From 0c7bf7956da801632d341605d75e401860b41847 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Tue, 15 Oct 2024 21:25:09 +0200 Subject: [PATCH 01/32] define __slots__ for class ByteFifo --- app/src/byte_fifo.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/app/src/byte_fifo.py b/app/src/byte_fifo.py index af9cb09..27d2512 100644 --- a/app/src/byte_fifo.py +++ b/app/src/byte_fifo.py @@ -7,6 +7,8 @@ else: # pragma: no cover class ByteFifo: """ a byte FIFO buffer with trigger callback """ + __slots__ = ('__buf', '__trigger_cb') + def __init__(self): self.__buf = bytearray() self.__trigger_cb = None From bbda66e455b49a76c71082aad9e6dd64998d631f Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Tue, 15 Oct 2024 21:28:57 +0200 Subject: [PATCH 02/32] disable set-timezone action --- .github/workflows/python-app.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index a9b5fcb..da77d60 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -30,12 +30,12 @@ jobs: runs-on: ubuntu-latest steps: - - name: Set timezone - uses: szenius/set-timezone@v2.0 - with: - timezoneLinux: "Europe/Berlin" - timezoneMacos: "Europe/Berlin" - timezoneWindows: "Europe/Berlin" + # - name: Set timezone + # uses: szenius/set-timezone@v2.0 + # with: + # timezoneLinux: "Europe/Berlin" + # timezoneMacos: "Europe/Berlin" + # timezoneWindows: "Europe/Berlin" # - name: Start Mosquitto # uses: namoshek/mosquitto-github-action@v1 # with: From 5d61a261b1dba15ae9a9a38f7cd37f10c0b1d22b Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Tue, 15 Oct 2024 21:37:01 +0200 Subject: [PATCH 03/32] set set-timezone to UTC --- .github/workflows/python-app.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index da77d60..ce8849c 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -30,12 +30,12 @@ jobs: runs-on: ubuntu-latest steps: - # - name: Set timezone - # uses: szenius/set-timezone@v2.0 - # with: - # timezoneLinux: "Europe/Berlin" - # timezoneMacos: "Europe/Berlin" - # timezoneWindows: "Europe/Berlin" + - name: Set timezone + uses: szenius/set-timezone@v2.0 + with: + timezoneLinux: "utc" + timezoneMacos: "utc" + timezoneWindows: "utc" # - name: Start Mosquitto # uses: namoshek/mosquitto-github-action@v1 # with: From 5775cb1ce368455bfdfc9de0047ea5d15947fbc1 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Tue, 15 Oct 2024 21:53:11 +0200 Subject: [PATCH 04/32] try MathRobin/timezone-action@v1.1 --- .github/workflows/python-app.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index ce8849c..7b63632 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -31,11 +31,11 @@ jobs: steps: - name: Set timezone - uses: szenius/set-timezone@v2.0 + uses: MathRobin/timezone-action@v1.1 with: - timezoneLinux: "utc" - timezoneMacos: "utc" - timezoneWindows: "utc" + timezoneLinux: "Europe/Berlin" + timezoneMacos: "Europe/Berlin" + timezoneWindows: "Europe/Berlin" # - name: Start Mosquitto # uses: namoshek/mosquitto-github-action@v1 # with: From 3863454a84bd6f2cc5442227cd02d88a7d10d877 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Tue, 15 Oct 2024 21:59:32 +0200 Subject: [PATCH 05/32] set TZ to "Europe/Berlin" --- .github/workflows/python-app.yml | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index 7b63632..1a9a38d 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -24,18 +24,15 @@ permissions: contents: read pull-requests: read # allows SonarCloud to decorate PRs with analysis results +env: + TZ: "Europe/Berlin" + jobs: build: runs-on: ubuntu-latest steps: - - name: Set timezone - uses: MathRobin/timezone-action@v1.1 - with: - timezoneLinux: "Europe/Berlin" - timezoneMacos: "Europe/Berlin" - timezoneWindows: "Europe/Berlin" # - name: Start Mosquitto # uses: namoshek/mosquitto-github-action@v1 # with: From db06d8c8e673e165a6da0493cab278ce0e728ba1 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Tue, 15 Oct 2024 22:11:19 +0200 Subject: [PATCH 06/32] define __slots__ --- app/src/gen3/infos_g3.py | 3 +++ app/src/gen3plus/infos_g3p.py | 4 +++- app/src/infos.py | 2 ++ 3 files changed, 8 insertions(+), 1 deletion(-) diff --git a/app/src/gen3/infos_g3.py b/app/src/gen3/infos_g3.py index c39bed9..7c62eac 100644 --- a/app/src/gen3/infos_g3.py +++ b/app/src/gen3/infos_g3.py @@ -10,6 +10,8 @@ else: # pragma: no cover class RegisterMap: + __slots__ = () + map = { 0x00092ba8: {'reg': Register.COLLECTOR_FW_VERSION}, 0x000927c0: {'reg': Register.CHIP_TYPE}, @@ -91,6 +93,7 @@ class RegisterMap: class InfosG3(Infos): + __slots__ = () def ha_confs(self, ha_prfx: str, node_id: str, snr: str, sug_area: str = '') \ diff --git a/app/src/gen3plus/infos_g3p.py b/app/src/gen3plus/infos_g3p.py index 135aa3d..f31f17b 100644 --- a/app/src/gen3plus/infos_g3p.py +++ b/app/src/gen3plus/infos_g3p.py @@ -10,8 +10,8 @@ else: # pragma: no cover class RegisterMap: # make the class read/only by using __slots__ - __slots__ = () + map = { # 0x41020007: {'reg': Register.DEVICE_SNR, 'fmt': ' Date: Wed, 16 Oct 2024 23:20:23 +0200 Subject: [PATCH 07/32] improve setting the node_id in the modbus --- app/src/gen3/talent.py | 5 +++-- app/src/gen3plus/solarman_v5.py | 2 ++ app/src/modbus.py | 7 ++++--- app/tests/test_modbus.py | 35 +++++++++++++++++++++------------ 4 files changed, 31 insertions(+), 18 deletions(-) diff --git a/app/src/gen3/talent.py b/app/src/gen3/talent.py index 522f4d0..0081742 100644 --- a/app/src/gen3/talent.py +++ b/app/src/gen3/talent.py @@ -135,6 +135,8 @@ class Talent(Message): self.modbus_polling = inv['modbus_polling'] logger.debug(f'SerialNo {serial_no} allowed! area:{self.sug_area}') # noqa: E501 self.db.set_pv_module_details(inv) + if self.mb: + self.mb.set_node_id(self.node_id) else: self.node_id = '' self.sug_area = '' @@ -590,8 +592,7 @@ class Talent(Message): return for key, update, _ in self.mb.recv_resp(self.db, data[ - hdr_len:], - self.node_id): + hdr_len:]): if update: self._set_mqtt_timestamp(key, self._utc()) self.new_data[key] = True diff --git a/app/src/gen3plus/solarman_v5.py b/app/src/gen3plus/solarman_v5.py index 51eb3a1..0a64764 100644 --- a/app/src/gen3plus/solarman_v5.py +++ b/app/src/gen3plus/solarman_v5.py @@ -208,6 +208,8 @@ class SolarmanV5(Message): self.sug_area = inv['suggested_area'] self.modbus_polling = inv['modbus_polling'] self.sensor_list = inv['sensor_list'] + if self.mb: + self.mb.set_node_id(self.node_id) def __set_serial_no(self, snr: int): '''check the serial number and configure the inverter connection''' diff --git a/app/src/modbus.py b/app/src/modbus.py index 028699d..d186ba7 100644 --- a/app/src/modbus.py +++ b/app/src/modbus.py @@ -117,6 +117,9 @@ class Modbus(): while not self.que.empty(): self.que.get_nowait() + def set_node_id(self, node_id: str): + self.node_id = node_id + def build_msg(self, addr: int, func: int, reg: int, val: int, log_lvl=logging.DEBUG) -> None: """Build MODBUS RTU request frame and add it to the tx queue @@ -160,14 +163,13 @@ class Modbus(): return True - def recv_resp(self, info_db, buf: bytes, node_id: str) -> \ + def recv_resp(self, info_db, buf: bytes) -> \ Generator[tuple[str, bool, int | float | str], None, None]: """Generator which check and parse a received MODBUS response. Keyword arguments: info_db: database for info lockups buf: received Modbus RTU response frame - node_id: string for logging which identifies the slave Returns on error and set Self.err to: 1: CRC error @@ -177,7 +179,6 @@ class Modbus(): 5: No MODBUS request pending """ # logging.info(f'recv_resp: first byte modbus:{buf[0]} len:{len(buf)}') - self.node_id = node_id fcode = buf[1] data_available = self.last_addr == self.INV_ADDR and \ diff --git a/app/tests/test_modbus.py b/app/tests/test_modbus.py index d0e321e..53e735f 100644 --- a/app/tests/test_modbus.py +++ b/app/tests/test_modbus.py @@ -77,9 +77,10 @@ def test_recv_resp_crc_err(): mb.last_fcode = 3 mb.last_reg = 0x300e mb.last_len = 2 + mb.set_node_id('test') # check matching response, but with CRC error call = 0 - for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x04\x01\x2c\x00\x46\xbb\xf3', 'test'): + for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x04\x01\x2c\x00\x46\xbb\xf3'): call += 1 assert mb.err == 1 assert 0 == call @@ -97,10 +98,11 @@ def test_recv_resp_invalid_addr(): mb.last_fcode = 3 mb.last_reg = 0x300e mb.last_len = 2 + mb.set_node_id('test') # check not matching response, with wrong server addr call = 0 - for key, update in mb.recv_resp(mb.db, b'\x02\x03\x04\x01\x2c\x00\x46\x88\xf4', 'test'): + for key, update in mb.recv_resp(mb.db, b'\x02\x03\x04\x01\x2c\x00\x46\x88\xf4'): call += 1 assert mb.err == 2 assert 0 == call @@ -120,7 +122,8 @@ def test_recv_recv_fcode(): # check not matching response, with wrong function code call = 0 - for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x04\x01\x2c\x00\x46\xbb\xf4', 'test'): + mb.set_node_id('test') + for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x04\x01\x2c\x00\x46\xbb\xf4'): call += 1 assert mb.err == 3 @@ -142,7 +145,8 @@ def test_recv_resp_len(): # check not matching response, with wrong data length call = 0 - for key, update, _ in mb.recv_resp(mb.db, b'\x01\x03\x04\x01\x2c\x00\x46\xbb\xf4', 'test'): + mb.set_node_id('test') + for key, update, _ in mb.recv_resp(mb.db, b'\x01\x03\x04\x01\x2c\x00\x46\xbb\xf4'): call += 1 assert mb.err == 4 @@ -161,7 +165,8 @@ def test_recv_unexpect_resp(): # check unexpected response, which must be dropped call = 0 - for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x04\x01\x2c\x00\x46\xbb\xf4', 'test'): + mb.set_node_id('test') + for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x04\x01\x2c\x00\x46\xbb\xf4'): call += 1 assert mb.err == 5 @@ -177,8 +182,9 @@ def test_parse_resp(): assert mb.req_pend call = 0 + mb.set_node_id('test') exp_result = ['V0.0.2C', 4.4, 0.7, 0.7, 30] - for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x0c\x01\x2c\x00\x2c\x00\x2c\x00\x46\x00\x46\x00\x46\x32\xc8', 'test'): + for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x0c\x01\x2c\x00\x2c\x00\x2c\x00\x46\x00\x46\x00\x46\x32\xc8'): if key == 'grid': assert update == True elif key == 'inverter': @@ -226,8 +232,9 @@ def test_queue2(): assert mb.send_calls == 1 assert mb.pdu == b'\x01\x030\x07\x00\x06{\t' call = 0 + mb.set_node_id('test') exp_result = ['V0.0.2C', 4.4, 0.7, 0.7, 30] - for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x0c\x01\x2c\x00\x2c\x00\x2c\x00\x46\x00\x46\x00\x46\x32\xc8', 'test'): + for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x0c\x01\x2c\x00\x2c\x00\x2c\x00\x46\x00\x46\x00\x46\x32\xc8'): if key == 'grid': assert update == True elif key == 'inverter': @@ -245,14 +252,14 @@ def test_queue2(): assert mb.send_calls == 2 assert mb.pdu == b'\x01\x06\x20\x08\x00\x04\x02\x0b' - for key, update, val in mb.recv_resp(mb.db, b'\x01\x06\x20\x08\x00\x04\x02\x0b', 'test'): + for key, update, val in mb.recv_resp(mb.db, b'\x01\x06\x20\x08\x00\x04\x02\x0b'): pass # call generator mb.recv_resp() assert mb.que.qsize() == 0 assert mb.send_calls == 3 assert mb.pdu == b'\x01\x030\x07\x00\x06{\t' call = 0 - for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x0c\x01\x2c\x00\x2c\x00\x2c\x00\x46\x00\x46\x00\x46\x32\xc8', 'test'): + for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x0c\x01\x2c\x00\x2c\x00\x2c\x00\x46\x00\x46\x00\x46\x32\xc8'): call += 1 assert 0 == mb.err assert 5 == call @@ -276,8 +283,9 @@ def test_queue3(): assert mb.recv_responses == 0 call = 0 + mb.set_node_id('test') exp_result = ['V0.0.2C', 4.4, 0.7, 0.7, 30] - for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x0c\x01\x2c\x00\x2c\x00\x2c\x00\x46\x00\x46\x00\x46\x32\xc8', 'test'): + for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x0c\x01\x2c\x00\x2c\x00\x2c\x00\x46\x00\x46\x00\x46\x32\xc8'): if key == 'grid': assert update == True elif key == 'inverter': @@ -296,7 +304,7 @@ def test_queue3(): assert mb.send_calls == 2 assert mb.pdu == b'\x01\x06\x20\x08\x00\x04\x02\x0b' - for key, update, val in mb.recv_resp(mb.db, b'\x01\x06\x20\x08\x00\x04\x02\x0b', 'test'): + for key, update, val in mb.recv_resp(mb.db, b'\x01\x06\x20\x08\x00\x04\x02\x0b'): pass # no code in loop is OK; calling the generator is the purpose assert 0 == mb.err assert mb.recv_responses == 2 @@ -305,7 +313,7 @@ def test_queue3(): assert mb.send_calls == 3 assert mb.pdu == b'\x01\x030\x07\x00\x06{\t' call = 0 - for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x0c\x01\x2c\x00\x2c\x00\x2c\x00\x46\x00\x46\x00\x46\x32\xc8', 'test'): + for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x0c\x01\x2c\x00\x2c\x00\x2c\x00\x46\x00\x46\x00\x46\x32\xc8'): call += 1 assert 0 == mb.err assert mb.recv_responses == 2 @@ -373,7 +381,8 @@ def test_recv_unknown_data(): # check matching response, but with CRC error call = 0 - for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x04\x01\x2c\x00\x46\xbb\xf4', 'test'): + mb.set_node_id('test') + for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x04\x01\x2c\x00\x46\xbb\xf4'): call += 1 assert mb.err == 0 assert 0 == call From 6122f4071868e50cc07d610ff203cf3f10af56a5 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Wed, 16 Oct 2024 23:25:18 +0200 Subject: [PATCH 08/32] fix recv_resp method call --- app/src/gen3plus/solarman_v5.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/app/src/gen3plus/solarman_v5.py b/app/src/gen3plus/solarman_v5.py index 0a64764..889ef15 100644 --- a/app/src/gen3plus/solarman_v5.py +++ b/app/src/gen3plus/solarman_v5.py @@ -654,8 +654,7 @@ class SolarmanV5(Message): # logger.info(f'first byte modbus:{data[14]}') inv_update = False self.modbus_elms = 0 - for key, update, _ in self.mb.recv_resp(self.db, data[14:], - self.node_id): + for key, update, _ in self.mb.recv_resp(self.db, data[14:]): self.modbus_elms += 1 if update: if key == 'inverter': From ce5bd6eb0a116b53eb5f371d8a3cce700320bfa4 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Thu, 17 Oct 2024 21:51:26 +0200 Subject: [PATCH 09/32] reduce code duplications --- app/src/gen3/talent.py | 10 ---------- app/src/gen3plus/solarman_v5.py | 10 ---------- app/src/messages.py | 10 ++++++++++ 3 files changed, 10 insertions(+), 20 deletions(-) diff --git a/app/src/gen3/talent.py b/app/src/gen3/talent.py index 0081742..a0af3cd 100644 --- a/app/src/gen3/talent.py +++ b/app/src/gen3/talent.py @@ -205,16 +205,6 @@ class Talent(Message): self.ifc.tx_log(log_lvl, f'Send Modbus {state}:{self.addr}:') self.ifc.tx_flush() - def _send_modbus_cmd(self, func, addr, val, log_lvl) -> None: - if self.state != State.up: - logger.log(log_lvl, f'[{self.node_id}] ignore MODBUS cmd,' - ' as the state is not UP') - return - self.mb.build_msg(Modbus.INV_ADDR, func, addr, val, log_lvl) - - async def send_modbus_cmd(self, func, addr, val, log_lvl) -> None: - self._send_modbus_cmd(func, addr, val, log_lvl) - def mb_timout_cb(self, exp_cnt): self.mb_timer.start(self.mb_timeout) diff --git a/app/src/gen3plus/solarman_v5.py b/app/src/gen3plus/solarman_v5.py index 889ef15..bae555c 100644 --- a/app/src/gen3plus/solarman_v5.py +++ b/app/src/gen3plus/solarman_v5.py @@ -435,16 +435,6 @@ class SolarmanV5(Message): self.ifc.tx_log(log_lvl, f'Send Modbus {state}:{self.addr}:') self.ifc.tx_flush() - def _send_modbus_cmd(self, func, addr, val, log_lvl) -> None: - if self.state != State.up: - logger.log(log_lvl, f'[{self.node_id}] ignore MODBUS cmd,' - ' as the state is not UP') - return - self.mb.build_msg(Modbus.INV_ADDR, func, addr, val, log_lvl) - - async def send_modbus_cmd(self, func, addr, val, log_lvl) -> None: - self._send_modbus_cmd(func, addr, val, log_lvl) - def mb_timout_cb(self, exp_cnt): self.mb_timer.start(self.mb_timeout) diff --git a/app/src/messages.py b/app/src/messages.py index bbff315..ccb8f3d 100644 --- a/app/src/messages.py +++ b/app/src/messages.py @@ -152,6 +152,16 @@ class Message(ProtocolIfc): to = self.MAX_DEF_IDLE_TIME return to + def _send_modbus_cmd(self, func, addr, val, log_lvl) -> None: + if self.state != State.up: + logger.log(log_lvl, f'[{self.node_id}] ignore MODBUS cmd,' + ' as the state is not UP') + return + self.mb.build_msg(Modbus.INV_ADDR, func, addr, val, log_lvl) + + async def send_modbus_cmd(self, func, addr, val, log_lvl) -> None: + self._send_modbus_cmd(func, addr, val, log_lvl) + ''' Our puplic methods ''' From 7d0ea41728a077ce78eb8401b4208deb00895155 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Thu, 17 Oct 2024 23:20:13 +0200 Subject: [PATCH 10/32] reduce code duplications --- app/src/gen3/talent.py | 30 +++----------------------- app/src/gen3plus/solarman_v5.py | 32 ++------------------------- app/src/messages.py | 38 +++++++++++++++++++++++++++++---- app/tests/test_async_stream.py | 9 +++++--- 4 files changed, 45 insertions(+), 64 deletions(-) diff --git a/app/src/gen3/talent.py b/app/src/gen3/talent.py index a0af3cd..57f875c 100644 --- a/app/src/gen3/talent.py +++ b/app/src/gen3/talent.py @@ -8,7 +8,6 @@ if __name__ == "app.src.gen3.talent": from app.src.async_ifc import AsyncIfc from app.src.messages import Message, State from app.src.modbus import Modbus - from app.src.my_timer import Timer from app.src.config import Config from app.src.gen3.infos_g3 import InfosG3 from app.src.infos import Register @@ -16,7 +15,6 @@ else: # pragma: no cover from async_ifc import AsyncIfc from messages import Message, State from modbus import Modbus - from my_timer import Timer from config import Config from gen3.infos_g3 import InfosG3 from infos import Register @@ -42,19 +40,18 @@ class Control: class Talent(Message): - MB_START_TIMEOUT = 40 - MB_REGULAR_TIMEOUT = 60 TXT_UNKNOWN_CTRL = 'Unknown Ctrl' def __init__(self, addr, ifc: "AsyncIfc", server_side: bool, client_mode: bool = False, id_str=b''): - super().__init__(server_side, self.send_modbus_cb, mb_timeout=15) + super().__init__('G3', ifc, server_side, self.send_modbus_cb, + mb_timeout=15) ifc.rx_set_cb(self.read) ifc.prot_set_timeout_cb(self._timeout) ifc.prot_set_init_new_client_conn_cb(self._init_new_client_conn) ifc.prot_set_update_header_cb(self._update_header) + self.addr = addr - self.ifc = ifc self.conn_no = ifc.get_conn_no() self.await_conn_resp_cnt = 0 self.id_str = id_str @@ -86,38 +83,17 @@ class Talent(Message): 0x87: self.get_modbus_log_lvl, 0x04: logging.INFO, } - self.modbus_elms = 0 # for unit tests - self.node_id = 'G3' # will be overwritten in __set_serial_no - self.mb_timer = Timer(self.mb_timout_cb, self.node_id) - self.mb_timeout = self.MB_REGULAR_TIMEOUT - self.mb_first_timeout = self.MB_START_TIMEOUT - self.modbus_polling = False ''' Our puplic methods ''' def close(self) -> None: logging.debug('Talent.close()') - if self.server_side: - # set inverter state to offline, if output power is very low - logging.debug('close power: ' - f'{self.db.get_db_value(Register.OUTPUT_POWER, -1)}') - if self.db.get_db_value(Register.OUTPUT_POWER, 999) < 2: - self.db.set_db_def_value(Register.INVERTER_STATUS, 0) - self.new_data['env'] = True - # we have references to methods of this class in self.switch # so we have to erase self.switch, otherwise this instance can't be # deallocated by the garbage collector ==> we get a memory leak self.switch.clear() self.log_lvl.clear() - self.state = State.closed - self.mb_timer.close() - self.ifc.rx_set_cb(None) - self.ifc.prot_set_timeout_cb(None) - self.ifc.prot_set_init_new_client_conn_cb(None) - self.ifc.prot_set_update_header_cb(None) - self.ifc = None super().close() def __set_serial_no(self, serial_no: str): diff --git a/app/src/gen3plus/solarman_v5.py b/app/src/gen3plus/solarman_v5.py index bae555c..191dcf9 100644 --- a/app/src/gen3plus/solarman_v5.py +++ b/app/src/gen3plus/solarman_v5.py @@ -8,7 +8,6 @@ if __name__ == "app.src.gen3plus.solarman_v5": from app.src.async_ifc import AsyncIfc from app.src.messages import hex_dump_memory, Message, State from app.src.modbus import Modbus - from app.src.my_timer import Timer from app.src.config import Config from app.src.gen3plus.infos_g3p import InfosG3P from app.src.infos import Register @@ -17,7 +16,6 @@ else: # pragma: no cover from messages import hex_dump_memory, Message, State from config import Config from modbus import Modbus - from my_timer import Timer from gen3plus.infos_g3p import InfosG3P from infos import Register @@ -53,10 +51,6 @@ class Sequence(): class SolarmanV5(Message): AT_CMD = 1 MB_RTU_CMD = 2 - MB_START_TIMEOUT = 40 - '''start delay for Modbus polling in server mode''' - MB_REGULAR_TIMEOUT = 60 - '''regular Modbus polling time in server mode''' MB_CLIENT_DATA_UP = 30 '''Data up time in client mode''' HDR_FMT = ' None: logging.debug('Solarman.close()') - if self.server_side: - # set inverter state to offline, if output power is very low - logging.debug('close power: ' - f'{self.db.get_db_value(Register.OUTPUT_POWER, -1)}') - if self.db.get_db_value(Register.OUTPUT_POWER, 999) < 2: - self.db.set_db_def_value(Register.INVERTER_STATUS, 0) - self.new_data['env'] = True - # we have references to methods of this class in self.switch # so we have to erase self.switch, otherwise this instance can't be # deallocated by the garbage collector ==> we get a memory leak self.switch.clear() self.log_lvl.clear() - self.state = State.closed - self.mb_timer.close() - self.ifc.rx_set_cb(None) - self.ifc.prot_set_timeout_cb(None) - self.ifc.prot_set_init_new_client_conn_cb(None) - self.ifc.prot_set_update_header_cb(None) - self.ifc = None super().close() async def send_start_cmd(self, snr: int, host: str, diff --git a/app/src/messages.py b/app/src/messages.py index ccb8f3d..7b691f2 100644 --- a/app/src/messages.py +++ b/app/src/messages.py @@ -5,13 +5,17 @@ from enum import Enum if __name__ == "app.src.messages": + from app.src.async_ifc import AsyncIfc from app.src.protocol_ifc import ProtocolIfc from app.src.infos import Infos, Register from app.src.modbus import Modbus + from app.src.my_timer import Timer else: # pragma: no cover + from async_ifc import AsyncIfc from protocol_ifc import ProtocolIfc from infos import Infos, Register from modbus import Modbus + from my_timer import Timer logger = logging.getLogger('msg') @@ -89,9 +93,14 @@ class Message(ProtocolIfc): '''maximum time without a received msg from the inverter in sec''' MAX_DEF_IDLE_TIME = 360 '''maximum default time without a received msg in sec''' + MB_START_TIMEOUT = 40 + '''start delay for Modbus polling in server mode''' + MB_REGULAR_TIMEOUT = 60 + '''regular Modbus polling time in server mode''' - def __init__(self, server_side: bool, send_modbus_cb: - Callable[[bytes, int, str], None], mb_timeout: int): + def __init__(self, node_id, ifc: "AsyncIfc", server_side: bool, + send_modbus_cb: Callable[[bytes, int, str], None], + mb_timeout: int): self._registry.append(weakref.ref(self)) self.server_side = server_side @@ -99,16 +108,22 @@ class Message(ProtocolIfc): self.mb = Modbus(send_modbus_cb, mb_timeout) else: self.mb = None - + self.ifc = ifc + self.node_id = node_id self.header_valid = False self.header_len = 0 self.data_len = 0 self.unique_id = 0 - self._node_id = '' self.sug_area = '' self.new_data = {} self.state = State.init self.shutdown_started = False + self.modbus_elms = 0 # for unit tests + self.mb_timer = Timer(self.mb_timout_cb, self.node_id) + self.mb_timeout = self.MB_REGULAR_TIMEOUT + self.mb_first_timeout = self.MB_START_TIMEOUT + '''timer value for next Modbus polling request''' + self.modbus_polling = False @property def node_id(self): @@ -166,6 +181,21 @@ class Message(ProtocolIfc): Our puplic methods ''' def close(self) -> None: + if self.server_side: + # set inverter state to offline, if output power is very low + logging.debug('close power: ' + f'{self.db.get_db_value(Register.OUTPUT_POWER, -1)}') + if self.db.get_db_value(Register.OUTPUT_POWER, 999) < 2: + self.db.set_db_def_value(Register.INVERTER_STATUS, 0) + self.new_data['env'] = True + self.state = State.closed + self.mb_timer.close() + self.ifc.rx_set_cb(None) + self.ifc.prot_set_timeout_cb(None) + self.ifc.prot_set_init_new_client_conn_cb(None) + self.ifc.prot_set_update_header_cb(None) + self.ifc = None + if self.mb: self.mb.close() self.mb = None diff --git a/app/tests/test_async_stream.py b/app/tests/test_async_stream.py index d7dcf12..d1d5911 100644 --- a/app/tests/test_async_stream.py +++ b/app/tests/test_async_stream.py @@ -17,10 +17,13 @@ pytest_plugins = ('pytest_asyncio',) Infos.static_init() class FakeProto(Message): - def __init__(self, server_side): - super().__init__(server_side, None, 10) + def __init__(self, ifc, server_side): + super().__init__('G3F', ifc, server_side, None, 10) self.conn_no = 0 + def mb_timout_cb(self, exp_cnt): + pass # empty callback + def fake_reader_fwd(): reader = FakeReader() reader.test = FakeReader.RD_TEST_13_BYTES @@ -349,7 +352,7 @@ def create_remote(remote, test_type, with_close_hdr:bool = False): FakeReader(), FakeWriter(), StreamPtr(None), close_hndl) remote.ifc.prot_set_update_header_cb(update_hdr) remote.ifc.prot_set_init_new_client_conn_cb(callback) - remote.stream = FakeProto(False) + remote.stream = FakeProto(remote.ifc, False) @pytest.mark.asyncio async def test_forward(): From 6c6109d421d8cbf88848f24c43be6ffa3456e7ec Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Fri, 18 Oct 2024 23:49:23 +0200 Subject: [PATCH 11/32] update class diagramms --- app/proxy_2.svg | 657 +++++++++++++++++++++-------------------------- app/proxy_2.yuml | 34 +-- app/proxy_3.svg | 364 ++++++++++++++++++++++++++ app/proxy_3.yuml | 42 +++ 4 files changed, 717 insertions(+), 380 deletions(-) create mode 100644 app/proxy_3.svg create mode 100644 app/proxy_3.yuml diff --git a/app/proxy_2.svg b/app/proxy_2.svg index 6a6fb51..232983f 100644 --- a/app/proxy_2.svg +++ b/app/proxy_2.svg @@ -4,429 +4,368 @@ - - + + G - + A0 - - - -You can stick notes -on diagrams too! + + + +Example of +instantiation for a +GEN3 inverter! A1 - -<<AbstractIterMeta>> - - -__iter__() + +<<AbstractIterMeta>> + + +__iter__() A14 - -<<ProtocolIfc>> - -_registry - -close() + +<<ProtocolIfc>> + +_registry + +close() - + A1->A14 - - + + A2 - -InverterG3 - -addr -remote:StreamPtr -local:StreamPtr - -create_remote() -close() - - - -A7 - -AsyncStreamServer - -create_remote - -<async>server_loop() -<async>_async_forward() -<async>publish_outstanding_mqtt() -close() - - - -A2->A7 - - - -local - - - -A8 - -AsyncStreamClient - - -<async>client_loop() -<async>_async_forward()) - - - -A2->A8 - - -remote + +InverterG3 + +addr +remote:StreamPtr +local:StreamPtr + +create_remote() +close() A3 - -InverterG3P - -addr -remote:StreamPtr -local:StreamPtr - -create_remote() -close() + +local:StreamPtr - - -A3->A7 - - - -local - - - -A3->A8 - - -remote + + +A2->A3 + + + A4 - -<<AsyncIfc>> - - -set_node_id() -get_conn_no() -tx_add() -tx_flush() -tx_get() -tx_peek() -tx_log() -tx_clear() -tx_len() -fwd_add() -fwd_log() -rx_get() -rx_peek() -rx_log() -rx_clear() -rx_len() -rx_set_cb() -prot_set_timeout_cb() + +remote:StreamPtr - - -A5 - -AsyncIfcImpl - -fwd_fifo:ByteFifo -tx_fifo:ByteFifo -rx_fifo:ByteFifo -conn_no:Count -node_id -timeout_cb - - - -A4->A5 - - - - - -A6 - -AsyncStream - -reader -writer -addr -r_addr -l_addr - -<async>loop -disc() -close() -healthy() -__async_read() -__async_write() -__async_forward() - - + -A5->A6 - - +A2->A4 + + + - - -A6->A7 - - + + +A8 + +AsyncStreamServer + +create_remote + +<async>server_loop() +<async>_async_forward() +<async>publish_outstanding_mqtt() +close() - - -A6->A8 - - + + +A3->A8 + + + A9 - -Talent - -ifc:AsyncIfc -conn_no -addr -await_conn_resp_cnt -id_str -contact_name -contact_mail -db:InfosG3 -mb:Modbus -switch - -msg_contact_info() -msg_ota_update() -msg_get_time() -msg_collector_data() -msg_inverter_data() -msg_unknown() -healthy() -close() + +AsyncStreamClient + + +<async>client_loop() +<async>_async_forward()) - + + +A4->A9 + + +0..1 + + + +A5 + +<<AsyncIfc>> + + +set_node_id() +get_conn_no() +tx_add() +tx_flush() +tx_get() +tx_peek() +tx_log() +tx_clear() +tx_len() +fwd_add() +fwd_log() +rx_get() +rx_peek() +rx_log() +rx_clear() +rx_len() +rx_set_cb() +prot_set_timeout_cb() + + + +A6 + +AsyncIfcImpl + +fwd_fifo:ByteFifo +tx_fifo:ByteFifo +rx_fifo:ByteFifo +conn_no:Count +node_id +timeout_cb + + + +A5->A6 + + + + + +A7 + +AsyncStream + +reader +writer +addr +r_addr +l_addr + +<async>loop +disc() +close() +healthy() +__async_read() +__async_write() +__async_forward() + + + +A6->A7 + + + + -A9->A2 - - -remote +A7->A8 + + - - -A9->A2 - - - -local - - - -A9->A4 - - -use - - - -A12 - -InfosG3 - - -ha_confs() -parse() - - - -A9->A12 - - + + +A7->A9 + + A10 - -SolarmanV5 - -ifc:AsyncIfc -conn_no -addr -control -serial -snr -db:InfosG3P -mb:Modbus -switch - -msg_unknown() -healthy() -close() + +Talent + +conn_no +addr +await_conn_resp_cnt +id_str +contact_name +contact_mail +db:InfosG3 +mb:Modbus +switch + +msg_contact_info() +msg_ota_update() +msg_get_time() +msg_collector_data() +msg_inverter_data() +msg_unknown() +healthy() +close() - + A10->A3 - - -remote - - - -A10->A3 - - - -local + + + - + A10->A4 - - -use + + +0..1 - - -A13 - -InfosG3P - - -ha_confs() -parse() + + +A12 + +InfosG3 + + +ha_confs() +parse() - - -A10->A13 - - + + +A10->A12 + + A11 - -Infos - -stat -new_stat_data -info_dev - -static_init() -dev_value() -inc_counter() -dec_counter() -ha_proxy_conf -ha_conf -ha_remove -update_db -set_db_def_value -get_db_value -ignore_this_device + +Infos + +stat +new_stat_data +info_dev + +static_init() +dev_value() +inc_counter() +dec_counter() +ha_proxy_conf +ha_conf +ha_remove +update_db +set_db_def_value +get_db_value +ignore_this_device - + A11->A12 - - + + - - -A11->A13 - - + + +A13 + +Message + +server_side:bool +mb:Modbus +ifc:AsyncIfc +node_id +header_valid:bool +header_len +data_len +unique_id +sug_area:str +new_data:dict +state:State +shutdown_started:bool +modbus_elms +mb_timer:Timer +mb_timeout +mb_first_timeout +modbus_polling:bool + +_set_mqtt_timestamp() +_timeout() +_send_modbus_cmd() +<async> end_modbus_cmd() +close() +inc_counter() +dec_counter() + + + +A13->A5 + + +use + + + +A13->A10 + + + + + +A14->A13 + + A15 - -Message - -node_id - -inc_counter() -dec_counter() + +Modbus + +que +snd_handler +rsp_handler +timeout +max_retires +last_xxx +err +retry_cnt +req_pend +tim + +build_msg() +recv_req() +recv_resp() +close() - - -A14->A15 - - - - - -A15->A9 - - - - - -A15->A10 - - - - - -A16 - -Modbus - -que -snd_handler -rsp_handler -timeout -max_retires -last_xxx -err -retry_cnt -req_pend -tim - -build_msg() -recv_req() -recv_resp() -close() - - - -A16->A9 - - -has -1 - - - -A16->A10 - - -has -1 + + +A15->A13 + + +has +0..1 diff --git a/app/proxy_2.yuml b/app/proxy_2.yuml index 39a399e..5138428 100644 --- a/app/proxy_2.yuml +++ b/app/proxy_2.yuml @@ -2,11 +2,12 @@ // {direction:topDown} // {generate:true} -[note: You can stick notes on diagrams too!{bg:cornsilk}] +[note: Example of instantiation for a GEN3 inverter!{bg:cornsilk}] [<>||__iter__()] [InverterG3|addr;remote:StreamPtr;local:StreamPtr|create_remote();;close()] -[InverterG3P|addr;remote:StreamPtr;local:StreamPtr|create_remote();;close()] +[InverterG3]++->[local:StreamPtr] +[InverterG3]++->[remote:StreamPtr] [<>||set_node_id();get_conn_no();;tx_add();tx_flush();tx_get();tx_peek();tx_log();tx_clear();tx_len();;fwd_add();fwd_log();rx_get();rx_peek();rx_log();rx_clear();rx_len();rx_set_cb();;prot_set_timeout_cb()] [AsyncIfcImpl|fwd_fifo:ByteFifo;tx_fifo:ByteFifo;rx_fifo:ByteFifo;conn_no:Count;node_id;timeout_cb] @@ -19,33 +20,24 @@ [AsyncStream]^[AsyncStreamClient] -[Talent|ifc:AsyncIfc;conn_no;addr;;await_conn_resp_cnt;id_str;contact_name;contact_mail;db:InfosG3;mb:Modbus;switch|msg_contact_info();msg_ota_update();msg_get_time();msg_collector_data();msg_inverter_data();msg_unknown();;healthy();close()] -[Talent][AsyncStreamClient] -[Talent]<-local++[InverterG3] -[InverterG3]++local->[AsyncStreamServer] - -[SolarmanV5|ifc:AsyncIfc;conn_no;addr;;control;serial;snr;db:InfosG3P;mb:Modbus;switch|msg_unknown();;healthy();close()] -[SolarmanV5][AsyncStreamClient] -[SolarmanV5]<-local++[InverterG3P] -[InverterG3P]++local->[AsyncStreamServer] +[Talent|conn_no;addr;;await_conn_resp_cnt;id_str;contact_name;contact_mail;db:InfosG3;mb:Modbus;switch|msg_contact_info();msg_ota_update();msg_get_time();msg_collector_data();msg_inverter_data();msg_unknown();;healthy();close()] +[Talent]<-++[local:StreamPtr] +[local:StreamPtr]++->[AsyncStreamServer] +[Talent]<-0..1[remote:StreamPtr] +[remote:StreamPtr]0..1->[AsyncStreamClient] [Infos|stat;new_stat_data;info_dev|static_init();dev_value();inc_counter();dec_counter();ha_proxy_conf;ha_conf;ha_remove;update_db;set_db_def_value;get_db_value;ignore_this_device] [Infos]^[InfosG3||ha_confs();parse()] -[Infos]^[InfosG3P||ha_confs();parse()] -[Talent]use->[<>] [Talent]->[InfosG3] -[SolarmanV5]use->[<>] -[SolarmanV5]->[InfosG3P] + +[Message|server_side:bool;mb:Modbus;ifc:AsyncIfc;node_id;header_valid:bool;header_len;data_len;unique_id;sug_area:str;new_data:dict;state:State;shutdown_started:bool;modbus_elms;mb_timer:Timer;mb_timeout;mb_first_timeout;modbus_polling:bool|_set_mqtt_timestamp();_timeout();_send_modbus_cmd(); end_modbus_cmd();close();inc_counter();dec_counter()] +[Message]use->[<>] [<>|_registry|close()] [<>]^-.-[<>] -[<>]^-.-[Message|node_id|inc_counter();dec_counter()] +[<>]^-.-[Message] [Message]^[Talent] -[Message]^[SolarmanV5] [Modbus|que;;snd_handler;rsp_handler;timeout;max_retires;last_xxx;err;retry_cnt;req_pend;tim|build_msg();recv_req();recv_resp();close()] -[Modbus]<1-has[SolarmanV5] -[Modbus]<1-has[Talent] +[Modbus]<0..1-has[Message] diff --git a/app/proxy_3.svg b/app/proxy_3.svg new file mode 100644 index 0000000..37fc587 --- /dev/null +++ b/app/proxy_3.svg @@ -0,0 +1,364 @@ + + + + + + +G + + + +A0 + + + +Example of +instantiation for a +GEN3PLUS inverter! + + + +A1 + +<<AbstractIterMeta>> + + +__iter__() + + + +A14 + +<<ProtocolIfc>> + +_registry + +close() + + + +A1->A14 + + + + + +A2 + +InverterG3P + +addr +remote:StreamPtr +local:StreamPtr + +create_remote() +close() + + + +A3 + +local:StreamPtr + + + +A2->A3 + + + + + + +A4 + +remote:StreamPtr + + + +A2->A4 + + + + + + +A8 + +AsyncStreamServer + +create_remote + +<async>server_loop() +<async>_async_forward() +<async>publish_outstanding_mqtt() +close() + + + +A3->A8 + + + + + + +A9 + +AsyncStreamClient + + +<async>client_loop() +<async>_async_forward()) + + + +A4->A9 + + +0..1 + + + +A5 + +<<AsyncIfc>> + + +set_node_id() +get_conn_no() +tx_add() +tx_flush() +tx_get() +tx_peek() +tx_log() +tx_clear() +tx_len() +fwd_add() +fwd_log() +rx_get() +rx_peek() +rx_log() +rx_clear() +rx_len() +rx_set_cb() +prot_set_timeout_cb() + + + +A6 + +AsyncIfcImpl + +fwd_fifo:ByteFifo +tx_fifo:ByteFifo +rx_fifo:ByteFifo +conn_no:Count +node_id +timeout_cb + + + +A5->A6 + + + + + +A7 + +AsyncStream + +reader +writer +addr +r_addr +l_addr + +<async>loop +disc() +close() +healthy() +__async_read() +__async_write() +__async_forward() + + + +A6->A7 + + + + + +A7->A8 + + + + + +A7->A9 + + + + + +A10 + +SolarmanV5 + +conn_no +addr +control +serial +snr +db:InfosG3P +switch + +msg_unknown() +healthy() +close() + + + +A10->A3 + + + + + + +A10->A4 + + +0..1 + + + +A12 + +InfosG3P + + +ha_confs() +parse() + + + +A10->A12 + + + + + +A11 + +Infos + +stat +new_stat_data +info_dev + +static_init() +dev_value() +inc_counter() +dec_counter() +ha_proxy_conf +ha_conf +ha_remove +update_db +set_db_def_value +get_db_value +ignore_this_device + + + +A11->A12 + + + + + +A13 + +Message + +server_side:bool +mb:Modbus +ifc:AsyncIfc +node_id +header_valid:bool +header_len +data_len +unique_id +sug_area:str +new_data:dict +state:State +shutdown_started:bool +modbus_elms +mb_timer:Timer +mb_timeout +mb_first_timeout +modbus_polling:bool + +_set_mqtt_timestamp() +_timeout() +_send_modbus_cmd() +<async> end_modbus_cmd() +close() +inc_counter() +dec_counter() + + + +A13->A5 + + +use + + + +A13->A10 + + + + + +A14->A13 + + + + + +A15 + +Modbus + +que +snd_handler +rsp_handler +timeout +max_retires +last_xxx +err +retry_cnt +req_pend +tim + +build_msg() +recv_req() +recv_resp() +close() + + + +A15->A13 + + +has +0..1 + + + diff --git a/app/proxy_3.yuml b/app/proxy_3.yuml new file mode 100644 index 0000000..499c93f --- /dev/null +++ b/app/proxy_3.yuml @@ -0,0 +1,42 @@ +// {type:class} +// {direction:topDown} +// {generate:true} + +[note: Example of instantiation for a GEN3PLUS inverter!{bg:cornsilk}] +[<>||__iter__()] + +[InverterG3P|addr;remote:StreamPtr;local:StreamPtr|create_remote();;close()] +[InverterG3P]++->[local:StreamPtr] +[InverterG3P]++->[remote:StreamPtr] + +[<>||set_node_id();get_conn_no();;tx_add();tx_flush();tx_get();tx_peek();tx_log();tx_clear();tx_len();;fwd_add();fwd_log();rx_get();rx_peek();rx_log();rx_clear();rx_len();rx_set_cb();;prot_set_timeout_cb()] +[AsyncIfcImpl|fwd_fifo:ByteFifo;tx_fifo:ByteFifo;rx_fifo:ByteFifo;conn_no:Count;node_id;timeout_cb] +[AsyncStream|reader;writer;addr;r_addr;l_addr|;loop;disc();close();healthy();;__async_read();__async_write();__async_forward()] +[AsyncStreamServer|create_remote|server_loop();_async_forward();publish_outstanding_mqtt();close()] +[AsyncStreamClient||client_loop();_async_forward())] +[<>]^-.-[AsyncIfcImpl] +[AsyncIfcImpl]^[AsyncStream] +[AsyncStream]^[AsyncStreamServer] +[AsyncStream]^[AsyncStreamClient] + +[SolarmanV5|conn_no;addr;;control;serial;snr;db:InfosG3P;switch|msg_unknown();;healthy();close()] +[SolarmanV5]<-++[local:StreamPtr] +[local:StreamPtr]++->[AsyncStreamServer] +[SolarmanV5]<-0..1[remote:StreamPtr] +[remote:StreamPtr]0..1->[AsyncStreamClient] + +[Infos|stat;new_stat_data;info_dev|static_init();dev_value();inc_counter();dec_counter();ha_proxy_conf;ha_conf;ha_remove;update_db;set_db_def_value;get_db_value;ignore_this_device] +[Infos]^[InfosG3P||ha_confs();parse()] + +[SolarmanV5]->[InfosG3P] + +[Message|server_side:bool;mb:Modbus;ifc:AsyncIfc;node_id;header_valid:bool;header_len;data_len;unique_id;sug_area:str;new_data:dict;state:State;shutdown_started:bool;modbus_elms;mb_timer:Timer;mb_timeout;mb_first_timeout;modbus_polling:bool|_set_mqtt_timestamp();_timeout();_send_modbus_cmd(); end_modbus_cmd();close();inc_counter();dec_counter()] +[Message]use->[<>] + +[<>|_registry|close()] +[<>]^-.-[<>] +[<>]^-.-[Message] +[Message]^[SolarmanV5] + +[Modbus|que;;snd_handler;rsp_handler;timeout;max_retires;last_xxx;err;retry_cnt;req_pend;tim|build_msg();recv_req();recv_resp();close()] +[Modbus]<0..1-has[Message] From 9eb7c7fbe082dd9d49964c675dc3d5dbfd8936e2 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Sat, 19 Oct 2024 01:23:16 +0200 Subject: [PATCH 12/32] increase test coverage --- app/tests/test_async_stream.py | 37 ++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/app/tests/test_async_stream.py b/app/tests/test_async_stream.py index d1d5911..3aaf35e 100644 --- a/app/tests/test_async_stream.py +++ b/app/tests/test_async_stream.py @@ -340,6 +340,7 @@ def create_remote(remote, test_type, with_close_hdr:bool = False): elif test_type == TestType.FWD_RUNTIME_ERROR_NO_STREAM: remote.stream = None raise RuntimeError("Peer closed") + return True def close(): return @@ -533,3 +534,39 @@ async def test_forward_runtime_error3(): await ifc.server_loop() assert cnt == 1 del ifc + +@pytest.mark.asyncio +async def test_forward_resp(): + assert asyncio.get_running_loop() + remote = StreamPtr(None) + cnt = 0 + + async def _close_cb(): + nonlocal cnt, remote, ifc + cnt += 1 + + cnt = 0 + ifc = AsyncStreamClient(fake_reader_fwd(), FakeWriter(), remote, _close_cb) + create_remote(remote, TestType.FWD_NO_EXCPT) + ifc.fwd_add(b'test-forward_msg') + await ifc.client_loop('') + assert cnt == 0 + del ifc + +@pytest.mark.asyncio +async def test_forward_resp2(): + assert asyncio.get_running_loop() + remote = StreamPtr(None) + cnt = 0 + + async def _close_cb(): + nonlocal cnt, remote, ifc + cnt += 1 + + cnt = 0 + ifc = AsyncStreamClient(fake_reader_fwd(), FakeWriter(), None, _close_cb) + create_remote(remote, TestType.FWD_NO_EXCPT) + ifc.fwd_add(b'test-forward_msg') + await ifc.client_loop('') + assert cnt == 0 + del ifc From 8d67f1745d23634a32cf76e94f95ad2770504249 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Fri, 25 Oct 2024 20:36:53 +0200 Subject: [PATCH 13/32] update SonarSource/sonarcloud-github-action --- .github/workflows/python-app.yml | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index 1a9a38d..3661e5a 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -33,15 +33,6 @@ jobs: runs-on: ubuntu-latest steps: - # - name: Start Mosquitto - # uses: namoshek/mosquitto-github-action@v1 - # with: - # version: '1.6' - # ports: '1883:1883 8883:8883' - # certificates: ${{ github.workspace }}/.ci/tls-certificates - # config: ${{ github.workspace }}/.ci/mosquitto.conf - # password-file: ${{ github.workspace}}/.ci/mosquitto.passwd - # container-name: 'mqtt' - uses: actions/checkout@v4 with: fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis @@ -65,7 +56,7 @@ jobs: python -m pytest app --cov=app/src --cov-report=xml coverage report - name: Analyze with SonarCloud - uses: SonarSource/sonarcloud-github-action@v2.2.0 + uses: SonarSource/sonarcloud-github-action@v3.1.0 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} From 10a18237c7cd4d2516fa466f2f5dd071d4bb1d94 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Fri, 25 Oct 2024 21:38:36 +0200 Subject: [PATCH 14/32] replace some eval calls --- app/src/gen3plus/infos_g3p.py | 8 ++++++-- app/tests/test_infos_g3p.py | 5 ++++- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/app/src/gen3plus/infos_g3p.py b/app/src/gen3plus/infos_g3p.py index f31f17b..25eb86c 100644 --- a/app/src/gen3plus/infos_g3p.py +++ b/app/src/gen3plus/infos_g3p.py @@ -15,7 +15,7 @@ class RegisterMap: map = { # 0x41020007: {'reg': Register.DEVICE_SNR, 'fmt': ' Date: Fri, 25 Oct 2024 23:41:25 +0200 Subject: [PATCH 15/32] remove all eval() calls --- app/src/gen3/infos_g3.py | 7 ++----- app/src/gen3plus/infos_g3p.py | 32 +++++++------------------------- app/src/infos.py | 35 +++++++++++++++++++++++++++++++++++ app/src/modbus.py | 21 +++++---------------- app/tests/test_infos_g3.py | 12 ------------ app/tests/test_infos_g3p.py | 12 +++++++----- 6 files changed, 56 insertions(+), 63 deletions(-) diff --git a/app/src/gen3/infos_g3.py b/app/src/gen3/infos_g3.py index 7c62eac..88f9207 100644 --- a/app/src/gen3/infos_g3.py +++ b/app/src/gen3/infos_g3.py @@ -183,11 +183,8 @@ class InfosG3(Infos): i += 1 def __modify_val(self, row, result): - if row: - if 'eval' in row: - result = eval(row['eval']) - if 'ratio' in row: - result = round(result * row['ratio'], 2) + if row and 'ratio' in row: + result = round(result * row['ratio'], 2) return result def __store_result(self, addr, result, info_id, node_id): diff --git a/app/src/gen3plus/infos_g3p.py b/app/src/gen3plus/infos_g3p.py index 25eb86c..58c53c9 100644 --- a/app/src/gen3plus/infos_g3p.py +++ b/app/src/gen3plus/infos_g3p.py @@ -1,11 +1,10 @@ -import struct from typing import Generator if __name__ == "app.src.gen3plus.infos_g3p": - from app.src.infos import Infos, Register, ProxyMode + from app.src.infos import Infos, Register, ProxyMode, Fmt else: # pragma: no cover - from infos import Infos, Register, ProxyMode + from infos import Infos, Register, ProxyMode, Fmt class RegisterMap: @@ -19,16 +18,16 @@ class RegisterMap: 0x4102001a: {'reg': Register.HEARTBEAT_INTERVAL, 'fmt': '>12)}.{(result>>8)&0xf}.{(result>>4)&0xf}{result&0xf}'"}, # noqa: E501 + 0x420100d0: {'reg': Register.VERSION, 'fmt': '!H', 'func': Fmt.version}, # noqa: E501 0x420100d2: {'reg': Register.GRID_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501 0x420100d4: {'reg': Register.GRID_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 0x420100d6: {'reg': Register.GRID_FREQUENCY, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 @@ -123,7 +122,7 @@ class InfosG3P(Infos): if not isinstance(row, dict): continue info_id = row['reg'] - result = self.__get_value(buf, addr, row) + result = Fmt.get_value(buf, addr, row) keys, level, unit, must_incr = self._key_obj(info_id) @@ -137,20 +136,3 @@ class InfosG3P(Infos): if update: self.tracer.log(level, f'[{node_id}] GEN3PLUS: {name}' f' : {result}{unit}') - - def __get_value(self, buf, idx, row): - '''Get a value from buf and interpret as in row''' - fmt = row['fmt'] - res = struct.unpack_from(fmt, buf, idx) - result = res[0] - if isinstance(result, (bytearray, bytes)): - result = result.decode().split('\x00')[0] - if 'eval' in row: - result = eval(row['eval']) - if 'ratio' in row: - result = round(result * row['ratio'], 2) - if 'quotient' in row: - result = round(result/row['quotient']) - if 'offset' in row: - result = result + row['offset'] - return result diff --git a/app/src/infos.py b/app/src/infos.py index 88e15c8..c94e219 100644 --- a/app/src/infos.py +++ b/app/src/infos.py @@ -1,5 +1,6 @@ import logging import json +import struct import os from enum import Enum from typing import Generator @@ -123,6 +124,40 @@ class Register(Enum): TEST_REG2 = 10001 +class Fmt: + @staticmethod + def get_value(buf: bytes, idx: int, row: dict): + '''Get a value from buf and interpret as in row defined''' + fmt = row['fmt'] + res = struct.unpack_from(fmt, buf, idx) + result = res[0] + if isinstance(result, (bytearray, bytes)): + result = result.decode().split('\x00')[0] + if 'func' in row: + result = row['func'](res) + if 'ratio' in row: + result = round(result * row['ratio'], 2) + if 'quotient' in row: + result = round(result/row['quotient']) + if 'offset' in row: + result = result + row['offset'] + return result + + @staticmethod + def hex4(val): + return f'{val[0]:04x}' + + @staticmethod + def mac(val): + return "%02x:%02x:%02x:%02x:%02x:%02x" % val + + @staticmethod + def version(val): + x = val[0] + return f'V{(x>>12)}.{(x>>8)&0xf}.{(x>>4)&0xf}{x&0xf:1X}' + # return f'V{x>>12}.{(x>>8)&0xf}.{(x>>4)&0xf}{val[0]&0xf}' + + class ClrAtMidnight: __clr_at_midnight = [Register.PV1_DAILY_GENERATION, Register.PV2_DAILY_GENERATION, Register.PV3_DAILY_GENERATION, Register.PV4_DAILY_GENERATION, Register.PV5_DAILY_GENERATION, Register.PV6_DAILY_GENERATION, Register.DAILY_GENERATION] # noqa: E501 db = {} diff --git a/app/src/modbus.py b/app/src/modbus.py index d186ba7..eec6d17 100644 --- a/app/src/modbus.py +++ b/app/src/modbus.py @@ -17,9 +17,9 @@ import asyncio from typing import Generator, Callable if __name__ == "app.src.modbus": - from app.src.infos import Register + from app.src.infos import Register, Fmt else: # pragma: no cover - from infos import Register + from infos import Register, Fmt logger = logging.getLogger('data') @@ -44,11 +44,11 @@ class Modbus(): 0x202c: {'reg': Register.OUTPUT_COEFFICIENT, 'fmt': '!H', 'ratio': 100/1024}, # noqa: E501 0x3000: {'reg': Register.INVERTER_STATUS, 'fmt': '!H'}, # noqa: E501 - 0x3008: {'reg': Register.VERSION, 'fmt': '!H', 'eval': "f'V{(result>>12)}.{(result>>8)&0xf}.{(result>>4)&0xf}{result&0xf:1X}'"}, # noqa: E501 + 0x3008: {'reg': Register.VERSION, 'fmt': '!H', 'func': Fmt.version}, # noqa: E501 0x3009: {'reg': Register.GRID_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501 0x300a: {'reg': Register.GRID_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 0x300b: {'reg': Register.GRID_FREQUENCY, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 - 0x300c: {'reg': Register.INVERTER_TEMP, 'fmt': '!H', 'eval': 'result-40'}, # noqa: E501 + 0x300c: {'reg': Register.INVERTER_TEMP, 'fmt': '!H', 'offset': -40}, # noqa: E501 # 0x300d 0x300e: {'reg': Register.RATED_POWER, 'fmt': '!H', 'ratio': 1}, # noqa: E501 0x300f: {'reg': Register.OUTPUT_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501 @@ -229,17 +229,6 @@ class Modbus(): return False - def __get_value(self, buf: bytes, idx: int, row: dict): - '''get a value from the received buffer''' - val = struct.unpack_from(row['fmt'], buf, idx) - result = val[0] - - if 'eval' in row: - result = eval(row['eval']) - if 'ratio' in row: - result = round(result * row['ratio'], 2) - return result - def __process_data(self, info_db, buf: bytes, first_reg, elmlen): '''Generator over received registers, updates the db''' for i in range(0, elmlen): @@ -249,7 +238,7 @@ class Modbus(): info_id = row['reg'] keys, level, unit, must_incr = info_db._key_obj(info_id) if keys: - result = self.__get_value(buf, 3+2*i, row) + result = Fmt.get_value(buf, 3+2*i, row) name, update = info_db.update_db(keys, must_incr, result) yield keys[0], update, result diff --git a/app/tests/test_infos_g3.py b/app/tests/test_infos_g3.py index 6fad692..0c84c20 100644 --- a/app/tests/test_infos_g3.py +++ b/app/tests/test_infos_g3.py @@ -520,15 +520,3 @@ def test_invalid_data_type(invalid_data_seq): val = i.dev_value(Register.INVALID_DATA_TYPE) # check invalid data type counter assert val == 1 - -def test_result_eval(inv_data_seq2: bytes): - - # add eval to convert temperature from °F to °C - RegisterMap.map[0x00000514]['eval'] = '(result-32)/1.8' - - i = InfosG3() - - for _, _ in i.parse (inv_data_seq2): - pass # side effect is calling generator i.parse() - assert math.isclose(-5.0, round (i.get_db_value(Register.INVERTER_TEMP, 0),4), rel_tol=1e-09, abs_tol=1e-09) - del RegisterMap.map[0x00000514]['eval'] # remove eval diff --git a/app/tests/test_infos_g3p.py b/app/tests/test_infos_g3p.py index c9ceb4e..095a29b 100644 --- a/app/tests/test_infos_g3p.py +++ b/app/tests/test_infos_g3p.py @@ -256,12 +256,12 @@ def test_build_ha_conf4(): assert tests==1 -def test_exception_and_eval(inverter_data: bytes): +def test_exception_and_calc(inverter_data: bytes): - # add eval to convert temperature from °F to °C + # patch table to convert temperature from °F to °C ofs = RegisterMap.map[0x420100d8]['offset'] - del RegisterMap.map[0x420100d8]['offset'] - RegisterMap.map[0x420100d8]['eval'] = '(result-32)/1.8' + RegisterMap.map[0x420100d8]['quotient'] = 1.8 + RegisterMap.map[0x420100d8]['offset'] = -32/1.8 # map PV1_VOLTAGE to invalid register RegisterMap.map[0x420100e0]['reg'] = Register.TEST_REG2 # set invalid maping entry for OUTPUT_POWER (string instead of dict type) @@ -274,7 +274,9 @@ def test_exception_and_eval(inverter_data: bytes): for key, update in i.parse (inverter_data, 0x42, 1): pass # side effect is calling generator i.parse() assert math.isclose(12.2222, round (i.get_db_value(Register.INVERTER_TEMP, 0),4), rel_tol=1e-09, abs_tol=1e-09) - del RegisterMap.map[0x420100d8]['eval'] # remove eval + del RegisterMap.map[0x420100d8]['quotient'] + del RegisterMap.map[0x420100d8]['offset'] + RegisterMap.map[0x420100e0]['reg'] = Register.PV1_VOLTAGE # reset mapping RegisterMap.map[0x420100de] = backup # reset mapping From a6ad3d4f0d961d90f0a30d6f63a23f17259a2beb Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Fri, 25 Oct 2024 23:49:35 +0200 Subject: [PATCH 16/32] fix linter warnings --- app/src/infos.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/app/src/infos.py b/app/src/infos.py index c94e219..4358969 100644 --- a/app/src/infos.py +++ b/app/src/infos.py @@ -154,8 +154,7 @@ class Fmt: @staticmethod def version(val): x = val[0] - return f'V{(x>>12)}.{(x>>8)&0xf}.{(x>>4)&0xf}{x&0xf:1X}' - # return f'V{x>>12}.{(x>>8)&0xf}.{(x>>4)&0xf}{val[0]&0xf}' + return f'V{(x >> 12)}.{(x >> 8) & 0xf}.{(x >> 4) & 0xf}{x & 0xf:1X}' class ClrAtMidnight: From 9b22fe354c3420fe4e0a07f21e44c6fbe17ee415 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Sat, 26 Oct 2024 17:30:00 +0200 Subject: [PATCH 17/32] clear remote ptr on disc only for client ifcs --- app/src/async_stream.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/app/src/async_stream.py b/app/src/async_stream.py index 2650235..b023c23 100644 --- a/app/src/async_stream.py +++ b/app/src/async_stream.py @@ -221,7 +221,6 @@ class AsyncStream(AsyncIfcImpl): async def disc(self) -> None: """Async disc handler for graceful disconnect""" - self.remote = None if self._writer.is_closing(): return logger.debug(f'AsyncStream.disc() l{self.l_addr} | r{self.r_addr}') @@ -370,6 +369,11 @@ class AsyncStreamClient(AsyncStream): AsyncStream.__init__(self, reader, writer, rstream) self.close_cb = close_cb + async def disc(self) -> None: + logging.debug('AsyncStreamClient.disc()') + self.remote = None + await super().disc() + def close(self) -> None: logging.debug('AsyncStreamClient.close()') self.close_cb = None From 78a35b5513837a1941f126944df5b2ddbf8664c6 Mon Sep 17 00:00:00 2001 From: Stefan Allius <122395479+s-allius@users.noreply.github.com> Date: Sat, 2 Nov 2024 15:09:10 +0100 Subject: [PATCH 18/32] report alarm and fault bitfield to ha (#204) * report alarm and fault bitfield to home assistant * initial verson of message builder for SolarmanV5 - for SolarmaV5 we build he param field for the device and inverter message from the internal database - added param description to the info table for constant values, which are not parsed and stored in internal database * define constants for often used format strings * update changelog --- CHANGELOG.md | 3 + app/src/gen3/infos_g3.py | 20 +--- app/src/gen3plus/infos_g3p.py | 70 +++++++++++- app/src/infos.py | 204 +++++++++++++++++++++++++++------- app/src/modbus.py | 10 ++ app/tests/test_infos.py | 23 +++- app/tests/test_infos_g3.py | 4 +- app/tests/test_infos_g3p.py | 64 +++++++++-- 8 files changed, 328 insertions(+), 70 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c2bd297..a3bc961 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [unreleased] +- add SolarmanV5 messages builder +- report inverter alarms and faults per MQTT [#7](https://github.com/s-allius/tsun-gen3-proxy/issues/7) + ## [0.11.0] - 2024-10-13 - fix healthcheck on infrastructure with IPv6 support [#196](https://github.com/s-allius/tsun-gen3-proxy/issues/196) diff --git a/app/src/gen3/infos_g3.py b/app/src/gen3/infos_g3.py index 88f9207..480fc94 100644 --- a/app/src/gen3/infos_g3.py +++ b/app/src/gen3/infos_g3.py @@ -70,22 +70,10 @@ class RegisterMap: 0x000d0020: {'reg': Register.COLLECT_INTERVAL}, 0x000cf850: {'reg': Register.DATA_UP_INTERVAL}, 0x000c7f38: {'reg': Register.COMMUNICATION_TYPE}, - 0x00000191: {'reg': Register.EVENT_401}, - 0x00000192: {'reg': Register.EVENT_402}, - 0x00000193: {'reg': Register.EVENT_403}, - 0x00000194: {'reg': Register.EVENT_404}, - 0x00000195: {'reg': Register.EVENT_405}, - 0x00000196: {'reg': Register.EVENT_406}, - 0x00000197: {'reg': Register.EVENT_407}, - 0x00000198: {'reg': Register.EVENT_408}, - 0x00000199: {'reg': Register.EVENT_409}, - 0x0000019a: {'reg': Register.EVENT_410}, - 0x0000019b: {'reg': Register.EVENT_411}, - 0x0000019c: {'reg': Register.EVENT_412}, - 0x0000019d: {'reg': Register.EVENT_413}, - 0x0000019e: {'reg': Register.EVENT_414}, - 0x0000019f: {'reg': Register.EVENT_415}, - 0x000001a0: {'reg': Register.EVENT_416}, + 0x00000190: {'reg': Register.EVENT_ALARM}, + 0x000001f4: {'reg': Register.EVENT_FAULT}, + 0x00000258: {'reg': Register.EVENT_BF1}, + 0x000002bc: {'reg': Register.EVENT_BF2}, 0x00000064: {'reg': Register.INVERTER_STATUS}, 0x0000125c: {'reg': Register.MAX_DESIGNED_POWER}, 0x00003200: {'reg': Register.OUTPUT_COEFFICIENT, 'ratio': 100/1024}, diff --git a/app/src/gen3plus/infos_g3p.py b/app/src/gen3plus/infos_g3p.py index 58c53c9..443dfac 100644 --- a/app/src/gen3plus/infos_g3p.py +++ b/app/src/gen3plus/infos_g3p.py @@ -11,28 +11,48 @@ class RegisterMap: # make the class read/only by using __slots__ __slots__ = () + FMT_2_16BIT_VAL = '!HH' + FMT_3_16BIT_VAL = '!HHH' + FMT_4_16BIT_VAL = '!HHHH' + map = { # 0x41020007: {'reg': Register.DEVICE_SNR, 'fmt': '> 16) & 0xff + mtype = (idx >> 24) & 0xff + if ftype != rcv_ftype or mtype != msg_type: + continue + if not isinstance(row, dict): + continue + if 'const' in row: + val = row['const'] + else: + info_id = row['reg'] + val = self.get_db_value(info_id) + if not val: + continue + Fmt.set_value(buf, addr, row, val) + return buf diff --git a/app/src/infos.py b/app/src/infos.py index 4358969..92609a1 100644 --- a/app/src/infos.py +++ b/app/src/infos.py @@ -43,6 +43,8 @@ class Register(Enum): RATED_POWER = 84 INVERTER_TEMP = 85 INVERTER_STATUS = 86 + DETECT_STATUS_1 = 87 + DETECT_STATUS_2 = 88 PV1_VOLTAGE = 100 PV1_CURRENT = 101 PV1_POWER = 102 @@ -85,6 +87,10 @@ class Register(Enum): PV5_TOTAL_GENERATION = 241 PV6_DAILY_GENERATION = 250 PV6_TOTAL_GENERATION = 251 + INV_UNKNOWN_1 = 252 + BOOT_STATUS = 253 + DSP_STATUS = 254 + GRID_VOLTAGE = 300 GRID_CURRENT = 301 GRID_FREQUENCY = 302 @@ -100,22 +106,11 @@ class Register(Enum): IP_ADDRESS = 407 POLLING_INTERVAL = 408 SENSOR_LIST = 409 - EVENT_401 = 500 - EVENT_402 = 501 - EVENT_403 = 502 - EVENT_404 = 503 - EVENT_405 = 504 - EVENT_406 = 505 - EVENT_407 = 506 - EVENT_408 = 507 - EVENT_409 = 508 - EVENT_410 = 509 - EVENT_411 = 510 - EVENT_412 = 511 - EVENT_413 = 512 - EVENT_414 = 513 - EVENT_415 = 514 - EVENT_416 = 515 + SSID = 410 + EVENT_ALARM = 500 + EVENT_FAULT = 501 + EVENT_BF1 = 502 + EVENT_BF2 = 503 TS_INPUT = 600 TS_GRID = 601 TS_TOTAL = 602 @@ -144,17 +139,54 @@ class Fmt: return result @staticmethod - def hex4(val): - return f'{val[0]:04x}' + def hex4(val: tuple | str, reverse=False) -> str | int: + if not reverse: + return f'{val[0]:04x}' + else: + return int(val, 16) @staticmethod - def mac(val): - return "%02x:%02x:%02x:%02x:%02x:%02x" % val + def mac(val: tuple | str, reverse=False) -> str | tuple: + if not reverse: + return "%02x:%02x:%02x:%02x:%02x:%02x" % val + else: + return ( + int(val[0:2], 16), int(val[3:5], 16), + int(val[6:8], 16), int(val[9:11], 16), + int(val[12:14], 16), int(val[15:], 16)) @staticmethod - def version(val): - x = val[0] - return f'V{(x >> 12)}.{(x >> 8) & 0xf}.{(x >> 4) & 0xf}{x & 0xf:1X}' + def version(val: tuple | str, reverse=False) -> str | int: + if not reverse: + x = val[0] + return f'V{(x >> 12)}.{(x >> 8) & 0xf}' \ + f'.{(x >> 4) & 0xf}{x & 0xf:1X}' + else: + arr = val[1:].split('.') + return int(arr[0], 10) << 12 | \ + int(arr[1], 10) << 8 | \ + int(arr[2][:-1], 10) << 4 | \ + int(arr[2][-1:], 16) + + @staticmethod + def set_value(buf: bytearray, idx: int, row: dict, val): + '''Get a value from buf and interpret as in row defined''' + fmt = row['fmt'] + if 'offset' in row: + val = val - row['offset'] + if 'quotient' in row: + val = round(val * row['quotient']) + if 'ratio' in row: + val = round(val / row['ratio']) + if 'func' in row: + val = row['func'](val, reverse=True) + if isinstance(val, str): + val = bytes(val, 'UTF8') + + if isinstance(val, tuple): + struct.pack_into(fmt, buf, idx, *val) + else: + struct.pack_into(fmt, buf, idx, val) class ClrAtMidnight: @@ -251,6 +283,99 @@ class Infos: {{ this.state }} {% endif %} ''' + __inv_alarm_val_tpl = ''' +{% if 'Inverter_Alarm' in value_json and + value_json['Inverter_Alarm'] != None %} + {% set val_int = value_json['Inverter_Alarm'] | int %} + {% if val_int == 0 %} + {% set result = 'noAlarm'%} + {%else%} + {% set result = '' %} + {% if val_int | bitwise_and(1)%}{% set result = result + 'Bit1, '%} + {% endif %} + {% if val_int | bitwise_and(2)%}{% set result = result + 'Bit2, '%} + {% endif %} + {% if val_int | bitwise_and(3)%}{% set result = result + 'Bit3, '%} + {% endif %} + {% if val_int | bitwise_and(4)%}{% set result = result + 'Bit4, '%} + {% endif %} + {% if val_int | bitwise_and(5)%}{% set result = result + 'Bit5, '%} + {% endif %} + {% if val_int | bitwise_and(6)%}{% set result = result + 'Bit6, '%} + {% endif %} + {% if val_int | bitwise_and(7)%}{% set result = result + 'Bit7, '%} + {% endif %} + {% if val_int | bitwise_and(8)%}{% set result = result + 'Bit8, '%} + {% endif %} + {% if val_int | bitwise_and(9)%}{% set result = result + 'noUtility, '%} + {% endif %} + {% if val_int | bitwise_and(10)%}{% set result = result + 'Bit10, '%} + {% endif %} + {% if val_int | bitwise_and(11)%}{% set result = result + 'Bit11, '%} + {% endif %} + {% if val_int | bitwise_and(12)%}{% set result = result + 'Bit12, '%} + {% endif %} + {% if val_int | bitwise_and(13)%}{% set result = result + 'Bit13, '%} + {% endif %} + {% if val_int | bitwise_and(14)%}{% set result = result + 'Bit14, '%} + {% endif %} + {% if val_int | bitwise_and(15)%}{% set result = result + 'Bit15, '%} + {% endif %} + {% if val_int | bitwise_and(16)%}{% set result = result + 'Bit16, '%} + {% endif %} + {% endif %} + {{ result }} +{% else %} + {{ this.state }} +{% endif %} +''' + __inv_fault_val_tpl = ''' +{% if 'Inverter_Fault' in value_json and + value_json['Inverter_Fault'] != None %} + {% set val_int = value_json['Inverter_Fault'] | int %} + {% if val_int == 0 %} + {% set result = 'noFault'%} + {%else%} + {% set result = '' %} + {% if val_int | bitwise_and(1)%}{% set result = result + 'Bit1, '%} + {% endif %} + {% if val_int | bitwise_and(2)%}{% set result = result + 'Bit2, '%} + {% endif %} + {% if val_int | bitwise_and(3)%}{% set result = result + 'Bit3, '%} + {% endif %} + {% if val_int | bitwise_and(4)%}{% set result = result + 'Bit4, '%} + {% endif %} + {% if val_int | bitwise_and(5)%}{% set result = result + 'Bit5, '%} + {% endif %} + {% if val_int | bitwise_and(6)%}{% set result = result + 'Bit6, '%} + {% endif %} + {% if val_int | bitwise_and(7)%}{% set result = result + 'Bit7, '%} + {% endif %} + {% if val_int | bitwise_and(8)%}{% set result = result + 'Bit8, '%} + {% endif %} + {% if val_int | bitwise_and(9)%}{% set result = result + 'Bit9, '%} + {% endif %} + {% if val_int | bitwise_and(10)%}{% set result = result + 'Bit10, '%} + {% endif %} + {% if val_int | bitwise_and(11)%}{% set result = result + 'Bit11, '%} + {% endif %} + {% if val_int | bitwise_and(12)%}{% set result = result + 'Bit12, '%} + {% endif %} + {% if val_int | bitwise_and(13)%}{% set result = result + 'Bit13, '%} + {% endif %} + {% if val_int | bitwise_and(14)%}{% set result = result + 'Bit14, '%} + {% endif %} + {% if val_int | bitwise_and(15)%}{% set result = result + 'Bit15, '%} + {% endif %} + {% if val_int | bitwise_and(16)%}{% set result = result + 'Bit16, '%} + {% endif %} + {% endif %} + {{ result }} +{% else %} + {{ this.state }} +{% endif %} +''' + __output_coef_val_tpl = "{% if 'Output_Coefficient' in value_json and value_json['Output_Coefficient'] != None %}{{value_json['Output_Coefficient']|string() +' %'}}{% else %}{{ this.state }}{% endif %}" # noqa: E501 __info_defs = { @@ -286,7 +411,8 @@ class Infos: Register.PV5_MODEL: {'name': ['inverter', 'PV5_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 Register.PV6_MANUFACTURER: {'name': ['inverter', 'PV6_Manufacturer'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 Register.PV6_MODEL: {'name': ['inverter', 'PV6_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - + Register.BOOT_STATUS: {'name': ['inverter', 'BOOT_STATUS'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.DSP_STATUS: {'name': ['inverter', 'DSP_STATUS'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 # proxy: Register.INVERTER_CNT: {'name': ['proxy', 'Inverter_Cnt'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_count_', 'fmt': FMT_INT, 'name': 'Active Inverter Connections', 'icon': COUNTER}}, # noqa: E501 Register.UNKNOWN_SNR: {'name': ['proxy', 'Unknown_SNR'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'unknown_snr_', 'fmt': FMT_INT, 'name': 'Unknown Serial No', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501 @@ -303,22 +429,12 @@ class Infos: # 0xffffff03: {'name':['proxy', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha':{'dev':'proxy', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id':'proxy_volt_', 'fmt':FMT_FLOAT,'name': 'Grid Voltage'}}, # noqa: E501 # events - Register.EVENT_401: {'name': ['events', '401_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EVENT_402: {'name': ['events', '402_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EVENT_403: {'name': ['events', '403_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EVENT_404: {'name': ['events', '404_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EVENT_405: {'name': ['events', '405_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EVENT_406: {'name': ['events', '406_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EVENT_407: {'name': ['events', '407_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EVENT_408: {'name': ['events', '408_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EVENT_409: {'name': ['events', '409_No_Utility'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EVENT_410: {'name': ['events', '410_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EVENT_411: {'name': ['events', '411_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EVENT_412: {'name': ['events', '412_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EVENT_413: {'name': ['events', '413_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EVENT_414: {'name': ['events', '414_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EVENT_415: {'name': ['events', '415_GridFreqOverRating'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EVENT_416: {'name': ['events', '416_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.EVENT_ALARM: {'name': ['events', 'Inverter_Alarm'], 'level': logging.INFO, 'unit': '', 'ha': {'dev': 'inverter', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_alarm_', 'name': 'Inverter Alarm', 'val_tpl': __inv_alarm_val_tpl, 'icon': 'mdi:alarm-light'}}, # noqa: E501 + Register.EVENT_FAULT: {'name': ['events', 'Inverter_Fault'], 'level': logging.INFO, 'unit': '', 'ha': {'dev': 'inverter', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_fault_', 'name': 'Inverter Fault', 'val_tpl': __inv_fault_val_tpl, 'icon': 'mdi:alarm-light'}}, # noqa: E501 + Register.EVENT_BF1: {'name': ['events', 'Inverter_Bitfield_1'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 + Register.EVENT_BF2: {'name': ['events', 'Inverter_bitfield_2'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 + # Register.EVENT_409: {'name': ['events', '409_No_Utility'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + # Register.EVENT_415: {'name': ['events', '415_GridFreqOverRating'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 # grid measures: Register.TS_GRID: {'name': ['grid', 'Timestamp'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 @@ -328,6 +444,8 @@ class Infos: Register.OUTPUT_POWER: {'name': ['grid', 'Output_Power'], 'level': logging.INFO, 'unit': 'W', 'ha': {'dev': 'inverter', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'out_power_', 'fmt': FMT_FLOAT, 'name': 'Power'}}, # noqa: E501 Register.INVERTER_TEMP: {'name': ['env', 'Inverter_Temp'], 'level': logging.DEBUG, 'unit': '°C', 'ha': {'dev': 'inverter', 'dev_cla': 'temperature', 'stat_cla': 'measurement', 'id': 'temp_', 'fmt': FMT_INT, 'name': 'Temperature'}}, # noqa: E501 Register.INVERTER_STATUS: {'name': ['env', 'Inverter_Status'], 'level': logging.INFO, 'unit': '', 'ha': {'dev': 'inverter', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_status_', 'name': 'Inverter Status', 'val_tpl': __status_type_val_tpl, 'icon': 'mdi:power'}}, # noqa: E501 + Register.DETECT_STATUS_1: {'name': ['env', 'Detect_Status_1'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.DETECT_STATUS_2: {'name': ['env', 'Detect_Status_2'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 # input measures: Register.TS_INPUT: {'name': ['input', 'Timestamp'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 @@ -377,6 +495,10 @@ class Infos: Register.IP_ADDRESS: {'name': ['controller', 'IP_Address'], 'level': logging.DEBUG, 'unit': '', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'ip_address_', 'fmt': '| string', 'name': 'IP Address', 'icon': WIFI, 'ent_cat': 'diagnostic'}}, # noqa: E501 Register.POLLING_INTERVAL: {'name': ['controller', 'Polling_Interval'], 'level': logging.DEBUG, 'unit': 's', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'polling_intval_', 'fmt': FMT_STRING_SEC, 'name': 'Polling Interval', 'icon': UPDATE, 'ent_cat': 'diagnostic'}}, # noqa: E501 Register.SENSOR_LIST: {'name': ['controller', 'Sensor_List'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 + Register.SSID: {'name': ['controller', 'WiFi_SSID'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + + Register.INV_UNKNOWN_1: {'name': ['inv_unknown', 'Unknown_1'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + } @property @@ -686,6 +808,8 @@ class Infos: def get_db_value(self, id: Register, not_found_result: any = None): '''get database value''' + if id not in self.info_defs: + return not_found_result row = self.info_defs[id] if isinstance(row, dict): keys = row['name'] diff --git a/app/src/modbus.py b/app/src/modbus.py index eec6d17..c3a1d67 100644 --- a/app/src/modbus.py +++ b/app/src/modbus.py @@ -40,10 +40,19 @@ class Modbus(): __crc_tab = [] mb_reg_mapping = { + 0x2000: {'reg': Register.BOOT_STATUS, 'fmt': '!H'}, # noqa: E501 + 0x2001: {'reg': Register.DSP_STATUS, 'fmt': '!H'}, # noqa: E501 0x2007: {'reg': Register.MAX_DESIGNED_POWER, 'fmt': '!H', 'ratio': 1}, # noqa: E501 0x202c: {'reg': Register.OUTPUT_COEFFICIENT, 'fmt': '!H', 'ratio': 100/1024}, # noqa: E501 0x3000: {'reg': Register.INVERTER_STATUS, 'fmt': '!H'}, # noqa: E501 + 0x3001: {'reg': Register.DETECT_STATUS_1, 'fmt': '!H'}, # noqa: E501 + 0x3002: {'reg': Register.DETECT_STATUS_2, 'fmt': '!H'}, # noqa: E501 + 0x3003: {'reg': Register.EVENT_ALARM, 'fmt': '!H'}, # noqa: E501 + 0x3004: {'reg': Register.EVENT_FAULT, 'fmt': '!H'}, # noqa: E501 + 0x3005: {'reg': Register.EVENT_BF1, 'fmt': '!H'}, # noqa: E501 + 0x3006: {'reg': Register.EVENT_BF2, 'fmt': '!H'}, # noqa: E501 + 0x3008: {'reg': Register.VERSION, 'fmt': '!H', 'func': Fmt.version}, # noqa: E501 0x3009: {'reg': Register.GRID_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501 0x300a: {'reg': Register.GRID_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 @@ -74,6 +83,7 @@ class Modbus(): 0x3026: {'reg': Register.PV3_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501 0x3028: {'reg': Register.PV4_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 0x3029: {'reg': Register.PV4_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501 + # 0x302a } def __init__(self, snd_handler: Callable[[bytes, int, str], None], diff --git a/app/tests/test_infos.py b/app/tests/test_infos.py index 8d0c268..18eb5e4 100644 --- a/app/tests/test_infos.py +++ b/app/tests/test_infos.py @@ -3,7 +3,7 @@ import pytest import json, math import logging from app.src.infos import Register, ClrAtMidnight -from app.src.infos import Infos +from app.src.infos import Infos, Fmt def test_statistic_counter(): i = Infos() @@ -256,3 +256,24 @@ def test_key_obj(): assert level == logging.DEBUG assert unit == 'kWh' assert must_incr == True + +def test_hex4_cnv(): + tst_val = (0x12ef, ) + string = Fmt.hex4(tst_val) + assert string == '12ef' + val = Fmt.hex4(string, reverse=True) + assert val == tst_val[0] + +def test_mac_cnv(): + tst_val = (0x12, 0x34, 0x67, 0x89, 0xcd, 0xef) + string = Fmt.mac(tst_val) + assert string == '12:34:67:89:cd:ef' + val = Fmt.mac(string, reverse=True) + assert val == tst_val + +def test_version_cnv(): + tst_val = (0x123f, ) + string = Fmt.version(tst_val) + assert string == 'V1.2.3F' + val = Fmt.version(string, reverse=True) + assert val == tst_val[0] diff --git a/app/tests/test_infos_g3.py b/app/tests/test_infos_g3.py index 0c84c20..1bab29e 100644 --- a/app/tests/test_infos_g3.py +++ b/app/tests/test_infos_g3.py @@ -501,10 +501,10 @@ def test_new_data_types(inv_data_new): else: assert False - assert tests==15 + assert tests==5 assert json.dumps(i.db['inverter']) == json.dumps({"Manufacturer": 0}) assert json.dumps(i.db['input']) == json.dumps({"pv1": {}}) - assert json.dumps(i.db['events']) == json.dumps({"401_": 0, "404_": 0, "405_": 0, "408_": 0, "409_No_Utility": 0, "406_": 0, "416_": 0}) + assert json.dumps(i.db['events']) == json.dumps({"Inverter_Alarm": 0, "Inverter_Fault": 0}) def test_invalid_data_type(invalid_data_seq): i = InfosG3() diff --git a/app/tests/test_infos_g3p.py b/app/tests/test_infos_g3p.py index 095a29b..3fbefa9 100644 --- a/app/tests/test_infos_g3p.py +++ b/app/tests/test_infos_g3p.py @@ -57,6 +57,7 @@ def inverter_data(): # 0x4210 ftype: 0x01 msg += b'\x01\x61\x00\xa8\x02\x54\x01\x5a\x00\x8a\x01\xe4\x01\x5a\x00\xbd' msg += b'\x02\x8f\x00\x11\x00\x01\x00\x00\x00\x0b\x00\x00\x27\x98\x00\x04' msg += b'\x00\x00\x0c\x04\x00\x03\x00\x00\x0a\xe7\x00\x05\x00\x00\x0c\x75' + msg += b'\x00\x00\x00\x00\x06\x16\x02\x00\x00\x00\x55\xaa\x00\x01\x00\x00' msg += b'\x00\x00\x00\x00\xff\xff\x07\xd0\x00\x03\x04\x00\x04\x00\x04\x00' msg += b'\x04\x00\x00\x01\xff\xff\x00\x01\x00\x06\x00\x68\x00\x68\x05\x00' @@ -85,10 +86,21 @@ def test_parse_4110(str_test_ip, device_data: bytes): pass # side effect is calling generator i.parse() assert json.dumps(i.db) == json.dumps({ - 'controller': {"Data_Up_Interval": 300, "Collect_Interval": 1, "Heartbeat_Interval": 120, "Signal_Strength": 100, "IP_Address": str_test_ip, "Sensor_List": "02b0"}, + 'controller': {"Data_Up_Interval": 300, "Collect_Interval": 1, "Heartbeat_Interval": 120, "Signal_Strength": 100, "IP_Address": str_test_ip, "Sensor_List": "02b0", "WiFi_SSID": "Allius-Home"}, 'collector': {"Chip_Model": "LSW5BLE_17_02B0_1.05", "MAC-Addr": "40:2a:8f:4f:51:54", "Collector_Fw_Version": "V1.1.00.0B"}, }) +def test_build_4110(str_test_ip, device_data: bytes): + i = InfosG3P(client_mode=False) + i.db.clear() + for key, update in i.parse (device_data, 0x41, 2): + pass # side effect is calling generator i.parse() + + build_msg = i.build(len(device_data), 0x41, 2) + for i in range(11, 20): + build_msg[i] = device_data[i] + assert device_data == build_msg + def test_parse_4210(inverter_data: bytes): i = InfosG3P(client_mode=False) i.db.clear() @@ -98,16 +110,30 @@ def test_parse_4210(inverter_data: bytes): assert json.dumps(i.db) == json.dumps({ "controller": {"Sensor_List": "02b0", "Power_On_Time": 2051}, - "inverter": {"Serial_Number": "Y17E00000000000E", "Version": "V4.0.10", "Rated_Power": 600, "Max_Designed_Power": 2000, "Output_Coefficient": 100.0}, - "env": {"Inverter_Status": 1, "Inverter_Temp": 14}, + "inverter": {"Serial_Number": "Y17E00000000000E", "Version": "V4.0.10", "Rated_Power": 600, "BOOT_STATUS": 0, "DSP_STATUS": 21930, "Max_Designed_Power": 2000, "Output_Coefficient": 100.0}, + "env": {"Inverter_Status": 1, "Detect_Status_1": 2, "Detect_Status_2": 0, "Inverter_Temp": 14}, + "events": {"Inverter_Alarm": 0, "Inverter_Fault": 0, "Inverter_Bitfield_1": 0, "Inverter_bitfield_2": 0}, "grid": {"Voltage": 224.8, "Current": 0.73, "Frequency": 50.05, "Output_Power": 165.8}, "input": {"pv1": {"Voltage": 35.3, "Current": 1.68, "Power": 59.6, "Daily_Generation": 0.04, "Total_Generation": 30.76}, "pv2": {"Voltage": 34.6, "Current": 1.38, "Power": 48.4, "Daily_Generation": 0.03, "Total_Generation": 27.91}, "pv3": {"Voltage": 34.6, "Current": 1.89, "Power": 65.5, "Daily_Generation": 0.05, "Total_Generation": 31.89}, "pv4": {"Voltage": 1.7, "Current": 0.01, "Power": 0.0, "Total_Generation": 15.58}}, - "total": {"Daily_Generation": 0.11, "Total_Generation": 101.36} + "total": {"Daily_Generation": 0.11, "Total_Generation": 101.36}, + "inv_unknown": {"Unknown_1": 512} }) + +def test_build_4210(inverter_data: bytes): + i = InfosG3P(client_mode=False) + i.db.clear() + for key, update in i.parse (inverter_data, 0x42, 1): + pass # side effect is calling generator i.parse() + + build_msg = i.build(len(inverter_data), 0x42, 1) + for i in range(11, 31): + build_msg[i] = inverter_data[i] + assert inverter_data == build_msg + def test_build_ha_conf1(): i = InfosG3P(client_mode=False) i.static_init() # initialize counter @@ -269,19 +295,43 @@ def test_exception_and_calc(inverter_data: bytes): RegisterMap.map[0x420100de] = 'invalid_entry' i = InfosG3P(client_mode=False) - # i.db.clear() + i.db.clear() for key, update in i.parse (inverter_data, 0x42, 1): pass # side effect is calling generator i.parse() assert math.isclose(12.2222, round (i.get_db_value(Register.INVERTER_TEMP, 0),4), rel_tol=1e-09, abs_tol=1e-09) + + build_msg = i.build(len(inverter_data), 0x42, 1) + assert build_msg[32:0xde] == inverter_data[32:0xde] + assert build_msg[0xde:0xe2] == b'\x00\x00\x00\x00' + assert build_msg[0xe2:-1] == inverter_data[0xe2:-1] + + + # remove a table entry and test parsing and building del RegisterMap.map[0x420100d8]['quotient'] del RegisterMap.map[0x420100d8]['offset'] - RegisterMap.map[0x420100e0]['reg'] = Register.PV1_VOLTAGE # reset mapping - RegisterMap.map[0x420100de] = backup # reset mapping + i.db.clear() for key, update in i.parse (inverter_data, 0x42, 1): pass # side effect is calling generator i.parse() assert 54 == i.get_db_value(Register.INVERTER_TEMP, 0) + build_msg = i.build(len(inverter_data), 0x42, 1) + assert build_msg[32:0xd8] == inverter_data[32:0xd8] + assert build_msg[0xd8:0xe2] == b'\x006\x00\x00\x02X\x00\x00\x00\x00' + assert build_msg[0xe2:-1] == inverter_data[0xe2:-1] + + # test restore table RegisterMap.map[0x420100d8]['offset'] = ofs + RegisterMap.map[0x420100e0]['reg'] = Register.PV1_VOLTAGE # reset mapping + RegisterMap.map[0x420100de] = backup # reset mapping + + # test orginial table + i.db.clear() + for key, update in i.parse (inverter_data, 0x42, 1): + pass # side effect is calling generator i.parse() + assert 14 == i.get_db_value(Register.INVERTER_TEMP, 0) + + build_msg = i.build(len(inverter_data), 0x42, 1) + assert build_msg[32:-1] == inverter_data[32:-1] From 5ced5ff06ab64e92fcfdbb28ff093becf179d634 Mon Sep 17 00:00:00 2001 From: Stefan Allius <122395479+s-allius@users.noreply.github.com> Date: Wed, 13 Nov 2024 22:03:28 +0100 Subject: [PATCH 19/32] S allius/issue205 (#207) * Add SolarmanEmu class * Forward a device ind to establish the EMU connection * Move SolarmanEmu class into a dedicated file * Add cloud connection counter * Send inverter data in emulator mode * Improve emulator mode - parse more values from MQTT register - differ between inverter and logger serial no * Add some unit tests for SolarmanEmu class * Send seconds since last sync in data packets * Increase test coverage --- app/src/async_stream.py | 20 +- app/src/config.py | 3 +- app/src/gen3/infos_g3.py | 8 + app/src/gen3plus/infos_g3p.py | 26 +- app/src/gen3plus/inverter_g3p.py | 7 +- app/src/gen3plus/solarman_emu.py | 144 ++++++++++ app/src/gen3plus/solarman_v5.py | 458 +++++++++++++++++-------------- app/src/infos.py | 20 +- app/src/inverter_base.py | 10 +- app/src/messages.py | 13 +- app/src/modbus.py | 6 + app/src/modbus_tcp.py | 8 +- app/tests/test_infos.py | 4 +- app/tests/test_infos_g3.py | 8 +- app/tests/test_infos_g3p.py | 5 +- app/tests/test_inverter_g3p.py | 6 +- app/tests/test_modbus_tcp.py | 7 +- app/tests/test_solarman.py | 40 ++- app/tests/test_solarman_emu.py | 230 ++++++++++++++++ 19 files changed, 767 insertions(+), 256 deletions(-) create mode 100644 app/src/gen3plus/solarman_emu.py create mode 100644 app/tests/test_solarman_emu.py diff --git a/app/src/async_stream.py b/app/src/async_stream.py index b023c23..ca642b2 100644 --- a/app/src/async_stream.py +++ b/app/src/async_stream.py @@ -305,6 +305,14 @@ class AsyncStream(AsyncIfcImpl): f"Fwd Exception for {self.r_addr}:\n" f"{traceback.format_exc()}") + async def publish_outstanding_mqtt(self): + '''Publish all outstanding MQTT topics''' + try: + await self.async_publ_mqtt() + await Proxy._async_publ_mqtt_proxy_stat('proxy') + except Exception: + pass + class AsyncStreamServer(AsyncStream): def __init__(self, reader: StreamReader, writer: StreamWriter, @@ -354,14 +362,6 @@ class AsyncStreamServer(AsyncStream): self.remote.ifc._writer.write(self.fwd_fifo.get()) await self.remote.ifc._writer.drain() - async def publish_outstanding_mqtt(self): - '''Publish all outstanding MQTT topics''' - try: - await self.async_publ_mqtt() - await Proxy._async_publ_mqtt_proxy_stat('proxy') - except Exception: - pass - class AsyncStreamClient(AsyncStream): def __init__(self, reader: StreamReader, writer: StreamWriter, @@ -381,7 +381,11 @@ class AsyncStreamClient(AsyncStream): async def client_loop(self, _: str) -> None: '''Loop for receiving messages from the TSUN cloud (client-side)''' + Infos.inc_counter('Cloud_Conn_Cnt') + await self.publish_outstanding_mqtt() await self.loop() + Infos.dec_counter('Cloud_Conn_Cnt') + await self.publish_outstanding_mqtt() logger.info(f'[{self.node_id}:{self.conn_no}] ' 'Client loop stopped for' f' l{self.l_addr}') diff --git a/app/src/config.py b/app/src/config.py index 02138e7..3424bd9 100644 --- a/app/src/config.py +++ b/app/src/config.py @@ -57,7 +57,8 @@ class Config(): Optional('client_mode'): { 'host': Use(str), Optional('port', default=8899): - And(Use(int), lambda n: 1024 <= n <= 65535) + And(Use(int), lambda n: 1024 <= n <= 65535), + Optional('forward', default=False): Use(bool), }, Optional('modbus_polling', default=True): Use(bool), Optional('suggested_area', default=""): Use(str), diff --git a/app/src/gen3/infos_g3.py b/app/src/gen3/infos_g3.py index 480fc94..950a956 100644 --- a/app/src/gen3/infos_g3.py +++ b/app/src/gen3/infos_g3.py @@ -75,7 +75,15 @@ class RegisterMap: 0x00000258: {'reg': Register.EVENT_BF1}, 0x000002bc: {'reg': Register.EVENT_BF2}, 0x00000064: {'reg': Register.INVERTER_STATUS}, + + 0x00000fa0: {'reg': Register.BOOT_STATUS}, + 0x00001004: {'reg': Register.DSP_STATUS}, + 0x000010cc: {'reg': Register.WORK_MODE}, + 0x000011f8: {'reg': Register.OUTPUT_SHUTDOWN}, 0x0000125c: {'reg': Register.MAX_DESIGNED_POWER}, + 0x000012c0: {'reg': Register.RATED_LEVEL}, + 0x00001324: {'reg': Register.INPUT_COEFFICIENT, 'ratio': 100/1024}, + 0x00001388: {'reg': Register.GRID_VOLT_CAL_COEF}, 0x00003200: {'reg': Register.OUTPUT_COEFFICIENT, 'ratio': 100/1024}, } diff --git a/app/src/gen3plus/infos_g3p.py b/app/src/gen3plus/infos_g3p.py index 443dfac..aec8bfb 100644 --- a/app/src/gen3plus/infos_g3p.py +++ b/app/src/gen3plus/infos_g3p.py @@ -17,17 +17,17 @@ class RegisterMap: map = { # 0x41020007: {'reg': Register.DEVICE_SNR, 'fmt': ' None: + logging.info('SolarmanEmu.close()') + # we have references to methods of this class in self.switch + # so we have to erase self.switch, otherwise this instance can't be + # deallocated by the garbage collector ==> we get a memory leak + self.switch.clear() + self.log_lvl.clear() + self.hb_timer.close() + self.data_timer.close() + self.db = None + super().close() + + def _set_serial_no(self, snr: int): + logging.debug(f'SolarmanEmu._set_serial_no, snr: {snr}') + self.unique_id = str(snr) + + def _init_new_client_conn(self) -> bool: + logging.debug('SolarmanEmu.init_new()') + self.data_timer.start(self.data_up_inv) + return False + + def next_pkt_cnt(self): + '''get the next packet number''' + self.pkt_cnt = (self.pkt_cnt + 1) & 0xffffffff + return self.pkt_cnt + + def seconds_since_last_sync(self): + '''get seconds since last 0x4110 message was sent''' + return self._emu_timestamp() - self.last_sync + + def send_heartbeat_cb(self, exp_cnt): + '''send a heartbeat to the TSUN cloud''' + self._build_header(0x4710) + self.ifc.tx_add(struct.pack(' float: + '''process all received messages in the _recv_buffer''' + self._read() + while True: + if not self.header_valid: + self.__parse_header(self.ifc.rx_peek(), + self.ifc.rx_len()) + + if self.header_valid and self.ifc.rx_len() >= \ + (self.header_len + self.data_len+2): + self.__process_complete_received_msg() + self.__flush_recv_msg() + else: + return 0 # wait 0s before sending a response + ''' + Our public methods + ''' + def _flow_str(self, server_side: bool, type: str): # noqa: F821 + switch = { + 'rx': ' <', + 'tx': ' >', + 'forwrd': '<< ', + 'drop': ' xx', + 'rxS': '> ', + 'txS': '< ', + 'forwrdS': ' >>', + 'dropS': 'xx ', + } + if server_side: + type += 'S' + return switch.get(type, '???') + + def get_fnc_handler(self, ctrl): + fnc = self.switch.get(ctrl, self.msg_unknown) + if callable(fnc): + return fnc, repr(fnc.__name__) + else: + return self.msg_unknown, repr(fnc) + + def _build_header(self, ctrl) -> None: + '''build header for new transmit message''' + self.send_msg_ofs = self.ifc.tx_len() + + self.ifc.tx_add(struct.pack( + ' None: + '''finish the transmit message, set lenght and checksum''' + _len = self.ifc.tx_len() - self.send_msg_ofs + struct.pack_into(' None: + + if (buf_len < self.header_len): # enough bytes for complete header? + return + + result = struct.unpack_from(' bool: + crc = buf[self.data_len+11] + stop = buf[self.data_len+12] + if stop != 0x15: + hex_dump_memory(logging.ERROR, + 'Drop packet w invalid stop byte from ' + f'{self.addr}:', buf, buf_len) + self.inc_counter('Invalid_Msg_Format') + if self.ifc.rx_len() > (self.data_len+13): + next_start = buf[self.data_len+13] + if next_start != 0xa5: + # erase broken recv buffer + self.ifc.rx_clear() + + return False + + check = sum(buf[1:buf_len-2]) & 0xff + if check != crc: + self.inc_counter('Invalid_Msg_Format') + logger.debug(f'CRC {int(crc):#02x} {int(check):#08x}' + f' Stop:{int(stop):#02x}') + # start & stop byte are valid, discard only this message + return False + + return True + + def __flush_recv_msg(self) -> None: + self.ifc.rx_get(self.header_len + self.data_len+2) + self.header_valid = False + + def __dispatch_msg(self) -> None: + _fnc, _str = self.get_fnc_handler(self.control) + if self.unique_id: + logger.info(self._flow_str(self.server_side, 'rx') + + f' Ctl: {int(self.control):#04x}' + + f' Msg: {_str}') + _fnc() + else: + logger.info(self._flow_str(self.server_side, 'drop') + + f' Ctl: {int(self.control):#04x}' + + f' Msg: {_str}') + + ''' + Message handler methods + ''' + def msg_response(self): + data = self.ifc.rx_peek()[self.header_len:] + result = struct.unpack_from(' float: - '''process all received messages in the _recv_buffer''' - self._read() - while True: - if not self.header_valid: - self.__parse_header(self.ifc.rx_peek(), - self.ifc.rx_len()) - - if self.header_valid and self.ifc.rx_len() >= \ - (self.header_len + self.data_len+2): - self.__process_complete_received_msg() - self.__flush_recv_msg() - else: - return 0 # wait 0s before sending a response - - def __process_complete_received_msg(self): - log_lvl = self.log_lvl.get(self.control, logging.WARNING) - if callable(log_lvl): - log_lvl = log_lvl() - self.ifc.rx_log(log_lvl, f'Received from {self.addr}:') - # self._recv_buffer, self.header_len + - # self.data_len+2) - if self.__trailer_is_ok(self.ifc.rx_peek(), self.header_len - + self.data_len + 2): - if self.state == State.init: - self.state = State.received - self.__set_serial_no(self.snr) - self.__dispatch_msg() - def forward(self, buffer, buflen) -> None: '''add the actual receive msg to the forwarding queue''' if self.no_forwarding: @@ -252,158 +434,34 @@ class SolarmanV5(Message): self.ifc.fwd_add(buffer[:buflen]) self.ifc.fwd_log(logging.DEBUG, 'Store for forwarding:') - fnc = self.switch.get(self.control, self.msg_unknown) - logger.info(self.__flow_str(self.server_side, 'forwrd') + + _, _str = self.get_fnc_handler(self.control) + logger.info(self._flow_str(self.server_side, 'forwrd') + f' Ctl: {int(self.control):#04x}' - f' Msg: {fnc.__name__!r}') + f' Msg: {_str}') def _init_new_client_conn(self) -> bool: return False - ''' - Our private methods - ''' - def __flow_str(self, server_side: bool, type: str): # noqa: F821 - switch = { - 'rx': ' <', - 'tx': ' >', - 'forwrd': '<< ', - 'drop': ' xx', - 'rxS': '> ', - 'txS': '< ', - 'forwrdS': ' >>', - 'dropS': 'xx ', - } - if server_side: - type += 'S' - return switch.get(type, '???') - - def _timestamp(self): - # utc as epoche - return int(time.time()) # pragma: no cover - def _heartbeat(self) -> int: return 60 # pragma: no cover - def __parse_header(self, buf: bytes, buf_len: int) -> None: - - if (buf_len < self.header_len): # enough bytes for complete header? - return - - result = struct.unpack_from(' bool: - crc = buf[self.data_len+11] - stop = buf[self.data_len+12] - if stop != 0x15: - hex_dump_memory(logging.ERROR, - 'Drop packet w invalid stop byte from ' - f'{self.addr}:', buf, buf_len) - self.inc_counter('Invalid_Msg_Format') - if self.ifc.rx_len() > (self.data_len+13): - next_start = buf[self.data_len+13] - if next_start != 0xa5: - # erase broken recv buffer - self.ifc.rx_clear() - - return False - - check = sum(buf[1:buf_len-2]) & 0xff - if check != crc: - self.inc_counter('Invalid_Msg_Format') - logger.debug(f'CRC {int(crc):#02x} {int(check):#08x}' - f' Stop:{int(stop):#02x}') - # start & stop byte are valid, discard only this message - return False - - return True - - def __build_header(self, ctrl) -> None: - '''build header for new transmit message''' - self.send_msg_ofs = self.ifc.tx_len() - - self.ifc.tx_add(struct.pack( - ' None: - '''finish the transmit message, set lenght and checksum''' - _len = self.ifc.tx_len() - self.send_msg_ofs - struct.pack_into(' None: - fnc = self.switch.get(self.control, self.msg_unknown) - if self.unique_id: - logger.info(self.__flow_str(self.server_side, 'rx') + - f' Ctl: {int(self.control):#04x}' + - f' Msg: {fnc.__name__!r}') - fnc() - else: - logger.info(self.__flow_str(self.server_side, 'drop') + - f' Ctl: {int(self.control):#04x}' + - f' Msg: {fnc.__name__!r}') - - def __flush_recv_msg(self) -> None: - self.ifc.rx_get(self.header_len + self.data_len+2) - self.header_valid = False - def __send_ack_rsp(self, msgtype, ftype, ack=1): - self.__build_header(msgtype) + self._build_header(msgtype) self.ifc.tx_add(struct.pack(' 4: # logger.info(f'first byte modbus:{data[14]}') - inv_update = False - self.modbus_elms = 0 - for key, update, _ in self.mb.recv_resp(self.db, data[14:]): - self.modbus_elms += 1 - if update: - if key == 'inverter': - inv_update = True - self._set_mqtt_timestamp(key, self._timestamp()) - self.new_data[key] = True + inv_update = self.__parse_modbus_rsp(data) if inv_update: self.__build_model_name() + if self.establish_inv_emu and not self.ifc.remote.stream: + self.establish_emu() + def msg_hbeat_ind(self): data = self.ifc.rx_peek()[self.header_len:] result = struct.unpack_from(' None: + async def modbus_loop(self, host, port, + snr: int, forward: bool) -> None: '''Loop for receiving messages from the TSUN cloud (client-side)''' while True: try: async with ModbusConn(host, port) as inverter: stream = inverter.local.stream - await stream.send_start_cmd(snr, host) + await stream.send_start_cmd(snr, host, forward) await stream.ifc.loop() logger.info(f'[{stream.node_id}:{stream.conn_no}] ' f'Connection closed - Shutdown: ' diff --git a/app/tests/test_infos.py b/app/tests/test_infos.py index 18eb5e4..ed4e293 100644 --- a/app/tests/test_infos.py +++ b/app/tests/test_infos.py @@ -17,13 +17,13 @@ def test_statistic_counter(): assert val == None or val == 0 i.static_init() # initialize counter - assert json.dumps(i.stat) == json.dumps({"proxy": {"Inverter_Cnt": 0, "Unknown_SNR": 0, "Unknown_Msg": 0, "Invalid_Data_Type": 0, "Internal_Error": 0,"Unknown_Ctrl": 0, "OTA_Start_Msg": 0, "SW_Exception": 0, "Invalid_Msg_Format": 0, "AT_Command": 0, "AT_Command_Blocked": 0, "Modbus_Command": 0}}) + assert json.dumps(i.stat) == json.dumps({"proxy": {"Inverter_Cnt": 0, "Cloud_Conn_Cnt": 0, "Unknown_SNR": 0, "Unknown_Msg": 0, "Invalid_Data_Type": 0, "Internal_Error": 0,"Unknown_Ctrl": 0, "OTA_Start_Msg": 0, "SW_Exception": 0, "Invalid_Msg_Format": 0, "AT_Command": 0, "AT_Command_Blocked": 0, "Modbus_Command": 0}}) val = i.dev_value(Register.INVERTER_CNT) # valid and initiliazed addr assert val == 0 i.inc_counter('Inverter_Cnt') - assert json.dumps(i.stat) == json.dumps({"proxy": {"Inverter_Cnt": 1, "Unknown_SNR": 0, "Unknown_Msg": 0, "Invalid_Data_Type": 0, "Internal_Error": 0,"Unknown_Ctrl": 0, "OTA_Start_Msg": 0, "SW_Exception": 0, "Invalid_Msg_Format": 0, "AT_Command": 0, "AT_Command_Blocked": 0, "Modbus_Command": 0}}) + assert json.dumps(i.stat) == json.dumps({"proxy": {"Inverter_Cnt": 1, "Cloud_Conn_Cnt": 0, "Unknown_SNR": 0, "Unknown_Msg": 0, "Invalid_Data_Type": 0, "Internal_Error": 0,"Unknown_Ctrl": 0, "OTA_Start_Msg": 0, "SW_Exception": 0, "Invalid_Msg_Format": 0, "AT_Command": 0, "AT_Command_Blocked": 0, "Modbus_Command": 0}}) val = i.dev_value(Register.INVERTER_CNT) assert val == 1 diff --git a/app/tests/test_infos_g3.py b/app/tests/test_infos_g3.py index 1bab29e..18d5854 100644 --- a/app/tests/test_infos_g3.py +++ b/app/tests/test_infos_g3.py @@ -421,7 +421,7 @@ def test_must_incr_total(inv_data_seq2, inv_data_seq2_zero): if key == 'total' or key == 'inverter' or key == 'env': assert update == True tests +=1 - assert tests==8 + assert tests==12 assert json.dumps(i.db['total']) == json.dumps({'Daily_Generation': 1.7, 'Total_Generation': 17.36}) assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 64.5, "Daily_Generation": 1.08, "Total_Generation": 9.74}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 45.7, "Daily_Generation": 0.62, "Total_Generation": 7.62}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}}) assert json.dumps(i.db['env']) == json.dumps({"Inverter_Status": 1, "Inverter_Temp": 23}) @@ -435,7 +435,7 @@ def test_must_incr_total(inv_data_seq2, inv_data_seq2_zero): assert json.dumps(i.db['total']) == json.dumps({'Daily_Generation': 1.7, 'Total_Generation': 17.36}) assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 64.5, "Daily_Generation": 1.08, "Total_Generation": 9.74}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 45.7, "Daily_Generation": 0.62, "Total_Generation": 7.62}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}}) assert json.dumps(i.db['env']) == json.dumps({"Inverter_Status": 1, "Inverter_Temp": 23}) - assert json.dumps(i.db['inverter']) == json.dumps({"Rated_Power": 600, "Max_Designed_Power": -1, "Output_Coefficient": 100.0, "No_Inputs": 2}) + assert json.dumps(i.db['inverter']) == json.dumps({"Rated_Power": 600, "BOOT_STATUS": 0, "DSP_STATUS": 21930, "Work_Mode": 0, "Max_Designed_Power": -1, "Input_Coefficient": -0.1, "Output_Coefficient": 100.0, "No_Inputs": 2}) tests = 0 for key, update in i.parse (inv_data_seq2_zero): @@ -501,8 +501,8 @@ def test_new_data_types(inv_data_new): else: assert False - assert tests==5 - assert json.dumps(i.db['inverter']) == json.dumps({"Manufacturer": 0}) + assert tests==7 + assert json.dumps(i.db['inverter']) == json.dumps({"Manufacturer": 0, "DSP_STATUS": 0}) assert json.dumps(i.db['input']) == json.dumps({"pv1": {}}) assert json.dumps(i.db['events']) == json.dumps({"Inverter_Alarm": 0, "Inverter_Fault": 0}) diff --git a/app/tests/test_infos_g3p.py b/app/tests/test_infos_g3p.py index 3fbefa9..51af74d 100644 --- a/app/tests/test_infos_g3p.py +++ b/app/tests/test_infos_g3p.py @@ -110,7 +110,7 @@ def test_parse_4210(inverter_data: bytes): assert json.dumps(i.db) == json.dumps({ "controller": {"Sensor_List": "02b0", "Power_On_Time": 2051}, - "inverter": {"Serial_Number": "Y17E00000000000E", "Version": "V4.0.10", "Rated_Power": 600, "BOOT_STATUS": 0, "DSP_STATUS": 21930, "Max_Designed_Power": 2000, "Output_Coefficient": 100.0}, + "inverter": {"Serial_Number": "Y17E00000000000E", "Version": "V4.0.10", "Rated_Power": 600, "BOOT_STATUS": 0, "DSP_STATUS": 21930, "Work_Mode": 0, "Max_Designed_Power": 2000, "Input_Coefficient": 100.0, "Output_Coefficient": 100.0}, "env": {"Inverter_Status": 1, "Detect_Status_1": 2, "Detect_Status_2": 0, "Inverter_Temp": 14}, "events": {"Inverter_Alarm": 0, "Inverter_Fault": 0, "Inverter_Bitfield_1": 0, "Inverter_bitfield_2": 0}, "grid": {"Voltage": 224.8, "Current": 0.73, "Frequency": 50.05, "Output_Power": 165.8}, @@ -119,7 +119,8 @@ def test_parse_4210(inverter_data: bytes): "pv3": {"Voltage": 34.6, "Current": 1.89, "Power": 65.5, "Daily_Generation": 0.05, "Total_Generation": 31.89}, "pv4": {"Voltage": 1.7, "Current": 0.01, "Power": 0.0, "Total_Generation": 15.58}}, "total": {"Daily_Generation": 0.11, "Total_Generation": 101.36}, - "inv_unknown": {"Unknown_1": 512} + "inv_unknown": {"Unknown_1": 512}, + "other": {"Output_Shutdown": 65535, "Rated_Level": 3, "Grid_Volt_Cal_Coef": 1024} }) def test_build_4210(inverter_data: bytes): diff --git a/app/tests/test_inverter_g3p.py b/app/tests/test_inverter_g3p.py index 0f47cbe..d06ee80 100644 --- a/app/tests/test_inverter_g3p.py +++ b/app/tests/test_inverter_g3p.py @@ -144,7 +144,7 @@ async def test_mqtt_publish(config_conn, patch_open_connection): with InverterG3P(FakeReader(), FakeWriter(), client_mode=False) as inverter: stream = inverter.local.stream await inverter.async_publ_mqtt() # check call with invalid unique_id - stream._SolarmanV5__set_serial_no(snr= 123344) + stream._set_serial_no(snr= 123344) stream.new_data['inverter'] = True stream.db.db['inverter'] = {} @@ -171,7 +171,7 @@ async def test_mqtt_err(config_conn, patch_open_connection, patch_mqtt_err): with InverterG3P(FakeReader(), FakeWriter(), client_mode=False) as inverter: stream = inverter.local.stream - stream._SolarmanV5__set_serial_no(snr= 123344) + stream._set_serial_no(snr= 123344) stream.new_data['inverter'] = True stream.db.db['inverter'] = {} await inverter.async_publ_mqtt() @@ -188,7 +188,7 @@ async def test_mqtt_except(config_conn, patch_open_connection, patch_mqtt_except with InverterG3P(FakeReader(), FakeWriter(), client_mode=False) as inverter: stream = inverter.local.stream - stream._SolarmanV5__set_serial_no(snr= 123344) + stream._set_serial_no(snr= 123344) stream.new_data['inverter'] = True stream.db.db['inverter'] = {} diff --git a/app/tests/test_modbus_tcp.py b/app/tests/test_modbus_tcp.py index 93ecfa0..029a6f3 100644 --- a/app/tests/test_modbus_tcp.py +++ b/app/tests/test_modbus_tcp.py @@ -52,6 +52,10 @@ def config_conn(test_hostname, test_port): 'proxy_node_id': 'test_1', 'proxy_unique_id': '' }, + 'solarman':{ + 'host': 'access1.solarmanpv.com', + 'port': 10000 + }, 'inverters':{ 'allow_all': True, "R170000000000001":{ @@ -65,7 +69,8 @@ def config_conn(test_hostname, test_port): 'sensor_list': 0x2b0, 'client_mode':{ 'host': '192.168.0.1', - 'port': 8899 + 'port': 8899, + 'forward': True } } } diff --git a/app/tests/test_solarman.py b/app/tests/test_solarman.py index a980744..52b94ca 100644 --- a/app/tests/test_solarman.py +++ b/app/tests/test_solarman.py @@ -6,7 +6,7 @@ import logging import random from math import isclose from app.src.async_stream import AsyncIfcImpl, StreamPtr -from app.src.gen3plus.solarman_v5 import SolarmanV5 +from app.src.gen3plus.solarman_v5 import SolarmanV5, SolarmanBase from app.src.config import Config from app.src.infos import Infos, Register from app.src.modbus import Modbus @@ -37,6 +37,9 @@ class FakeIfc(AsyncIfcImpl): super().__init__() self.remote = StreamPtr(None) + async def create_remote(self): + await asyncio.sleep(0) + class MemoryStream(SolarmanV5): def __init__(self, msg, chunks = (0,), server_side: bool = True): _ifc = FakeIfc() @@ -109,7 +112,7 @@ class MemoryStream(SolarmanV5): c.ifc.remote.stream = self return c - def _SolarmanV5__flush_recv_msg(self) -> None: + def _SolarmanBase__flush_recv_msg(self) -> None: self.msg_recvd.append( { 'control': self.control, @@ -117,7 +120,7 @@ class MemoryStream(SolarmanV5): 'data_len': self.data_len } ) - super()._SolarmanV5__flush_recv_msg() + super()._SolarmanBase__flush_recv_msg() self.msg_count += 1 @@ -1102,7 +1105,7 @@ def test_sync_start_ind(config_tsun_inv1, sync_start_ind_msg, sync_start_rsp_msg assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0 m.seq.server_side = False # simulate forawding to TSUN cloud - m._update_header(m.ifc.fwd_fifo.peek()) + m._SolarmanBase__update_header(m.ifc.fwd_fifo.peek()) assert str(m.seq) == '0d:0e' # value after forwarding indication assert m.ifc.fwd_fifo.get()==sync_start_fwd_msg @@ -1768,7 +1771,7 @@ async def test_start_client_mode(config_tsun_inv1, str_test_ip): assert m.no_forwarding == False assert m.mb_timer.tim == None assert asyncio.get_running_loop() == m.mb_timer.loop - await m.send_start_cmd(get_sn_int(), str_test_ip, m.mb_first_timeout) + await m.send_start_cmd(get_sn_int(), str_test_ip, False, m.mb_first_timeout) assert m.sent_pdu==bytearray(b'\xa5\x17\x00\x10E\x01\x00!Ce{\x02\xb0\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x030\x00\x000J\xde\xf1\x15') assert m.db.get_db_value(Register.IP_ADDRESS) == str_test_ip assert isclose(m.db.get_db_value(Register.POLLING_INTERVAL), 0.5) @@ -1803,3 +1806,30 @@ def test_timeout(config_tsun_inv1): assert SolarmanV5.MAX_DEF_IDLE_TIME == m._timeout() m.state = State.closed m.close() + +def test_fnc_dispatch(): + def msg(): + return + + _ = config_tsun_inv1 + m = MemoryStream(b'') + m.switch[1] = msg + m.switch[2] = "msg" + + _obj, _str = m.get_fnc_handler(1) + assert _obj == msg + assert _str == "'msg'" + + _obj, _str = m.get_fnc_handler(2) + assert _obj == m.msg_unknown + assert _str == "'msg'" + + _obj, _str = m.get_fnc_handler(3) + assert _obj == m.msg_unknown + assert _str == "'msg_unknown'" + +def test_timestamp(): + m = MemoryStream(b'') + ts = m._timestamp() + ts_emu = m._emu_timestamp() + assert ts == ts_emu + 24*60*60 \ No newline at end of file diff --git a/app/tests/test_solarman_emu.py b/app/tests/test_solarman_emu.py new file mode 100644 index 0000000..32787ab --- /dev/null +++ b/app/tests/test_solarman_emu.py @@ -0,0 +1,230 @@ +import pytest +import asyncio +from app.src.async_stream import AsyncIfcImpl, StreamPtr +from app.src.gen3plus.solarman_v5 import SolarmanV5, SolarmanBase +from app.src.gen3plus.solarman_emu import SolarmanEmu +from app.src.infos import Infos, Register +from app.tests.test_solarman import FakeIfc, MemoryStream, get_sn_int, get_sn, correct_checksum, config_tsun_inv1, msg_modbus_rsp +from app.tests.test_infos_g3p import str_test_ip, bytes_test_ip + +timestamp = 0x3224c8bc + +class InvStream(MemoryStream): + def __init__(self, msg=b''): + super().__init__(msg) + + def _emu_timestamp(self): + return timestamp + +class CldStream(SolarmanEmu): + def __init__(self, inv: InvStream): + _ifc = FakeIfc() + _ifc.remote.stream = inv + super().__init__(('test.local', 1234), _ifc, server_side=False, client_mode=False) + self.__msg = b'' + self.__msg_len = 0 + self.__offs = 0 + self.msg_count = 0 + self.msg_recvd = [] + + def _emu_timestamp(self): + return timestamp + + def append_msg(self, msg): + self.__msg += msg + self.__msg_len += len(msg) + + def _read(self) -> int: + copied_bytes = 0 + try: + if (self.__offs < self.__msg_len): + self.ifc.rx_fifo += self.__msg[self.__offs:] + copied_bytes = self.__msg_len - self.__offs + self.__offs = self.__msg_len + except Exception: + pass # ignore exceptions here + return copied_bytes + + def _SolarmanBase__flush_recv_msg(self) -> None: + self.msg_recvd.append( + { + 'control': self.control, + 'seq': str(self.seq), + 'data_len': self.data_len + } + ) + super()._SolarmanBase__flush_recv_msg() + self.msg_count += 1 + +@pytest.fixture +def device_ind_msg(bytes_test_ip): # 0x4110 + msg = b'\xa5\xd4\x00\x10\x41\x00\x01' +get_sn() +b'\x02\xbc\xc8\x24\x32' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x05\x3c\x78\x01\x00\x01\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + bytes_test_ip + msg += b'\x0f\x00\x01\xb0' + msg += b'\x02\x0f\x00\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xfe\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += correct_checksum(msg) + msg += b'\x15' + return msg + +@pytest.fixture +def inverter_ind_msg(): # 0x4210 + msg = b'\xa5\x99\x01\x10\x42\x00\x01' +get_sn() +b'\x01\xb0\x02\xbc\xc8' + msg += b'\x24\x32\x3c\x00\x00\x00\xa0\x47\xe4\x33\x01\x00\x03\x08\x00\x00' + msg += b'\x59\x31\x37\x30\x30\x30\x30\x30\x30\x30\x30\x30\x30\x30\x30\x31' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x01\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x40\x10\x08\xc8\x00\x49\x13\x8d\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00' + msg += b'\x04\x00\x00\x01\xff\xff\x00\x01\x00\x06\x00\x68\x00\x68\x05\x00' + msg += b'\x09\xcd\x07\xb6\x13\x9c\x13\x24\x00\x01\x07\xae\x04\x0f\x00\x41' + msg += b'\x00\x0f\x0a\x64\x0a\x64\x00\x06\x00\x06\x09\xf6\x12\x8c\x12\x8c' + msg += b'\x00\x10\x00\x10\x14\x52\x14\x52\x00\x10\x00\x10\x01\x51\x00\x05' + msg += b'\x00\x00\x00\x01\x13\x9c\x0f\xa0\x00\x4e\x00\x66\x03\xe8\x04\x00' + msg += b'\x09\xce\x07\xa8\x13\x9c\x13\x26\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x04\x00\x04\x00\x00\x00\x00\x00\xff\xff\x00\x00' + msg += b'\x00\x00\x00\x00' + msg += correct_checksum(msg) + msg += b'\x15' + return msg + +@pytest.fixture +def inverter_rsp_msg(): # 0x1210 + msg = b'\xa5\x0a\x00\x10\x12\x02\02' +get_sn() +b'\x01\x01' + msg += b'\x00\x00\x00\x00' + msg += b'\x3c\x00\x00\x00' + msg += correct_checksum(msg) + msg += b'\x15' + return msg + +@pytest.fixture +def heartbeat_ind(): + msg = b'\xa5\x01\x00\x10G\x00\x01\x00\x00\x00\x00\x00Y\x15' + return msg + +def test_emu_init_close(): + # received a message with wrong start byte plus an valid message + # the complete receive buffer must be cleared to + # find the next valid message + inv = InvStream() + cld = CldStream(inv) + cld.close() + + +@pytest.mark.asyncio +async def test_emu_start(config_tsun_inv1, msg_modbus_rsp, str_test_ip, device_ind_msg): + _ = config_tsun_inv1 + assert asyncio.get_running_loop() + inv = InvStream(msg_modbus_rsp) + + assert asyncio.get_running_loop() == inv.mb_timer.loop + await inv.send_start_cmd(get_sn_int(), str_test_ip, True, inv.mb_first_timeout) + inv.read() # read complete msg, and dispatch msg + assert not inv.header_valid # must be invalid, since msg was handled and buffer flushed + assert inv.msg_count == 1 + assert inv.control == 0x1510 + + cld = CldStream(inv) + cld.ifc.update_header_cb(inv.ifc.fwd_fifo.peek()) + assert inv.ifc.fwd_fifo.peek() == device_ind_msg + cld.close() + +def test_snd_hb(config_tsun_inv1, heartbeat_ind): + _ = config_tsun_inv1 + inv = InvStream() + cld = CldStream(inv) + + # await inv.send_start_cmd(get_sn_int(), str_test_ip, False, inv.mb_first_timeout) + cld.send_heartbeat_cb(0) + assert cld.ifc.tx_fifo.peek() == heartbeat_ind + cld.close() + +@pytest.mark.asyncio +async def test_snd_inv_data(config_tsun_inv1, inverter_ind_msg, inverter_rsp_msg): + _ = config_tsun_inv1 + inv = InvStream() + inv.db.set_db_def_value(Register.INVERTER_STATUS, 1) + inv.db.set_db_def_value(Register.DETECT_STATUS_1, 2) + inv.db.set_db_def_value(Register.VERSION, 'V4.0.10') + inv.db.set_db_def_value(Register.GRID_VOLTAGE, 224.8) + inv.db.set_db_def_value(Register.GRID_CURRENT, 0.73) + inv.db.set_db_def_value(Register.GRID_FREQUENCY, 50.05) + assert asyncio.get_running_loop() == inv.mb_timer.loop + await inv.send_start_cmd(get_sn_int(), str_test_ip, False, inv.mb_first_timeout) + inv.db.set_db_def_value(Register.DATA_UP_INTERVAL, 17) # set test value + + cld = CldStream(inv) + cld.time_ofs = 0x33e447a0 + cld.last_sync = cld._emu_timestamp() - 60 + cld.pkt_cnt = 0x802 + assert cld.data_up_inv == 17 # check test value + cld.data_up_inv = 0.1 # speedup test first data msg + cld._init_new_client_conn() + cld.data_up_inv = 0.5 # timeout for second data msg + await asyncio.sleep(0.2) + assert cld.ifc.tx_fifo.get() == inverter_ind_msg + + cld.append_msg(inverter_rsp_msg) + cld.read() # read complete msg, and dispatch msg + + assert not cld.header_valid # must be invalid, since msg was handled and buffer flushed + assert cld.msg_count == 1 + assert cld.header_len==11 + assert cld.snr == 2070233889 + assert cld.unique_id == '2070233889' + assert cld.msg_recvd[0]['control']==0x1210 + assert cld.msg_recvd[0]['seq']=='02:02' + assert cld.msg_recvd[0]['data_len']==0x0a + assert '02b0' == cld.db.get_db_value(Register.SENSOR_LIST, None) + assert cld.db.stat['proxy']['Unknown_Msg'] == 0 + + cld.close() + +@pytest.mark.asyncio +async def test_rcv_invalid(config_tsun_inv1, inverter_ind_msg, inverter_rsp_msg): + _ = config_tsun_inv1 + inv = InvStream() + assert asyncio.get_running_loop() == inv.mb_timer.loop + await inv.send_start_cmd(get_sn_int(), str_test_ip, False, inv.mb_first_timeout) + inv.db.set_db_def_value(Register.DATA_UP_INTERVAL, 17) # set test value + + cld = CldStream(inv) + cld._init_new_client_conn() + + cld.append_msg(inverter_ind_msg) + cld.read() # read complete msg, and dispatch msg + + assert not cld.header_valid # must be invalid, since msg was handled and buffer flushed + assert cld.msg_count == 1 + assert cld.header_len==11 + assert cld.snr == 2070233889 + assert cld.unique_id == '2070233889' + assert cld.msg_recvd[0]['control']==0x4210 + assert cld.msg_recvd[0]['seq']=='00:01' + assert cld.msg_recvd[0]['data_len']==0x199 + assert '02b0' == cld.db.get_db_value(Register.SENSOR_LIST, None) + assert cld.db.stat['proxy']['Unknown_Msg'] == 1 + + + cld.close() From 211a9580801ba4bd25977dab0540d7ff4b260e9a Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Wed, 20 Nov 2024 20:08:20 +0100 Subject: [PATCH 20/32] add PROD_COMPL_TYPE to trace --- CHANGELOG.md | 1 + app/src/gen3/infos_g3.py | 1 + app/src/gen3plus/infos_g3p.py | 3 ++- app/src/infos.py | 2 ++ app/src/modbus.py | 4 ++-- app/tests/test_infos_g3p.py | 2 +- app/tests/test_solarman_emu.py | 1 + 7 files changed, 10 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a3bc961..84b7d3d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [unreleased] +- add PROD_COMPL_TYPE to trace - add SolarmanV5 messages builder - report inverter alarms and faults per MQTT [#7](https://github.com/s-allius/tsun-gen3-proxy/issues/7) diff --git a/app/src/gen3/infos_g3.py b/app/src/gen3/infos_g3.py index 950a956..97a4f5e 100644 --- a/app/src/gen3/infos_g3.py +++ b/app/src/gen3/infos_g3.py @@ -84,6 +84,7 @@ class RegisterMap: 0x000012c0: {'reg': Register.RATED_LEVEL}, 0x00001324: {'reg': Register.INPUT_COEFFICIENT, 'ratio': 100/1024}, 0x00001388: {'reg': Register.GRID_VOLT_CAL_COEF}, + 0x00002710: {'reg': Register.PROD_COMPL_TYPE}, 0x00003200: {'reg': Register.OUTPUT_COEFFICIENT, 'ratio': 100/1024}, } diff --git a/app/src/gen3plus/infos_g3p.py b/app/src/gen3plus/infos_g3p.py index aec8bfb..b866fea 100644 --- a/app/src/gen3plus/infos_g3p.py +++ b/app/src/gen3plus/infos_g3p.py @@ -91,7 +91,8 @@ class RegisterMap: 0x4201012c: {'reg': Register.GRID_VOLT_CAL_COEF, 'fmt': '!H'}, 0x4201012e: {'reg': None, 'fmt': '!H', 'const': 1024}, # noqa: E501 0x42010130: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (1024, 1, 0xffff, 1)}, # noqa: E501 - 0x42010138: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (6, 0x68, 0x68, 0x500)}, # noqa: E501 + 0x42010138: {'reg': Register.PROD_COMPL_TYPE, 'fmt': '!H'}, + 0x4201013a: {'reg': None, 'fmt': FMT_3_16BIT_VAL, 'const': (0x68, 0x68, 0x500)}, # noqa: E501 0x42010140: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0x9cd, 0x7b6, 0x139c, 0x1324)}, # noqa: E501 0x42010148: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (1, 0x7ae, 0x40f, 0x41)}, # noqa: E501 0x42010150: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0xf, 0xa64, 0xa64, 0x6)}, # noqa: E501 diff --git a/app/src/infos.py b/app/src/infos.py index e899f00..fb8f0d1 100644 --- a/app/src/infos.py +++ b/app/src/infos.py @@ -30,6 +30,7 @@ class Register(Enum): INPUT_COEFFICIENT = 28 GRID_VOLT_CAL_COEF = 29 OUTPUT_COEFFICIENT = 30 + PROD_COMPL_TYPE = 31 INVERTER_CNT = 50 UNKNOWN_SNR = 51 UNKNOWN_MSG = 52 @@ -511,6 +512,7 @@ class Infos: Register.OUTPUT_SHUTDOWN: {'name': ['other', 'Output_Shutdown'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 Register.RATED_LEVEL: {'name': ['other', 'Rated_Level'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 Register.GRID_VOLT_CAL_COEF: {'name': ['other', 'Grid_Volt_Cal_Coef'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.PROD_COMPL_TYPE: {'name': ['other', 'Prod_Compliance_Type'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 Register.INV_UNKNOWN_1: {'name': ['inv_unknown', 'Unknown_1'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 } diff --git a/app/src/modbus.py b/app/src/modbus.py index ace14fe..8c88537 100644 --- a/app/src/modbus.py +++ b/app/src/modbus.py @@ -44,11 +44,11 @@ class Modbus(): 0x2001: {'reg': Register.DSP_STATUS, 'fmt': '!H'}, # noqa: E501 0x2003: {'reg': Register.WORK_MODE, 'fmt': '!H'}, 0x2006: {'reg': Register.OUTPUT_SHUTDOWN, 'fmt': '!H'}, - 0x2007: {'reg': Register.MAX_DESIGNED_POWER, 'fmt': '!H', 'ratio': 1}, # noqa: E501 + 0x2007: {'reg': Register.MAX_DESIGNED_POWER, 'fmt': '!H', 'ratio': 1}, # noqa: E501 0x2008: {'reg': Register.RATED_LEVEL, 'fmt': '!H'}, 0x2009: {'reg': Register.INPUT_COEFFICIENT, 'fmt': '!H', 'ratio': 100/1024}, # noqa: E501 0x200a: {'reg': Register.GRID_VOLT_CAL_COEF, 'fmt': '!H'}, - + 0x2010: {'reg': Register.PROD_COMPL_TYPE, 'fmt': '!H'}, 0x202c: {'reg': Register.OUTPUT_COEFFICIENT, 'fmt': '!H', 'ratio': 100/1024}, # noqa: E501 0x3000: {'reg': Register.INVERTER_STATUS, 'fmt': '!H'}, # noqa: E501 diff --git a/app/tests/test_infos_g3p.py b/app/tests/test_infos_g3p.py index 51af74d..150a666 100644 --- a/app/tests/test_infos_g3p.py +++ b/app/tests/test_infos_g3p.py @@ -120,7 +120,7 @@ def test_parse_4210(inverter_data: bytes): "pv4": {"Voltage": 1.7, "Current": 0.01, "Power": 0.0, "Total_Generation": 15.58}}, "total": {"Daily_Generation": 0.11, "Total_Generation": 101.36}, "inv_unknown": {"Unknown_1": 512}, - "other": {"Output_Shutdown": 65535, "Rated_Level": 3, "Grid_Volt_Cal_Coef": 1024} + "other": {"Output_Shutdown": 65535, "Rated_Level": 3, "Grid_Volt_Cal_Coef": 1024, "Prod_Compliance_Type": 6} }) def test_build_4210(inverter_data: bytes): diff --git a/app/tests/test_solarman_emu.py b/app/tests/test_solarman_emu.py index 32787ab..0eadbc6 100644 --- a/app/tests/test_solarman_emu.py +++ b/app/tests/test_solarman_emu.py @@ -170,6 +170,7 @@ async def test_snd_inv_data(config_tsun_inv1, inverter_ind_msg, inverter_rsp_msg inv.db.set_db_def_value(Register.GRID_VOLTAGE, 224.8) inv.db.set_db_def_value(Register.GRID_CURRENT, 0.73) inv.db.set_db_def_value(Register.GRID_FREQUENCY, 50.05) + inv.db.set_db_def_value(Register.PROD_COMPL_TYPE, 6) assert asyncio.get_running_loop() == inv.mb_timer.loop await inv.send_start_cmd(get_sn_int(), str_test_ip, False, inv.mb_first_timeout) inv.db.set_db_def_value(Register.DATA_UP_INTERVAL, 17) # set test value From 84231c034cfc023744ef80e4945166a3cbfe3744 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Sat, 23 Nov 2024 16:31:44 +0100 Subject: [PATCH 21/32] specify more offset of the 0x4110 message --- app/src/gen3plus/infos_g3p.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/app/src/gen3plus/infos_g3p.py b/app/src/gen3plus/infos_g3p.py index b866fea..e438fa9 100644 --- a/app/src/gen3plus/infos_g3p.py +++ b/app/src/gen3plus/infos_g3p.py @@ -29,9 +29,11 @@ class RegisterMap: 0x4102005c: {'reg': None, 'fmt': ' Date: Sun, 24 Nov 2024 22:07:43 +0100 Subject: [PATCH 22/32] S allius/pytest (#211) * - fix pytest setup that can be startet from the rootdir - support python venv environment - add pytest.ini - move common settings from .vscode/settings.json into pytest.ini - add missing requirements - fix import paths for pytests * - support python venv environment * initial version * - add missing requirements python-dotenv * fix import paths for pytests * fix pytest warnings * initial version * report 5 slowest test durations * add more vscode settings for python --- .gitignore | 1 + .python-version | 1 + .vscode/settings.json | 15 +++++++++++---- CHANGELOG.md | 6 ++++++ app/requirements-test.txt | 1 + app/src/async_stream.py | 14 ++++---------- app/src/byte_fifo.py | 6 +----- app/src/gen3/infos_g3.py | 5 +---- app/src/gen3/inverter_g3.py | 9 +++------ app/src/gen3/talent.py | 20 ++++++-------------- app/src/gen3plus/infos_g3p.py | 5 +---- app/src/gen3plus/inverter_g3p.py | 11 +++-------- app/src/gen3plus/solarman_emu.py | 14 ++++---------- app/src/gen3plus/solarman_v5.py | 20 ++++++-------------- app/src/inverter_base.py | 23 +++++++---------------- app/src/inverter_ifc.py | 5 +---- app/src/messages.py | 18 +++++------------- app/src/modbus.py | 5 +---- app/src/modbus_tcp.py | 11 +++-------- app/src/mqtt.py | 15 +++++---------- app/src/protocol_ifc.py | 8 ++------ app/src/proxy.py | 11 +++-------- app/tests/test_async_stream.py | 21 +++++++++++---------- app/tests/test_byte_fifo.py | 2 +- app/tests/test_config.py | 2 +- app/tests/test_infos.py | 4 ++-- app/tests/test_infos_g3.py | 4 ++-- app/tests/test_infos_g3p.py | 6 +++--- app/tests/test_inverter_base.py | 22 +++++++++++----------- app/tests/test_inverter_g3.py | 28 ++++++++++++++-------------- app/tests/test_inverter_g3p.py | 26 +++++++++++++------------- app/tests/test_modbus.py | 4 ++-- app/tests/test_modbus_tcp.py | 16 ++++++++-------- app/tests/test_mqtt.py | 12 ++++++------ app/tests/test_proxy.py | 10 +++++----- app/tests/test_singleton.py | 8 ++++---- app/tests/test_solarman.py | 12 ++++++------ app/tests/test_solarman_emu.py | 14 ++++++++------ app/tests/test_talent.py | 12 ++++++------ pytest.ini | 8 ++++++++ 40 files changed, 187 insertions(+), 248 deletions(-) create mode 100644 .python-version create mode 100644 pytest.ini diff --git a/.gitignore b/.gitignore index 1da811a..0c6871d 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ __pycache__ .pytest_cache +.venv/** bin/** mosquitto/** homeassistant/** diff --git a/.python-version b/.python-version new file mode 100644 index 0000000..56bb660 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.12.7 diff --git a/.vscode/settings.json b/.vscode/settings.json index 57033c8..2c024dd 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,10 +1,12 @@ { + "python.analysis.extraPaths": [ + "app/src", + ".venv/lib" ], "python.testing.pytestArgs": [ - "-vv", - "app", + "-v", "--cov=app/src", "--cov-report=xml", - "--cov-report=html", + "app", "system_tests" ], "python.testing.unittestEnabled": false, @@ -18,5 +20,10 @@ }, "files.exclude": { "**/*.pyi": true - } + }, + "python.analysis.typeEvaluation.deprecateTypingAliases": true, + "python.autoComplete.extraPaths": [ + ".venv/lib" + ], + "coverage-gutters.coverageBaseDir": "tsun" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 80edecd..09063c7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [0.11.1] - 2024-11-20 +- fix pytest setup that can be startet from the rootdir + - support python venv environment + - add pytest.ini + - move common settings from .vscode/settings.json into pytest.ini + - add missing requirements + - fix import paths for pytests - Bumps [aiohttp](https://github.com/aio-libs/aiohttp) from 3.10.5 to 3.10.11. ## [0.11.0] - 2024-10-13 diff --git a/app/requirements-test.txt b/app/requirements-test.txt index d07ed29..bbf4e68 100644 --- a/app/requirements-test.txt +++ b/app/requirements-test.txt @@ -2,5 +2,6 @@ pytest pytest-asyncio pytest-cov + python-dotenv mock coverage \ No newline at end of file diff --git a/app/src/async_stream.py b/app/src/async_stream.py index ca642b2..ec060b2 100644 --- a/app/src/async_stream.py +++ b/app/src/async_stream.py @@ -6,16 +6,10 @@ from asyncio import StreamReader, StreamWriter from typing import Self from itertools import count -if __name__ == "app.src.async_stream": - from app.src.proxy import Proxy - from app.src.byte_fifo import ByteFifo - from app.src.async_ifc import AsyncIfc - from app.src.infos import Infos -else: # pragma: no cover - from proxy import Proxy - from byte_fifo import ByteFifo - from async_ifc import AsyncIfc - from infos import Infos +from proxy import Proxy +from byte_fifo import ByteFifo +from async_ifc import AsyncIfc +from infos import Infos import gc diff --git a/app/src/byte_fifo.py b/app/src/byte_fifo.py index 27d2512..959eab2 100644 --- a/app/src/byte_fifo.py +++ b/app/src/byte_fifo.py @@ -1,8 +1,4 @@ - -if __name__ == "app.src.byte_fifo": - from app.src.messages import hex_dump_str, hex_dump_memory -else: # pragma: no cover - from messages import hex_dump_str, hex_dump_memory +from messages import hex_dump_str, hex_dump_memory class ByteFifo: diff --git a/app/src/gen3/infos_g3.py b/app/src/gen3/infos_g3.py index 97a4f5e..efa220c 100644 --- a/app/src/gen3/infos_g3.py +++ b/app/src/gen3/infos_g3.py @@ -3,10 +3,7 @@ import struct import logging from typing import Generator -if __name__ == "app.src.gen3.infos_g3": - from app.src.infos import Infos, Register -else: # pragma: no cover - from infos import Infos, Register +from infos import Infos, Register class RegisterMap: diff --git a/app/src/gen3/inverter_g3.py b/app/src/gen3/inverter_g3.py index fd09a8f..efaeca0 100644 --- a/app/src/gen3/inverter_g3.py +++ b/app/src/gen3/inverter_g3.py @@ -1,10 +1,7 @@ from asyncio import StreamReader, StreamWriter -if __name__ == "app.src.gen3.inverter_g3": - from app.src.inverter_base import InverterBase - from app.src.gen3.talent import Talent -else: # pragma: no cover - from inverter_base import InverterBase - from gen3.talent import Talent + +from inverter_base import InverterBase +from gen3.talent import Talent class InverterG3(InverterBase): diff --git a/app/src/gen3/talent.py b/app/src/gen3/talent.py index 57f875c..da3ebc8 100644 --- a/app/src/gen3/talent.py +++ b/app/src/gen3/talent.py @@ -4,20 +4,12 @@ from zoneinfo import ZoneInfo from datetime import datetime from tzlocal import get_localzone -if __name__ == "app.src.gen3.talent": - from app.src.async_ifc import AsyncIfc - from app.src.messages import Message, State - from app.src.modbus import Modbus - from app.src.config import Config - from app.src.gen3.infos_g3 import InfosG3 - from app.src.infos import Register -else: # pragma: no cover - from async_ifc import AsyncIfc - from messages import Message, State - from modbus import Modbus - from config import Config - from gen3.infos_g3 import InfosG3 - from infos import Register +from async_ifc import AsyncIfc +from messages import Message, State +from modbus import Modbus +from config import Config +from gen3.infos_g3 import InfosG3 +from infos import Register logger = logging.getLogger('msg') diff --git a/app/src/gen3plus/infos_g3p.py b/app/src/gen3plus/infos_g3p.py index e438fa9..417487a 100644 --- a/app/src/gen3plus/infos_g3p.py +++ b/app/src/gen3plus/infos_g3p.py @@ -1,10 +1,7 @@ from typing import Generator -if __name__ == "app.src.gen3plus.infos_g3p": - from app.src.infos import Infos, Register, ProxyMode, Fmt -else: # pragma: no cover - from infos import Infos, Register, ProxyMode, Fmt +from infos import Infos, Register, ProxyMode, Fmt class RegisterMap: diff --git a/app/src/gen3plus/inverter_g3p.py b/app/src/gen3plus/inverter_g3p.py index 74a025a..f3680c9 100644 --- a/app/src/gen3plus/inverter_g3p.py +++ b/app/src/gen3plus/inverter_g3p.py @@ -1,13 +1,8 @@ from asyncio import StreamReader, StreamWriter -if __name__ == "app.src.gen3plus.inverter_g3p": - from app.src.inverter_base import InverterBase - from app.src.gen3plus.solarman_v5 import SolarmanV5 - from app.src.gen3plus.solarman_emu import SolarmanEmu -else: # pragma: no cover - from inverter_base import InverterBase - from gen3plus.solarman_v5 import SolarmanV5 - from gen3plus.solarman_emu import SolarmanEmu +from inverter_base import InverterBase +from gen3plus.solarman_v5 import SolarmanV5 +from gen3plus.solarman_emu import SolarmanEmu class InverterG3P(InverterBase): diff --git a/app/src/gen3plus/solarman_emu.py b/app/src/gen3plus/solarman_emu.py index 7865d5c..66035bb 100644 --- a/app/src/gen3plus/solarman_emu.py +++ b/app/src/gen3plus/solarman_emu.py @@ -1,16 +1,10 @@ import logging import struct -if __name__ == "app.src.gen3plus.solarman_emu": - from app.src.async_ifc import AsyncIfc - from app.src.gen3plus.solarman_v5 import SolarmanBase - from app.src.my_timer import Timer - from app.src.infos import Register -else: # pragma: no cover - from async_ifc import AsyncIfc - from gen3plus.solarman_v5 import SolarmanBase - from my_timer import Timer - from infos import Register +from async_ifc import AsyncIfc +from gen3plus.solarman_v5 import SolarmanBase +from my_timer import Timer +from infos import Register logger = logging.getLogger('msg') diff --git a/app/src/gen3plus/solarman_v5.py b/app/src/gen3plus/solarman_v5.py index 70e1939..f95894e 100644 --- a/app/src/gen3plus/solarman_v5.py +++ b/app/src/gen3plus/solarman_v5.py @@ -4,20 +4,12 @@ import time import asyncio from datetime import datetime -if __name__ == "app.src.gen3plus.solarman_v5": - from app.src.async_ifc import AsyncIfc - from app.src.messages import hex_dump_memory, Message, State - from app.src.modbus import Modbus - from app.src.config import Config - from app.src.gen3plus.infos_g3p import InfosG3P - from app.src.infos import Register, Fmt -else: # pragma: no cover - from async_ifc import AsyncIfc - from messages import hex_dump_memory, Message, State - from config import Config - from modbus import Modbus - from gen3plus.infos_g3p import InfosG3P - from infos import Register, Fmt +from async_ifc import AsyncIfc +from messages import hex_dump_memory, Message, State +from config import Config +from modbus import Modbus +from gen3plus.infos_g3p import InfosG3P +from infos import Register, Fmt logger = logging.getLogger('msg') diff --git a/app/src/inverter_base.py b/app/src/inverter_base.py index 689a506..757b883 100644 --- a/app/src/inverter_base.py +++ b/app/src/inverter_base.py @@ -7,22 +7,13 @@ import gc from aiomqtt import MqttCodeError from asyncio import StreamReader, StreamWriter -if __name__ == "app.src.inverter_base": - from app.src.inverter_ifc import InverterIfc - from app.src.proxy import Proxy - from app.src.async_stream import StreamPtr - from app.src.async_stream import AsyncStreamClient - from app.src.async_stream import AsyncStreamServer - from app.src.config import Config - from app.src.infos import Infos -else: # pragma: no cover - from inverter_ifc import InverterIfc - from proxy import Proxy - from async_stream import StreamPtr - from async_stream import AsyncStreamClient - from async_stream import AsyncStreamServer - from config import Config - from infos import Infos +from inverter_ifc import InverterIfc +from proxy import Proxy +from async_stream import StreamPtr +from async_stream import AsyncStreamClient +from async_stream import AsyncStreamServer +from config import Config +from infos import Infos logger_mqtt = logging.getLogger('mqtt') diff --git a/app/src/inverter_ifc.py b/app/src/inverter_ifc.py index 55fc1b9..11bd5e8 100644 --- a/app/src/inverter_ifc.py +++ b/app/src/inverter_ifc.py @@ -2,10 +2,7 @@ from abc import abstractmethod import logging from asyncio import StreamReader, StreamWriter -if __name__ == "app.src.inverter_ifc": - from app.src.iter_registry import AbstractIterMeta -else: # pragma: no cover - from iter_registry import AbstractIterMeta +from iter_registry import AbstractIterMeta logger_mqtt = logging.getLogger('mqtt') diff --git a/app/src/messages.py b/app/src/messages.py index 9c4a4aa..eecfc80 100644 --- a/app/src/messages.py +++ b/app/src/messages.py @@ -3,19 +3,11 @@ import weakref from typing import Callable from enum import Enum - -if __name__ == "app.src.messages": - from app.src.async_ifc import AsyncIfc - from app.src.protocol_ifc import ProtocolIfc - from app.src.infos import Infos, Register - from app.src.modbus import Modbus - from app.src.my_timer import Timer -else: # pragma: no cover - from async_ifc import AsyncIfc - from protocol_ifc import ProtocolIfc - from infos import Infos, Register - from modbus import Modbus - from my_timer import Timer +from async_ifc import AsyncIfc +from protocol_ifc import ProtocolIfc +from infos import Infos, Register +from modbus import Modbus +from my_timer import Timer logger = logging.getLogger('msg') diff --git a/app/src/modbus.py b/app/src/modbus.py index 8c88537..5c64086 100644 --- a/app/src/modbus.py +++ b/app/src/modbus.py @@ -16,10 +16,7 @@ import logging import asyncio from typing import Generator, Callable -if __name__ == "app.src.modbus": - from app.src.infos import Register, Fmt -else: # pragma: no cover - from infos import Register, Fmt +from infos import Register, Fmt logger = logging.getLogger('data') diff --git a/app/src/modbus_tcp.py b/app/src/modbus_tcp.py index abd1332..f3788d4 100644 --- a/app/src/modbus_tcp.py +++ b/app/src/modbus_tcp.py @@ -2,14 +2,9 @@ import logging import traceback import asyncio -if __name__ == "app.src.modbus_tcp": - from app.src.config import Config - from app.src.gen3plus.inverter_g3p import InverterG3P - from app.src.infos import Infos -else: # pragma: no cover - from config import Config - from gen3plus.inverter_g3p import InverterG3P - from infos import Infos +from config import Config +from gen3plus.inverter_g3p import InverterG3P +from infos import Infos logger = logging.getLogger('conn') diff --git a/app/src/mqtt.py b/app/src/mqtt.py index 83e0fd4..f52b797 100644 --- a/app/src/mqtt.py +++ b/app/src/mqtt.py @@ -2,16 +2,11 @@ import asyncio import logging import aiomqtt import traceback -if __name__ == "app.src.mqtt": - from app.src.modbus import Modbus - from app.src.messages import Message - from app.src.config import Config - from app.src.singleton import Singleton -else: # pragma: no cover - from modbus import Modbus - from messages import Message - from config import Config - from singleton import Singleton + +from modbus import Modbus +from messages import Message +from config import Config +from singleton import Singleton logger_mqtt = logging.getLogger('mqtt') diff --git a/app/src/protocol_ifc.py b/app/src/protocol_ifc.py index 46795e4..3b6c886 100644 --- a/app/src/protocol_ifc.py +++ b/app/src/protocol_ifc.py @@ -1,11 +1,7 @@ from abc import abstractmethod -if __name__ == "app.src.protocol_ifc": - from app.src.iter_registry import AbstractIterMeta - from app.src.async_ifc import AsyncIfc -else: # pragma: no cover - from iter_registry import AbstractIterMeta - from async_ifc import AsyncIfc +from async_ifc import AsyncIfc +from iter_registry import AbstractIterMeta class ProtocolIfc(metaclass=AbstractIterMeta): diff --git a/app/src/proxy.py b/app/src/proxy.py index 9b75c37..eadc3ac 100644 --- a/app/src/proxy.py +++ b/app/src/proxy.py @@ -2,14 +2,9 @@ import asyncio import logging import json -if __name__ == "app.src.proxy": - from app.src.config import Config - from app.src.mqtt import Mqtt - from app.src.infos import Infos -else: # pragma: no cover - from config import Config - from mqtt import Mqtt - from infos import Infos +from config import Config +from mqtt import Mqtt +from infos import Infos logger_mqtt = logging.getLogger('mqtt') diff --git a/app/tests/test_async_stream.py b/app/tests/test_async_stream.py index 3aaf35e..3f4db93 100644 --- a/app/tests/test_async_stream.py +++ b/app/tests/test_async_stream.py @@ -4,12 +4,13 @@ import asyncio import gc import time -from app.src.infos import Infos -from app.src.inverter_base import InverterBase -from app.src.async_stream import AsyncStreamServer, AsyncStreamClient, StreamPtr -from app.src.messages import Message -from app.tests.test_modbus_tcp import FakeReader, FakeWriter -from app.tests.test_inverter_base import config_conn, patch_open_connection +from infos import Infos +from inverter_base import InverterBase +from async_stream import AsyncStreamServer, AsyncStreamClient, StreamPtr +from messages import Message + +from test_modbus_tcp import FakeReader, FakeWriter +from test_inverter_base import config_conn, patch_open_connection pytest_plugins = ('pytest_asyncio',) @@ -541,7 +542,7 @@ async def test_forward_resp(): remote = StreamPtr(None) cnt = 0 - async def _close_cb(): + def _close_cb(): nonlocal cnt, remote, ifc cnt += 1 @@ -550,7 +551,7 @@ async def test_forward_resp(): create_remote(remote, TestType.FWD_NO_EXCPT) ifc.fwd_add(b'test-forward_msg') await ifc.client_loop('') - assert cnt == 0 + assert cnt == 1 del ifc @pytest.mark.asyncio @@ -559,7 +560,7 @@ async def test_forward_resp2(): remote = StreamPtr(None) cnt = 0 - async def _close_cb(): + def _close_cb(): nonlocal cnt, remote, ifc cnt += 1 @@ -568,5 +569,5 @@ async def test_forward_resp2(): create_remote(remote, TestType.FWD_NO_EXCPT) ifc.fwd_add(b'test-forward_msg') await ifc.client_loop('') - assert cnt == 0 + assert cnt == 1 del ifc diff --git a/app/tests/test_byte_fifo.py b/app/tests/test_byte_fifo.py index 1544cc0..f1392db 100644 --- a/app/tests/test_byte_fifo.py +++ b/app/tests/test_byte_fifo.py @@ -1,6 +1,6 @@ # test_with_pytest.py -from app.src.byte_fifo import ByteFifo +from byte_fifo import ByteFifo def test_fifo(): read = ByteFifo() diff --git a/app/tests/test_config.py b/app/tests/test_config.py index 5ceb1b3..aaac45c 100644 --- a/app/tests/test_config.py +++ b/app/tests/test_config.py @@ -1,7 +1,7 @@ # test_with_pytest.py import tomllib from schema import SchemaMissingKeyError -from app.src.config import Config +from config import Config class TstConfig(Config): diff --git a/app/tests/test_infos.py b/app/tests/test_infos.py index ed4e293..43c0050 100644 --- a/app/tests/test_infos.py +++ b/app/tests/test_infos.py @@ -2,8 +2,8 @@ import pytest import json, math import logging -from app.src.infos import Register, ClrAtMidnight -from app.src.infos import Infos, Fmt +from infos import Register, ClrAtMidnight +from infos import Infos, Fmt def test_statistic_counter(): i = Infos() diff --git a/app/tests/test_infos_g3.py b/app/tests/test_infos_g3.py index 18d5854..da3eaed 100644 --- a/app/tests/test_infos_g3.py +++ b/app/tests/test_infos_g3.py @@ -1,7 +1,7 @@ # test_with_pytest.py import pytest, json, math -from app.src.infos import Register -from app.src.gen3.infos_g3 import InfosG3, RegisterMap +from infos import Register +from gen3.infos_g3 import InfosG3, RegisterMap @pytest.fixture def contr_data_seq(): # Get Time Request message diff --git a/app/tests/test_infos_g3p.py b/app/tests/test_infos_g3p.py index 150a666..e0cac05 100644 --- a/app/tests/test_infos_g3p.py +++ b/app/tests/test_infos_g3p.py @@ -1,9 +1,9 @@ # test_with_pytest.py import pytest, json, math, random -from app.src.infos import Register -from app.src.gen3plus.infos_g3p import InfosG3P -from app.src.gen3plus.infos_g3p import RegisterMap +from infos import Register +from gen3plus.infos_g3p import InfosG3P +from gen3plus.infos_g3p import RegisterMap @pytest.fixture(scope="session") def str_test_ip(): diff --git a/app/tests/test_inverter_base.py b/app/tests/test_inverter_base.py index 054d729..5962a49 100644 --- a/app/tests/test_inverter_base.py +++ b/app/tests/test_inverter_base.py @@ -5,14 +5,14 @@ import gc from mock import patch from enum import Enum -from app.src.infos import Infos -from app.src.config import Config -from app.src.gen3.talent import Talent -from app.src.inverter_base import InverterBase -from app.src.singleton import Singleton -from app.src.async_stream import AsyncStream, AsyncStreamClient +from infos import Infos +from config import Config +from gen3.talent import Talent +from inverter_base import InverterBase +from singleton import Singleton +from async_stream import AsyncStream, AsyncStreamClient -from app.tests.test_modbus_tcp import patch_mqtt_err, patch_mqtt_except, test_port, test_hostname +from test_modbus_tcp import patch_mqtt_err, patch_mqtt_except, test_port, test_hostname pytest_plugins = ('pytest_asyncio',) @@ -69,13 +69,13 @@ class FakeWriter(): async def wait_closed(self): return -class TestType(Enum): +class MockType(Enum): RD_TEST_0_BYTES = 1 RD_TEST_TIMEOUT = 2 RD_TEST_EXCEPT = 3 -test = TestType.RD_TEST_0_BYTES +test = MockType.RD_TEST_0_BYTES @pytest.fixture def patch_open_connection(): @@ -85,9 +85,9 @@ def patch_open_connection(): def new_open(host: str, port: int): global test - if test == TestType.RD_TEST_TIMEOUT: + if test == MockType.RD_TEST_TIMEOUT: raise ConnectionRefusedError - elif test == TestType.RD_TEST_EXCEPT: + elif test == MockType.RD_TEST_EXCEPT: raise ValueError("Value cannot be negative") # Compliant return new_conn(None) diff --git a/app/tests/test_inverter_g3.py b/app/tests/test_inverter_g3.py index 45438bb..a841dbc 100644 --- a/app/tests/test_inverter_g3.py +++ b/app/tests/test_inverter_g3.py @@ -5,15 +5,15 @@ import sys,gc from mock import patch from enum import Enum -from app.src.infos import Infos -from app.src.config import Config -from app.src.proxy import Proxy -from app.src.inverter_base import InverterBase -from app.src.singleton import Singleton -from app.src.gen3.inverter_g3 import InverterG3 -from app.src.async_stream import AsyncStream +from infos import Infos +from config import Config +from proxy import Proxy +from inverter_base import InverterBase +from singleton import Singleton +from gen3.inverter_g3 import InverterG3 +from async_stream import AsyncStream -from app.tests.test_modbus_tcp import patch_mqtt_err, patch_mqtt_except, test_port, test_hostname +from test_modbus_tcp import patch_mqtt_err, patch_mqtt_except, test_port, test_hostname pytest_plugins = ('pytest_asyncio',) @@ -70,13 +70,13 @@ class FakeWriter(): async def wait_closed(self): return -class TestType(Enum): +class MockType(Enum): RD_TEST_0_BYTES = 1 RD_TEST_TIMEOUT = 2 RD_TEST_EXCEPT = 3 -test = TestType.RD_TEST_0_BYTES +test = MockType.RD_TEST_0_BYTES @pytest.fixture def patch_open_connection(): @@ -86,9 +86,9 @@ def patch_open_connection(): def new_open(host: str, port: int): global test - if test == TestType.RD_TEST_TIMEOUT: + if test == MockType.RD_TEST_TIMEOUT: raise ConnectionRefusedError - elif test == TestType.RD_TEST_EXCEPT: + elif test == MockType.RD_TEST_EXCEPT: raise ValueError("Value cannot be negative") # Compliant return new_conn(None) @@ -144,14 +144,14 @@ async def test_remote_except(config_conn, patch_open_connection): assert asyncio.get_running_loop() global test - test = TestType.RD_TEST_TIMEOUT + test = MockType.RD_TEST_TIMEOUT with InverterG3(FakeReader(), FakeWriter()) as inverter: await inverter.create_remote() await asyncio.sleep(0) assert inverter.remote.stream==None - test = TestType.RD_TEST_EXCEPT + test = MockType.RD_TEST_EXCEPT await inverter.create_remote() await asyncio.sleep(0) assert inverter.remote.stream==None diff --git a/app/tests/test_inverter_g3p.py b/app/tests/test_inverter_g3p.py index d06ee80..307018b 100644 --- a/app/tests/test_inverter_g3p.py +++ b/app/tests/test_inverter_g3p.py @@ -4,14 +4,14 @@ import asyncio from mock import patch from enum import Enum -from app.src.infos import Infos -from app.src.config import Config -from app.src.proxy import Proxy -from app.src.inverter_base import InverterBase -from app.src.singleton import Singleton -from app.src.gen3plus.inverter_g3p import InverterG3P +from infos import Infos +from config import Config +from proxy import Proxy +from inverter_base import InverterBase +from singleton import Singleton +from gen3plus.inverter_g3p import InverterG3P -from app.tests.test_modbus_tcp import patch_mqtt_err, patch_mqtt_except, test_port, test_hostname +from test_modbus_tcp import patch_mqtt_err, patch_mqtt_except, test_port, test_hostname pytest_plugins = ('pytest_asyncio',) @@ -69,13 +69,13 @@ class FakeWriter(): async def wait_closed(self): return -class TestType(Enum): +class MockType(Enum): RD_TEST_0_BYTES = 1 RD_TEST_TIMEOUT = 2 RD_TEST_EXCEPT = 3 -test = TestType.RD_TEST_0_BYTES +test = MockType.RD_TEST_0_BYTES @pytest.fixture def patch_open_connection(): @@ -85,9 +85,9 @@ def patch_open_connection(): def new_open(host: str, port: int): global test - if test == TestType.RD_TEST_TIMEOUT: + if test == MockType.RD_TEST_TIMEOUT: raise ConnectionRefusedError - elif test == TestType.RD_TEST_EXCEPT: + elif test == MockType.RD_TEST_EXCEPT: raise ValueError("Value cannot be negative") # Compliant return new_conn(None) @@ -121,14 +121,14 @@ async def test_remote_except(config_conn, patch_open_connection): assert asyncio.get_running_loop() global test - test = TestType.RD_TEST_TIMEOUT + test = MockType.RD_TEST_TIMEOUT with InverterG3P(FakeReader(), FakeWriter(), client_mode=False) as inverter: await inverter.create_remote() await asyncio.sleep(0) assert inverter.remote.stream==None - test = TestType.RD_TEST_EXCEPT + test = MockType.RD_TEST_EXCEPT await inverter.create_remote() await asyncio.sleep(0) assert inverter.remote.stream==None diff --git a/app/tests/test_modbus.py b/app/tests/test_modbus.py index 53e735f..b6914b0 100644 --- a/app/tests/test_modbus.py +++ b/app/tests/test_modbus.py @@ -1,8 +1,8 @@ # test_with_pytest.py import pytest import asyncio -from app.src.modbus import Modbus -from app.src.infos import Infos, Register +from modbus import Modbus +from infos import Infos, Register pytest_plugins = ('pytest_asyncio',) diff --git a/app/tests/test_modbus_tcp.py b/app/tests/test_modbus_tcp.py index 029a6f3..e901e96 100644 --- a/app/tests/test_modbus_tcp.py +++ b/app/tests/test_modbus_tcp.py @@ -5,14 +5,14 @@ from aiomqtt import MqttCodeError from mock import patch from enum import Enum -from app.src.singleton import Singleton -from app.src.config import Config -from app.src.infos import Infos -from app.src.mqtt import Mqtt -from app.src.inverter_base import InverterBase -from app.src.messages import Message, State -from app.src.proxy import Proxy -from app.src.modbus_tcp import ModbusConn, ModbusTcp +from singleton import Singleton +from config import Config +from infos import Infos +from mqtt import Mqtt +from inverter_base import InverterBase +from messages import Message, State +from proxy import Proxy +from modbus_tcp import ModbusConn, ModbusTcp pytest_plugins = ('pytest_asyncio',) diff --git a/app/tests/test_mqtt.py b/app/tests/test_mqtt.py index 9fb857e..80bf436 100644 --- a/app/tests/test_mqtt.py +++ b/app/tests/test_mqtt.py @@ -5,12 +5,12 @@ import aiomqtt import logging from mock import patch, Mock -from app.src.async_stream import AsyncIfcImpl -from app.src.singleton import Singleton -from app.src.mqtt import Mqtt -from app.src.modbus import Modbus -from app.src.gen3plus.solarman_v5 import SolarmanV5 -from app.src.config import Config +from async_stream import AsyncIfcImpl +from singleton import Singleton +from mqtt import Mqtt +from modbus import Modbus +from gen3plus.solarman_v5 import SolarmanV5 +from config import Config pytest_plugins = ('pytest_asyncio',) diff --git a/app/tests/test_proxy.py b/app/tests/test_proxy.py index bee01fd..aa6c739 100644 --- a/app/tests/test_proxy.py +++ b/app/tests/test_proxy.py @@ -5,11 +5,11 @@ import aiomqtt import logging from mock import patch, Mock -from app.src.singleton import Singleton -from app.src.proxy import Proxy -from app.src.mqtt import Mqtt -from app.src.gen3plus.solarman_v5 import SolarmanV5 -from app.src.config import Config +from singleton import Singleton +from proxy import Proxy +from mqtt import Mqtt +from gen3plus.solarman_v5 import SolarmanV5 +from config import Config pytest_plugins = ('pytest_asyncio',) diff --git a/app/tests/test_singleton.py b/app/tests/test_singleton.py index d645e08..9d9eb94 100644 --- a/app/tests/test_singleton.py +++ b/app/tests/test_singleton.py @@ -1,16 +1,16 @@ # test_with_pytest.py import pytest -from app.src.singleton import Singleton +from singleton import Singleton -class Test(metaclass=Singleton): +class Example(metaclass=Singleton): def __init__(self): pass # is a dummy test class def test_singleton_metaclass(): Singleton._instances.clear() - a = Test() + a = Example() assert 1 == len(Singleton._instances) - b = Test() + b = Example() assert 1 == len(Singleton._instances) assert a is b del a diff --git a/app/tests/test_solarman.py b/app/tests/test_solarman.py index 52b94ca..6d19688 100644 --- a/app/tests/test_solarman.py +++ b/app/tests/test_solarman.py @@ -5,12 +5,12 @@ import asyncio import logging import random from math import isclose -from app.src.async_stream import AsyncIfcImpl, StreamPtr -from app.src.gen3plus.solarman_v5 import SolarmanV5, SolarmanBase -from app.src.config import Config -from app.src.infos import Infos, Register -from app.src.modbus import Modbus -from app.src.messages import State, Message +from async_stream import AsyncIfcImpl, StreamPtr +from gen3plus.solarman_v5 import SolarmanV5, SolarmanBase +from config import Config +from infos import Infos, Register +from modbus import Modbus +from messages import State, Message pytest_plugins = ('pytest_asyncio',) diff --git a/app/tests/test_solarman_emu.py b/app/tests/test_solarman_emu.py index 0eadbc6..41e0e48 100644 --- a/app/tests/test_solarman_emu.py +++ b/app/tests/test_solarman_emu.py @@ -1,11 +1,13 @@ import pytest import asyncio -from app.src.async_stream import AsyncIfcImpl, StreamPtr -from app.src.gen3plus.solarman_v5 import SolarmanV5, SolarmanBase -from app.src.gen3plus.solarman_emu import SolarmanEmu -from app.src.infos import Infos, Register -from app.tests.test_solarman import FakeIfc, MemoryStream, get_sn_int, get_sn, correct_checksum, config_tsun_inv1, msg_modbus_rsp -from app.tests.test_infos_g3p import str_test_ip, bytes_test_ip + +from async_stream import AsyncIfcImpl, StreamPtr +from gen3plus.solarman_v5 import SolarmanV5, SolarmanBase +from gen3plus.solarman_emu import SolarmanEmu +from infos import Infos, Register + +from test_solarman import FakeIfc, MemoryStream, get_sn_int, get_sn, correct_checksum, config_tsun_inv1, msg_modbus_rsp +from test_infos_g3p import str_test_ip, bytes_test_ip timestamp = 0x3224c8bc diff --git a/app/tests/test_talent.py b/app/tests/test_talent.py index ae156bb..32fd6fe 100644 --- a/app/tests/test_talent.py +++ b/app/tests/test_talent.py @@ -1,12 +1,12 @@ # test_with_pytest.py import pytest, logging, asyncio from math import isclose -from app.src.async_stream import AsyncIfcImpl, StreamPtr -from app.src.gen3.talent import Talent, Control -from app.src.config import Config -from app.src.infos import Infos, Register -from app.src.modbus import Modbus -from app.src.messages import State +from async_stream import AsyncIfcImpl, StreamPtr +from gen3.talent import Talent, Control +from config import Config +from infos import Infos, Register +from modbus import Modbus +from messages import State pytest_plugins = ('pytest_asyncio',) diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..71e8374 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,8 @@ +# pytest.ini or .pytest.ini +[pytest] +minversion = 8.0 +addopts = -ra -q --durations=5 +pythonpath = app/src +testpaths = app/tests +asyncio_default_fixture_loop_scope = function +asyncio_mode = strict \ No newline at end of file From 2c69044bf86b232556fb78b7812e477c34f379a2 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Sun, 24 Nov 2024 22:26:55 +0100 Subject: [PATCH 23/32] initial test version --- proxy.c4 | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 proxy.c4 diff --git a/proxy.c4 b/proxy.c4 new file mode 100644 index 0000000..9ea402f --- /dev/null +++ b/proxy.c4 @@ -0,0 +1,20 @@ +model { + extend home.logger.proxy { + component webserver 'http server' + component inverter 'inverter' + component local 'local connection' + component remote 'remote connection' + component r-ifc 'async-ifc' + component l-ifc 'async-ifc' + component prot 'Protocol' 'SolarmanV5 or Talent' + component config 'config' 'reads the file confg.toml' + component mqtt + inverter -> local + inverter -> remote + remote -> r-ifc + remote -> prot + local -> l-ifc + local -> prot + prot -> mqtt + } +} \ No newline at end of file From 28cf875533e86bf06d7c23510f59659d0367a6da Mon Sep 17 00:00:00 2001 From: Stefan Allius <122395479+s-allius@users.noreply.github.com> Date: Mon, 2 Dec 2024 22:49:56 +0100 Subject: [PATCH 24/32] migrate paho.mqtt CallbackAPIVersion to VERSION2 (#225) --- CHANGELOG.md | 1 + app/tests/test_mqtt.py | 2 +- .../ha_addon/rootfs/home/proxy/async_ifc.py | 104 +++ .../rootfs/home/proxy/async_stream.py | 397 ++++++++ .../ha_addon/rootfs/home/proxy/byte_fifo.py | 52 ++ .../ha_addon/rootfs/home/proxy/config.py | 181 ++++ .../rootfs/home/proxy/default_config.toml | 177 ++++ .../rootfs/home/proxy/gen3/infos_g3.py | 194 ++++ .../rootfs/home/proxy/gen3/inverter_g3.py | 9 + .../ha_addon/rootfs/home/proxy/gen3/talent.py | 569 ++++++++++++ .../rootfs/home/proxy/gen3plus/infos_g3p.py | 204 ++++ .../home/proxy/gen3plus/inverter_g3p.py | 15 + .../home/proxy/gen3plus/solarman_emu.py | 138 +++ .../rootfs/home/proxy/gen3plus/solarman_v5.py | 706 ++++++++++++++ ha_addons/ha_addon/rootfs/home/proxy/infos.py | 871 ++++++++++++++++++ .../rootfs/home/proxy/inverter_base.py | 178 ++++ .../rootfs/home/proxy/inverter_ifc.py | 37 + .../rootfs/home/proxy/iter_registry.py | 9 + .../ha_addon/rootfs/home/proxy/logging.ini | 76 ++ .../ha_addon/rootfs/home/proxy/messages.py | 203 ++++ .../ha_addon/rootfs/home/proxy/modbus.py | 345 +++++++ .../ha_addon/rootfs/home/proxy/modbus_tcp.py | 88 ++ ha_addons/ha_addon/rootfs/home/proxy/mqtt.py | 182 ++++ .../ha_addon/rootfs/home/proxy/my_timer.py | 35 + .../rootfs/home/proxy/protocol_ifc.py | 17 + ha_addons/ha_addon/rootfs/home/proxy/proxy.py | 101 ++ .../ha_addon/rootfs/home/proxy/scheduler.py | 30 + .../ha_addon/rootfs/home/proxy/server.py | 191 ++++ .../ha_addon/rootfs/home/proxy/singleton.py | 14 + ha_addons/ha_addon/rootfs/requirements.txt | 4 + 30 files changed, 5129 insertions(+), 1 deletion(-) create mode 100644 ha_addons/ha_addon/rootfs/home/proxy/async_ifc.py create mode 100644 ha_addons/ha_addon/rootfs/home/proxy/async_stream.py create mode 100644 ha_addons/ha_addon/rootfs/home/proxy/byte_fifo.py create mode 100644 ha_addons/ha_addon/rootfs/home/proxy/config.py create mode 100644 ha_addons/ha_addon/rootfs/home/proxy/default_config.toml create mode 100644 ha_addons/ha_addon/rootfs/home/proxy/gen3/infos_g3.py create mode 100644 ha_addons/ha_addon/rootfs/home/proxy/gen3/inverter_g3.py create mode 100644 ha_addons/ha_addon/rootfs/home/proxy/gen3/talent.py create mode 100644 ha_addons/ha_addon/rootfs/home/proxy/gen3plus/infos_g3p.py create mode 100644 ha_addons/ha_addon/rootfs/home/proxy/gen3plus/inverter_g3p.py create mode 100644 ha_addons/ha_addon/rootfs/home/proxy/gen3plus/solarman_emu.py create mode 100644 ha_addons/ha_addon/rootfs/home/proxy/gen3plus/solarman_v5.py create mode 100644 ha_addons/ha_addon/rootfs/home/proxy/infos.py create mode 100644 ha_addons/ha_addon/rootfs/home/proxy/inverter_base.py create mode 100644 ha_addons/ha_addon/rootfs/home/proxy/inverter_ifc.py create mode 100644 ha_addons/ha_addon/rootfs/home/proxy/iter_registry.py create mode 100644 ha_addons/ha_addon/rootfs/home/proxy/logging.ini create mode 100644 ha_addons/ha_addon/rootfs/home/proxy/messages.py create mode 100644 ha_addons/ha_addon/rootfs/home/proxy/modbus.py create mode 100644 ha_addons/ha_addon/rootfs/home/proxy/modbus_tcp.py create mode 100644 ha_addons/ha_addon/rootfs/home/proxy/mqtt.py create mode 100644 ha_addons/ha_addon/rootfs/home/proxy/my_timer.py create mode 100644 ha_addons/ha_addon/rootfs/home/proxy/protocol_ifc.py create mode 100644 ha_addons/ha_addon/rootfs/home/proxy/proxy.py create mode 100644 ha_addons/ha_addon/rootfs/home/proxy/scheduler.py create mode 100644 ha_addons/ha_addon/rootfs/home/proxy/server.py create mode 100644 ha_addons/ha_addon/rootfs/home/proxy/singleton.py create mode 100644 ha_addons/ha_addon/rootfs/requirements.txt diff --git a/CHANGELOG.md b/CHANGELOG.md index 09063c7..5bb20c3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [unreleased] +- migrate paho.mqtt CallbackAPIVersion to VERSION2 [#224](https://github.com/s-allius/tsun-gen3-proxy/issues/224) - add PROD_COMPL_TYPE to trace - add SolarmanV5 messages builder - report inverter alarms and faults per MQTT [#7](https://github.com/s-allius/tsun-gen3-proxy/issues/7) diff --git a/app/tests/test_mqtt.py b/app/tests/test_mqtt.py index 80bf436..b85d746 100644 --- a/app/tests/test_mqtt.py +++ b/app/tests/test_mqtt.py @@ -75,7 +75,7 @@ def test_native_client(test_hostname, test_port): import paho.mqtt.client as mqtt import threading - c = mqtt.Client() + c = mqtt.Client(mqtt.CallbackAPIVersion.VERSION2) c.loop_start() try: # Just make sure the client connects successfully diff --git a/ha_addons/ha_addon/rootfs/home/proxy/async_ifc.py b/ha_addons/ha_addon/rootfs/home/proxy/async_ifc.py new file mode 100644 index 0000000..80af383 --- /dev/null +++ b/ha_addons/ha_addon/rootfs/home/proxy/async_ifc.py @@ -0,0 +1,104 @@ +from abc import ABC, abstractmethod + + +class AsyncIfc(ABC): + @abstractmethod + def get_conn_no(self): + pass # pragma: no cover + + @abstractmethod + def set_node_id(self, value: str): + pass # pragma: no cover + + # + # TX - QUEUE + # + @abstractmethod + def tx_add(self, data: bytearray): + ''' add data to transmit queue''' + pass # pragma: no cover + + @abstractmethod + def tx_flush(self): + ''' send transmit queue and clears it''' + pass # pragma: no cover + + @abstractmethod + def tx_peek(self, size: int = None) -> bytearray: + '''returns size numbers of byte without removing them''' + pass # pragma: no cover + + @abstractmethod + def tx_log(self, level, info): + ''' log the transmit queue''' + pass # pragma: no cover + + @abstractmethod + def tx_clear(self): + ''' clear transmit queue''' + pass # pragma: no cover + + @abstractmethod + def tx_len(self): + ''' get numner of bytes in the transmit queue''' + pass # pragma: no cover + + # + # FORWARD - QUEUE + # + @abstractmethod + def fwd_add(self, data: bytearray): + ''' add data to forward queue''' + pass # pragma: no cover + + @abstractmethod + def fwd_log(self, level, info): + ''' log the forward queue''' + pass # pragma: no cover + + # + # RX - QUEUE + # + @abstractmethod + def rx_get(self, size: int = None) -> bytearray: + '''removes size numbers of bytes and return them''' + pass # pragma: no cover + + @abstractmethod + def rx_peek(self, size: int = None) -> bytearray: + '''returns size numbers of byte without removing them''' + pass # pragma: no cover + + @abstractmethod + def rx_log(self, level, info): + ''' logs the receive queue''' + pass # pragma: no cover + + @abstractmethod + def rx_clear(self): + ''' clear receive queue''' + pass # pragma: no cover + + @abstractmethod + def rx_len(self): + ''' get numner of bytes in the receive queue''' + pass # pragma: no cover + + @abstractmethod + def rx_set_cb(self, callback): + pass # pragma: no cover + + # + # Protocol Callbacks + # + @abstractmethod + def prot_set_timeout_cb(self, callback): + pass # pragma: no cover + + @abstractmethod + def prot_set_init_new_client_conn_cb(self, callback): + pass # pragma: no cover + + @abstractmethod + def prot_set_update_header_cb(self, callback): + pass # pragma: no cover diff --git a/ha_addons/ha_addon/rootfs/home/proxy/async_stream.py b/ha_addons/ha_addon/rootfs/home/proxy/async_stream.py new file mode 100644 index 0000000..ec060b2 --- /dev/null +++ b/ha_addons/ha_addon/rootfs/home/proxy/async_stream.py @@ -0,0 +1,397 @@ +import asyncio +import logging +import traceback +import time +from asyncio import StreamReader, StreamWriter +from typing import Self +from itertools import count + +from proxy import Proxy +from byte_fifo import ByteFifo +from async_ifc import AsyncIfc +from infos import Infos + + +import gc +logger = logging.getLogger('conn') + + +class AsyncIfcImpl(AsyncIfc): + _ids = count(0) + + def __init__(self) -> None: + logger.debug('AsyncIfcImpl.__init__') + self.fwd_fifo = ByteFifo() + self.tx_fifo = ByteFifo() + self.rx_fifo = ByteFifo() + self.conn_no = next(self._ids) + self.node_id = '' + self.timeout_cb = None + self.init_new_client_conn_cb = None + self.update_header_cb = None + + def close(self): + self.timeout_cb = None + self.fwd_fifo.reg_trigger(None) + self.tx_fifo.reg_trigger(None) + self.rx_fifo.reg_trigger(None) + + def set_node_id(self, value: str): + self.node_id = value + + def get_conn_no(self): + return self.conn_no + + def tx_add(self, data: bytearray): + ''' add data to transmit queue''' + self.tx_fifo += data + + def tx_flush(self): + ''' send transmit queue and clears it''' + self.tx_fifo() + + def tx_peek(self, size: int = None) -> bytearray: + '''returns size numbers of byte without removing them''' + return self.tx_fifo.peek(size) + + def tx_log(self, level, info): + ''' log the transmit queue''' + self.tx_fifo.logging(level, info) + + def tx_clear(self): + ''' clear transmit queue''' + self.tx_fifo.clear() + + def tx_len(self): + ''' get numner of bytes in the transmit queue''' + return len(self.tx_fifo) + + def fwd_add(self, data: bytearray): + ''' add data to forward queue''' + self.fwd_fifo += data + + def fwd_log(self, level, info): + ''' log the forward queue''' + self.fwd_fifo.logging(level, info) + + def rx_get(self, size: int = None) -> bytearray: + '''removes size numbers of bytes and return them''' + return self.rx_fifo.get(size) + + def rx_peek(self, size: int = None) -> bytearray: + '''returns size numbers of byte without removing them''' + return self.rx_fifo.peek(size) + + def rx_log(self, level, info): + ''' logs the receive queue''' + self.rx_fifo.logging(level, info) + + def rx_clear(self): + ''' clear receive queue''' + self.rx_fifo.clear() + + def rx_len(self): + ''' get numner of bytes in the receive queue''' + return len(self.rx_fifo) + + def rx_set_cb(self, callback): + self.rx_fifo.reg_trigger(callback) + + def prot_set_timeout_cb(self, callback): + self.timeout_cb = callback + + def prot_set_init_new_client_conn_cb(self, callback): + self.init_new_client_conn_cb = callback + + def prot_set_update_header_cb(self, callback): + self.update_header_cb = callback + + +class StreamPtr(): + '''Descr StreamPtr''' + def __init__(self, _stream, _ifc=None): + self.stream = _stream + self.ifc = _ifc + + @property + def ifc(self): + return self._ifc + + @ifc.setter + def ifc(self, value): + self._ifc = value + + @property + def stream(self): + return self._stream + + @stream.setter + def stream(self, value): + self._stream = value + + +class AsyncStream(AsyncIfcImpl): + MAX_PROC_TIME = 2 + '''maximum processing time for a received msg in sec''' + MAX_START_TIME = 400 + '''maximum time without a received msg in sec''' + MAX_INV_IDLE_TIME = 120 + '''maximum time without a received msg from the inverter in sec''' + MAX_DEF_IDLE_TIME = 360 + '''maximum default time without a received msg in sec''' + + def __init__(self, reader: StreamReader, writer: StreamWriter, + rstream: "StreamPtr") -> None: + AsyncIfcImpl.__init__(self) + + logger.debug('AsyncStream.__init__') + + self.remote = rstream + self.tx_fifo.reg_trigger(self.__write_cb) + self._reader = reader + self._writer = writer + self.r_addr = writer.get_extra_info('peername') + self.l_addr = writer.get_extra_info('sockname') + self.proc_start = None # start processing start timestamp + self.proc_max = 0 + self.async_publ_mqtt = None # will be set AsyncStreamServer only + + def __write_cb(self): + self._writer.write(self.tx_fifo.get()) + + def __timeout(self) -> int: + if self.timeout_cb: + return self.timeout_cb() + return 360 + + async def loop(self) -> Self: + """Async loop handler for precessing all received messages""" + self.proc_start = time.time() + while True: + try: + self.__calc_proc_time() + dead_conn_to = self.__timeout() + await asyncio.wait_for(self.__async_read(), + dead_conn_to) + + await self.__async_write() + await self.__async_forward() + if self.async_publ_mqtt: + await self.async_publ_mqtt() + + except asyncio.TimeoutError: + logger.warning(f'[{self.node_id}:{self.conn_no}] Dead ' + f'connection timeout ({dead_conn_to}s) ' + f'for {self.l_addr}') + await self.disc() + return self + + except OSError as error: + logger.error(f'[{self.node_id}:{self.conn_no}] ' + f'{error} for l{self.l_addr} | ' + f'r{self.r_addr}') + await self.disc() + return self + + except RuntimeError as error: + logger.info(f'[{self.node_id}:{self.conn_no}] ' + f'{error} for {self.l_addr}') + await self.disc() + return self + + except Exception: + Infos.inc_counter('SW_Exception') + logger.error( + f"Exception for {self.r_addr}:\n" + f"{traceback.format_exc()}") + await asyncio.sleep(0) # be cooperative to other task + + def __calc_proc_time(self): + if self.proc_start: + proc = time.time() - self.proc_start + if proc > self.proc_max: + self.proc_max = proc + self.proc_start = None + + async def disc(self) -> None: + """Async disc handler for graceful disconnect""" + if self._writer.is_closing(): + return + logger.debug(f'AsyncStream.disc() l{self.l_addr} | r{self.r_addr}') + self._writer.close() + await self._writer.wait_closed() + + def close(self) -> None: + logging.debug(f'AsyncStream.close() l{self.l_addr} | r{self.r_addr}') + """close handler for a no waiting disconnect + + hint: must be called before releasing the connection instance + """ + super().close() + self._reader.feed_eof() # abort awaited read + if self._writer.is_closing(): + return + self._writer.close() + + def healthy(self) -> bool: + elapsed = 0 + if self.proc_start is not None: + elapsed = time.time() - self.proc_start + if elapsed > self.MAX_PROC_TIME: + logging.debug(f'[{self.node_id}:{self.conn_no}:' + f'{type(self).__name__}]' + f' act:{round(1000*elapsed)}ms' + f' max:{round(1000*self.proc_max)}ms') + logging.debug(f'Healthy()) refs: {gc.get_referrers(self)}') + return elapsed < 5 + + ''' + Our private methods + ''' + async def __async_read(self) -> None: + """Async read handler to read received data from TCP stream""" + data = await self._reader.read(4096) + if data: + self.proc_start = time.time() + self.rx_fifo += data + wait = self.rx_fifo() # call read in parent class + if wait and wait > 0: + await asyncio.sleep(wait) + else: + raise RuntimeError("Peer closed.") + + async def __async_write(self, headline: str = 'Transmit to ') -> None: + """Async write handler to transmit the send_buffer""" + if len(self.tx_fifo) > 0: + self.tx_fifo.logging(logging.INFO, f'{headline}{self.r_addr}:') + self._writer.write(self.tx_fifo.get()) + await self._writer.drain() + + async def __async_forward(self) -> None: + """forward handler transmits data over the remote connection""" + if len(self.fwd_fifo) == 0: + return + try: + await self._async_forward() + + except OSError as error: + if self.remote.stream: + rmt = self.remote + logger.error(f'[{rmt.stream.node_id}:{rmt.stream.conn_no}] ' + f'Fwd: {error} for ' + f'l{rmt.ifc.l_addr} | r{rmt.ifc.r_addr}') + await rmt.ifc.disc() + if rmt.ifc.close_cb: + rmt.ifc.close_cb() + + except RuntimeError as error: + if self.remote.stream: + rmt = self.remote + logger.info(f'[{rmt.stream.node_id}:{rmt.stream.conn_no}] ' + f'Fwd: {error} for {rmt.ifc.l_addr}') + await rmt.ifc.disc() + if rmt.ifc.close_cb: + rmt.ifc.close_cb() + + except Exception: + Infos.inc_counter('SW_Exception') + logger.error( + f"Fwd Exception for {self.r_addr}:\n" + f"{traceback.format_exc()}") + + async def publish_outstanding_mqtt(self): + '''Publish all outstanding MQTT topics''' + try: + await self.async_publ_mqtt() + await Proxy._async_publ_mqtt_proxy_stat('proxy') + except Exception: + pass + + +class AsyncStreamServer(AsyncStream): + def __init__(self, reader: StreamReader, writer: StreamWriter, + async_publ_mqtt, create_remote, + rstream: "StreamPtr") -> None: + AsyncStream.__init__(self, reader, writer, rstream) + self.create_remote = create_remote + self.async_publ_mqtt = async_publ_mqtt + + def close(self) -> None: + logging.debug('AsyncStreamServer.close()') + self.create_remote = None + self.async_publ_mqtt = None + super().close() + + async def server_loop(self) -> None: + '''Loop for receiving messages from the inverter (server-side)''' + logger.info(f'[{self.node_id}:{self.conn_no}] ' + f'Accept connection from {self.r_addr}') + Infos.inc_counter('Inverter_Cnt') + await self.publish_outstanding_mqtt() + await self.loop() + Infos.dec_counter('Inverter_Cnt') + await self.publish_outstanding_mqtt() + logger.info(f'[{self.node_id}:{self.conn_no}] Server loop stopped for' + f' r{self.r_addr}') + + # if the server connection closes, we also have to disconnect + # the connection to te TSUN cloud + if self.remote and self.remote.stream: + logger.info(f'[{self.node_id}:{self.conn_no}] disc client ' + f'connection: [{self.remote.ifc.node_id}:' + f'{self.remote.ifc.conn_no}]') + await self.remote.ifc.disc() + + async def _async_forward(self) -> None: + """forward handler transmits data over the remote connection""" + if not self.remote.stream: + await self.create_remote() + if self.remote.stream and \ + self.remote.ifc.init_new_client_conn_cb(): + await self.remote.ifc._AsyncStream__async_write() + if self.remote.stream: + self.remote.ifc.update_header_cb(self.fwd_fifo.peek()) + self.fwd_fifo.logging(logging.INFO, 'Forward to ' + f'{self.remote.ifc.r_addr}:') + self.remote.ifc._writer.write(self.fwd_fifo.get()) + await self.remote.ifc._writer.drain() + + +class AsyncStreamClient(AsyncStream): + def __init__(self, reader: StreamReader, writer: StreamWriter, + rstream: "StreamPtr", close_cb) -> None: + AsyncStream.__init__(self, reader, writer, rstream) + self.close_cb = close_cb + + async def disc(self) -> None: + logging.debug('AsyncStreamClient.disc()') + self.remote = None + await super().disc() + + def close(self) -> None: + logging.debug('AsyncStreamClient.close()') + self.close_cb = None + super().close() + + async def client_loop(self, _: str) -> None: + '''Loop for receiving messages from the TSUN cloud (client-side)''' + Infos.inc_counter('Cloud_Conn_Cnt') + await self.publish_outstanding_mqtt() + await self.loop() + Infos.dec_counter('Cloud_Conn_Cnt') + await self.publish_outstanding_mqtt() + logger.info(f'[{self.node_id}:{self.conn_no}] ' + 'Client loop stopped for' + f' l{self.l_addr}') + + if self.close_cb: + self.close_cb() + + async def _async_forward(self) -> None: + """forward handler transmits data over the remote connection""" + if self.remote.stream: + self.remote.ifc.update_header_cb(self.fwd_fifo.peek()) + self.fwd_fifo.logging(logging.INFO, 'Forward to ' + f'{self.remote.ifc.r_addr}:') + self.remote.ifc._writer.write(self.fwd_fifo.get()) + await self.remote.ifc._writer.drain() diff --git a/ha_addons/ha_addon/rootfs/home/proxy/byte_fifo.py b/ha_addons/ha_addon/rootfs/home/proxy/byte_fifo.py new file mode 100644 index 0000000..959eab2 --- /dev/null +++ b/ha_addons/ha_addon/rootfs/home/proxy/byte_fifo.py @@ -0,0 +1,52 @@ +from messages import hex_dump_str, hex_dump_memory + + +class ByteFifo: + """ a byte FIFO buffer with trigger callback """ + __slots__ = ('__buf', '__trigger_cb') + + def __init__(self): + self.__buf = bytearray() + self.__trigger_cb = None + + def reg_trigger(self, cb) -> None: + self.__trigger_cb = cb + + def __iadd__(self, data): + self.__buf.extend(data) + return self + + def __call__(self): + '''triggers the observer''' + if callable(self.__trigger_cb): + return self.__trigger_cb() + return None + + def get(self, size: int = None) -> bytearray: + '''removes size numbers of byte and return them''' + if not size: + data = self.__buf + self.clear() + else: + data = self.__buf[:size] + # The fast delete syntax + self.__buf[:size] = b'' + return data + + def peek(self, size: int = None) -> bytearray: + '''returns size numbers of byte without removing them''' + if not size: + return self.__buf + return self.__buf[:size] + + def clear(self): + self.__buf = bytearray() + + def __len__(self) -> int: + return len(self.__buf) + + def __str__(self) -> str: + return hex_dump_str(self.__buf, self.__len__()) + + def logging(self, level, info): + hex_dump_memory(level, info, self.__buf, self.__len__()) diff --git a/ha_addons/ha_addon/rootfs/home/proxy/config.py b/ha_addons/ha_addon/rootfs/home/proxy/config.py new file mode 100644 index 0000000..3424bd9 --- /dev/null +++ b/ha_addons/ha_addon/rootfs/home/proxy/config.py @@ -0,0 +1,181 @@ +'''Config module handles the proxy configuration in the config.toml file''' + +import shutil +import tomllib +import logging +from schema import Schema, And, Or, Use, Optional + + +class Config(): + '''Static class Config is reads and sanitize the config. + + Read config.toml file and sanitize it with read(). + Get named parts of the config with get()''' + + act_config = {} + def_config = {} + conf_schema = Schema({ + 'tsun': { + 'enabled': Use(bool), + 'host': Use(str), + 'port': And(Use(int), lambda n: 1024 <= n <= 65535) + }, + 'solarman': { + 'enabled': Use(bool), + 'host': Use(str), + 'port': And(Use(int), lambda n: 1024 <= n <= 65535) + }, + 'mqtt': { + 'host': Use(str), + 'port': And(Use(int), lambda n: 1024 <= n <= 65535), + 'user': And(Use(str), Use(lambda s: s if len(s) > 0 else None)), + 'passwd': And(Use(str), Use(lambda s: s if len(s) > 0 else None)) + }, + 'ha': { + 'auto_conf_prefix': Use(str), + 'discovery_prefix': Use(str), + 'entity_prefix': Use(str), + 'proxy_node_id': Use(str), + 'proxy_unique_id': Use(str) + }, + 'gen3plus': { + 'at_acl': { + Or('mqtt', 'tsun'): { + 'allow': [str], + Optional('block', default=[]): [str] + } + } + }, + 'inverters': { + 'allow_all': Use(bool), And(Use(str), lambda s: len(s) == 16): { + Optional('monitor_sn', default=0): Use(int), + Optional('node_id', default=""): And(Use(str), + Use(lambda s: s + '/' + if len(s) > 0 + and s[-1] != '/' + else s)), + Optional('client_mode'): { + 'host': Use(str), + Optional('port', default=8899): + And(Use(int), lambda n: 1024 <= n <= 65535), + Optional('forward', default=False): Use(bool), + }, + Optional('modbus_polling', default=True): Use(bool), + Optional('suggested_area', default=""): Use(str), + Optional('sensor_list', default=0x2b0): Use(int), + Optional('pv1'): { + Optional('type'): Use(str), + Optional('manufacturer'): Use(str), + }, + Optional('pv2'): { + Optional('type'): Use(str), + Optional('manufacturer'): Use(str), + }, + Optional('pv3'): { + Optional('type'): Use(str), + Optional('manufacturer'): Use(str), + }, + Optional('pv4'): { + Optional('type'): Use(str), + Optional('manufacturer'): Use(str), + }, + Optional('pv5'): { + Optional('type'): Use(str), + Optional('manufacturer'): Use(str), + }, + Optional('pv6'): { + Optional('type'): Use(str), + Optional('manufacturer'): Use(str), + } + } + } + }, ignore_extra_keys=True + ) + + @classmethod + def class_init(cls) -> None | str: # pragma: no cover + try: + # make the default config transparaent by copying it + # in the config.example file + logging.debug('Copy Default Config to config.example.toml') + + shutil.copy2("default_config.toml", + "config/config.example.toml") + except Exception: + pass + err_str = cls.read() + del cls.conf_schema + return err_str + + @classmethod + def _read_config_file(cls) -> dict: # pragma: no cover + usr_config = {} + + try: + with open("config/config.toml", "rb") as f: + usr_config = tomllib.load(f) + except Exception as error: + err = f'Config.read: {error}' + logging.error(err) + logging.info( + '\n To create the missing config.toml file, ' + 'you can rename the template config.example.toml\n' + ' and customize it for your scenario.\n') + return usr_config + + @classmethod + def read(cls, path='') -> None | str: + '''Read config file, merge it with the default config + and sanitize the result''' + err = None + config = {} + logger = logging.getLogger('data') + + try: + # read example config file as default configuration + cls.def_config = {} + with open(f"{path}default_config.toml", "rb") as f: + def_config = tomllib.load(f) + cls.def_config = cls.conf_schema.validate(def_config) + + # overwrite the default values, with values from + # the config.toml file + usr_config = cls._read_config_file() + + # merge the default and the user config + config = def_config.copy() + for key in ['tsun', 'solarman', 'mqtt', 'ha', 'inverters', + 'gen3plus']: + if key in usr_config: + config[key] |= usr_config[key] + + try: + cls.act_config = cls.conf_schema.validate(config) + except Exception as error: + err = f'Config.read: {error}' + logging.error(err) + + # logging.debug(f'Readed config: "{cls.act_config}" ') + + except Exception as error: + err = f'Config.read: {error}' + logger.error(err) + cls.act_config = {} + + return err + + @classmethod + def get(cls, member: str = None): + '''Get a named attribute from the proxy config. If member == + None it returns the complete config dict''' + + if member: + return cls.act_config.get(member, {}) + else: + return cls.act_config + + @classmethod + def is_default(cls, member: str) -> bool: + '''Check if the member is the default value''' + + return cls.act_config.get(member) == cls.def_config.get(member) diff --git a/ha_addons/ha_addon/rootfs/home/proxy/default_config.toml b/ha_addons/ha_addon/rootfs/home/proxy/default_config.toml new file mode 100644 index 0000000..57b2baf --- /dev/null +++ b/ha_addons/ha_addon/rootfs/home/proxy/default_config.toml @@ -0,0 +1,177 @@ +########################################################################################## +### +### T S U N - G E N 3 - P R O X Y +### +### from Stefan Allius +### +########################################################################################## +### +### The readme will give you an overview of the project: +### https://s-allius.github.io/tsun-gen3-proxy/ +### +### The proxy supports different operation modes. Select the proper mode +### which depends on your inverter type and you inverter firmware. +### Please read: +### https://github.com/s-allius/tsun-gen3-proxy/wiki/Operation-Modes-Overview +### +### Here you will find a description of all configuration options: +### https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-details +### +### The configration uses the TOML format, which aims to be easy to read due to +### obvious semantics. You find more details here: https://toml.io/en/v1.0.0 +### +########################################################################################## + + +########################################################################################## +## +## MQTT broker configuration +## +## In this block, you must configure the connection to your MQTT broker and specify the +## required credentials. As the proxy does not currently support an encrypted connection +## to the MQTT broker, it is strongly recommended that you do not use a public broker. +## +## https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-details#mqtt-broker-account +## + +mqtt.host = 'mqtt' # URL or IP address of the mqtt broker +mqtt.port = 1883 +mqtt.user = '' +mqtt.passwd = '' + + +########################################################################################## +## +## HOME ASSISTANT +## +## The proxy supports the MQTT autoconfiguration of Home Assistant (HA). The default +## values match the HA default configuration. If you need to change these or want to use +## a different MQTT client, you can adjust the prefixes of the MQTT topics below. +## +## https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-details#home-assistant +## + +ha.auto_conf_prefix = 'homeassistant' # MQTT prefix for subscribing for homeassistant status updates +ha.discovery_prefix = 'homeassistant' # MQTT prefix for discovery topic +ha.entity_prefix = 'tsun' # MQTT topic prefix for publishing inverter values +ha.proxy_node_id = 'proxy' # MQTT node id, for the proxy_node_id +ha.proxy_unique_id = 'P170000000000001' # MQTT unique id, to identify a proxy instance + + +########################################################################################## +## +## GEN3 Proxy Mode Configuration +## +## In this block, you can configure an optional connection to the TSUN cloud for GEN3 +## inverters. This connection is only required if you want send data to the TSUN cloud +## to use the TSUN APPs or receive firmware updates. +## +## https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-details#tsun-cloud-for-gen3-inverter-only +## + +tsun.enabled = true # false: disables connecting to the tsun cloud, and avoids updates +tsun.host = 'logger.talent-monitoring.com' +tsun.port = 5005 + + +########################################################################################## +## +## GEN3PLUS Proxy Mode Configuration +## +## In this block, you can configure an optional connection to the TSUN cloud for GEN3PLUS +## inverters. This connection is only required if you want send data to the TSUN cloud +## to use the TSUN APPs or receive firmware updates. +## +## https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-details#solarman-cloud-for-gen3plus-inverter-only +## + +solarman.enabled = true # false: disables connecting to the tsun cloud, and avoids updates +solarman.host = 'iot.talent-monitoring.com' +solarman.port = 10000 + + +########################################################################################## +### +### Inverter Definitions +### +### The proxy supports the simultaneous operation of several inverters, even of different +### types. A configuration block must be defined for each inverter, in which all necessary +### parameters must be specified. These depend on the operation mode used and also differ +### slightly depending on the inverter type. +### +### In addition, the PV modules can be defined at the individual inputs for documentation +### purposes, whereby these are displayed in Home Assistant. +### +### The proxy only accepts connections from known inverters. This can be switched off for +### test purposes and unknown serial numbers are also accepted. +### + +inverters.allow_all = false # only allow known inverters + + +########################################################################################## +## +## For each GEN3 inverter, the serial number of the inverter must be mapped to an MQTT +## definition. To do this, the corresponding configuration block is started with +## `[Inverter.“<16-digit serial number>”]` so that all subsequent parameters are assigned +## to this inverter. Further inverter-specific parameters (e.g. polling mode) can be set +## in the configuration block +## +## The serial numbers of all GEN3 inverters start with `R17`! +## + +[inverters."R170000000000001"] +node_id = '' # MQTT replacement for inverters serial number +suggested_area = '' # suggested installation area for home-assistant +modbus_polling = false # Disable optional MODBUS polling +pv1 = {type = 'RSM40-8-395M', manufacturer = 'Risen'} # Optional, PV module descr +pv2 = {type = 'RSM40-8-395M', manufacturer = 'Risen'} # Optional, PV module descr + + +########################################################################################## +## +## For each GEN3PLUS inverter, the serial number of the inverter must be mapped to an MQTT +## definition. To do this, the corresponding configuration block is started with +## `[Inverter.“<16-digit serial number>”]` so that all subsequent parameters are assigned +## to this inverter. Further inverter-specific parameters (e.g. polling mode, client mode) +## can be set in the configuration block +## +## The serial numbers of all GEN3PLUS inverters start with `Y17` or Y47! Each GEN3PLUS +## inverter is supplied with a “Monitoring SN:”. This can be found on a sticker enclosed +## with the inverter. +## + +[inverters."Y170000000000001"] +monitor_sn = 2000000000 # The GEN3PLUS "Monitoring SN:" +node_id = '' # MQTT replacement for inverters serial number +suggested_area = '' # suggested installation place for home-assistant +modbus_polling = true # Enable optional MODBUS polling + +# if your inverter supports SSL connections you must use the client_mode. Pls, uncomment +# the next line and configure the fixed IP of your inverter +#client_mode = {host = '192.168.0.1', port = 8899} + +pv1 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr +pv2 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr +pv3 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr +pv4 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr + + +########################################################################################## +### +### If the proxy mode is configured, commands from TSUN can be sent to the inverter via +### this connection or parameters (e.g. network credentials) can be queried. Filters can +### then be configured for the AT+ commands from the TSUN Cloud so that only certain +### accesses are permitted. +### +### An overview of all known AT+ commands can be found here: +### https://github.com/s-allius/tsun-gen3-proxy/wiki/AT--commands +### + +[gen3plus.at_acl] +# filter for received commands from the internet +tsun.allow = ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'] +tsun.block = [] +# filter for received commands from the MQTT broker +mqtt.allow = ['AT+'] +mqtt.block = [] diff --git a/ha_addons/ha_addon/rootfs/home/proxy/gen3/infos_g3.py b/ha_addons/ha_addon/rootfs/home/proxy/gen3/infos_g3.py new file mode 100644 index 0000000..efa220c --- /dev/null +++ b/ha_addons/ha_addon/rootfs/home/proxy/gen3/infos_g3.py @@ -0,0 +1,194 @@ + +import struct +import logging +from typing import Generator + +from infos import Infos, Register + + +class RegisterMap: + __slots__ = () + + map = { + 0x00092ba8: {'reg': Register.COLLECTOR_FW_VERSION}, + 0x000927c0: {'reg': Register.CHIP_TYPE}, + 0x00092f90: {'reg': Register.CHIP_MODEL}, + 0x00094ae8: {'reg': Register.MAC_ADDR}, + 0x00095a88: {'reg': Register.TRACE_URL}, + 0x00095aec: {'reg': Register.LOGGER_URL}, + 0x0000000a: {'reg': Register.PRODUCT_NAME}, + 0x00000014: {'reg': Register.MANUFACTURER}, + 0x0000001e: {'reg': Register.VERSION}, + 0x00000028: {'reg': Register.SERIAL_NUMBER}, + 0x00000032: {'reg': Register.EQUIPMENT_MODEL}, + 0x00013880: {'reg': Register.NO_INPUTS}, + 0xffffff00: {'reg': Register.INVERTER_CNT}, + 0xffffff01: {'reg': Register.UNKNOWN_SNR}, + 0xffffff02: {'reg': Register.UNKNOWN_MSG}, + 0xffffff03: {'reg': Register.INVALID_DATA_TYPE}, + 0xffffff04: {'reg': Register.INTERNAL_ERROR}, + 0xffffff05: {'reg': Register.UNKNOWN_CTRL}, + 0xffffff06: {'reg': Register.OTA_START_MSG}, + 0xffffff07: {'reg': Register.SW_EXCEPTION}, + 0xffffff08: {'reg': Register.POLLING_INTERVAL}, + 0xfffffffe: {'reg': Register.TEST_REG1}, + 0xffffffff: {'reg': Register.TEST_REG2}, + 0x00000640: {'reg': Register.OUTPUT_POWER}, + 0x000005dc: {'reg': Register.RATED_POWER}, + 0x00000514: {'reg': Register.INVERTER_TEMP}, + 0x000006a4: {'reg': Register.PV1_VOLTAGE}, + 0x00000708: {'reg': Register.PV1_CURRENT}, + 0x0000076c: {'reg': Register.PV1_POWER}, + 0x000007d0: {'reg': Register.PV2_VOLTAGE}, + 0x00000834: {'reg': Register.PV2_CURRENT}, + 0x00000898: {'reg': Register.PV2_POWER}, + 0x000008fc: {'reg': Register.PV3_VOLTAGE}, + 0x00000960: {'reg': Register.PV3_CURRENT}, + 0x000009c4: {'reg': Register.PV3_POWER}, + 0x00000a28: {'reg': Register.PV4_VOLTAGE}, + 0x00000a8c: {'reg': Register.PV4_CURRENT}, + 0x00000af0: {'reg': Register.PV4_POWER}, + 0x00000c1c: {'reg': Register.PV1_DAILY_GENERATION}, + 0x00000c80: {'reg': Register.PV1_TOTAL_GENERATION}, + 0x00000ce4: {'reg': Register.PV2_DAILY_GENERATION}, + 0x00000d48: {'reg': Register.PV2_TOTAL_GENERATION}, + 0x00000dac: {'reg': Register.PV3_DAILY_GENERATION}, + 0x00000e10: {'reg': Register.PV3_TOTAL_GENERATION}, + 0x00000e74: {'reg': Register.PV4_DAILY_GENERATION}, + 0x00000ed8: {'reg': Register.PV4_TOTAL_GENERATION}, + 0x00000b54: {'reg': Register.DAILY_GENERATION}, + 0x00000bb8: {'reg': Register.TOTAL_GENERATION}, + 0x000003e8: {'reg': Register.GRID_VOLTAGE}, + 0x0000044c: {'reg': Register.GRID_CURRENT}, + 0x000004b0: {'reg': Register.GRID_FREQUENCY}, + 0x000cfc38: {'reg': Register.CONNECT_COUNT}, + 0x000c3500: {'reg': Register.SIGNAL_STRENGTH}, + 0x000c96a8: {'reg': Register.POWER_ON_TIME}, + 0x000d0020: {'reg': Register.COLLECT_INTERVAL}, + 0x000cf850: {'reg': Register.DATA_UP_INTERVAL}, + 0x000c7f38: {'reg': Register.COMMUNICATION_TYPE}, + 0x00000190: {'reg': Register.EVENT_ALARM}, + 0x000001f4: {'reg': Register.EVENT_FAULT}, + 0x00000258: {'reg': Register.EVENT_BF1}, + 0x000002bc: {'reg': Register.EVENT_BF2}, + 0x00000064: {'reg': Register.INVERTER_STATUS}, + + 0x00000fa0: {'reg': Register.BOOT_STATUS}, + 0x00001004: {'reg': Register.DSP_STATUS}, + 0x000010cc: {'reg': Register.WORK_MODE}, + 0x000011f8: {'reg': Register.OUTPUT_SHUTDOWN}, + 0x0000125c: {'reg': Register.MAX_DESIGNED_POWER}, + 0x000012c0: {'reg': Register.RATED_LEVEL}, + 0x00001324: {'reg': Register.INPUT_COEFFICIENT, 'ratio': 100/1024}, + 0x00001388: {'reg': Register.GRID_VOLT_CAL_COEF}, + 0x00002710: {'reg': Register.PROD_COMPL_TYPE}, + 0x00003200: {'reg': Register.OUTPUT_COEFFICIENT, 'ratio': 100/1024}, + } + + +class InfosG3(Infos): + __slots__ = () + + def ha_confs(self, ha_prfx: str, node_id: str, snr: str, + sug_area: str = '') \ + -> Generator[tuple[dict, str], None, None]: + '''Generator function yields a json register struct for home-assistant + auto configuration and a unique entity string + + arguments: + prfx:str ==> MQTT prefix for the home assistant 'stat_t string + snr:str ==> serial number of the inverter, used to build unique + entity strings + sug_area:str ==> suggested area string from the config file''' + # iterate over RegisterMap.map and get the register values + for row in RegisterMap.map.values(): + reg = row['reg'] + res = self.ha_conf(reg, ha_prfx, node_id, snr, False, sug_area) # noqa: E501 + if res: + yield res + + def parse(self, buf, ind=0, node_id: str = '') -> \ + Generator[tuple[str, bool], None, None]: + '''parse a data sequence received from the inverter and + stores the values in Infos.db + + buf: buffer of the sequence to parse''' + result = struct.unpack_from('!l', buf, ind) + elms = result[0] + i = 0 + ind += 4 + while i < elms: + result = struct.unpack_from('!lB', buf, ind) + addr = result[0] + if addr not in RegisterMap.map: + row = None + info_id = -1 + else: + row = RegisterMap.map[addr] + info_id = row['reg'] + data_type = result[1] + ind += 5 + + if data_type == 0x54: # 'T' -> Pascal-String + str_len = buf[ind] + result = struct.unpack_from(f'!{str_len+1}p', buf, + ind)[0].decode(encoding='ascii', + errors='replace') + ind += str_len+1 + + elif data_type == 0x00: # 'Nul' -> end + i = elms # abort the loop + + elif data_type == 0x41: # 'A' -> Nop ?? + ind += 0 + i += 1 + continue + + elif data_type == 0x42: # 'B' -> byte, int8 + result = struct.unpack_from('!B', buf, ind)[0] + ind += 1 + + elif data_type == 0x49: # 'I' -> int32 + result = struct.unpack_from('!l', buf, ind)[0] + ind += 4 + + elif data_type == 0x53: # 'S' -> short, int16 + result = struct.unpack_from('!h', buf, ind)[0] + ind += 2 + + elif data_type == 0x46: # 'F' -> float32 + result = round(struct.unpack_from('!f', buf, ind)[0], 2) + ind += 4 + + elif data_type == 0x4c: # 'L' -> long, int64 + result = struct.unpack_from('!q', buf, ind)[0] + ind += 8 + + else: + self.inc_counter('Invalid_Data_Type') + logging.error(f"Infos.parse: data_type: {data_type}" + f" @0x{addr:04x} No:{i}" + " not supported") + return + + result = self.__modify_val(row, result) + + yield from self.__store_result(addr, result, info_id, node_id) + i += 1 + + def __modify_val(self, row, result): + if row and 'ratio' in row: + result = round(result * row['ratio'], 2) + return result + + def __store_result(self, addr, result, info_id, node_id): + keys, level, unit, must_incr = self._key_obj(info_id) + if keys: + name, update = self.update_db(keys, must_incr, result) + yield keys[0], update + else: + update = False + name = str(f'info-id.0x{addr:x}') + if update: + self.tracer.log(level, f'[{node_id}] GEN3: {name} :' + f' {result}{unit}') diff --git a/ha_addons/ha_addon/rootfs/home/proxy/gen3/inverter_g3.py b/ha_addons/ha_addon/rootfs/home/proxy/gen3/inverter_g3.py new file mode 100644 index 0000000..efaeca0 --- /dev/null +++ b/ha_addons/ha_addon/rootfs/home/proxy/gen3/inverter_g3.py @@ -0,0 +1,9 @@ +from asyncio import StreamReader, StreamWriter + +from inverter_base import InverterBase +from gen3.talent import Talent + + +class InverterG3(InverterBase): + def __init__(self, reader: StreamReader, writer: StreamWriter): + super().__init__(reader, writer, 'tsun', Talent) diff --git a/ha_addons/ha_addon/rootfs/home/proxy/gen3/talent.py b/ha_addons/ha_addon/rootfs/home/proxy/gen3/talent.py new file mode 100644 index 0000000..da3ebc8 --- /dev/null +++ b/ha_addons/ha_addon/rootfs/home/proxy/gen3/talent.py @@ -0,0 +1,569 @@ +import struct +import logging +from zoneinfo import ZoneInfo +from datetime import datetime +from tzlocal import get_localzone + +from async_ifc import AsyncIfc +from messages import Message, State +from modbus import Modbus +from config import Config +from gen3.infos_g3 import InfosG3 +from infos import Register + +logger = logging.getLogger('msg') + + +class Control: + def __init__(self, ctrl: int): + self.ctrl = ctrl + + def __int__(self) -> int: + return self.ctrl + + def is_ind(self) -> bool: + return (self.ctrl == 0x91) + + def is_req(self) -> bool: + return (self.ctrl == 0x70) + + def is_resp(self) -> bool: + return (self.ctrl == 0x99) + + +class Talent(Message): + TXT_UNKNOWN_CTRL = 'Unknown Ctrl' + + def __init__(self, addr, ifc: "AsyncIfc", server_side: bool, + client_mode: bool = False, id_str=b''): + super().__init__('G3', ifc, server_side, self.send_modbus_cb, + mb_timeout=15) + ifc.rx_set_cb(self.read) + ifc.prot_set_timeout_cb(self._timeout) + ifc.prot_set_init_new_client_conn_cb(self._init_new_client_conn) + ifc.prot_set_update_header_cb(self._update_header) + + self.addr = addr + self.conn_no = ifc.get_conn_no() + self.await_conn_resp_cnt = 0 + self.id_str = id_str + self.contact_name = b'' + self.contact_mail = b'' + self.ts_offset = 0 # time offset between tsun cloud and local + self.db = InfosG3() + self.switch = { + 0x00: self.msg_contact_info, + 0x13: self.msg_ota_update, + 0x22: self.msg_get_time, + 0x99: self.msg_heartbeat, + 0x71: self.msg_collector_data, + # 0x76: + 0x77: self.msg_modbus, + # 0x78: + 0x87: self.msg_modbus2, + 0x04: self.msg_inverter_data, + } + self.log_lvl = { + 0x00: logging.INFO, + 0x13: logging.INFO, + 0x22: logging.INFO, + 0x99: logging.INFO, + 0x71: logging.INFO, + # 0x76: + 0x77: self.get_modbus_log_lvl, + # 0x78: + 0x87: self.get_modbus_log_lvl, + 0x04: logging.INFO, + } + + ''' + Our puplic methods + ''' + def close(self) -> None: + logging.debug('Talent.close()') + # we have references to methods of this class in self.switch + # so we have to erase self.switch, otherwise this instance can't be + # deallocated by the garbage collector ==> we get a memory leak + self.switch.clear() + self.log_lvl.clear() + super().close() + + def __set_serial_no(self, serial_no: str): + + if self.unique_id == serial_no: + logger.debug(f'SerialNo: {serial_no}') + else: + inverters = Config.get('inverters') + # logger.debug(f'Inverters: {inverters}') + + if serial_no in inverters: + inv = inverters[serial_no] + self.node_id = inv['node_id'] + self.sug_area = inv['suggested_area'] + self.modbus_polling = inv['modbus_polling'] + logger.debug(f'SerialNo {serial_no} allowed! area:{self.sug_area}') # noqa: E501 + self.db.set_pv_module_details(inv) + if self.mb: + self.mb.set_node_id(self.node_id) + else: + self.node_id = '' + self.sug_area = '' + if 'allow_all' not in inverters or not inverters['allow_all']: + self.inc_counter('Unknown_SNR') + self.unique_id = None + logger.warning(f'ignore message from unknow inverter! (SerialNo: {serial_no})') # noqa: E501 + return + logger.debug(f'SerialNo {serial_no} not known but accepted!') + + self.unique_id = serial_no + self.db.set_db_def_value(Register.COLLECTOR_SNR, serial_no) + + def read(self) -> float: + '''process all received messages in the _recv_buffer''' + self._read() + while True: + if not self.header_valid: + self.__parse_header(self.ifc.rx_peek(), self.ifc.rx_len()) + + if self.header_valid and \ + self.ifc.rx_len() >= (self.header_len + self.data_len): + if self.state == State.init: + self.state = State.received # received 1st package + + log_lvl = self.log_lvl.get(self.msg_id, logging.WARNING) + if callable(log_lvl): + log_lvl = log_lvl() + + self.ifc.rx_log(log_lvl, f'Received from {self.addr}:' + f' BufLen: {self.ifc.rx_len()}' + f' HdrLen: {self.header_len}' + f' DtaLen: {self.data_len}') + + self.__set_serial_no(self.id_str.decode("utf-8")) + self.__dispatch_msg() + self.__flush_recv_msg() + else: + return 0 # don not wait before sending a response + + def forward(self) -> None: + '''add the actual receive msg to the forwarding queue''' + tsun = Config.get('tsun') + if tsun['enabled']: + buflen = self.header_len+self.data_len + buffer = self.ifc.rx_peek(buflen) + self.ifc.fwd_add(buffer) + self.ifc.fwd_log(logging.DEBUG, 'Store for forwarding:') + + fnc = self.switch.get(self.msg_id, self.msg_unknown) + logger.info(self.__flow_str(self.server_side, 'forwrd') + + f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}') + + def send_modbus_cb(self, modbus_pdu: bytearray, log_lvl: int, state: str): + if self.state != State.up: + logger.warning(f'[{self.node_id}] ignore MODBUS cmd,' + ' cause the state is not UP anymore') + return + + self.__build_header(0x70, 0x77) + self.ifc.tx_add(b'\x00\x01\xa3\x28') # magic ? + self.ifc.tx_add(struct.pack('!B', len(modbus_pdu))) + self.ifc.tx_add(modbus_pdu) + self.__finish_send_msg() + + self.ifc.tx_log(log_lvl, f'Send Modbus {state}:{self.addr}:') + self.ifc.tx_flush() + + def mb_timout_cb(self, exp_cnt): + self.mb_timer.start(self.mb_timeout) + + if 2 == (exp_cnt % 30): + # logging.info("Regular Modbus Status request") + self._send_modbus_cmd(Modbus.READ_REGS, 0x2000, 96, logging.DEBUG) + else: + self._send_modbus_cmd(Modbus.READ_REGS, 0x3000, 48, logging.DEBUG) + + def _init_new_client_conn(self) -> bool: + contact_name = self.contact_name + contact_mail = self.contact_mail + logger.info(f'name: {contact_name} mail: {contact_mail}') + self.msg_id = 0 + self.await_conn_resp_cnt += 1 + self.__build_header(0x91) + self.ifc.tx_add(struct.pack(f'!{len(contact_name)+1}p' + f'{len(contact_mail)+1}p', + contact_name, contact_mail)) + + self.__finish_send_msg() + return True + + ''' + Our private methods + ''' + def __flow_str(self, server_side: bool, type: str): # noqa: F821 + switch = { + 'rx': ' <', + 'tx': ' >', + 'forwrd': '<< ', + 'drop': ' xx', + 'rxS': '> ', + 'txS': '< ', + 'forwrdS': ' >>', + 'dropS': 'xx ', + } + if server_side: + type += 'S' + return switch.get(type, '???') + + def _timestamp(self): # pragma: no cover + '''returns timestamp fo the inverter as localtime + since 1.1.1970 in msec''' + # convert localtime in epoche + ts = (datetime.now() - datetime(1970, 1, 1)).total_seconds() + return round(ts*1000) + + def _utcfromts(self, ts: float): + '''converts inverter timestamp into unix time (epoche)''' + dt = datetime.fromtimestamp(ts/1000, tz=ZoneInfo("UTC")). \ + replace(tzinfo=get_localzone()) + return dt.timestamp() + + def _utc(self): # pragma: no cover + '''returns unix time (epoche)''' + return datetime.now().timestamp() + + def _update_header(self, _forward_buffer): + '''update header for message before forwarding, + add time offset to timestamp''' + _len = len(_forward_buffer) + ofs = 0 + while ofs < _len: + result = struct.unpack_from('!lB', _forward_buffer, 0) + msg_len = 4 + result[0] + id_len = result[1] # len of variable id string + if _len < 2*id_len + 21: + return + + result = struct.unpack_from('!B', _forward_buffer, id_len+6) + msg_code = result[0] + if msg_code == 0x71 or msg_code == 0x04: + result = struct.unpack_from('!q', _forward_buffer, 13+2*id_len) + ts = result[0] + self.ts_offset + logger.debug(f'offset: {self.ts_offset:08x}' + f' proxy-time: {ts:08x}') + struct.pack_into('!q', _forward_buffer, 13+2*id_len, ts) + ofs += msg_len + + # check if there is a complete header in the buffer, parse it + # and set + # self.header_len + # self.data_len + # self.id_str + # self.ctrl + # self.msg_id + # + # if the header is incomplete, than self.header_len is still 0 + # + def __parse_header(self, buf: bytes, buf_len: int) -> None: + + if (buf_len < 5): # enough bytes to read len and id_len? + return + result = struct.unpack_from('!lB', buf, 0) + msg_len = result[0] # len of complete message + id_len = result[1] # len of variable id string + if id_len > 17: + logger.warning(f'len of ID string must == 16 but is {id_len}') + self.inc_counter('Invalid_Msg_Format') + + # erase broken recv buffer + self.ifc.rx_clear() + return + + hdr_len = 5+id_len+2 + + if (buf_len < hdr_len): # enough bytes for complete header? + return + + result = struct.unpack_from(f'!{id_len+1}pBB', buf, 4) + + # store parsed header values in the class + self.id_str = result[0] + self.ctrl = Control(result[1]) + self.msg_id = result[2] + self.data_len = msg_len-id_len-3 + self.header_len = hdr_len + self.header_valid = True + + def __build_header(self, ctrl, msg_id=None) -> None: + if not msg_id: + msg_id = self.msg_id + self.send_msg_ofs = self.ifc.tx_len() + self.ifc.tx_add(struct.pack(f'!l{len(self.id_str)+1}pBB', + 0, self.id_str, ctrl, msg_id)) + fnc = self.switch.get(msg_id, self.msg_unknown) + logger.info(self.__flow_str(self.server_side, 'tx') + + f' Ctl: {int(ctrl):#02x} Msg: {fnc.__name__!r}') + + def __finish_send_msg(self) -> None: + _len = self.ifc.tx_len() - self.send_msg_ofs + struct.pack_into('!l', self.ifc.tx_peek(), self.send_msg_ofs, + _len-4) + + def __dispatch_msg(self) -> None: + fnc = self.switch.get(self.msg_id, self.msg_unknown) + if self.unique_id: + logger.info(self.__flow_str(self.server_side, 'rx') + + f' Ctl: {int(self.ctrl):#02x} ({self.state}) ' + f'Msg: {fnc.__name__!r}') + fnc() + else: + logger.info(self.__flow_str(self.server_side, 'drop') + + f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}') + + def __flush_recv_msg(self) -> None: + self.ifc.rx_get(self.header_len+self.data_len) + self.header_valid = False + + ''' + Message handler methods + ''' + def msg_contact_info(self): + if self.ctrl.is_ind(): + if self.server_side and self.__process_contact_info(): + self.__build_header(0x91) + self.ifc.tx_add(b'\x01') + self.__finish_send_msg() + # don't forward this contact info here, we will build one + # when the remote connection is established + elif self.await_conn_resp_cnt > 0: + self.await_conn_resp_cnt -= 1 + else: + self.forward() + else: + logger.warning(self.TXT_UNKNOWN_CTRL) + self.inc_counter('Unknown_Ctrl') + self.forward() + + def __process_contact_info(self) -> bool: + buf = self.ifc.rx_peek() + result = struct.unpack_from('!B', buf, self.header_len) + name_len = result[0] + if self.data_len == 1: # this is a response withone status byte + return False + if self.data_len >= name_len+2: + result = struct.unpack_from(f'!{name_len+1}pB', buf, + self.header_len) + self.contact_name = result[0] + mail_len = result[1] + logger.info(f'name: {self.contact_name}') + + result = struct.unpack_from(f'!{mail_len+1}p', buf, + self.header_len+name_len+1) + self.contact_mail = result[0] + logger.info(f'mail: {self.contact_mail}') + return True + + def msg_get_time(self): + if self.ctrl.is_ind(): + if self.data_len == 0: + if self.state == State.up: + self.state = State.pend # block MODBUS cmds + + ts = self._timestamp() + logger.debug(f'time: {ts:08x}') + self.__build_header(0x91) + self.ifc.tx_add(struct.pack('!q', ts)) + self.__finish_send_msg() + + elif self.data_len >= 8: + ts = self._timestamp() + result = struct.unpack_from('!q', self.ifc.rx_peek(), + self.header_len) + self.ts_offset = result[0]-ts + if self.ifc.remote.stream: + self.ifc.remote.stream.ts_offset = self.ts_offset + logger.debug(f'tsun-time: {int(result[0]):08x}' + f' proxy-time: {ts:08x}' + f' offset: {self.ts_offset}') + return # ignore received response + else: + logger.warning(self.TXT_UNKNOWN_CTRL) + self.inc_counter('Unknown_Ctrl') + + self.forward() + + def msg_heartbeat(self): + if self.ctrl.is_ind(): + if self.data_len == 9: + self.state = State.up # allow MODBUS cmds + if (self.modbus_polling): + self.mb_timer.start(self.mb_first_timeout) + self.db.set_db_def_value(Register.POLLING_INTERVAL, + self.mb_timeout) + self.__build_header(0x99) + self.ifc.tx_add(b'\x02') + self.__finish_send_msg() + + result = struct.unpack_from('!Bq', self.ifc.rx_peek(), + self.header_len) + resp_code = result[0] + ts = result[1]+self.ts_offset + logger.debug(f'inv-time: {int(result[1]):08x}' + f' tsun-time: {ts:08x}' + f' offset: {self.ts_offset}') + struct.pack_into('!Bq', self.ifc.rx_peek(), + self.header_len, resp_code, ts) + elif self.ctrl.is_resp(): + result = struct.unpack_from('!B', self.ifc.rx_peek(), + self.header_len) + resp_code = result[0] + logging.debug(f'Heartbeat-RespCode: {resp_code}') + return + else: + logger.warning(self.TXT_UNKNOWN_CTRL) + self.inc_counter('Unknown_Ctrl') + + self.forward() + + def parse_msg_header(self): + result = struct.unpack_from('!lB', self.ifc.rx_peek(), + self.header_len) + + data_id = result[0] # len of complete message + id_len = result[1] # len of variable id string + logger.debug(f'Data_ID: 0x{data_id:08x} id_len: {id_len}') + + msg_hdr_len = 5+id_len+9 + + result = struct.unpack_from(f'!{id_len+1}pBq', self.ifc.rx_peek(), + self.header_len + 4) + + timestamp = result[2] + logger.debug(f'ID: {result[0]} B: {result[1]}') + logger.debug(f'time: {timestamp:08x}') + # logger.info(f'time: {datetime.utcfromtimestamp(result[2]).strftime( + # "%Y-%m-%d %H:%M:%S")}') + return msg_hdr_len, timestamp + + def msg_collector_data(self): + if self.ctrl.is_ind(): + self.__build_header(0x99) + self.ifc.tx_add(b'\x01') + self.__finish_send_msg() + self.__process_data() + + elif self.ctrl.is_resp(): + return # ignore received response + else: + logger.warning(self.TXT_UNKNOWN_CTRL) + self.inc_counter('Unknown_Ctrl') + + self.forward() + + def msg_inverter_data(self): + if self.ctrl.is_ind(): + self.__build_header(0x99) + self.ifc.tx_add(b'\x01') + self.__finish_send_msg() + self.__process_data() + self.state = State.up # allow MODBUS cmds + if (self.modbus_polling): + self.mb_timer.start(self.mb_first_timeout) + self.db.set_db_def_value(Register.POLLING_INTERVAL, + self.mb_timeout) + + elif self.ctrl.is_resp(): + return # ignore received response + else: + logger.warning(self.TXT_UNKNOWN_CTRL) + self.inc_counter('Unknown_Ctrl') + + self.forward() + + def __process_data(self): + msg_hdr_len, ts = self.parse_msg_header() + + for key, update in self.db.parse(self.ifc.rx_peek(), self.header_len + + msg_hdr_len, self.node_id): + if update: + self._set_mqtt_timestamp(key, self._utcfromts(ts)) + self.new_data[key] = True + + def msg_ota_update(self): + if self.ctrl.is_req(): + self.inc_counter('OTA_Start_Msg') + elif self.ctrl.is_ind(): + pass # Ok, nothing to do + else: + logger.warning(self.TXT_UNKNOWN_CTRL) + self.inc_counter('Unknown_Ctrl') + self.forward() + + def parse_modbus_header(self): + + msg_hdr_len = 5 + + result = struct.unpack_from('!lBB', self.ifc.rx_peek(), + self.header_len) + modbus_len = result[1] + return msg_hdr_len, modbus_len + + def parse_modbus_header2(self): + + msg_hdr_len = 6 + + result = struct.unpack_from('!lBBB', self.ifc.rx_peek(), + self.header_len) + modbus_len = result[2] + return msg_hdr_len, modbus_len + + def get_modbus_log_lvl(self) -> int: + if self.ctrl.is_req(): + return logging.INFO + elif self.ctrl.is_ind() and self.server_side: + return self.mb.last_log_lvl + return logging.WARNING + + def msg_modbus(self): + hdr_len, _ = self.parse_modbus_header() + self.__msg_modbus(hdr_len) + + def msg_modbus2(self): + hdr_len, _ = self.parse_modbus_header2() + self.__msg_modbus(hdr_len) + + def __msg_modbus(self, hdr_len): + data = self.ifc.rx_peek()[self.header_len: + self.header_len+self.data_len] + + if self.ctrl.is_req(): + rstream = self.ifc.remote.stream + if rstream.mb.recv_req(data[hdr_len:], rstream.msg_forward): + self.inc_counter('Modbus_Command') + else: + self.inc_counter('Invalid_Msg_Format') + elif self.ctrl.is_ind(): + self.modbus_elms = 0 + # logger.debug(f'Modbus Ind MsgLen: {modbus_len}') + if not self.server_side: + logger.warning('Unknown Message') + self.inc_counter('Unknown_Msg') + return + + for key, update, _ in self.mb.recv_resp(self.db, data[ + hdr_len:]): + if update: + self._set_mqtt_timestamp(key, self._utc()) + self.new_data[key] = True + self.modbus_elms += 1 # count for unit tests + else: + logger.warning(self.TXT_UNKNOWN_CTRL) + self.inc_counter('Unknown_Ctrl') + self.forward() + + def msg_forward(self): + self.forward() + + def msg_unknown(self): + logger.warning(f"Unknow Msg: ID:{self.msg_id}") + self.inc_counter('Unknown_Msg') + self.forward() diff --git a/ha_addons/ha_addon/rootfs/home/proxy/gen3plus/infos_g3p.py b/ha_addons/ha_addon/rootfs/home/proxy/gen3plus/infos_g3p.py new file mode 100644 index 0000000..417487a --- /dev/null +++ b/ha_addons/ha_addon/rootfs/home/proxy/gen3plus/infos_g3p.py @@ -0,0 +1,204 @@ + +from typing import Generator + +from infos import Infos, Register, ProxyMode, Fmt + + +class RegisterMap: + # make the class read/only by using __slots__ + __slots__ = () + + FMT_2_16BIT_VAL = '!HH' + FMT_3_16BIT_VAL = '!HHH' + FMT_4_16BIT_VAL = '!HHHH' + + map = { + # 0x41020007: {'reg': Register.DEVICE_SNR, 'fmt': ' bool: + if 'dep' in row: + mode = row['dep'] + if self.client_mode: + return mode != ProxyMode.CLIENT + else: + return mode != ProxyMode.SERVER + return False + + def ha_confs(self, ha_prfx: str, node_id: str, snr: str, + sug_area: str = '') \ + -> Generator[tuple[dict, str], None, None]: + '''Generator function yields a json register struct for home-assistant + auto configuration and a unique entity string + + arguments: + prfx:str ==> MQTT prefix for the home assistant 'stat_t string + snr:str ==> serial number of the inverter, used to build unique + entity strings + sug_area:str ==> suggested area string from the config file''' + # iterate over RegisterMap.map and get the register values + for row in RegisterMap.map.values(): + info_id = row['reg'] + if self.__hide_topic(row): + res = self.ha_remove(info_id, node_id, snr) # noqa: E501 + else: + res = self.ha_conf(info_id, ha_prfx, node_id, snr, False, sug_area) # noqa: E501 + if res: + yield res + + def parse(self, buf, msg_type: int, rcv_ftype: int, node_id: str = '') \ + -> Generator[tuple[str, bool], None, None]: + '''parse a data sequence received from the inverter and + stores the values in Infos.db + + buf: buffer of the sequence to parse''' + for idx, row in RegisterMap.map.items(): + addr = idx & 0xffff + ftype = (idx >> 16) & 0xff + mtype = (idx >> 24) & 0xff + if ftype != rcv_ftype or mtype != msg_type: + continue + if not isinstance(row, dict): + continue + info_id = row['reg'] + result = Fmt.get_value(buf, addr, row) + + keys, level, unit, must_incr = self._key_obj(info_id) + + if keys: + name, update = self.update_db(keys, must_incr, result) + yield keys[0], update + else: + name = str(f'info-id.0x{addr:x}') + update = False + + if update: + self.tracer.log(level, f'[{node_id}] GEN3PLUS: {name}' + f' : {result}{unit}') + + def build(self, len, msg_type: int, rcv_ftype: int): + buf = bytearray(len) + for idx, row in RegisterMap.map.items(): + addr = idx & 0xffff + ftype = (idx >> 16) & 0xff + mtype = (idx >> 24) & 0xff + if ftype != rcv_ftype or mtype != msg_type: + continue + if not isinstance(row, dict): + continue + if 'const' in row: + val = row['const'] + else: + info_id = row['reg'] + val = self.get_db_value(info_id) + if not val: + continue + Fmt.set_value(buf, addr, row, val) + return buf diff --git a/ha_addons/ha_addon/rootfs/home/proxy/gen3plus/inverter_g3p.py b/ha_addons/ha_addon/rootfs/home/proxy/gen3plus/inverter_g3p.py new file mode 100644 index 0000000..f3680c9 --- /dev/null +++ b/ha_addons/ha_addon/rootfs/home/proxy/gen3plus/inverter_g3p.py @@ -0,0 +1,15 @@ +from asyncio import StreamReader, StreamWriter + +from inverter_base import InverterBase +from gen3plus.solarman_v5 import SolarmanV5 +from gen3plus.solarman_emu import SolarmanEmu + + +class InverterG3P(InverterBase): + def __init__(self, reader: StreamReader, writer: StreamWriter, + client_mode: bool = False): + remote_prot = None + if client_mode: + remote_prot = SolarmanEmu + super().__init__(reader, writer, 'solarman', + SolarmanV5, client_mode, remote_prot) diff --git a/ha_addons/ha_addon/rootfs/home/proxy/gen3plus/solarman_emu.py b/ha_addons/ha_addon/rootfs/home/proxy/gen3plus/solarman_emu.py new file mode 100644 index 0000000..66035bb --- /dev/null +++ b/ha_addons/ha_addon/rootfs/home/proxy/gen3plus/solarman_emu.py @@ -0,0 +1,138 @@ +import logging +import struct + +from async_ifc import AsyncIfc +from gen3plus.solarman_v5 import SolarmanBase +from my_timer import Timer +from infos import Register + +logger = logging.getLogger('msg') + + +class SolarmanEmu(SolarmanBase): + def __init__(self, addr, ifc: "AsyncIfc", + server_side: bool, client_mode: bool): + super().__init__(addr, ifc, server_side=False, + _send_modbus_cb=None, + mb_timeout=8) + logging.debug('SolarmanEmu.init()') + self.db = ifc.remote.stream.db + self.snr = ifc.remote.stream.snr + self.hb_timeout = 60 + '''actual heatbeat timeout from the last response message''' + self.data_up_inv = self.db.get_db_value(Register.DATA_UP_INTERVAL) + '''time interval for getting new MQTT data messages''' + self.hb_timer = Timer(self.send_heartbeat_cb, self.node_id) + self.data_timer = Timer(self.send_data_cb, self.node_id) + self.last_sync = self._emu_timestamp() + '''timestamp when we send the last sync message (4110)''' + self.pkt_cnt = 0 + '''last sent packet number''' + + self.switch = { + + 0x4210: 'msg_data_ind', # real time data + 0x1210: self.msg_response, # at least every 5 minutes + + 0x4710: 'msg_hbeat_ind', # heatbeat + 0x1710: self.msg_response, # every 2 minutes + + 0x4110: 'msg_dev_ind', # device data, sync start + 0x1110: self.msg_response, # every 3 hours + + } + + self.log_lvl = { + + 0x4110: logging.INFO, # device data, sync start + 0x1110: logging.INFO, # every 3 hours + + 0x4210: logging.INFO, # real time data + 0x1210: logging.INFO, # at least every 5 minutes + + 0x4710: logging.DEBUG, # heatbeat + 0x1710: logging.DEBUG, # every 2 minutes + + } + + ''' + Our puplic methods + ''' + def close(self) -> None: + logging.info('SolarmanEmu.close()') + # we have references to methods of this class in self.switch + # so we have to erase self.switch, otherwise this instance can't be + # deallocated by the garbage collector ==> we get a memory leak + self.switch.clear() + self.log_lvl.clear() + self.hb_timer.close() + self.data_timer.close() + self.db = None + super().close() + + def _set_serial_no(self, snr: int): + logging.debug(f'SolarmanEmu._set_serial_no, snr: {snr}') + self.unique_id = str(snr) + + def _init_new_client_conn(self) -> bool: + logging.debug('SolarmanEmu.init_new()') + self.data_timer.start(self.data_up_inv) + return False + + def next_pkt_cnt(self): + '''get the next packet number''' + self.pkt_cnt = (self.pkt_cnt + 1) & 0xffffffff + return self.pkt_cnt + + def seconds_since_last_sync(self): + '''get seconds since last 0x4110 message was sent''' + return self._emu_timestamp() - self.last_sync + + def send_heartbeat_cb(self, exp_cnt): + '''send a heartbeat to the TSUN cloud''' + self._build_header(0x4710) + self.ifc.tx_add(struct.pack('> 8 + self.snd_idx = val & 0xff + else: + self.rcv_idx = val & 0xff + self.snd_idx = val >> 8 + + def get_send(self): + self.snd_idx += 1 + self.snd_idx &= 0xff + if self.server_side: + return (self.rcv_idx << 8) | self.snd_idx + else: + return (self.snd_idx << 8) | self.rcv_idx + + def __str__(self): + return f'{self.rcv_idx:02x}:{self.snd_idx:02x}' + + +class SolarmanBase(Message): + def __init__(self, addr, ifc: "AsyncIfc", server_side: bool, + _send_modbus_cb, mb_timeout: int): + super().__init__('G3P', ifc, server_side, _send_modbus_cb, + mb_timeout) + ifc.rx_set_cb(self.read) + ifc.prot_set_timeout_cb(self._timeout) + ifc.prot_set_init_new_client_conn_cb(self._init_new_client_conn) + ifc.prot_set_update_header_cb(self.__update_header) + self.addr = addr + self.conn_no = ifc.get_conn_no() + self.header_len = 11 # overwrite construcor in class Message + self.control = 0 + self.seq = Sequence(server_side) + self.snr = 0 + self.time_ofs = 0 + + def read(self) -> float: + '''process all received messages in the _recv_buffer''' + self._read() + while True: + if not self.header_valid: + self.__parse_header(self.ifc.rx_peek(), + self.ifc.rx_len()) + + if self.header_valid and self.ifc.rx_len() >= \ + (self.header_len + self.data_len+2): + self.__process_complete_received_msg() + self.__flush_recv_msg() + else: + return 0 # wait 0s before sending a response + ''' + Our public methods + ''' + def _flow_str(self, server_side: bool, type: str): # noqa: F821 + switch = { + 'rx': ' <', + 'tx': ' >', + 'forwrd': '<< ', + 'drop': ' xx', + 'rxS': '> ', + 'txS': '< ', + 'forwrdS': ' >>', + 'dropS': 'xx ', + } + if server_side: + type += 'S' + return switch.get(type, '???') + + def get_fnc_handler(self, ctrl): + fnc = self.switch.get(ctrl, self.msg_unknown) + if callable(fnc): + return fnc, repr(fnc.__name__) + else: + return self.msg_unknown, repr(fnc) + + def _build_header(self, ctrl) -> None: + '''build header for new transmit message''' + self.send_msg_ofs = self.ifc.tx_len() + + self.ifc.tx_add(struct.pack( + ' None: + '''finish the transmit message, set lenght and checksum''' + _len = self.ifc.tx_len() - self.send_msg_ofs + struct.pack_into(' None: + + if (buf_len < self.header_len): # enough bytes for complete header? + return + + result = struct.unpack_from(' bool: + crc = buf[self.data_len+11] + stop = buf[self.data_len+12] + if stop != 0x15: + hex_dump_memory(logging.ERROR, + 'Drop packet w invalid stop byte from ' + f'{self.addr}:', buf, buf_len) + self.inc_counter('Invalid_Msg_Format') + if self.ifc.rx_len() > (self.data_len+13): + next_start = buf[self.data_len+13] + if next_start != 0xa5: + # erase broken recv buffer + self.ifc.rx_clear() + + return False + + check = sum(buf[1:buf_len-2]) & 0xff + if check != crc: + self.inc_counter('Invalid_Msg_Format') + logger.debug(f'CRC {int(crc):#02x} {int(check):#08x}' + f' Stop:{int(stop):#02x}') + # start & stop byte are valid, discard only this message + return False + + return True + + def __flush_recv_msg(self) -> None: + self.ifc.rx_get(self.header_len + self.data_len+2) + self.header_valid = False + + def __dispatch_msg(self) -> None: + _fnc, _str = self.get_fnc_handler(self.control) + if self.unique_id: + logger.info(self._flow_str(self.server_side, 'rx') + + f' Ctl: {int(self.control):#04x}' + + f' Msg: {_str}') + _fnc() + else: + logger.info(self._flow_str(self.server_side, 'drop') + + f' Ctl: {int(self.control):#04x}' + + f' Msg: {_str}') + + ''' + Message handler methods + ''' + def msg_response(self): + data = self.ifc.rx_peek()[self.header_len:] + result = struct.unpack_from(' None: + logging.debug('Solarman.close()') + # we have references to methods of this class in self.switch + # so we have to erase self.switch, otherwise this instance can't be + # deallocated by the garbage collector ==> we get a memory leak + self.switch.clear() + self.log_lvl.clear() + super().close() + + async def send_start_cmd(self, snr: int, host: str, + forward: bool, + start_timeout=MB_CLIENT_DATA_UP): + self.no_forwarding = True + self.establish_inv_emu = forward + self.snr = snr + self._set_serial_no(snr) + self.mb_timeout = start_timeout + self.db.set_db_def_value(Register.IP_ADDRESS, host) + self.db.set_db_def_value(Register.POLLING_INTERVAL, + self.mb_timeout) + self.db.set_db_def_value(Register.DATA_UP_INTERVAL, + 300) + self.db.set_db_def_value(Register.COLLECT_INTERVAL, + 1) + self.db.set_db_def_value(Register.HEARTBEAT_INTERVAL, + 120) + self.db.set_db_def_value(Register.SENSOR_LIST, + Fmt.hex4((self.sensor_list, ))) + self.new_data['controller'] = True + + self.state = State.up + self._send_modbus_cmd(Modbus.READ_REGS, 0x3000, 48, logging.DEBUG) + self.mb_timer.start(self.mb_timeout) + + def new_state_up(self): + if self.state is not State.up: + self.state = State.up + if (self.modbus_polling): + self.mb_timer.start(self.mb_first_timeout) + self.db.set_db_def_value(Register.POLLING_INTERVAL, + self.mb_timeout) + + def establish_emu(self): + _len = 223 + build_msg = self.db.build(_len, 0x41, 2) + struct.pack_into( + ' {inv}') + if (type(inv) is dict and 'monitor_sn' in inv + and inv['monitor_sn'] == snr): + self.__set_config_parms(inv) + self.db.set_pv_module_details(inv) + logger.debug(f'SerialNo {serial_no} allowed! area:{self.sug_area}') # noqa: E501 + + self.db.set_db_def_value(Register.COLLECTOR_SNR, snr) + self.db.set_db_def_value(Register.SERIAL_NUMBER, key) + break + else: + self.node_id = '' + self.sug_area = '' + if 'allow_all' not in inverters or not inverters['allow_all']: + self.inc_counter('Unknown_SNR') + self.unique_id = None + logger.warning(f'ignore message from unknow inverter! (SerialNo: {serial_no})') # noqa: E501 + return + logger.warning(f'SerialNo {serial_no} not known but accepted!') + + self.unique_id = serial_no + + def forward(self, buffer, buflen) -> None: + '''add the actual receive msg to the forwarding queue''' + if self.no_forwarding: + return + tsun = Config.get('solarman') + if tsun['enabled']: + self.ifc.fwd_add(buffer[:buflen]) + self.ifc.fwd_log(logging.DEBUG, 'Store for forwarding:') + + _, _str = self.get_fnc_handler(self.control) + logger.info(self._flow_str(self.server_side, 'forwrd') + + f' Ctl: {int(self.control):#04x}' + f' Msg: {_str}') + + def _init_new_client_conn(self) -> bool: + return False + + def _heartbeat(self) -> int: + return 60 # pragma: no cover + + def __send_ack_rsp(self, msgtype, ftype, ack=1): + self._build_header(msgtype) + self.ifc.tx_add(struct.pack(' bool: + return not cmd.startswith(tuple(self.at_acl[connection]['allow'])) or \ + cmd.startswith(tuple(self.at_acl[connection]['block'])) + + async def send_at_cmd(self, at_cmd: str) -> None: + if self.state != State.up: + logger.warning(f'[{self.node_id}] ignore AT+ cmd,' + ' as the state is not UP') + return + at_cmd = at_cmd.strip() + + if self.at_cmd_forbidden(cmd=at_cmd, connection='mqtt'): + data_json = f'\'{at_cmd}\' is forbidden' + node_id = self.node_id + key = 'at_resp' + logger.info(f'{key}: {data_json}') + await self.mqtt.publish(f'{self.entity_prfx}{node_id}{key}', data_json) # noqa: E501 + return + + self.forward_at_cmd_resp = False + self._build_header(0x4510) + self.ifc.tx_add(struct.pack(f'> 8 + for key, update in self.db.parse(self.ifc.rx_peek(), msg_type, ftype, + self.node_id): + if update: + if key == 'inverter': + inv_update = True + self._set_mqtt_timestamp(key, ts) + self.new_data[key] = True + + if inv_update: + self.__build_model_name() + ''' + Message handler methods + ''' + def msg_unknown(self): + logger.warning(f"Unknow Msg: ID:{int(self.control):#04x}") + self.inc_counter('Unknown_Msg') + self.__forward_msg() + + def msg_dev_ind(self): + data = self.ifc.rx_peek()[self.header_len:] + result = struct.unpack_from(self.HDR_FMT, data, 0) + ftype = result[0] # always 2 + total = result[1] + tim = result[2] + res = result[3] # always zero + logger.info(f'frame type:{ftype:02x}' + f' timer:{tim:08x}s null:{res}') + if self.time_ofs: + # dt = datetime.fromtimestamp(total + self.time_ofs) + # logger.info(f'ts: {dt.strftime("%Y-%m-%d %H:%M:%S")}') + ts = total + self.time_ofs + else: + ts = None + self.__process_data(ftype, ts) + self.sensor_list = int(self.db.get_db_value(Register.SENSOR_LIST, 0), + 16) + self.__forward_msg() + self.__send_ack_rsp(0x1110, ftype) + + def msg_data_ind(self): + data = self.ifc.rx_peek() + result = struct.unpack_from(' int: + ftype = self.ifc.rx_peek()[self.header_len] + if ftype == self.AT_CMD: + if self.forward_at_cmd_resp: + return logging.INFO + return logging.DEBUG + elif ftype == self.MB_RTU_CMD \ + and self.server_side: + return self.mb.last_log_lvl + + return logging.WARNING + + def msg_command_rsp(self): + data = self.ifc.rx_peek()[self.header_len: + self.header_len+self.data_len] + ftype = data[0] + if ftype == self.AT_CMD: + if not self.forward_at_cmd_resp: + data_json = data[14:].decode("utf-8") + node_id = self.node_id + key = 'at_resp' + logger.info(f'{key}: {data_json}') + self.publish_mqtt(f'{self.entity_prfx}{node_id}{key}', data_json) # noqa: E501 + return + elif ftype == self.MB_RTU_CMD: + self.__modbus_command_rsp(data) + return + self.__forward_msg() + + def __parse_modbus_rsp(self, data): + inv_update = False + self.modbus_elms = 0 + for key, update, _ in self.mb.recv_resp(self.db, data[14:]): + self.modbus_elms += 1 + if update: + if key == 'inverter': + inv_update = True + self._set_mqtt_timestamp(key, self._timestamp()) + self.new_data[key] = True + return inv_update + + def __modbus_command_rsp(self, data): + '''precess MODBUS RTU response''' + valid = data[1] + modbus_msg_len = self.data_len - 14 + # logger.debug(f'modbus_len:{modbus_msg_len} accepted:{valid}') + if valid == 1 and modbus_msg_len > 4: + # logger.info(f'first byte modbus:{data[14]}') + inv_update = self.__parse_modbus_rsp(data) + if inv_update: + self.__build_model_name() + + if self.establish_inv_emu and not self.ifc.remote.stream: + self.establish_emu() + + def msg_hbeat_ind(self): + data = self.ifc.rx_peek()[self.header_len:] + result = struct.unpack_from(' str | int: + if not reverse: + return f'{val[0]:04x}' + else: + return int(val, 16) + + @staticmethod + def mac(val: tuple | str, reverse=False) -> str | tuple: + if not reverse: + return "%02x:%02x:%02x:%02x:%02x:%02x" % val + else: + return ( + int(val[0:2], 16), int(val[3:5], 16), + int(val[6:8], 16), int(val[9:11], 16), + int(val[12:14], 16), int(val[15:], 16)) + + @staticmethod + def version(val: tuple | str, reverse=False) -> str | int: + if not reverse: + x = val[0] + return f'V{(x >> 12)}.{(x >> 8) & 0xf}' \ + f'.{(x >> 4) & 0xf}{x & 0xf:1X}' + else: + arr = val[1:].split('.') + return int(arr[0], 10) << 12 | \ + int(arr[1], 10) << 8 | \ + int(arr[2][:-1], 10) << 4 | \ + int(arr[2][-1:], 16) + + @staticmethod + def set_value(buf: bytearray, idx: int, row: dict, val): + '''Get a value from buf and interpret as in row defined''' + fmt = row['fmt'] + if 'offset' in row: + val = val - row['offset'] + if 'quotient' in row: + val = round(val * row['quotient']) + if 'ratio' in row: + val = round(val / row['ratio']) + if 'func' in row: + val = row['func'](val, reverse=True) + if isinstance(val, str): + val = bytes(val, 'UTF8') + + if isinstance(val, tuple): + struct.pack_into(fmt, buf, idx, *val) + else: + struct.pack_into(fmt, buf, idx, val) + + +class ClrAtMidnight: + __clr_at_midnight = [Register.PV1_DAILY_GENERATION, Register.PV2_DAILY_GENERATION, Register.PV3_DAILY_GENERATION, Register.PV4_DAILY_GENERATION, Register.PV5_DAILY_GENERATION, Register.PV6_DAILY_GENERATION, Register.DAILY_GENERATION] # noqa: E501 + db = {} + + @classmethod + def add(cls, keys: list, prfx: str, reg: Register) -> None: + if reg not in cls.__clr_at_midnight: + return + + prfx += f'{keys[0]}' + db_dict = cls.db + if prfx not in db_dict: + db_dict[prfx] = {} + db_dict = db_dict[prfx] + + for key in keys[1:-1]: + if key not in db_dict: + db_dict[key] = {} + db_dict = db_dict[key] + db_dict[keys[-1]] = 0 + + @classmethod + def elm(cls) -> Generator[tuple[str, dict], None, None]: + for reg, name in cls.db.items(): + yield reg, name + cls.db = {} + + +class Infos: + __slots__ = ('db', 'tracer', ) + + LIGHTNING = 'mdi:lightning-bolt' + COUNTER = 'mdi:counter' + GAUGE = 'mdi:gauge' + SOLAR_POWER_VAR = 'mdi:solar-power-variant' + SOLAR_POWER = 'mdi:solar-power' + WIFI = 'mdi:wifi' + UPDATE = 'mdi:update' + DAILY_GEN = 'Daily Generation' + TOTAL_GEN = 'Total Generation' + FMT_INT = '| int' + FMT_FLOAT = '| float' + FMT_STRING_SEC = '| string + " s"' + stat = {} + app_name = os.getenv('SERVICE_NAME', 'proxy') + version = os.getenv('VERSION', 'unknown') + new_stat_data = {} + + @classmethod + def static_init(cls): + logging.debug('Initialize proxy statistics') + # init proxy counter in the class.stat dictionary + cls.stat['proxy'] = {} + for key in cls.__info_defs: + name = cls.__info_defs[key]['name'] + if name[0] == 'proxy': + cls.stat['proxy'][name[1]] = 0 + + # add values from the environment to the device definition table + prxy = cls.__info_devs['proxy'] + prxy['sw'] = cls.version + prxy['mdl'] = cls.app_name + + def __init__(self): + self.db = {} + self.tracer = logging.getLogger('data') + + __info_devs = { + 'proxy': {'singleton': True, 'name': 'Proxy', 'mf': 'Stefan Allius'}, # noqa: E501 + 'controller': {'via': 'proxy', 'name': 'Controller', 'mdl': Register.CHIP_MODEL, 'mf': Register.CHIP_TYPE, 'sw': Register.COLLECTOR_FW_VERSION, 'mac': Register.MAC_ADDR, 'sn': Register.COLLECTOR_SNR}, # noqa: E501 + 'inverter': {'via': 'controller', 'name': 'Micro Inverter', 'mdl': Register.EQUIPMENT_MODEL, 'mf': Register.MANUFACTURER, 'sw': Register.VERSION, 'sn': Register.SERIAL_NUMBER}, # noqa: E501 + 'input_pv1': {'via': 'inverter', 'name': 'Module PV1', 'mdl': Register.PV1_MODEL, 'mf': Register.PV1_MANUFACTURER}, # noqa: E501 + 'input_pv2': {'via': 'inverter', 'name': 'Module PV2', 'mdl': Register.PV2_MODEL, 'mf': Register.PV2_MANUFACTURER, 'dep': {'reg': Register.NO_INPUTS, 'gte': 2}}, # noqa: E501 + 'input_pv3': {'via': 'inverter', 'name': 'Module PV3', 'mdl': Register.PV3_MODEL, 'mf': Register.PV3_MANUFACTURER, 'dep': {'reg': Register.NO_INPUTS, 'gte': 3}}, # noqa: E501 + 'input_pv4': {'via': 'inverter', 'name': 'Module PV4', 'mdl': Register.PV4_MODEL, 'mf': Register.PV4_MANUFACTURER, 'dep': {'reg': Register.NO_INPUTS, 'gte': 4}}, # noqa: E501 + 'input_pv5': {'via': 'inverter', 'name': 'Module PV5', 'mdl': Register.PV5_MODEL, 'mf': Register.PV5_MANUFACTURER, 'dep': {'reg': Register.NO_INPUTS, 'gte': 5}}, # noqa: E501 + 'input_pv6': {'via': 'inverter', 'name': 'Module PV6', 'mdl': Register.PV6_MODEL, 'mf': Register.PV6_MANUFACTURER, 'dep': {'reg': Register.NO_INPUTS, 'gte': 6}}, # noqa: E501 + } + + __comm_type_val_tpl = "{%set com_types = ['n/a','Wi-Fi', 'G4', 'G5', 'GPRS'] %}{{com_types[value_json['Communication_Type']|int(0)]|default(value_json['Communication_Type'])}}" # noqa: E501 + __work_mode_val_tpl = "{%set mode = ['Normal-Mode', 'Aging-Mode', 'ATE-Mode', 'Shielding GFDI', 'DTU-Mode'] %}{{mode[value_json['Work_Mode']|int(0)]|default(value_json['Work_Mode'])}}" # noqa: E501 + __status_type_val_tpl = "{%set inv_status = ['Off-line', 'On-grid', 'Off-grid'] %}{{inv_status[value_json['Inverter_Status']|int(0)]|default(value_json['Inverter_Status'])}}" # noqa: E501 + __rated_power_val_tpl = "{% if 'Rated_Power' in value_json and value_json['Rated_Power'] != None %}{{value_json['Rated_Power']|string() +' W'}}{% else %}{{ this.state }}{% endif %}" # noqa: E501 + __designed_power_val_tpl = ''' +{% if 'Max_Designed_Power' in value_json and + value_json['Max_Designed_Power'] != None %} + {% if value_json['Max_Designed_Power'] | int(0xffff) < 0x8000 %} + {{value_json['Max_Designed_Power']|string() +' W'}} + {% else %} + n/a + {% endif %} +{% else %} + {{ this.state }} +{% endif %} +''' + __inv_alarm_val_tpl = ''' +{% if 'Inverter_Alarm' in value_json and + value_json['Inverter_Alarm'] != None %} + {% set val_int = value_json['Inverter_Alarm'] | int %} + {% if val_int == 0 %} + {% set result = 'noAlarm'%} + {%else%} + {% set result = '' %} + {% if val_int | bitwise_and(1)%}{% set result = result + 'Bit1, '%} + {% endif %} + {% if val_int | bitwise_and(2)%}{% set result = result + 'Bit2, '%} + {% endif %} + {% if val_int | bitwise_and(3)%}{% set result = result + 'Bit3, '%} + {% endif %} + {% if val_int | bitwise_and(4)%}{% set result = result + 'Bit4, '%} + {% endif %} + {% if val_int | bitwise_and(5)%}{% set result = result + 'Bit5, '%} + {% endif %} + {% if val_int | bitwise_and(6)%}{% set result = result + 'Bit6, '%} + {% endif %} + {% if val_int | bitwise_and(7)%}{% set result = result + 'Bit7, '%} + {% endif %} + {% if val_int | bitwise_and(8)%}{% set result = result + 'Bit8, '%} + {% endif %} + {% if val_int | bitwise_and(9)%}{% set result = result + 'noUtility, '%} + {% endif %} + {% if val_int | bitwise_and(10)%}{% set result = result + 'Bit10, '%} + {% endif %} + {% if val_int | bitwise_and(11)%}{% set result = result + 'Bit11, '%} + {% endif %} + {% if val_int | bitwise_and(12)%}{% set result = result + 'Bit12, '%} + {% endif %} + {% if val_int | bitwise_and(13)%}{% set result = result + 'Bit13, '%} + {% endif %} + {% if val_int | bitwise_and(14)%}{% set result = result + 'Bit14, '%} + {% endif %} + {% if val_int | bitwise_and(15)%}{% set result = result + 'Bit15, '%} + {% endif %} + {% if val_int | bitwise_and(16)%}{% set result = result + 'Bit16, '%} + {% endif %} + {% endif %} + {{ result }} +{% else %} + {{ this.state }} +{% endif %} +''' + __inv_fault_val_tpl = ''' +{% if 'Inverter_Fault' in value_json and + value_json['Inverter_Fault'] != None %} + {% set val_int = value_json['Inverter_Fault'] | int %} + {% if val_int == 0 %} + {% set result = 'noFault'%} + {%else%} + {% set result = '' %} + {% if val_int | bitwise_and(1)%}{% set result = result + 'Bit1, '%} + {% endif %} + {% if val_int | bitwise_and(2)%}{% set result = result + 'Bit2, '%} + {% endif %} + {% if val_int | bitwise_and(3)%}{% set result = result + 'Bit3, '%} + {% endif %} + {% if val_int | bitwise_and(4)%}{% set result = result + 'Bit4, '%} + {% endif %} + {% if val_int | bitwise_and(5)%}{% set result = result + 'Bit5, '%} + {% endif %} + {% if val_int | bitwise_and(6)%}{% set result = result + 'Bit6, '%} + {% endif %} + {% if val_int | bitwise_and(7)%}{% set result = result + 'Bit7, '%} + {% endif %} + {% if val_int | bitwise_and(8)%}{% set result = result + 'Bit8, '%} + {% endif %} + {% if val_int | bitwise_and(9)%}{% set result = result + 'Bit9, '%} + {% endif %} + {% if val_int | bitwise_and(10)%}{% set result = result + 'Bit10, '%} + {% endif %} + {% if val_int | bitwise_and(11)%}{% set result = result + 'Bit11, '%} + {% endif %} + {% if val_int | bitwise_and(12)%}{% set result = result + 'Bit12, '%} + {% endif %} + {% if val_int | bitwise_and(13)%}{% set result = result + 'Bit13, '%} + {% endif %} + {% if val_int | bitwise_and(14)%}{% set result = result + 'Bit14, '%} + {% endif %} + {% if val_int | bitwise_and(15)%}{% set result = result + 'Bit15, '%} + {% endif %} + {% if val_int | bitwise_and(16)%}{% set result = result + 'Bit16, '%} + {% endif %} + {% endif %} + {{ result }} +{% else %} + {{ this.state }} +{% endif %} +''' + + __input_coef_val_tpl = "{% if 'Output_Coefficient' in value_json and value_json['Input_Coefficient'] != None %}{{value_json['Input_Coefficient']|string() +' %'}}{% else %}{{ this.state }}{% endif %}" # noqa: E501 + __output_coef_val_tpl = "{% if 'Output_Coefficient' in value_json and value_json['Output_Coefficient'] != None %}{{value_json['Output_Coefficient']|string() +' %'}}{% else %}{{ this.state }}{% endif %}" # noqa: E501 + + __info_defs = { + # collector values used for device registration: + Register.COLLECTOR_FW_VERSION: {'name': ['collector', 'Collector_Fw_Version'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 + Register.CHIP_TYPE: {'name': ['collector', 'Chip_Type'], 'singleton': False, 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.CHIP_MODEL: {'name': ['collector', 'Chip_Model'], 'singleton': False, 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.TRACE_URL: {'name': ['collector', 'Trace_URL'], 'singleton': False, 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.LOGGER_URL: {'name': ['collector', 'Logger_URL'], 'singleton': False, 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.MAC_ADDR: {'name': ['collector', 'MAC-Addr'], 'singleton': False, 'level': logging.INFO, 'unit': ''}, # noqa: E501 + Register.COLLECTOR_SNR: {'name': ['collector', 'Serial_Number'], 'singleton': False, 'level': logging.INFO, 'unit': ''}, # noqa: E501 + + + # inverter values used for device registration: + Register.PRODUCT_NAME: {'name': ['inverter', 'Product_Name'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.MANUFACTURER: {'name': ['inverter', 'Manufacturer'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.VERSION: {'name': ['inverter', 'Version'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 + Register.SERIAL_NUMBER: {'name': ['inverter', 'Serial_Number'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.EQUIPMENT_MODEL: {'name': ['inverter', 'Equipment_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.NO_INPUTS: {'name': ['inverter', 'No_Inputs'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.MAX_DESIGNED_POWER: {'name': ['inverter', 'Max_Designed_Power'], 'level': logging.INFO, 'unit': 'W', 'ha': {'dev': 'inverter', 'dev_cla': None, 'stat_cla': None, 'id': 'designed_power_', 'val_tpl': __designed_power_val_tpl, 'name': 'Max Designed Power', 'icon': LIGHTNING, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.RATED_POWER: {'name': ['inverter', 'Rated_Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'inverter', 'dev_cla': None, 'stat_cla': None, 'id': 'rated_power_', 'val_tpl': __rated_power_val_tpl, 'name': 'Rated Power', 'icon': LIGHTNING, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.WORK_MODE: {'name': ['inverter', 'Work_Mode'], 'level': logging.DEBUG, 'unit': '', 'ha': {'dev': 'inverter', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'work_mode_', 'name': 'Work Mode', 'val_tpl': __work_mode_val_tpl, 'icon': 'mdi:power', 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.INPUT_COEFFICIENT: {'name': ['inverter', 'Input_Coefficient'], 'level': logging.DEBUG, 'unit': '%', 'ha': {'dev': 'inverter', 'dev_cla': None, 'stat_cla': None, 'id': 'input_coef_', 'val_tpl': __input_coef_val_tpl, 'name': 'Input Coefficient', 'icon': LIGHTNING, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.OUTPUT_COEFFICIENT: {'name': ['inverter', 'Output_Coefficient'], 'level': logging.INFO, 'unit': '%', 'ha': {'dev': 'inverter', 'dev_cla': None, 'stat_cla': None, 'id': 'output_coef_', 'val_tpl': __output_coef_val_tpl, 'name': 'Output Coefficient', 'icon': LIGHTNING, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.PV1_MANUFACTURER: {'name': ['inverter', 'PV1_Manufacturer'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.PV1_MODEL: {'name': ['inverter', 'PV1_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.PV2_MANUFACTURER: {'name': ['inverter', 'PV2_Manufacturer'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.PV2_MODEL: {'name': ['inverter', 'PV2_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.PV3_MANUFACTURER: {'name': ['inverter', 'PV3_Manufacturer'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.PV3_MODEL: {'name': ['inverter', 'PV3_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.PV4_MANUFACTURER: {'name': ['inverter', 'PV4_Manufacturer'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.PV4_MODEL: {'name': ['inverter', 'PV4_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.PV5_MANUFACTURER: {'name': ['inverter', 'PV5_Manufacturer'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.PV5_MODEL: {'name': ['inverter', 'PV5_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.PV6_MANUFACTURER: {'name': ['inverter', 'PV6_Manufacturer'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.PV6_MODEL: {'name': ['inverter', 'PV6_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.BOOT_STATUS: {'name': ['inverter', 'BOOT_STATUS'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.DSP_STATUS: {'name': ['inverter', 'DSP_STATUS'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + # proxy: + Register.INVERTER_CNT: {'name': ['proxy', 'Inverter_Cnt'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_count_', 'fmt': FMT_INT, 'name': 'Active Inverter Connections', 'icon': COUNTER}}, # noqa: E501 + Register.CLOUD_CONN_CNT: {'name': ['proxy', 'Cloud_Conn_Cnt'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'cloud_conn_count_', 'fmt': FMT_INT, 'name': 'Active Cloud Connections', 'icon': COUNTER}}, # noqa: E501 + Register.UNKNOWN_SNR: {'name': ['proxy', 'Unknown_SNR'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'unknown_snr_', 'fmt': FMT_INT, 'name': 'Unknown Serial No', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.UNKNOWN_MSG: {'name': ['proxy', 'Unknown_Msg'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'unknown_msg_', 'fmt': FMT_INT, 'name': 'Unknown Msg Type', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.INVALID_DATA_TYPE: {'name': ['proxy', 'Invalid_Data_Type'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_data_type_', 'fmt': FMT_INT, 'name': 'Invalid Data Type', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.INTERNAL_ERROR: {'name': ['proxy', 'Internal_Error'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'intern_err_', 'fmt': FMT_INT, 'name': 'Internal Error', 'icon': COUNTER, 'ent_cat': 'diagnostic', 'en': False}}, # noqa: E501 + Register.UNKNOWN_CTRL: {'name': ['proxy', 'Unknown_Ctrl'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'unknown_ctrl_', 'fmt': FMT_INT, 'name': 'Unknown Control Type', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.OTA_START_MSG: {'name': ['proxy', 'OTA_Start_Msg'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'ota_start_cmd_', 'fmt': FMT_INT, 'name': 'OTA Start Cmd', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.SW_EXCEPTION: {'name': ['proxy', 'SW_Exception'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'sw_exception_', 'fmt': FMT_INT, 'name': 'Internal SW Exception', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.INVALID_MSG_FMT: {'name': ['proxy', 'Invalid_Msg_Format'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_msg_fmt_', 'fmt': FMT_INT, 'name': 'Invalid Message Format', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.AT_COMMAND: {'name': ['proxy', 'AT_Command'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'at_cmd_', 'fmt': FMT_INT, 'name': 'AT Command', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.AT_COMMAND_BLOCKED: {'name': ['proxy', 'AT_Command_Blocked'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'at_cmd_blocked_', 'fmt': FMT_INT, 'name': 'AT Command Blocked', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.MODBUS_COMMAND: {'name': ['proxy', 'Modbus_Command'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'modbus_cmd_', 'fmt': FMT_INT, 'name': 'Modbus Command', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501 + # 0xffffff03: {'name':['proxy', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha':{'dev':'proxy', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id':'proxy_volt_', 'fmt':FMT_FLOAT,'name': 'Grid Voltage'}}, # noqa: E501 + + # events + Register.EVENT_ALARM: {'name': ['events', 'Inverter_Alarm'], 'level': logging.INFO, 'unit': '', 'ha': {'dev': 'inverter', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_alarm_', 'name': 'Inverter Alarm', 'val_tpl': __inv_alarm_val_tpl, 'icon': 'mdi:alarm-light'}}, # noqa: E501 + Register.EVENT_FAULT: {'name': ['events', 'Inverter_Fault'], 'level': logging.INFO, 'unit': '', 'ha': {'dev': 'inverter', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_fault_', 'name': 'Inverter Fault', 'val_tpl': __inv_fault_val_tpl, 'icon': 'mdi:alarm-light'}}, # noqa: E501 + Register.EVENT_BF1: {'name': ['events', 'Inverter_Bitfield_1'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 + Register.EVENT_BF2: {'name': ['events', 'Inverter_bitfield_2'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 + # Register.EVENT_409: {'name': ['events', '409_No_Utility'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + # Register.EVENT_415: {'name': ['events', '415_GridFreqOverRating'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + + # grid measures: + Register.TS_GRID: {'name': ['grid', 'Timestamp'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 + Register.GRID_VOLTAGE: {'name': ['grid', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'inverter', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'out_volt_', 'fmt': FMT_FLOAT, 'name': 'Grid Voltage', 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.GRID_CURRENT: {'name': ['grid', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'inverter', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'out_cur_', 'fmt': FMT_FLOAT, 'name': 'Grid Current', 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.GRID_FREQUENCY: {'name': ['grid', 'Frequency'], 'level': logging.DEBUG, 'unit': 'Hz', 'ha': {'dev': 'inverter', 'dev_cla': 'frequency', 'stat_cla': 'measurement', 'id': 'out_freq_', 'fmt': FMT_FLOAT, 'name': 'Grid Frequency', 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.OUTPUT_POWER: {'name': ['grid', 'Output_Power'], 'level': logging.INFO, 'unit': 'W', 'ha': {'dev': 'inverter', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'out_power_', 'fmt': FMT_FLOAT, 'name': 'Power'}}, # noqa: E501 + Register.INVERTER_TEMP: {'name': ['env', 'Inverter_Temp'], 'level': logging.DEBUG, 'unit': '°C', 'ha': {'dev': 'inverter', 'dev_cla': 'temperature', 'stat_cla': 'measurement', 'id': 'temp_', 'fmt': FMT_INT, 'name': 'Temperature'}}, # noqa: E501 + Register.INVERTER_STATUS: {'name': ['env', 'Inverter_Status'], 'level': logging.INFO, 'unit': '', 'ha': {'dev': 'inverter', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_status_', 'name': 'Inverter Status', 'val_tpl': __status_type_val_tpl, 'icon': 'mdi:power'}}, # noqa: E501 + Register.DETECT_STATUS_1: {'name': ['env', 'Detect_Status_1'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.DETECT_STATUS_2: {'name': ['env', 'Detect_Status_2'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + + # input measures: + Register.TS_INPUT: {'name': ['input', 'Timestamp'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 + Register.PV1_VOLTAGE: {'name': ['input', 'pv1', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'input_pv1', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'volt_pv1_', 'val_tpl': "{{ (value_json['pv1']['Voltage'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.PV1_CURRENT: {'name': ['input', 'pv1', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'input_pv1', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'cur_pv1_', 'val_tpl': "{{ (value_json['pv1']['Current'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.PV1_POWER: {'name': ['input', 'pv1', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'input_pv1', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'power_pv1_', 'val_tpl': "{{ (value_json['pv1']['Power'] | float)}}"}}, # noqa: E501 + Register.PV2_VOLTAGE: {'name': ['input', 'pv2', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'input_pv2', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'volt_pv2_', 'val_tpl': "{{ (value_json['pv2']['Voltage'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.PV2_CURRENT: {'name': ['input', 'pv2', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'input_pv2', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'cur_pv2_', 'val_tpl': "{{ (value_json['pv2']['Current'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.PV2_POWER: {'name': ['input', 'pv2', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'input_pv2', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'power_pv2_', 'val_tpl': "{{ (value_json['pv2']['Power'] | float)}}"}}, # noqa: E501 + Register.PV3_VOLTAGE: {'name': ['input', 'pv3', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'input_pv3', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'volt_pv3_', 'val_tpl': "{{ (value_json['pv3']['Voltage'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.PV3_CURRENT: {'name': ['input', 'pv3', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'input_pv3', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'cur_pv3_', 'val_tpl': "{{ (value_json['pv3']['Current'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.PV3_POWER: {'name': ['input', 'pv3', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'input_pv3', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'power_pv3_', 'val_tpl': "{{ (value_json['pv3']['Power'] | float)}}"}}, # noqa: E501 + Register.PV4_VOLTAGE: {'name': ['input', 'pv4', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'input_pv4', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'volt_pv4_', 'val_tpl': "{{ (value_json['pv4']['Voltage'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.PV4_CURRENT: {'name': ['input', 'pv4', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'input_pv4', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'cur_pv4_', 'val_tpl': "{{ (value_json['pv4']['Current'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.PV4_POWER: {'name': ['input', 'pv4', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'input_pv4', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'power_pv4_', 'val_tpl': "{{ (value_json['pv4']['Power'] | float)}}"}}, # noqa: E501 + Register.PV5_VOLTAGE: {'name': ['input', 'pv5', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'input_pv5', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'volt_pv5_', 'val_tpl': "{{ (value_json['pv5']['Voltage'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.PV5_CURRENT: {'name': ['input', 'pv5', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'input_pv5', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'cur_pv5_', 'val_tpl': "{{ (value_json['pv5']['Current'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.PV5_POWER: {'name': ['input', 'pv5', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'input_pv5', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'power_pv5_', 'val_tpl': "{{ (value_json['pv5']['Power'] | float)}}"}}, # noqa: E501 + Register.PV6_VOLTAGE: {'name': ['input', 'pv6', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'input_pv6', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'volt_pv6_', 'val_tpl': "{{ (value_json['pv6']['Voltage'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.PV6_CURRENT: {'name': ['input', 'pv6', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'input_pv6', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'cur_pv6_', 'val_tpl': "{{ (value_json['pv6']['Current'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.PV6_POWER: {'name': ['input', 'pv6', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'input_pv6', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'power_pv6_', 'val_tpl': "{{ (value_json['pv6']['Power'] | float)}}"}}, # noqa: E501 + Register.PV1_DAILY_GENERATION: {'name': ['input', 'pv1', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv1', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_pv1_', 'name': DAILY_GEN, 'val_tpl': "{{ (value_json['pv1']['Daily_Generation'] | float)}}", 'icon': SOLAR_POWER_VAR, 'must_incr': True}}, # noqa: E501 + Register.PV1_TOTAL_GENERATION: {'name': ['input', 'pv1', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv1', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_pv1_', 'name': TOTAL_GEN, 'val_tpl': "{{ (value_json['pv1']['Total_Generation'] | float)}}", 'icon': SOLAR_POWER, 'must_incr': True}}, # noqa: E501 + Register.PV2_DAILY_GENERATION: {'name': ['input', 'pv2', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv2', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_pv2_', 'name': DAILY_GEN, 'val_tpl': "{{ (value_json['pv2']['Daily_Generation'] | float)}}", 'icon': SOLAR_POWER_VAR, 'must_incr': True}}, # noqa: E501 + Register.PV2_TOTAL_GENERATION: {'name': ['input', 'pv2', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv2', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_pv2_', 'name': TOTAL_GEN, 'val_tpl': "{{ (value_json['pv2']['Total_Generation'] | float)}}", 'icon': SOLAR_POWER, 'must_incr': True}}, # noqa: E501 + Register.PV3_DAILY_GENERATION: {'name': ['input', 'pv3', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv3', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_pv3_', 'name': DAILY_GEN, 'val_tpl': "{{ (value_json['pv3']['Daily_Generation'] | float)}}", 'icon': SOLAR_POWER_VAR, 'must_incr': True}}, # noqa: E501 + Register.PV3_TOTAL_GENERATION: {'name': ['input', 'pv3', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv3', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_pv3_', 'name': TOTAL_GEN, 'val_tpl': "{{ (value_json['pv3']['Total_Generation'] | float)}}", 'icon': SOLAR_POWER, 'must_incr': True}}, # noqa: E501 + Register.PV4_DAILY_GENERATION: {'name': ['input', 'pv4', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv4', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_pv4_', 'name': DAILY_GEN, 'val_tpl': "{{ (value_json['pv4']['Daily_Generation'] | float)}}", 'icon': SOLAR_POWER_VAR, 'must_incr': True}}, # noqa: E501 + Register.PV4_TOTAL_GENERATION: {'name': ['input', 'pv4', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv4', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_pv4_', 'name': TOTAL_GEN, 'val_tpl': "{{ (value_json['pv4']['Total_Generation'] | float)}}", 'icon': SOLAR_POWER, 'must_incr': True}}, # noqa: E501 + Register.PV5_DAILY_GENERATION: {'name': ['input', 'pv5', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv5', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_pv5_', 'name': DAILY_GEN, 'val_tpl': "{{ (value_json['pv5']['Daily_Generation'] | float)}}", 'icon': SOLAR_POWER_VAR, 'must_incr': True}}, # noqa: E501 + Register.PV5_TOTAL_GENERATION: {'name': ['input', 'pv5', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv5', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_pv5_', 'name': TOTAL_GEN, 'val_tpl': "{{ (value_json['pv5']['Total_Generation'] | float)}}", 'icon': SOLAR_POWER, 'must_incr': True}}, # noqa: E501 + Register.PV6_DAILY_GENERATION: {'name': ['input', 'pv6', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv6', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_pv6_', 'name': DAILY_GEN, 'val_tpl': "{{ (value_json['pv6']['Daily_Generation'] | float)}}", 'icon': SOLAR_POWER_VAR, 'must_incr': True}}, # noqa: E501 + Register.PV6_TOTAL_GENERATION: {'name': ['input', 'pv6', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv6', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_pv6_', 'name': TOTAL_GEN, 'val_tpl': "{{ (value_json['pv6']['Total_Generation'] | float)}}", 'icon': SOLAR_POWER, 'must_incr': True}}, # noqa: E501 + # total: + Register.TS_TOTAL: {'name': ['total', 'Timestamp'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 + Register.DAILY_GENERATION: {'name': ['total', 'Daily_Generation'], 'level': logging.INFO, 'unit': 'kWh', 'ha': {'dev': 'inverter', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_', 'fmt': FMT_FLOAT, 'name': DAILY_GEN, 'icon': SOLAR_POWER_VAR, 'must_incr': True}}, # noqa: E501 + Register.TOTAL_GENERATION: {'name': ['total', 'Total_Generation'], 'level': logging.INFO, 'unit': 'kWh', 'ha': {'dev': 'inverter', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_', 'fmt': FMT_FLOAT, 'name': TOTAL_GEN, 'icon': SOLAR_POWER, 'must_incr': True}}, # noqa: E501 + + # controller: + Register.SIGNAL_STRENGTH: {'name': ['controller', 'Signal_Strength'], 'level': logging.DEBUG, 'unit': '%', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': 'measurement', 'id': 'signal_', 'fmt': FMT_INT, 'name': 'Signal Strength', 'icon': WIFI}}, # noqa: E501 + Register.POWER_ON_TIME: {'name': ['controller', 'Power_On_Time'], 'level': logging.DEBUG, 'unit': 's', 'ha': {'dev': 'controller', 'dev_cla': 'duration', 'stat_cla': 'measurement', 'id': 'power_on_time_', 'fmt': FMT_INT, 'name': 'Power on Time', 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.COLLECT_INTERVAL: {'name': ['controller', 'Collect_Interval'], 'level': logging.DEBUG, 'unit': 'min', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'data_collect_intval_', 'fmt': '| string + " min"', 'name': 'Data Collect Interval', 'icon': UPDATE, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.CONNECT_COUNT: {'name': ['controller', 'Connect_Count'], 'level': logging.DEBUG, 'unit': '', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'connect_count_', 'fmt': FMT_INT, 'name': 'Connect Count', 'icon': COUNTER, 'comp': 'sensor', 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.COMMUNICATION_TYPE: {'name': ['controller', 'Communication_Type'], 'level': logging.DEBUG, 'unit': '', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'comm_type_', 'name': 'Communication Type', 'val_tpl': __comm_type_val_tpl, 'comp': 'sensor', 'icon': WIFI}}, # noqa: E501 + Register.DATA_UP_INTERVAL: {'name': ['controller', 'Data_Up_Interval'], 'level': logging.DEBUG, 'unit': 's', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'data_up_intval_', 'fmt': FMT_STRING_SEC, 'name': 'Data Up Interval', 'icon': UPDATE, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.HEARTBEAT_INTERVAL: {'name': ['controller', 'Heartbeat_Interval'], 'level': logging.DEBUG, 'unit': 's', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'heartbeat_intval_', 'fmt': FMT_STRING_SEC, 'name': 'Heartbeat Interval', 'icon': UPDATE, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.IP_ADDRESS: {'name': ['controller', 'IP_Address'], 'level': logging.DEBUG, 'unit': '', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'ip_address_', 'fmt': '| string', 'name': 'IP Address', 'icon': WIFI, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.POLLING_INTERVAL: {'name': ['controller', 'Polling_Interval'], 'level': logging.DEBUG, 'unit': 's', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'polling_intval_', 'fmt': FMT_STRING_SEC, 'name': 'Polling Interval', 'icon': UPDATE, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.SENSOR_LIST: {'name': ['controller', 'Sensor_List'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 + Register.SSID: {'name': ['controller', 'WiFi_SSID'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + + Register.OUTPUT_SHUTDOWN: {'name': ['other', 'Output_Shutdown'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.RATED_LEVEL: {'name': ['other', 'Rated_Level'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.GRID_VOLT_CAL_COEF: {'name': ['other', 'Grid_Volt_Cal_Coef'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.PROD_COMPL_TYPE: {'name': ['other', 'Prod_Compliance_Type'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 + Register.INV_UNKNOWN_1: {'name': ['inv_unknown', 'Unknown_1'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + + } + + @property + def info_devs(self) -> dict: + return self.__info_devs + + @property + def info_defs(self) -> dict: + return self.__info_defs + + def dev_value(self, idx: str | int) -> str | int | float | dict | None: + '''returns the stored device value from our database + + idx:int ==> lookup the value in the database and return it as str, + int or float. If the value is not available return 'None' + idx:str ==> returns the string as a fixed value without a + database lookup + ''' + if type(idx) is str: + return idx # return idx as a fixed value + elif idx in self.info_defs: + row = self.info_defs[idx] + if 'singleton' in row and row['singleton']: + db_dict = self.stat + else: + db_dict = self.db + + keys = row['name'] + + for key in keys: + if key not in db_dict: + return None # value not found in the database + db_dict = db_dict[key] + return db_dict # value of the reqeusted entry + + return None # unknwon idx, not in info_defs + + @classmethod + def inc_counter(cls, counter: str) -> None: + '''inc proxy statistic counter''' + db_dict = cls.stat['proxy'] + db_dict[counter] += 1 + cls.new_stat_data['proxy'] = True + + @classmethod + def dec_counter(cls, counter: str) -> None: + '''dec proxy statistic counter''' + db_dict = cls.stat['proxy'] + db_dict[counter] -= 1 + cls.new_stat_data['proxy'] = True + + def ha_proxy_confs(self, ha_prfx: str, node_id: str, snr: str) \ + -> Generator[tuple[str, str, str, str], None, None]: + '''Generator function yields json register struct for home-assistant + auto configuration and the unique entity string, for all proxy + registers + + arguments: + ha_prfx:str ==> MQTT prefix for the home assistant 'stat_t string + node_id:str ==> node id of the inverter, used to build unique entity + snr:str ==> serial number of the inverter, used to build unique + entity strings + ''' + # iterate over RegisterMap.map and get the register values for entries + # with Singleton=True, which means that this is a proxy register + for reg in self.info_defs.keys(): + res = self.ha_conf(reg, ha_prfx, node_id, snr, True) # noqa: E501 + if res: + yield res + + def ha_conf(self, key, ha_prfx, node_id, snr, singleton: bool, + sug_area: str = '') -> tuple[str, str, str, str] | None: + '''Method to build json register struct for home-assistant + auto configuration and the unique entity string, for all proxy + registers + + arguments: + key ==> index of info_defs dict which reference the topic + ha_prfx:str ==> MQTT prefix for the home assistant 'stat_t string + node_id:str ==> node id of the inverter, used to build unique entity + snr:str ==> serial number of the inverter, used to build unique + entity strings + singleton ==> bool to allow/disaalow proxy topics which are common + for all invters + sug_area ==> area name for home assistant + ''' + if key not in self.info_defs: + return None + row = self.info_defs[key] + + if 'singleton' in row: + if singleton != row['singleton']: + return None + elif singleton: + return None + + # check if we have details for home assistant + if 'ha' in row: + return self.__ha_conf(row, key, ha_prfx, node_id, snr, sug_area) + return None + + def __ha_conf(self, row, key, ha_prfx, node_id, snr, + sug_area: str) -> tuple[str, str, str, str] | None: + ha = row['ha'] + if 'comp' in ha: + component = ha['comp'] + else: + component = 'sensor' + attr = self.__build_attr(row, key, ha_prfx, node_id, snr) + if 'dev' in ha: + device = self.info_devs[ha['dev']] + if 'dep' in device and self.ignore_this_device(device['dep']): # noqa: E501 + return None + attr['dev'] = self.__build_dev(device, key, ha, snr, + sug_area) + attr['o'] = self.__build_origin() + + else: + self.inc_counter('Internal_Error') + logging.error(f"Infos.info_defs: the row for {key} " + "missing 'dev' value for ha register") + return json.dumps(attr), component, node_id, attr['uniq_id'] + + def __build_attr(self, row, key, ha_prfx, node_id, snr): + attr = {} + ha = row['ha'] + if 'name' in ha: + attr['name'] = ha['name'] + else: + attr['name'] = row['name'][-1] + prfx = ha_prfx + node_id + attr['stat_t'] = prfx + row['name'][0] + attr['dev_cla'] = ha['dev_cla'] + attr['stat_cla'] = ha['stat_cla'] + attr['uniq_id'] = ha['id']+snr + if 'val_tpl' in ha: + attr['val_tpl'] = ha['val_tpl'] + elif 'fmt' in ha: + attr['val_tpl'] = '{{value_json' + f"['{row['name'][-1]}'] {ha['fmt']}" + '}}' # eg. 'val_tpl': "{{ value_json['Output_Power']|float }} # noqa: E501 + else: + self.inc_counter('Internal_Error') + logging.error(f"Infos.info_defs: the row for {key} do" + " not have a 'val_tpl' nor a 'fmt' value") + # add unit_of_meas only, if status_class isn't none. If + # status_cla is None we want a number format and not line + # graph in home assistant. A unit will change the number + # format to a line graph + if 'unit' in row and attr['stat_cla'] is not None: + attr['unit_of_meas'] = row['unit'] # 'unit_of_meas' + if 'icon' in ha: + attr['ic'] = ha['icon'] # icon for the entity + if 'nat_prc' in ha: # pragma: no cover + attr['sug_dsp_prc'] = ha['nat_prc'] # precison of floats + if 'ent_cat' in ha: + attr['ent_cat'] = ha['ent_cat'] # diagnostic, config + # enabled_by_default is deactivated, since it avoid the via + # setup of the devices. It seems, that there is a bug in home + # assistant. tested with 'Home Assistant 2023.10.4' + # if 'en' in ha: # enabled_by_default + # attr['en'] = ha['en'] + return attr + + def __build_dev(self, device, key, ha, snr, sug_area): + dev = {} + singleton = 'singleton' in device and device['singleton'] + # the same name for 'name' and 'suggested area', so we get + # dedicated devices in home assistant with short value + # name and headline + if (sug_area == '' or singleton): + dev['name'] = device['name'] + dev['sa'] = device['name'] + else: + dev['name'] = device['name']+' - '+sug_area + dev['sa'] = device['name']+' - '+sug_area + self.__add_via_dev(dev, device, key, snr) + for key in ('mdl', 'mf', 'sw', 'hw', 'sn'): # add optional + # values fpr 'modell', 'manufacturer', 'sw version' and + # 'hw version' + if key in device: + data = self.dev_value(device[key]) + if data is not None: + dev[key] = data + if singleton: + dev['ids'] = [f"{ha['dev']}"] + else: + dev['ids'] = [f"{ha['dev']}_{snr}"] + self.__add_connection(dev, device) + return dev + + def __add_connection(self, dev, device): + if 'mac' in device: + mac_str = self.dev_value(device['mac']) + if mac_str is not None: + if 12 == len(mac_str): + mac_str = ':'.join(mac_str[i:i+2] for i in range(0, 12, 2)) + dev['cns'] = [["mac", f"{mac_str}"]] + + def __add_via_dev(self, dev, device, key, snr): + if 'via' in device: # add the link to the parent device + via = device['via'] + if via in self.info_devs: + via_dev = self.info_devs[via] + if 'singleton' in via_dev and via_dev['singleton']: + dev['via_device'] = via + else: + dev['via_device'] = f"{via}_{snr}" + else: + self.inc_counter('Internal_Error') + logging.error(f"Infos.info_defs: the row for " + f"{key} has an invalid via value: " + f"{via}") + + def __build_origin(self): + origin = {} + origin['name'] = self.app_name + origin['sw'] = self.version + return origin + + def ha_remove(self, key, node_id, snr) -> tuple[str, str, str, str] | None: + '''Method to build json unregister struct for home-assistant + to remove topics per auto configuration. Only for inverer topics. + + arguments: + key ==> index of info_defs dict which reference the topic + node_id:str ==> node id of the inverter, used to build unique entity + snr:str ==> serial number of the inverter, used to build unique + entity strings + + hint: + the returned tuple must have the same format as self.ha_conf() + ''' + if key not in self.info_defs: + return None + row = self.info_defs[key] + + if 'singleton' in row and row['singleton']: + return None + + # check if we have details for home assistant + if 'ha' in row: + ha = row['ha'] + if 'comp' in ha: + component = ha['comp'] + else: + component = 'sensor' + attr = {} + uniq_id = ha['id']+snr + + return json.dumps(attr), component, node_id, uniq_id + return None + + def _key_obj(self, id: Register) -> tuple: + d = self.info_defs.get(id, {'name': None, 'level': logging.DEBUG, + 'unit': ''}) + if 'ha' in d and 'must_incr' in d['ha']: + must_incr = d['ha']['must_incr'] + else: + must_incr = False + + return d['name'], d['level'], d['unit'], must_incr + + def update_db(self, keys: list, must_incr: bool, result): + name = '' + db_dict = self.db + for key in keys[:-1]: + if key not in db_dict: + db_dict[key] = {} + db_dict = db_dict[key] + name += key + '.' + if keys[-1] not in db_dict: + update = (not must_incr or result > 0) + else: + if must_incr: + update = db_dict[keys[-1]] < result + else: + update = db_dict[keys[-1]] != result + if update: + db_dict[keys[-1]] = result + name += keys[-1] + return name, update + + def set_db_def_value(self, id: Register, value) -> None: + '''set default value''' + row = self.info_defs[id] + if isinstance(row, dict): + keys = row['name'] + self.update_db(keys, False, value) + + def reg_clr_at_midnight(self, prfx: str, + check_dependencies: bool = True) -> None: + '''register all registers for the 'ClrAtMidnight' class and + check if device of every register is available otherwise ignore + the register. + + prfx:str ==> prefix for the home assistant 'stat_t string'' + ''' + for id, row in self.info_defs.items(): + if check_dependencies and 'ha' in row: + ha = row['ha'] + if 'dev' in ha: + device = self.info_devs[ha['dev']] + if 'dep' in device and self.ignore_this_device(device['dep']): # noqa: E501 + continue + + keys = row['name'] + ClrAtMidnight.add(keys, prfx, id) + + def get_db_value(self, id: Register, not_found_result: any = None): + '''get database value''' + if id not in self.info_defs: + return not_found_result + row = self.info_defs[id] + if isinstance(row, dict): + keys = row['name'] + elm = self.db + for key in keys: + if key not in elm: + return not_found_result + elm = elm[key] + return elm + return not_found_result + + def ignore_this_device(self, dep: dict) -> bool: + '''Checks the equation in the dep(endency) dict + + returns 'False' only if the equation is valid; + 'True' in any other case''' + if 'reg' in dep: + value = self.dev_value(dep['reg']) + if not value: + return True + + if 'gte' in dep: + return value < dep['gte'] + elif 'less_eq' in dep: + return value > dep['less_eq'] + return True + + def set_pv_module_details(self, inv: dict) -> None: + pvs = {'pv1': {'manufacturer': Register.PV1_MANUFACTURER, 'model': Register.PV1_MODEL}, # noqa: E501 + 'pv2': {'manufacturer': Register.PV2_MANUFACTURER, 'model': Register.PV2_MODEL}, # noqa: E501 + 'pv3': {'manufacturer': Register.PV3_MANUFACTURER, 'model': Register.PV3_MODEL}, # noqa: E501 + 'pv4': {'manufacturer': Register.PV4_MANUFACTURER, 'model': Register.PV4_MODEL}, # noqa: E501 + 'pv5': {'manufacturer': Register.PV5_MANUFACTURER, 'model': Register.PV5_MODEL}, # noqa: E501 + 'pv6': {'manufacturer': Register.PV6_MANUFACTURER, 'model': Register.PV6_MODEL} # noqa: E501 + } + + for key, reg in pvs.items(): + if key in inv: + if 'manufacturer' in inv[key]: + self.set_db_def_value(reg['manufacturer'], + inv[key]['manufacturer']) + if 'type' in inv[key]: + self.set_db_def_value(reg['model'], inv[key]['type']) diff --git a/ha_addons/ha_addon/rootfs/home/proxy/inverter_base.py b/ha_addons/ha_addon/rootfs/home/proxy/inverter_base.py new file mode 100644 index 0000000..757b883 --- /dev/null +++ b/ha_addons/ha_addon/rootfs/home/proxy/inverter_base.py @@ -0,0 +1,178 @@ +import weakref +import asyncio +import logging +import traceback +import json +import gc +from aiomqtt import MqttCodeError +from asyncio import StreamReader, StreamWriter + +from inverter_ifc import InverterIfc +from proxy import Proxy +from async_stream import StreamPtr +from async_stream import AsyncStreamClient +from async_stream import AsyncStreamServer +from config import Config +from infos import Infos + +logger_mqtt = logging.getLogger('mqtt') + + +class InverterBase(InverterIfc, Proxy): + + def __init__(self, reader: StreamReader, writer: StreamWriter, + config_id: str, prot_class, + client_mode: bool = False, + remote_prot_class=None): + Proxy.__init__(self) + self._registry.append(weakref.ref(self)) + self.addr = writer.get_extra_info('peername') + self.config_id = config_id + if remote_prot_class: + self.prot_class = remote_prot_class + else: + self.prot_class = prot_class + self.__ha_restarts = -1 + self.remote = StreamPtr(None) + ifc = AsyncStreamServer(reader, writer, + self.async_publ_mqtt, + self.create_remote, + self.remote) + + self.local = StreamPtr( + prot_class(self.addr, ifc, True, client_mode), ifc + ) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc, tb) -> None: + logging.debug(f'InverterBase.__exit__() {self.addr}') + self.__del_remote() + + self.local.stream.close() + self.local.stream = None + self.local.ifc.close() + self.local.ifc = None + + # now explicitly call garbage collector to release unreachable objects + unreachable_obj = gc.collect() + logging.debug( + f'InverterBase.__exit: freed unreachable obj: {unreachable_obj}') + + def __del_remote(self): + if self.remote.stream: + self.remote.stream.close() + self.remote.stream = None + + if self.remote.ifc: + self.remote.ifc.close() + self.remote.ifc = None + + async def disc(self, shutdown_started=False) -> None: + if self.remote.stream: + self.remote.stream.shutdown_started = shutdown_started + if self.remote.ifc: + await self.remote.ifc.disc() + if self.local.stream: + self.local.stream.shutdown_started = shutdown_started + if self.local.ifc: + await self.local.ifc.disc() + + def healthy(self) -> bool: + logging.debug('InverterBase healthy()') + + if self.local.ifc and not self.local.ifc.healthy(): + return False + if self.remote.ifc and not self.remote.ifc.healthy(): + return False + return True + + async def create_remote(self) -> None: + '''Establish a client connection to the TSUN cloud''' + + tsun = Config.get(self.config_id) + host = tsun['host'] + port = tsun['port'] + addr = (host, port) + stream = self.local.stream + + try: + logging.info(f'[{stream.node_id}] Connect to {addr}') + connect = asyncio.open_connection(host, port) + reader, writer = await connect + ifc = AsyncStreamClient( + reader, writer, self.local, self.__del_remote) + + self.remote.ifc = ifc + if hasattr(stream, 'id_str'): + self.remote.stream = self.prot_class( + addr, ifc, server_side=False, + client_mode=False, id_str=stream.id_str) + else: + self.remote.stream = self.prot_class( + addr, ifc, server_side=False, + client_mode=False) + + logging.info(f'[{self.remote.stream.node_id}:' + f'{self.remote.stream.conn_no}] ' + f'Connected to {addr}') + asyncio.create_task(self.remote.ifc.client_loop(addr)) + + except (ConnectionRefusedError, TimeoutError) as error: + logging.info(f'{error}') + except Exception: + Infos.inc_counter('SW_Exception') + logging.error( + f"Inverter: Exception for {addr}:\n" + f"{traceback.format_exc()}") + + async def async_publ_mqtt(self) -> None: + '''publish data to MQTT broker''' + stream = self.local.stream + if not stream or not stream.unique_id: + return + # check if new inverter or collector infos are available or when the + # home assistant has changed the status back to online + try: + if (('inverter' in stream.new_data and stream.new_data['inverter']) + or ('collector' in stream.new_data and + stream.new_data['collector']) + or self.mqtt.ha_restarts != self.__ha_restarts): + await self._register_proxy_stat_home_assistant() + await self.__register_home_assistant(stream) + self.__ha_restarts = self.mqtt.ha_restarts + + for key in stream.new_data: + await self.__async_publ_mqtt_packet(stream, key) + for key in Infos.new_stat_data: + await Proxy._async_publ_mqtt_proxy_stat(key) + + except MqttCodeError as error: + logging.error(f'Mqtt except: {error}') + except Exception: + Infos.inc_counter('SW_Exception') + logging.error( + f"Inverter: Exception:\n" + f"{traceback.format_exc()}") + + async def __async_publ_mqtt_packet(self, stream, key): + db = stream.db.db + if key in db and stream.new_data[key]: + data_json = json.dumps(db[key]) + node_id = stream.node_id + logger_mqtt.debug(f'{key}: {data_json}') + await self.mqtt.publish(f'{self.entity_prfx}{node_id}{key}', data_json) # noqa: E501 + stream.new_data[key] = False + + async def __register_home_assistant(self, stream) -> None: + '''register all our topics at home assistant''' + for data_json, component, node_id, id in stream.db.ha_confs( + self.entity_prfx, stream.node_id, stream.unique_id, + stream.sug_area): + logger_mqtt.debug(f"MQTT Register: cmp:'{component}'" + f" node_id:'{node_id}' {data_json}") + await self.mqtt.publish(f"{self.discovery_prfx}{component}" + f"/{node_id}{id}/config", data_json) + + stream.db.reg_clr_at_midnight(f'{self.entity_prfx}{stream.node_id}') diff --git a/ha_addons/ha_addon/rootfs/home/proxy/inverter_ifc.py b/ha_addons/ha_addon/rootfs/home/proxy/inverter_ifc.py new file mode 100644 index 0000000..11bd5e8 --- /dev/null +++ b/ha_addons/ha_addon/rootfs/home/proxy/inverter_ifc.py @@ -0,0 +1,37 @@ +from abc import abstractmethod +import logging +from asyncio import StreamReader, StreamWriter + +from iter_registry import AbstractIterMeta + +logger_mqtt = logging.getLogger('mqtt') + + +class InverterIfc(metaclass=AbstractIterMeta): + _registry = [] + + @abstractmethod + def __init__(self, reader: StreamReader, writer: StreamWriter, + config_id: str, prot_class, + client_mode: bool): + pass # pragma: no cover + + @abstractmethod + def __enter__(self): + pass # pragma: no cover + + @abstractmethod + def __exit__(self, exc_type, exc, tb): + pass # pragma: no cover + + @abstractmethod + def healthy(self) -> bool: + pass # pragma: no cover + + @abstractmethod + async def disc(self, shutdown_started=False) -> None: + pass # pragma: no cover + + @abstractmethod + async def create_remote(self) -> None: + pass # pragma: no cover diff --git a/ha_addons/ha_addon/rootfs/home/proxy/iter_registry.py b/ha_addons/ha_addon/rootfs/home/proxy/iter_registry.py new file mode 100644 index 0000000..ea0cd73 --- /dev/null +++ b/ha_addons/ha_addon/rootfs/home/proxy/iter_registry.py @@ -0,0 +1,9 @@ +from abc import ABCMeta + + +class AbstractIterMeta(ABCMeta): + def __iter__(cls): + for ref in cls._registry: + obj = ref() + if obj is not None: + yield obj diff --git a/ha_addons/ha_addon/rootfs/home/proxy/logging.ini b/ha_addons/ha_addon/rootfs/home/proxy/logging.ini new file mode 100644 index 0000000..34db695 --- /dev/null +++ b/ha_addons/ha_addon/rootfs/home/proxy/logging.ini @@ -0,0 +1,76 @@ +[loggers] +keys=root,tracer,mesg,conn,data,mqtt,asyncio + +[handlers] +keys=console_handler,file_handler_name1,file_handler_name2 + +[formatters] +keys=console_formatter,file_formatter + +[logger_root] +level=DEBUG +handlers=console_handler,file_handler_name1 + + +[logger_conn] +level=DEBUG +handlers=console_handler,file_handler_name1 +propagate=0 +qualname=conn + +[logger_mqtt] +level=INFO +handlers=console_handler,file_handler_name1 +propagate=0 +qualname=mqtt + +[logger_asyncio] +level=INFO +handlers=console_handler,file_handler_name1 +propagate=0 +qualname=asyncio + +[logger_data] +level=DEBUG +handlers=file_handler_name1 +propagate=0 +qualname=data + + +[logger_mesg] +level=DEBUG +handlers=file_handler_name2 +propagate=0 +qualname=msg + +[logger_tracer] +level=INFO +handlers=file_handler_name2 +propagate=0 +qualname=tracer + +[handler_console_handler] +class=StreamHandler +level=DEBUG +formatter=console_formatter + +[handler_file_handler_name1] +class=handlers.TimedRotatingFileHandler +level=INFO +formatter=file_formatter +args=('log/proxy.log', when:='midnight') + +[handler_file_handler_name2] +class=handlers.TimedRotatingFileHandler +level=NOTSET +formatter=file_formatter +args=('log/trace.log', when:='midnight') + +[formatter_console_formatter] +format=%(asctime)s %(levelname)5s | %(name)4s | %(message)s' +datefmt='%Y-%m-%d %H:%M:%S + +[formatter_file_formatter] +format=%(asctime)s %(levelname)5s | %(name)4s | %(message)s' +datefmt='%Y-%m-%d %H:%M:%S + diff --git a/ha_addons/ha_addon/rootfs/home/proxy/messages.py b/ha_addons/ha_addon/rootfs/home/proxy/messages.py new file mode 100644 index 0000000..eecfc80 --- /dev/null +++ b/ha_addons/ha_addon/rootfs/home/proxy/messages.py @@ -0,0 +1,203 @@ +import logging +import weakref +from typing import Callable +from enum import Enum + +from async_ifc import AsyncIfc +from protocol_ifc import ProtocolIfc +from infos import Infos, Register +from modbus import Modbus +from my_timer import Timer + +logger = logging.getLogger('msg') + + +def __hex_val(n, data, data_len): + line = '' + for j in range(n-16, n): + if j >= data_len: + break + line += '%02x ' % abs(data[j]) + return line + + +def __asc_val(n, data, data_len): + line = '' + for j in range(n-16, n): + if j >= data_len: + break + c = data[j] if not (data[j] < 0x20 or data[j] > 0x7e) else '.' + line += '%c' % c + return line + + +def hex_dump(data, data_len) -> list: + n = 0 + lines = [] + + for i in range(0, data_len, 16): + line = ' ' + line += '%04x | ' % (i) + n += 16 + line += __hex_val(n, data, data_len) + line += ' ' * (3 * 16 + 9 - len(line)) + ' | ' + line += __asc_val(n, data, data_len) + lines.append(line) + + return lines + + +def hex_dump_str(data, data_len): + lines = hex_dump(data, data_len) + return '\n'.join(lines) + + +def hex_dump_memory(level, info, data, data_len): + lines = [] + lines.append(info) + tracer = logging.getLogger('tracer') + if not tracer.isEnabledFor(level): + return + + lines += hex_dump(data, data_len) + + tracer.log(level, '\n'.join(lines)) + + +class State(Enum): + '''state of the logical connection''' + init = 0 + '''just created''' + received = 1 + '''at least one packet received''' + up = 2 + '''at least one cmd-rsp transaction''' + pend = 3 + '''inverter transaction pending, don't send MODBUS cmds''' + closed = 4 + '''connection closed''' + + +class Message(ProtocolIfc): + MAX_START_TIME = 400 + '''maximum time without a received msg in sec''' + MAX_INV_IDLE_TIME = 120 + '''maximum time without a received msg from the inverter in sec''' + MAX_DEF_IDLE_TIME = 360 + '''maximum default time without a received msg in sec''' + MB_START_TIMEOUT = 40 + '''start delay for Modbus polling in server mode''' + MB_REGULAR_TIMEOUT = 60 + '''regular Modbus polling time in server mode''' + + def __init__(self, node_id, ifc: "AsyncIfc", server_side: bool, + send_modbus_cb: Callable[[bytes, int, str], None], + mb_timeout: int): + self._registry.append(weakref.ref(self)) + + self.server_side = server_side + self.ifc = ifc + self.node_id = node_id + if server_side: + self.mb = Modbus(send_modbus_cb, mb_timeout) + self.mb_timer = Timer(self.mb_timout_cb, self.node_id) + else: + self.mb = None + self.mb_timer = None + self.header_valid = False + self.header_len = 0 + self.data_len = 0 + self.unique_id = 0 + self.sug_area = '' + self.new_data = {} + self.state = State.init + self.shutdown_started = False + self.modbus_elms = 0 # for unit tests + self.mb_timeout = self.MB_REGULAR_TIMEOUT + self.mb_first_timeout = self.MB_START_TIMEOUT + '''timer value for next Modbus polling request''' + self.modbus_polling = False + + @property + def node_id(self): + return self._node_id + + @node_id.setter + def node_id(self, value): + self._node_id = value + self.ifc.set_node_id(value) + + ''' + Empty methods, that have to be implemented in any child class which + don't use asyncio + ''' + def _read(self) -> None: # read data bytes from socket and copy them + # to our _recv_buffer + return # pragma: no cover + + def _set_mqtt_timestamp(self, key, ts: float | None): + if key not in self.new_data or \ + not self.new_data[key]: + if key == 'grid': + info_id = Register.TS_GRID + elif key == 'input': + info_id = Register.TS_INPUT + elif key == 'total': + info_id = Register.TS_TOTAL + else: + return + # tstr = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(ts)) + # logger.info(f'update: key: {key} ts:{tstr}' + self.db.set_db_def_value(info_id, round(ts)) + + def _timeout(self) -> int: + if self.state == State.init or self.state == State.received: + to = self.MAX_START_TIME + elif self.state == State.up and \ + self.server_side and self.modbus_polling: + to = self.MAX_INV_IDLE_TIME + else: + to = self.MAX_DEF_IDLE_TIME + return to + + def _send_modbus_cmd(self, func, addr, val, log_lvl) -> None: + if self.state != State.up: + logger.log(log_lvl, f'[{self.node_id}] ignore MODBUS cmd,' + ' as the state is not UP') + return + self.mb.build_msg(Modbus.INV_ADDR, func, addr, val, log_lvl) + + async def send_modbus_cmd(self, func, addr, val, log_lvl) -> None: + self._send_modbus_cmd(func, addr, val, log_lvl) + + ''' + Our puplic methods + ''' + def close(self) -> None: + if self.server_side: + # set inverter state to offline, if output power is very low + logging.debug('close power: ' + f'{self.db.get_db_value(Register.OUTPUT_POWER, -1)}') + if self.db.get_db_value(Register.OUTPUT_POWER, 999) < 2: + self.db.set_db_def_value(Register.INVERTER_STATUS, 0) + self.new_data['env'] = True + self.mb_timer.close() + self.state = State.closed + self.ifc.rx_set_cb(None) + self.ifc.prot_set_timeout_cb(None) + self.ifc.prot_set_init_new_client_conn_cb(None) + self.ifc.prot_set_update_header_cb(None) + self.ifc = None + + if self.mb: + self.mb.close() + self.mb = None + # pragma: no cover + + def inc_counter(self, counter: str) -> None: + self.db.inc_counter(counter) + Infos.new_stat_data['proxy'] = True + + def dec_counter(self, counter: str) -> None: + self.db.dec_counter(counter) + Infos.new_stat_data['proxy'] = True diff --git a/ha_addons/ha_addon/rootfs/home/proxy/modbus.py b/ha_addons/ha_addon/rootfs/home/proxy/modbus.py new file mode 100644 index 0000000..5c64086 --- /dev/null +++ b/ha_addons/ha_addon/rootfs/home/proxy/modbus.py @@ -0,0 +1,345 @@ +'''MODBUS module for TSUN inverter support + +TSUN uses the MODBUS in the RTU transmission mode over serial line. +see: https://modbus.org/docs/Modbus_Application_Protocol_V1_1b3.pdf +see: https://modbus.org/docs/Modbus_over_serial_line_V1_02.pdf + +A Modbus PDU consists of: 'Function-Code' + 'Data' +A Modbus RTU message consists of: 'Addr' + 'Modbus-PDU' + 'CRC-16' +The inverter is a MODBUS server and the proxy the MODBUS client. + +The 16-bit CRC is known as CRC-16-ANSI(reverse) +see: https://en.wikipedia.org/wiki/Computation_of_cyclic_redundancy_checks +''' +import struct +import logging +import asyncio +from typing import Generator, Callable + +from infos import Register, Fmt + +logger = logging.getLogger('data') + +CRC_POLY = 0xA001 # (LSBF/reverse) +CRC_INIT = 0xFFFF + + +class Modbus(): + '''Simple MODBUS implementation with TX queue and retransmit timer''' + INV_ADDR = 1 + '''MODBUS server address of the TSUN inverter''' + READ_REGS = 3 + '''MODBUS function code: Read Holding Register''' + READ_INPUTS = 4 + '''MODBUS function code: Read Input Register''' + WRITE_SINGLE_REG = 6 + '''Modbus function code: Write Single Register''' + + __crc_tab = [] + mb_reg_mapping = { + 0x2000: {'reg': Register.BOOT_STATUS, 'fmt': '!H'}, # noqa: E501 + 0x2001: {'reg': Register.DSP_STATUS, 'fmt': '!H'}, # noqa: E501 + 0x2003: {'reg': Register.WORK_MODE, 'fmt': '!H'}, + 0x2006: {'reg': Register.OUTPUT_SHUTDOWN, 'fmt': '!H'}, + 0x2007: {'reg': Register.MAX_DESIGNED_POWER, 'fmt': '!H', 'ratio': 1}, # noqa: E501 + 0x2008: {'reg': Register.RATED_LEVEL, 'fmt': '!H'}, + 0x2009: {'reg': Register.INPUT_COEFFICIENT, 'fmt': '!H', 'ratio': 100/1024}, # noqa: E501 + 0x200a: {'reg': Register.GRID_VOLT_CAL_COEF, 'fmt': '!H'}, + 0x2010: {'reg': Register.PROD_COMPL_TYPE, 'fmt': '!H'}, + 0x202c: {'reg': Register.OUTPUT_COEFFICIENT, 'fmt': '!H', 'ratio': 100/1024}, # noqa: E501 + + 0x3000: {'reg': Register.INVERTER_STATUS, 'fmt': '!H'}, # noqa: E501 + 0x3001: {'reg': Register.DETECT_STATUS_1, 'fmt': '!H'}, # noqa: E501 + 0x3002: {'reg': Register.DETECT_STATUS_2, 'fmt': '!H'}, # noqa: E501 + 0x3003: {'reg': Register.EVENT_ALARM, 'fmt': '!H'}, # noqa: E501 + 0x3004: {'reg': Register.EVENT_FAULT, 'fmt': '!H'}, # noqa: E501 + 0x3005: {'reg': Register.EVENT_BF1, 'fmt': '!H'}, # noqa: E501 + 0x3006: {'reg': Register.EVENT_BF2, 'fmt': '!H'}, # noqa: E501 + + 0x3008: {'reg': Register.VERSION, 'fmt': '!H', 'func': Fmt.version}, # noqa: E501 + 0x3009: {'reg': Register.GRID_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501 + 0x300a: {'reg': Register.GRID_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 + 0x300b: {'reg': Register.GRID_FREQUENCY, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 + 0x300c: {'reg': Register.INVERTER_TEMP, 'fmt': '!H', 'offset': -40}, # noqa: E501 + # 0x300d + 0x300e: {'reg': Register.RATED_POWER, 'fmt': '!H', 'ratio': 1}, # noqa: E501 + 0x300f: {'reg': Register.OUTPUT_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501 + 0x3010: {'reg': Register.PV1_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501 + 0x3011: {'reg': Register.PV1_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 + 0x3012: {'reg': Register.PV1_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501 + 0x3013: {'reg': Register.PV2_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501 + 0x3014: {'reg': Register.PV2_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 + 0x3015: {'reg': Register.PV2_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501 + 0x3016: {'reg': Register.PV3_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501 + 0x3017: {'reg': Register.PV3_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 + 0x3018: {'reg': Register.PV3_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501 + 0x3019: {'reg': Register.PV4_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501 + 0x301a: {'reg': Register.PV4_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 + 0x301b: {'reg': Register.PV4_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501 + 0x301c: {'reg': Register.DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 + 0x301d: {'reg': Register.TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501 + 0x301f: {'reg': Register.PV1_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 + 0x3020: {'reg': Register.PV1_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501 + 0x3022: {'reg': Register.PV2_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 + 0x3023: {'reg': Register.PV2_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501 + 0x3025: {'reg': Register.PV3_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 + 0x3026: {'reg': Register.PV3_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501 + 0x3028: {'reg': Register.PV4_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 + 0x3029: {'reg': Register.PV4_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501 + # 0x302a + } + + def __init__(self, snd_handler: Callable[[bytes, int, str], None], + timeout: int = 1): + if not len(self.__crc_tab): + self.__build_crc_tab(CRC_POLY) + self.que = asyncio.Queue(100) + self.snd_handler = snd_handler + '''Send handler to transmit a MODBUS RTU request''' + self.rsp_handler = None + '''Response handler to forward the response''' + self.timeout = timeout + '''MODBUS response timeout in seconds''' + self.max_retries = 1 + '''Max retransmit for MODBUS requests''' + self.retry_cnt = 0 + self.last_req = b'' + self.counter = {} + '''Dictenary with statistic counter''' + self.counter['timeouts'] = 0 + self.counter['retries'] = {} + for i in range(0, self.max_retries+1): + self.counter['retries'][f'{i}'] = 0 + self.last_log_lvl = logging.DEBUG + self.last_addr = 0 + self.last_fcode = 0 + self.last_len = 0 + self.last_reg = 0 + self.err = 0 + self.loop = asyncio.get_event_loop() + self.req_pend = False + self.tim = None + self.node_id = '' + + def close(self): + """free the queue and erase the callback handlers""" + logging.debug('Modbus close:') + self.__stop_timer() + self.rsp_handler = None + self.snd_handler = None + while not self.que.empty(): + self.que.get_nowait() + + def set_node_id(self, node_id: str): + self.node_id = node_id + + def build_msg(self, addr: int, func: int, reg: int, val: int, + log_lvl=logging.DEBUG) -> None: + """Build MODBUS RTU request frame and add it to the tx queue + + Keyword arguments: + addr: RTU server address (inverter) + func: MODBUS function code + reg: 16-bit register number + val: 16 bit value + """ + msg = struct.pack('>BBHH', addr, func, reg, val) + msg += struct.pack(' bool: + """Add the received Modbus RTU request to the tx queue + + Keyword arguments: + buf: Modbus RTU pdu incl ADDR byte and trailing CRC + rsp_handler: Callback, if the received pdu is valid + + Returns: + True: PDU was added to the queue + False: PDU was ignored, due to an error + """ + # logging.info(f'recv_req: first byte modbus:{buf[0]} len:{len(buf)}') + if not self.__check_crc(buf): + self.err = 1 + logger.error('Modbus recv: CRC error') + return False + self.que.put_nowait({'req': buf, + 'rsp_hdl': rsp_handler, + 'log_lvl': logging.INFO}) + if self.que.qsize() == 1: + self.__send_next_from_que() + + return True + + def recv_resp(self, info_db, buf: bytes) -> \ + Generator[tuple[str, bool, int | float | str], None, None]: + """Generator which check and parse a received MODBUS response. + + Keyword arguments: + info_db: database for info lockups + buf: received Modbus RTU response frame + + Returns on error and set Self.err to: + 1: CRC error + 2: Wrong server address + 3: Unexpected function code + 4: Unexpected data length + 5: No MODBUS request pending + """ + # logging.info(f'recv_resp: first byte modbus:{buf[0]} len:{len(buf)}') + + fcode = buf[1] + data_available = self.last_addr == self.INV_ADDR and \ + (fcode == 3 or fcode == 4) + + if self.__resp_error_check(buf, data_available): + return + + if data_available: + elmlen = buf[2] >> 1 + first_reg = self.last_reg # save last_reg before sending next pdu + self.__stop_timer() # stop timer and send next pdu + yield from self.__process_data(info_db, buf, first_reg, elmlen) + else: + self.__stop_timer() + + self.counter['retries'][f'{self.retry_cnt}'] += 1 + if self.rsp_handler: + self.rsp_handler() + self.__send_next_from_que() + + def __resp_error_check(self, buf: bytes, data_available: bool) -> bool: + '''Check the MODBUS response for errors, returns True if one accure''' + if not self.req_pend: + self.err = 5 + return True + if not self.__check_crc(buf): + logger.error(f'[{self.node_id}] Modbus resp: CRC error') + self.err = 1 + return True + if buf[0] != self.last_addr: + logger.info(f'[{self.node_id}] Modbus resp: Wrong addr {buf[0]}') + self.err = 2 + return True + fcode = buf[1] + if fcode != self.last_fcode: + logger.info(f'[{self.node_id}] Modbus: Wrong fcode {fcode}' + f' != {self.last_fcode}') + self.err = 3 + return True + if data_available: + elmlen = buf[2] >> 1 + if elmlen != self.last_len: + logger.info(f'[{self.node_id}] Modbus: len error {elmlen}' + f' != {self.last_len}') + self.err = 4 + return True + + return False + + def __process_data(self, info_db, buf: bytes, first_reg, elmlen): + '''Generator over received registers, updates the db''' + for i in range(0, elmlen): + addr = first_reg+i + if addr in self.mb_reg_mapping: + row = self.mb_reg_mapping[addr] + info_id = row['reg'] + keys, level, unit, must_incr = info_db._key_obj(info_id) + if keys: + result = Fmt.get_value(buf, 3+2*i, row) + name, update = info_db.update_db(keys, must_incr, + result) + yield keys[0], update, result + if update: + info_db.tracer.log(level, + f'[{self.node_id}] MODBUS: {name}' + f' : {result}{unit}') + + ''' + MODBUS response timer + ''' + def __start_timer(self) -> None: + '''Start response timer and set `req_pend` to True''' + self.req_pend = True + self.tim = self.loop.call_later(self.timeout, self.__timeout_cb) + # logging.debug(f'Modbus start timer {self}') + + def __stop_timer(self) -> None: + '''Stop response timer and set `req_pend` to False''' + self.req_pend = False + # logging.debug(f'Modbus stop timer {self}') + if self.tim: + self.tim.cancel() + self.tim = None + + def __timeout_cb(self) -> None: + '''Rsponse timeout handler retransmit pdu or send next pdu''' + self.req_pend = False + + if self.retry_cnt < self.max_retries: + logger.debug(f'Modbus retrans {self}') + self.retry_cnt += 1 + self.__start_timer() + self.snd_handler(self.last_req, self.last_log_lvl, state='Retrans') + else: + logger.info(f'[{self.node_id}] Modbus timeout ' + f'(FCode: {self.last_fcode} ' + f'Reg: 0x{self.last_reg:04x}, ' + f'{self.last_len})') + self.counter['timeouts'] += 1 + self.__send_next_from_que() + + def __send_next_from_que(self) -> None: + '''Get next MODBUS pdu from queue and transmit it''' + if self.req_pend: + return + try: + item = self.que.get_nowait() + req = item['req'] + self.last_req = req + self.rsp_handler = item['rsp_hdl'] + self.last_log_lvl = item['log_lvl'] + self.last_addr = req[0] + self.last_fcode = req[1] + + res = struct.unpack_from('>HH', req, 2) + self.last_reg = res[0] + self.last_len = res[1] + self.retry_cnt = 0 + self.__start_timer() + self.snd_handler(self.last_req, self.last_log_lvl, state='Command') + except asyncio.QueueEmpty: + pass + + ''' + Helper function for CRC-16 handling + ''' + def __check_crc(self, msg: bytes) -> bool: + '''Check CRC-16 and returns True if valid''' + return 0 == self.__calc_crc(msg) + + def __calc_crc(self, buffer: bytes) -> int: + '''Build CRC-16 for buffer and returns it''' + crc = CRC_INIT + + for cur in buffer: + crc = (crc >> 8) ^ self.__crc_tab[(crc ^ cur) & 0xFF] + return crc + + def __build_crc_tab(self, poly: int) -> None: + '''Build CRC-16 helper table, must be called exactly one time''' + for index in range(256): + data = index << 1 + crc = 0 + for _ in range(8, 0, -1): + data >>= 1 + if (data ^ crc) & 1: + crc = (crc >> 1) ^ poly + else: + crc >>= 1 + self.__crc_tab.append(crc) diff --git a/ha_addons/ha_addon/rootfs/home/proxy/modbus_tcp.py b/ha_addons/ha_addon/rootfs/home/proxy/modbus_tcp.py new file mode 100644 index 0000000..f3788d4 --- /dev/null +++ b/ha_addons/ha_addon/rootfs/home/proxy/modbus_tcp.py @@ -0,0 +1,88 @@ +import logging +import traceback +import asyncio + +from config import Config +from gen3plus.inverter_g3p import InverterG3P +from infos import Infos + +logger = logging.getLogger('conn') + + +class ModbusConn(): + def __init__(self, host, port): + self.host = host + self.port = port + self.addr = (host, port) + self.inverter = None + + async def __aenter__(self) -> 'InverterG3P': + '''Establish a client connection to the TSUN cloud''' + connection = asyncio.open_connection(self.host, self.port) + reader, writer = await connection + self.inverter = InverterG3P(reader, writer, + client_mode=True) + self.inverter.__enter__() + stream = self.inverter.local.stream + logging.info(f'[{stream.node_id}:{stream.conn_no}] ' + f'Connected to {self.addr}') + Infos.inc_counter('Inverter_Cnt') + await self.inverter.local.ifc.publish_outstanding_mqtt() + return self.inverter + + async def __aexit__(self, exc_type, exc, tb): + Infos.dec_counter('Inverter_Cnt') + await self.inverter.local.ifc.publish_outstanding_mqtt() + self.inverter.__exit__(exc_type, exc, tb) + + +class ModbusTcp(): + + def __init__(self, loop, tim_restart=10) -> None: + self.tim_restart = tim_restart + + inverters = Config.get('inverters') + # logging.info(f'Inverters: {inverters}') + + for inv in inverters.values(): + if (type(inv) is dict + and 'monitor_sn' in inv + and 'client_mode' in inv): + client = inv['client_mode'] + # logging.info(f"SerialNo:{inv['monitor_sn']} host:{client['host']} port:{client['port']}") # noqa: E501 + loop.create_task(self.modbus_loop(client['host'], + client['port'], + inv['monitor_sn'], + client['forward'])) + + async def modbus_loop(self, host, port, + snr: int, forward: bool) -> None: + '''Loop for receiving messages from the TSUN cloud (client-side)''' + while True: + try: + async with ModbusConn(host, port) as inverter: + stream = inverter.local.stream + await stream.send_start_cmd(snr, host, forward) + await stream.ifc.loop() + logger.info(f'[{stream.node_id}:{stream.conn_no}] ' + f'Connection closed - Shutdown: ' + f'{stream.shutdown_started}') + if stream.shutdown_started: + return + del inverter # decrease ref counter after the with block + + except (ConnectionRefusedError, TimeoutError) as error: + logging.debug(f'Inv-conn:{error}') + + except OSError as error: + if error.errno == 113: # pragma: no cover + logging.debug(f'os-error:{error}') + else: + logging.info(f'os-error: {error}') + + except Exception: + logging.error( + f"ModbusTcpCreate: Exception for {(host, port)}:\n" + f"{traceback.format_exc()}") + + await asyncio.sleep(self.tim_restart) diff --git a/ha_addons/ha_addon/rootfs/home/proxy/mqtt.py b/ha_addons/ha_addon/rootfs/home/proxy/mqtt.py new file mode 100644 index 0000000..f52b797 --- /dev/null +++ b/ha_addons/ha_addon/rootfs/home/proxy/mqtt.py @@ -0,0 +1,182 @@ +import asyncio +import logging +import aiomqtt +import traceback + +from modbus import Modbus +from messages import Message +from config import Config +from singleton import Singleton + +logger_mqtt = logging.getLogger('mqtt') + + +class Mqtt(metaclass=Singleton): + __client = None + __cb_mqtt_is_up = None + + def __init__(self, cb_mqtt_is_up): + logger_mqtt.debug('MQTT: __init__') + if cb_mqtt_is_up: + self.__cb_mqtt_is_up = cb_mqtt_is_up + loop = asyncio.get_event_loop() + self.task = loop.create_task(self.__loop()) + self.ha_restarts = 0 + + ha = Config.get('ha') + self.ha_status_topic = f"{ha['auto_conf_prefix']}/status" + self.mb_rated_topic = f"{ha['entity_prefix']}/+/rated_load" + self.mb_out_coeff_topic = f"{ha['entity_prefix']}/+/out_coeff" + self.mb_reads_topic = f"{ha['entity_prefix']}/+/modbus_read_regs" + self.mb_inputs_topic = f"{ha['entity_prefix']}/+/modbus_read_inputs" + self.mb_at_cmd_topic = f"{ha['entity_prefix']}/+/at_cmd" + + @property + def ha_restarts(self): + return self._ha_restarts + + @ha_restarts.setter + def ha_restarts(self, value): + self._ha_restarts = value + + async def close(self) -> None: + logger_mqtt.debug('MQTT: close') + self.task.cancel() + try: + await self.task + + except (asyncio.CancelledError, Exception) as e: + logging.debug(f"Mqtt.close: exception: {e} ...") + + async def publish(self, topic: str, payload: str | bytes | bytearray + | int | float | None = None) -> None: + if self.__client: + await self.__client.publish(topic, payload) + + async def __loop(self) -> None: + mqtt = Config.get('mqtt') + logger_mqtt.info(f'start MQTT: host:{mqtt["host"]} port:' + f'{mqtt["port"]} ' + f'user:{mqtt["user"]}') + self.__client = aiomqtt.Client(hostname=mqtt['host'], + port=mqtt['port'], + username=mqtt['user'], + password=mqtt['passwd']) + + interval = 5 # Seconds + + while True: + try: + async with self.__client: + logger_mqtt.info('MQTT broker connection established') + + if self.__cb_mqtt_is_up: + await self.__cb_mqtt_is_up() + + await self.__client.subscribe(self.ha_status_topic) + await self.__client.subscribe(self.mb_rated_topic) + await self.__client.subscribe(self.mb_out_coeff_topic) + await self.__client.subscribe(self.mb_reads_topic) + await self.__client.subscribe(self.mb_inputs_topic) + await self.__client.subscribe(self.mb_at_cmd_topic) + + async for message in self.__client.messages: + await self.dispatch_msg(message) + + except aiomqtt.MqttError: + if Config.is_default('mqtt'): + logger_mqtt.info( + "MQTT is unconfigured; Check your config.toml!") + interval = 30 + else: + interval = 5 # Seconds + logger_mqtt.info( + f"Connection lost; Reconnecting in {interval}" + " seconds ...") + + await asyncio.sleep(interval) + except asyncio.CancelledError: + logger_mqtt.debug("MQTT task cancelled") + self.__client = None + return + except Exception: + # self.inc_counter('SW_Exception') # fixme + logger_mqtt.error( + f"Exception:\n" + f"{traceback.format_exc()}") + + async def dispatch_msg(self, message): + if message.topic.matches(self.ha_status_topic): + status = message.payload.decode("UTF-8") + logger_mqtt.info('Home-Assistant Status:' + f' {status}') + if status == 'online': + self.ha_restarts += 1 + await self.__cb_mqtt_is_up() + + if message.topic.matches(self.mb_rated_topic): + await self.modbus_cmd(message, + Modbus.WRITE_SINGLE_REG, + 1, 0x2008) + + if message.topic.matches(self.mb_out_coeff_topic): + payload = message.payload.decode("UTF-8") + try: + val = round(float(payload) * 1024/100) + if val < 0 or val > 1024: + logger_mqtt.error('out_coeff: value must be in' + 'the range 0..100,' + f' got: {payload}') + else: + await self.modbus_cmd(message, + Modbus.WRITE_SINGLE_REG, + 0, 0x202c, val) + except Exception: + pass + + if message.topic.matches(self.mb_reads_topic): + await self.modbus_cmd(message, + Modbus.READ_REGS, 2) + + if message.topic.matches(self.mb_inputs_topic): + await self.modbus_cmd(message, + Modbus.READ_INPUTS, 2) + + if message.topic.matches(self.mb_at_cmd_topic): + await self.at_cmd(message) + + def each_inverter(self, message, func_name: str): + topic = str(message.topic) + node_id = topic.split('/')[1] + '/' + for m in Message: + if m.server_side and (m.node_id == node_id): + logger_mqtt.debug(f'Found: {node_id}') + fnc = getattr(m, func_name, None) + if callable(fnc): + yield fnc + else: + logger_mqtt.warning(f'Cmd not supported by: {node_id}') + break + + else: + logger_mqtt.warning(f'Node_id: {node_id} not found') + + async def modbus_cmd(self, message, func, params=0, addr=0, val=0): + payload = message.payload.decode("UTF-8") + for fnc in self.each_inverter(message, "send_modbus_cmd"): + res = payload.split(',') + if params > 0 and params != len(res): + logger_mqtt.error(f'Parameter expected: {params}, ' + f'got: {len(res)}') + return + if params == 1: + val = int(payload) + elif params == 2: + addr = int(res[0], base=16) + val = int(res[1]) # lenght + await fnc(func, addr, val, logging.INFO) + + async def at_cmd(self, message): + payload = message.payload.decode("UTF-8") + for fnc in self.each_inverter(message, "send_at_cmd"): + await fnc(payload) diff --git a/ha_addons/ha_addon/rootfs/home/proxy/my_timer.py b/ha_addons/ha_addon/rootfs/home/proxy/my_timer.py new file mode 100644 index 0000000..46435bd --- /dev/null +++ b/ha_addons/ha_addon/rootfs/home/proxy/my_timer.py @@ -0,0 +1,35 @@ +import asyncio +import logging +from itertools import count + + +class Timer: + def __init__(self, cb, id_str: str = ''): + self.__timeout_cb = cb + self.loop = asyncio.get_event_loop() + self.tim = None + self.id_str = id_str + self.exp_count = count(0) + + def start(self, timeout: float) -> None: + '''Start timer with timeout seconds''' + if self.tim: + self.tim.cancel() + self.tim = self.loop.call_later(timeout, self.__timeout) + logging.debug(f'[{self.id_str}]Start timer') + + def stop(self) -> None: + '''Stop timer''' + logging.debug(f'[{self.id_str}]Stop timer') + if self.tim: + self.tim.cancel() + self.tim = None + + def __timeout(self) -> None: + '''timer expired handler''' + logging.debug(f'[{self.id_str}]Timer expired') + self.__timeout_cb(next(self.exp_count)) + + def close(self) -> None: + self.stop() + self.__timeout_cb = None diff --git a/ha_addons/ha_addon/rootfs/home/proxy/protocol_ifc.py b/ha_addons/ha_addon/rootfs/home/proxy/protocol_ifc.py new file mode 100644 index 0000000..3b6c886 --- /dev/null +++ b/ha_addons/ha_addon/rootfs/home/proxy/protocol_ifc.py @@ -0,0 +1,17 @@ +from abc import abstractmethod + +from async_ifc import AsyncIfc +from iter_registry import AbstractIterMeta + + +class ProtocolIfc(metaclass=AbstractIterMeta): + _registry = [] + + @abstractmethod + def __init__(self, addr, ifc: "AsyncIfc", server_side: bool, + client_mode: bool = False, id_str=b''): + pass # pragma: no cover + + @abstractmethod + def close(self): + pass # pragma: no cover diff --git a/ha_addons/ha_addon/rootfs/home/proxy/proxy.py b/ha_addons/ha_addon/rootfs/home/proxy/proxy.py new file mode 100644 index 0000000..eadc3ac --- /dev/null +++ b/ha_addons/ha_addon/rootfs/home/proxy/proxy.py @@ -0,0 +1,101 @@ +import asyncio +import logging +import json + +from config import Config +from mqtt import Mqtt +from infos import Infos + +logger_mqtt = logging.getLogger('mqtt') + + +class Proxy(): + '''class Proxy is a baseclass + + The class has some class method for managing common resources like a + connection to the MQTT broker or proxy error counter which are common + for all inverter connection + + Instances of the class are connections to an inverter and can have an + optional link to an remote connection to the TSUN cloud. A remote + connection dies with the inverter connection. + + class methods: + class_init(): initialize the common resources of the proxy (MQTT + broker, Proxy DB, etc). Must be called before the + first inverter instance can be created + class_close(): release the common resources of the proxy. Should not + be called before any instances of the class are + destroyed + + methods: + create_remote(): Establish a client connection to the TSUN cloud + async_publ_mqtt(): Publish data to MQTT broker + ''' + @classmethod + def class_init(cls) -> None: + logging.debug('Proxy.class_init') + # initialize the proxy statistics + Infos.static_init() + cls.db_stat = Infos() + + ha = Config.get('ha') + cls.entity_prfx = ha['entity_prefix'] + '/' + cls.discovery_prfx = ha['discovery_prefix'] + '/' + cls.proxy_node_id = ha['proxy_node_id'] + '/' + cls.proxy_unique_id = ha['proxy_unique_id'] + + # call Mqtt singleton to establisch the connection to the mqtt broker + cls.mqtt = Mqtt(cls._cb_mqtt_is_up) + + # register all counters which should be reset at midnight. + # This is needed if the proxy is restated before midnight + # and the inverters are offline, cause the normal refgistering + # needs an update on the counters. + # Without this registration here the counters would not be + # reset at midnight when you restart the proxy just before + # midnight! + inverters = Config.get('inverters') + # logger.debug(f'Proxys: {inverters}') + for inv in inverters.values(): + if (type(inv) is dict): + node_id = inv['node_id'] + cls.db_stat.reg_clr_at_midnight(f'{cls.entity_prfx}{node_id}', + check_dependencies=False) + + @classmethod + async def _cb_mqtt_is_up(cls) -> None: + logging.info('Initialize proxy device on home assistant') + # register proxy status counters at home assistant + await cls._register_proxy_stat_home_assistant() + + # send values of the proxy status counters + await asyncio.sleep(0.5) # wait a bit, before sending data + Infos.new_stat_data['proxy'] = True # force sending data to sync ha + await cls._async_publ_mqtt_proxy_stat('proxy') + + @classmethod + async def _register_proxy_stat_home_assistant(cls) -> None: + '''register all our topics at home assistant''' + for data_json, component, node_id, id in cls.db_stat.ha_proxy_confs( + cls.entity_prfx, cls.proxy_node_id, cls.proxy_unique_id): + logger_mqtt.debug(f"MQTT Register: cmp:'{component}' node_id:'{node_id}' {data_json}") # noqa: E501 + await cls.mqtt.publish(f'{cls.discovery_prfx}{component}/{node_id}{id}/config', data_json) # noqa: E501 + + @classmethod + async def _async_publ_mqtt_proxy_stat(cls, key) -> None: + stat = Infos.stat + if key in stat and Infos.new_stat_data[key]: + data_json = json.dumps(stat[key]) + node_id = cls.proxy_node_id + logger_mqtt.debug(f'{key}: {data_json}') + await cls.mqtt.publish(f"{cls.entity_prfx}{node_id}{key}", + data_json) + Infos.new_stat_data[key] = False + + @classmethod + def class_close(cls, loop) -> None: # pragma: no cover + logging.debug('Proxy.class_close') + logging.info('Close MQTT Task') + loop.run_until_complete(cls.mqtt.close()) + cls.mqtt = None diff --git a/ha_addons/ha_addon/rootfs/home/proxy/scheduler.py b/ha_addons/ha_addon/rootfs/home/proxy/scheduler.py new file mode 100644 index 0000000..3c1d25a --- /dev/null +++ b/ha_addons/ha_addon/rootfs/home/proxy/scheduler.py @@ -0,0 +1,30 @@ +import logging +import json +from mqtt import Mqtt +from aiocron import crontab +from infos import ClrAtMidnight + +logger_mqtt = logging.getLogger('mqtt') + + +class Schedule: + mqtt = None + count = 0 + + @classmethod + def start(cls) -> None: + '''Start the scheduler and schedule the tasks (cron jobs)''' + logging.debug("Scheduler init") + cls.mqtt = Mqtt(None) + + crontab('0 0 * * *', func=cls.atmidnight, start=True) + + @classmethod + async def atmidnight(cls) -> None: + '''Clear daily counters at midnight''' + logging.info("Clear daily counters at midnight") + + for key, data in ClrAtMidnight.elm(): + logger_mqtt.debug(f'{key}: {data}') + data_json = json.dumps(data) + await cls.mqtt.publish(f"{key}", data_json) diff --git a/ha_addons/ha_addon/rootfs/home/proxy/server.py b/ha_addons/ha_addon/rootfs/home/proxy/server.py new file mode 100644 index 0000000..cda8501 --- /dev/null +++ b/ha_addons/ha_addon/rootfs/home/proxy/server.py @@ -0,0 +1,191 @@ +import logging +import asyncio +import signal +import os +from asyncio import StreamReader, StreamWriter +from aiohttp import web +from logging import config # noqa F401 +from proxy import Proxy +from inverter_ifc import InverterIfc +from gen3.inverter_g3 import InverterG3 +from gen3plus.inverter_g3p import InverterG3P +from scheduler import Schedule +from config import Config +from modbus_tcp import ModbusTcp + +routes = web.RouteTableDef() +proxy_is_up = False + + +@routes.get('/') +async def hello(request): + return web.Response(text="Hello, world") + + +@routes.get('/-/ready') +async def ready(request): + if proxy_is_up: + status = 200 + text = 'Is ready' + else: + status = 503 + text = 'Not ready' + return web.Response(status=status, text=text) + + +@routes.get('/-/healthy') +async def healthy(request): + + if proxy_is_up: + # logging.info('web reqeust healthy()') + for inverter in InverterIfc: + try: + res = inverter.healthy() + if not res: + return web.Response(status=503, text="I have a problem") + except Exception as err: + logging.info(f'Exception:{err}') + + return web.Response(status=200, text="I'm fine") + + +async def webserver(addr, port): + '''coro running our webserver''' + app = web.Application() + app.add_routes(routes) + runner = web.AppRunner(app) + + await runner.setup() + site = web.TCPSite(runner, addr, port) + await site.start() + logging.info(f'HTTP server listen on port: {port}') + + try: + # Normal interaction with aiohttp + while True: + await asyncio.sleep(3600) # sleep forever + except asyncio.CancelledError: + logging.info('HTTP server cancelled') + await runner.cleanup() + logging.debug('HTTP cleanup done') + + +async def handle_client(reader: StreamReader, writer: StreamWriter, inv_class): + '''Handles a new incoming connection and starts an async loop''' + + with inv_class(reader, writer) as inv: + await inv.local.ifc.server_loop() + + +async def handle_shutdown(web_task): + '''Close all TCP connections and stop the event loop''' + + logging.info('Shutdown due to SIGTERM') + global proxy_is_up + proxy_is_up = False + + # + # first, disc all open TCP connections gracefully + # + for inverter in InverterIfc: + await inverter.disc(True) + + logging.info('Proxy disconnecting done') + + # + # second, cancel the web server + # + web_task.cancel() + await web_task + + # + # now cancel all remaining (pending) tasks + # + pending = asyncio.all_tasks() + for task in pending: + task.cancel() + + # + # at last, start a coro for stopping the loop + # + logging.debug("Stop event loop") + loop.stop() + + +def get_log_level() -> int: + '''checks if LOG_LVL is set in the environment and returns the + corresponding logging.LOG_LEVEL''' + log_level = os.getenv('LOG_LVL', 'INFO') + if log_level == 'DEBUG': + log_level = logging.DEBUG + elif log_level == 'WARN': + log_level = logging.WARNING + else: + log_level = logging.INFO + return log_level + + +if __name__ == "__main__": + # + # Setup our daily, rotating logger + # + serv_name = os.getenv('SERVICE_NAME', 'proxy') + version = os.getenv('VERSION', 'unknown') + + logging.config.fileConfig('logging.ini') + logging.info(f'Server "{serv_name} - {version}" will be started') + + # set lowest-severity for 'root', 'msg', 'conn' and 'data' logger + log_level = get_log_level() + logging.getLogger().setLevel(log_level) + logging.getLogger('msg').setLevel(log_level) + logging.getLogger('conn').setLevel(log_level) + logging.getLogger('data').setLevel(log_level) + logging.getLogger('tracer').setLevel(log_level) + logging.getLogger('asyncio').setLevel(log_level) + # logging.getLogger('mqtt').setLevel(log_level) + + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + + # read config file + ConfigErr = Config.class_init() + if ConfigErr is not None: + logging.info(f'ConfigErr: {ConfigErr}') + Proxy.class_init() + Schedule.start() + ModbusTcp(loop) + + # + # Create tasks for our listening servers. These must be tasks! If we call + # start_server directly out of our main task, the eventloop will be blocked + # and we can't receive and handle the UNIX signals! + # + for inv_class, port in [(InverterG3, 5005), (InverterG3P, 10000)]: + loop.create_task(asyncio.start_server(lambda r, w, i=inv_class: + handle_client(r, w, i), + '0.0.0.0', port)) + web_task = loop.create_task(webserver('0.0.0.0', 8127)) + + # + # Register some UNIX Signal handler for a gracefully server shutdown + # on Docker restart and stop + # + for signame in ('SIGINT', 'SIGTERM'): + loop.add_signal_handler(getattr(signal, signame), + lambda loop=loop: asyncio.create_task( + handle_shutdown(web_task))) + + loop.set_debug(log_level == logging.DEBUG) + try: + if ConfigErr is None: + proxy_is_up = True + loop.run_forever() + except KeyboardInterrupt: + pass + finally: + logging.info("Event loop is stopped") + Proxy.class_close(loop) + logging.debug('Close event loop') + loop.close() + logging.info(f'Finally, exit Server "{serv_name}"') diff --git a/ha_addons/ha_addon/rootfs/home/proxy/singleton.py b/ha_addons/ha_addon/rootfs/home/proxy/singleton.py new file mode 100644 index 0000000..8222146 --- /dev/null +++ b/ha_addons/ha_addon/rootfs/home/proxy/singleton.py @@ -0,0 +1,14 @@ +from weakref import WeakValueDictionary + + +class Singleton(type): + _instances = WeakValueDictionary() + + def __call__(cls, *args, **kwargs): + # logger_mqtt.debug('singleton: __call__') + if cls not in cls._instances: + instance = super(Singleton, + cls).__call__(*args, **kwargs) + cls._instances[cls] = instance + + return cls._instances[cls] diff --git a/ha_addons/ha_addon/rootfs/requirements.txt b/ha_addons/ha_addon/rootfs/requirements.txt new file mode 100644 index 0000000..1fb1c53 --- /dev/null +++ b/ha_addons/ha_addon/rootfs/requirements.txt @@ -0,0 +1,4 @@ + aiomqtt==2.3.0 + schema==0.7.7 + aiocron==1.8 + aiohttp==3.10.11 \ No newline at end of file From 07c989a305adc7d2266b16e063c9643bac9f2203 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Mon, 2 Dec 2024 23:11:30 +0100 Subject: [PATCH 25/32] increase mqtt timeout to 10s --- app/tests/test_mqtt.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/tests/test_mqtt.py b/app/tests/test_mqtt.py index b85d746..91acd02 100644 --- a/app/tests/test_mqtt.py +++ b/app/tests/test_mqtt.py @@ -82,7 +82,7 @@ def test_native_client(test_hostname, test_port): on_connect = threading.Event() c.on_connect = Mock(side_effect=lambda *_: on_connect.set()) c.connect_async(test_hostname, test_port) - assert on_connect.wait(5) + assert on_connect.wait(10) finally: c.loop_stop() From 668c631018be07286223335e24c1988e81b29bde Mon Sep 17 00:00:00 2001 From: Stefan Allius <122395479+s-allius@users.noreply.github.com> Date: Mon, 2 Dec 2024 23:41:58 +0100 Subject: [PATCH 26/32] S allius/issue222 (#223) * github action: use ubuntu 24.04 and sonar-scanner-action 4 --- .github/workflows/python-app.yml | 4 ++-- CHANGELOG.md | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index 97f390f..8434843 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -31,7 +31,7 @@ env: jobs: build: - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v4 @@ -58,7 +58,7 @@ jobs: coverage report - name: Analyze with SonarCloud if: ${{ env.SONAR_TOKEN != 0 }} - uses: SonarSource/sonarcloud-github-action@v3.1.0 + uses: SonarSource/sonarqube-scan-action@v4 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: diff --git a/CHANGELOG.md b/CHANGELOG.md index 5bb20c3..14dc30f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [unreleased] +- github action: use ubuntu 24.04 and sonar-scanner-action 4 [#222](https://github.com/s-allius/tsun-gen3-proxy/issues/222) - migrate paho.mqtt CallbackAPIVersion to VERSION2 [#224](https://github.com/s-allius/tsun-gen3-proxy/issues/224) - add PROD_COMPL_TYPE to trace - add SolarmanV5 messages builder From a5b2b4b7c26d34f69087d4be484bebc3c33e2ea1 Mon Sep 17 00:00:00 2001 From: Stefan Allius <122395479+s-allius@users.noreply.github.com> Date: Tue, 3 Dec 2024 22:02:23 +0100 Subject: [PATCH 27/32] S allius/issue217 (#229) * move config.py into a sub directory cnf * adapt unit test * split config class - use depency injection to get config * increase test coverage --- app/src/{ => cnf}/config.py | 48 +++++++---------------- app/src/cnf/config_ifc_proxy.py | 34 ++++++++++++++++ app/src/gen3/talent.py | 2 +- app/src/gen3plus/solarman_v5.py | 2 +- app/src/inverter_base.py | 2 +- app/src/modbus_tcp.py | 2 +- app/src/mqtt.py | 2 +- app/src/proxy.py | 2 +- app/src/server.py | 5 ++- app/tests/test_config.py | 62 +++++++++++++----------------- app/tests/test_config_ifc_proxy.py | 53 +++++++++++++++++++++++++ app/tests/test_inverter_base.py | 2 +- app/tests/test_inverter_g3.py | 2 +- app/tests/test_inverter_g3p.py | 2 +- app/tests/test_modbus_tcp.py | 2 +- app/tests/test_mqtt.py | 2 +- app/tests/test_proxy.py | 2 +- app/tests/test_solarman.py | 2 +- app/tests/test_talent.py | 2 +- 19 files changed, 145 insertions(+), 85 deletions(-) rename app/src/{ => cnf}/config.py (81%) create mode 100644 app/src/cnf/config_ifc_proxy.py create mode 100644 app/tests/test_config_ifc_proxy.py diff --git a/app/src/config.py b/app/src/cnf/config.py similarity index 81% rename from app/src/config.py rename to app/src/cnf/config.py index 3424bd9..ecbad0a 100644 --- a/app/src/config.py +++ b/app/src/cnf/config.py @@ -1,19 +1,23 @@ '''Config module handles the proxy configuration in the config.toml file''' -import shutil import tomllib import logging +from abc import ABC, abstractmethod from schema import Schema, And, Or, Use, Optional +class ConfigIfc(ABC): + @abstractmethod + def get_config(cls) -> dict: # pragma: no cover + pass + + class Config(): '''Static class Config is reads and sanitize the config. Read config.toml file and sanitize it with read(). Get named parts of the config with get()''' - act_config = {} - def_config = {} conf_schema = Schema({ 'tsun': { 'enabled': Use(bool), @@ -93,38 +97,14 @@ class Config(): ) @classmethod - def class_init(cls) -> None | str: # pragma: no cover - try: - # make the default config transparaent by copying it - # in the config.example file - logging.debug('Copy Default Config to config.example.toml') - - shutil.copy2("default_config.toml", - "config/config.example.toml") - except Exception: - pass - err_str = cls.read() - del cls.conf_schema - return err_str + def init(cls, ifc: ConfigIfc, path='') -> None | str: + cls.ifc = ifc + cls.act_config = {} + cls.def_config = {} + return cls.read(path) @classmethod - def _read_config_file(cls) -> dict: # pragma: no cover - usr_config = {} - - try: - with open("config/config.toml", "rb") as f: - usr_config = tomllib.load(f) - except Exception as error: - err = f'Config.read: {error}' - logging.error(err) - logging.info( - '\n To create the missing config.toml file, ' - 'you can rename the template config.example.toml\n' - ' and customize it for your scenario.\n') - return usr_config - - @classmethod - def read(cls, path='') -> None | str: + def read(cls, path) -> None | str: '''Read config file, merge it with the default config and sanitize the result''' err = None @@ -140,7 +120,7 @@ class Config(): # overwrite the default values, with values from # the config.toml file - usr_config = cls._read_config_file() + usr_config = cls.ifc.get_config() # merge the default and the user config config = def_config.copy() diff --git a/app/src/cnf/config_ifc_proxy.py b/app/src/cnf/config_ifc_proxy.py new file mode 100644 index 0000000..cf2f022 --- /dev/null +++ b/app/src/cnf/config_ifc_proxy.py @@ -0,0 +1,34 @@ +'''Config module handles the proxy configuration in the config.toml file''' + +import shutil +import tomllib +import logging +from cnf.config import ConfigIfc + + +class ConfigIfcProxy(ConfigIfc): + def __init__(self): # pragma: no cover + try: + # make the default config transparaent by copying it + # in the config.example file + logging.info('Copy Default Config to config.example.toml') + + shutil.copy2("default_config.toml", + "config/config.example.toml") + except Exception: + pass + + def get_config(self, cnf_file="config/config.toml") -> dict: + usr_config = {} + + try: + with open(cnf_file, "rb") as f: + usr_config = tomllib.load(f) + except Exception as error: + err = f'Config.read: {error}' + logging.error(err) + logging.info( + '\n To create the missing config.toml file, ' + 'you can rename the template config.example.toml\n' + ' and customize it for your scenario.\n') + return usr_config diff --git a/app/src/gen3/talent.py b/app/src/gen3/talent.py index da3ebc8..efb080a 100644 --- a/app/src/gen3/talent.py +++ b/app/src/gen3/talent.py @@ -7,7 +7,7 @@ from tzlocal import get_localzone from async_ifc import AsyncIfc from messages import Message, State from modbus import Modbus -from config import Config +from cnf.config import Config from gen3.infos_g3 import InfosG3 from infos import Register diff --git a/app/src/gen3plus/solarman_v5.py b/app/src/gen3plus/solarman_v5.py index f95894e..463c043 100644 --- a/app/src/gen3plus/solarman_v5.py +++ b/app/src/gen3plus/solarman_v5.py @@ -6,7 +6,7 @@ from datetime import datetime from async_ifc import AsyncIfc from messages import hex_dump_memory, Message, State -from config import Config +from cnf.config import Config from modbus import Modbus from gen3plus.infos_g3p import InfosG3P from infos import Register, Fmt diff --git a/app/src/inverter_base.py b/app/src/inverter_base.py index 757b883..580490c 100644 --- a/app/src/inverter_base.py +++ b/app/src/inverter_base.py @@ -12,7 +12,7 @@ from proxy import Proxy from async_stream import StreamPtr from async_stream import AsyncStreamClient from async_stream import AsyncStreamServer -from config import Config +from cnf.config import Config from infos import Infos logger_mqtt = logging.getLogger('mqtt') diff --git a/app/src/modbus_tcp.py b/app/src/modbus_tcp.py index f3788d4..7d371c9 100644 --- a/app/src/modbus_tcp.py +++ b/app/src/modbus_tcp.py @@ -2,7 +2,7 @@ import logging import traceback import asyncio -from config import Config +from cnf.config import Config from gen3plus.inverter_g3p import InverterG3P from infos import Infos diff --git a/app/src/mqtt.py b/app/src/mqtt.py index f52b797..0d33cac 100644 --- a/app/src/mqtt.py +++ b/app/src/mqtt.py @@ -5,7 +5,7 @@ import traceback from modbus import Modbus from messages import Message -from config import Config +from cnf.config import Config from singleton import Singleton logger_mqtt = logging.getLogger('mqtt') diff --git a/app/src/proxy.py b/app/src/proxy.py index eadc3ac..3f4f263 100644 --- a/app/src/proxy.py +++ b/app/src/proxy.py @@ -2,7 +2,7 @@ import asyncio import logging import json -from config import Config +from cnf.config import Config from mqtt import Mqtt from infos import Infos diff --git a/app/src/server.py b/app/src/server.py index cda8501..3f997bd 100644 --- a/app/src/server.py +++ b/app/src/server.py @@ -10,7 +10,8 @@ from inverter_ifc import InverterIfc from gen3.inverter_g3 import InverterG3 from gen3plus.inverter_g3p import InverterG3P from scheduler import Schedule -from config import Config +from cnf.config import Config +from cnf.config_ifc_proxy import ConfigIfcProxy from modbus_tcp import ModbusTcp routes = web.RouteTableDef() @@ -149,7 +150,7 @@ if __name__ == "__main__": asyncio.set_event_loop(loop) # read config file - ConfigErr = Config.class_init() + ConfigErr = Config.init(ConfigIfcProxy()) if ConfigErr is not None: logging.info(f'ConfigErr: {ConfigErr}') Proxy.class_init() diff --git a/app/tests/test_config.py b/app/tests/test_config.py index aaac45c..3eb2169 100644 --- a/app/tests/test_config.py +++ b/app/tests/test_config.py @@ -1,16 +1,16 @@ # test_with_pytest.py import tomllib from schema import SchemaMissingKeyError -from config import Config +from cnf.config import Config, ConfigIfc -class TstConfig(Config): +class TstConfig(ConfigIfc): @classmethod - def set(cls, cnf): + def __init__(cls, cnf): cls.act_config = cnf @classmethod - def _read_config_file(cls) -> dict: + def get_config(cls) -> dict: return cls.act_config @@ -93,10 +93,9 @@ def test_mininum_config(): def test_read_empty(): cnf = {} - TstConfig.set(cnf) - err = TstConfig.read('app/config/') + err = Config.init(TstConfig(cnf), 'app/config/') assert err == None - cnf = TstConfig.get() + cnf = Config.get() assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': { 'allow_all': False, @@ -129,26 +128,24 @@ def test_read_empty(): } } - defcnf = TstConfig.def_config.get('solarman') + defcnf = Config.def_config.get('solarman') assert defcnf == {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000} - assert True == TstConfig.is_default('solarman') + assert True == Config.is_default('solarman') def test_no_file(): cnf = {} - TstConfig.set(cnf) - err = TstConfig.read('') + err = Config.init(TstConfig(cnf), '') assert err == "Config.read: [Errno 2] No such file or directory: 'default_config.toml'" - cnf = TstConfig.get() + cnf = Config.get() assert cnf == {} - defcnf = TstConfig.def_config.get('solarman') + defcnf = Config.def_config.get('solarman') assert defcnf == None def test_read_cnf1(): cnf = {'solarman' : {'enabled': False}} - TstConfig.set(cnf) - err = TstConfig.read('app/config/') + err = Config.init(TstConfig(cnf), 'app/config/') assert err == None - cnf = TstConfig.get() + cnf = Config.get() assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': False, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': { 'allow_all': False, @@ -180,18 +177,17 @@ def test_read_cnf1(): } } } - cnf = TstConfig.get('solarman') + cnf = Config.get('solarman') assert cnf == {'enabled': False, 'host': 'iot.talent-monitoring.com', 'port': 10000} - defcnf = TstConfig.def_config.get('solarman') + defcnf = Config.def_config.get('solarman') assert defcnf == {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000} - assert False == TstConfig.is_default('solarman') + assert False == Config.is_default('solarman') def test_read_cnf2(): cnf = {'solarman' : {'enabled': 'FALSE'}} - TstConfig.set(cnf) - err = TstConfig.read('app/config/') + err = Config.init(TstConfig(cnf), 'app/config/') assert err == None - cnf = TstConfig.get() + cnf = Config.get() assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': { 'allow_all': False, @@ -223,22 +219,20 @@ def test_read_cnf2(): } } } - assert True == TstConfig.is_default('solarman') + assert True == Config.is_default('solarman') def test_read_cnf3(): cnf = {'solarman' : {'port': 'FALSE'}} - TstConfig.set(cnf) - err = TstConfig.read('app/config/') + err = Config.init(TstConfig(cnf), 'app/config/') assert err == 'Config.read: Key \'solarman\' error:\nKey \'port\' error:\nint(\'FALSE\') raised ValueError("invalid literal for int() with base 10: \'FALSE\'")' - cnf = TstConfig.get() - assert cnf == {'solarman': {'port': 'FALSE'}} + cnf = Config.get() + assert cnf == {} def test_read_cnf4(): cnf = {'solarman' : {'port': 5000}} - TstConfig.set(cnf) - err = TstConfig.read('app/config/') + err = Config.init(TstConfig(cnf), 'app/config/') assert err == None - cnf = TstConfig.get() + cnf = Config.get() assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 5000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': { 'allow_all': False, @@ -270,16 +264,14 @@ def test_read_cnf4(): } } } - assert False == TstConfig.is_default('solarman') + assert False == Config.is_default('solarman') def test_read_cnf5(): cnf = {'solarman' : {'port': 1023}} - TstConfig.set(cnf) - err = TstConfig.read('app/config/') + err = Config.init(TstConfig(cnf), 'app/config/') assert err != None def test_read_cnf6(): cnf = {'solarman' : {'port': 65536}} - TstConfig.set(cnf) - err = TstConfig.read('app/config/') + err = Config.init(TstConfig(cnf), 'app/config/') assert err != None diff --git a/app/tests/test_config_ifc_proxy.py b/app/tests/test_config_ifc_proxy.py new file mode 100644 index 0000000..02b0ec7 --- /dev/null +++ b/app/tests/test_config_ifc_proxy.py @@ -0,0 +1,53 @@ +# test_with_pytest.py +import tomllib +from schema import SchemaMissingKeyError +from cnf.config_ifc_proxy import ConfigIfcProxy + +class CnfIfc(ConfigIfcProxy): + def __init__(self): + pass + +def test_no_config(): + cnf_ifc = CnfIfc() + + cnf = cnf_ifc.get_config("") + assert cnf == {} + +def test_get_config(): + cnf_ifc = CnfIfc() + + cnf = cnf_ifc.get_config("app/config/default_config.toml") + assert cnf == { + 'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, + 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, + 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, + 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': '', 'passwd': ''}, + 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, + 'inverters': { + 'allow_all': False, + 'R170000000000001': { + 'node_id': '', + 'pv1': {'manufacturer': 'Risen', + 'type': 'RSM40-8-395M'}, + 'pv2': {'manufacturer': 'Risen', + 'type': 'RSM40-8-395M'}, + 'modbus_polling': False, + 'suggested_area': '' + }, + 'Y170000000000001': { + 'modbus_polling': True, + 'monitor_sn': 2000000000, + 'node_id': '', + 'pv1': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'pv2': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'pv3': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'pv4': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'suggested_area': '' + } + } + } + diff --git a/app/tests/test_inverter_base.py b/app/tests/test_inverter_base.py index 5962a49..2e05777 100644 --- a/app/tests/test_inverter_base.py +++ b/app/tests/test_inverter_base.py @@ -6,7 +6,7 @@ import gc from mock import patch from enum import Enum from infos import Infos -from config import Config +from cnf.config import Config from gen3.talent import Talent from inverter_base import InverterBase from singleton import Singleton diff --git a/app/tests/test_inverter_g3.py b/app/tests/test_inverter_g3.py index a841dbc..620173c 100644 --- a/app/tests/test_inverter_g3.py +++ b/app/tests/test_inverter_g3.py @@ -6,7 +6,7 @@ import sys,gc from mock import patch from enum import Enum from infos import Infos -from config import Config +from cnf.config import Config from proxy import Proxy from inverter_base import InverterBase from singleton import Singleton diff --git a/app/tests/test_inverter_g3p.py b/app/tests/test_inverter_g3p.py index 307018b..6bb98ed 100644 --- a/app/tests/test_inverter_g3p.py +++ b/app/tests/test_inverter_g3p.py @@ -5,7 +5,7 @@ import asyncio from mock import patch from enum import Enum from infos import Infos -from config import Config +from cnf.config import Config from proxy import Proxy from inverter_base import InverterBase from singleton import Singleton diff --git a/app/tests/test_modbus_tcp.py b/app/tests/test_modbus_tcp.py index e901e96..1c69b60 100644 --- a/app/tests/test_modbus_tcp.py +++ b/app/tests/test_modbus_tcp.py @@ -6,7 +6,7 @@ from aiomqtt import MqttCodeError from mock import patch from enum import Enum from singleton import Singleton -from config import Config +from cnf.config import Config from infos import Infos from mqtt import Mqtt from inverter_base import InverterBase diff --git a/app/tests/test_mqtt.py b/app/tests/test_mqtt.py index 91acd02..9c2923c 100644 --- a/app/tests/test_mqtt.py +++ b/app/tests/test_mqtt.py @@ -10,7 +10,7 @@ from singleton import Singleton from mqtt import Mqtt from modbus import Modbus from gen3plus.solarman_v5 import SolarmanV5 -from config import Config +from cnf.config import Config pytest_plugins = ('pytest_asyncio',) diff --git a/app/tests/test_proxy.py b/app/tests/test_proxy.py index aa6c739..ace707e 100644 --- a/app/tests/test_proxy.py +++ b/app/tests/test_proxy.py @@ -9,7 +9,7 @@ from singleton import Singleton from proxy import Proxy from mqtt import Mqtt from gen3plus.solarman_v5 import SolarmanV5 -from config import Config +from cnf.config import Config pytest_plugins = ('pytest_asyncio',) diff --git a/app/tests/test_solarman.py b/app/tests/test_solarman.py index 6d19688..6f11bec 100644 --- a/app/tests/test_solarman.py +++ b/app/tests/test_solarman.py @@ -7,7 +7,7 @@ import random from math import isclose from async_stream import AsyncIfcImpl, StreamPtr from gen3plus.solarman_v5 import SolarmanV5, SolarmanBase -from config import Config +from cnf.config import Config from infos import Infos, Register from modbus import Modbus from messages import State, Message diff --git a/app/tests/test_talent.py b/app/tests/test_talent.py index 32fd6fe..2b1ef6c 100644 --- a/app/tests/test_talent.py +++ b/app/tests/test_talent.py @@ -3,7 +3,7 @@ import pytest, logging, asyncio from math import isclose from async_stream import AsyncIfcImpl, StreamPtr from gen3.talent import Talent, Control -from config import Config +from cnf.config import Config from infos import Infos, Register from modbus import Modbus from messages import State From be3b4d6df034b8f04f23aa1fb6f00e6c06455856 Mon Sep 17 00:00:00 2001 From: Stefan Allius <122395479+s-allius@users.noreply.github.com> Date: Tue, 3 Dec 2024 22:22:50 +0100 Subject: [PATCH 28/32] S allius/issue206 (#213) * update changelog * add addon-dev target * initial version * use prebuild docker image * initial version for multi arch images * fix missing label latest * create log and config folder first. * clean up and translate to english * set labels with docker bake * add addon-debug and addon-dev targets * pass version number to proxy at runtime * add two more callbacks * get addon version from app * deploy rc addon container to ghcr * move ha_addon test into subdir * fix crash on container restart - mkdir -p returns no error even if the director exists * prepation for unit testing - move script into a method * added further config to schema * typo fixed * added monitor_sn + PV-strings 3-6 to create toml * added options.json for testing * prepare pytest and coverage for addons * fix missing values in resulting config.toml - define mqtt default values - convert filter configuration * first running unittest for addons * add ha_addons * increase test coverage * test empty options.json file for HA AddOn * fix pytest call in terminal * improve test coverage * remove uneeded options.json * move config.py into subdir cnf --------- Co-authored-by: Michael Metz --- .github/workflows/python-app.yml | 2 +- .gitignore | 4 +- .vscode/settings.json | 15 +- CHANGELOG.md | 1 + Makefile | 7 +- app/.version | 1 + app/tests/test_config.py | 90 +- ha_addon/Makefile | 45 - ha_addon/rootfs/home/create_config_toml.py | 65 -- ha_addon/rootfs/home/options.json | 19 - ha_addon/tests/test_create_config_toml.py | 6 - {ha_addon => ha_addons/ha_addon}/Dockerfile | 22 +- ha_addons/ha_addon/Makefile | 74 ++ {ha_addon => ha_addons/ha_addon}/config.yaml | 43 +- ha_addons/ha_addon/docker-bake.hcl | 99 ++ {ha_addon => ha_addons/ha_addon}/icon.png | Bin {ha_addon => ha_addons/ha_addon}/logo.png | Bin .../rootfs/home/create_config_toml.py | 115 +++ .../ha_addon/rootfs/home/proxy/async_ifc.py | 104 --- .../rootfs/home/proxy/async_stream.py | 397 -------- .../ha_addon/rootfs/home/proxy/byte_fifo.py | 52 -- .../ha_addon/rootfs/home/proxy/config.py | 181 ---- .../rootfs/home/proxy/default_config.toml | 177 ---- .../rootfs/home/proxy/gen3/infos_g3.py | 194 ---- .../rootfs/home/proxy/gen3/inverter_g3.py | 9 - .../ha_addon/rootfs/home/proxy/gen3/talent.py | 569 ------------ .../rootfs/home/proxy/gen3plus/infos_g3p.py | 204 ---- .../home/proxy/gen3plus/inverter_g3p.py | 15 - .../home/proxy/gen3plus/solarman_emu.py | 138 --- .../rootfs/home/proxy/gen3plus/solarman_v5.py | 706 -------------- ha_addons/ha_addon/rootfs/home/proxy/infos.py | 871 ------------------ .../rootfs/home/proxy/inverter_base.py | 178 ---- .../rootfs/home/proxy/inverter_ifc.py | 37 - .../rootfs/home/proxy/iter_registry.py | 9 - .../ha_addon/rootfs/home/proxy/logging.ini | 76 -- .../ha_addon/rootfs/home/proxy/messages.py | 203 ---- .../ha_addon/rootfs/home/proxy/modbus.py | 345 ------- .../ha_addon/rootfs/home/proxy/modbus_tcp.py | 88 -- ha_addons/ha_addon/rootfs/home/proxy/mqtt.py | 182 ---- .../ha_addon/rootfs/home/proxy/my_timer.py | 35 - .../rootfs/home/proxy/protocol_ifc.py | 17 - ha_addons/ha_addon/rootfs/home/proxy/proxy.py | 101 -- .../ha_addon/rootfs/home/proxy/scheduler.py | 30 - .../ha_addon/rootfs/home/proxy/server.py | 191 ---- .../ha_addon/rootfs/home/proxy/singleton.py | 14 - ha_addons/ha_addon/rootfs/requirements.txt | 4 - .../ha_addon}/rootfs/run.sh | 10 +- .../ha_addon/tests/test_create_config_toml.py | 190 ++++ .../ha_addon}/translations/en.yaml | 37 +- ha_addons/repository.yaml | 3 + pytest.ini | 4 +- sonar-project.properties | 4 +- 52 files changed, 660 insertions(+), 5323 deletions(-) create mode 100644 app/.version delete mode 100644 ha_addon/Makefile delete mode 100644 ha_addon/rootfs/home/create_config_toml.py delete mode 100644 ha_addon/rootfs/home/options.json delete mode 100644 ha_addon/tests/test_create_config_toml.py rename {ha_addon => ha_addons/ha_addon}/Dockerfile (77%) create mode 100644 ha_addons/ha_addon/Makefile rename {ha_addon => ha_addons/ha_addon}/config.yaml (63%) create mode 100644 ha_addons/ha_addon/docker-bake.hcl rename {ha_addon => ha_addons/ha_addon}/icon.png (100%) rename {ha_addon => ha_addons/ha_addon}/logo.png (100%) create mode 100644 ha_addons/ha_addon/rootfs/home/create_config_toml.py delete mode 100644 ha_addons/ha_addon/rootfs/home/proxy/async_ifc.py delete mode 100644 ha_addons/ha_addon/rootfs/home/proxy/async_stream.py delete mode 100644 ha_addons/ha_addon/rootfs/home/proxy/byte_fifo.py delete mode 100644 ha_addons/ha_addon/rootfs/home/proxy/config.py delete mode 100644 ha_addons/ha_addon/rootfs/home/proxy/default_config.toml delete mode 100644 ha_addons/ha_addon/rootfs/home/proxy/gen3/infos_g3.py delete mode 100644 ha_addons/ha_addon/rootfs/home/proxy/gen3/inverter_g3.py delete mode 100644 ha_addons/ha_addon/rootfs/home/proxy/gen3/talent.py delete mode 100644 ha_addons/ha_addon/rootfs/home/proxy/gen3plus/infos_g3p.py delete mode 100644 ha_addons/ha_addon/rootfs/home/proxy/gen3plus/inverter_g3p.py delete mode 100644 ha_addons/ha_addon/rootfs/home/proxy/gen3plus/solarman_emu.py delete mode 100644 ha_addons/ha_addon/rootfs/home/proxy/gen3plus/solarman_v5.py delete mode 100644 ha_addons/ha_addon/rootfs/home/proxy/infos.py delete mode 100644 ha_addons/ha_addon/rootfs/home/proxy/inverter_base.py delete mode 100644 ha_addons/ha_addon/rootfs/home/proxy/inverter_ifc.py delete mode 100644 ha_addons/ha_addon/rootfs/home/proxy/iter_registry.py delete mode 100644 ha_addons/ha_addon/rootfs/home/proxy/logging.ini delete mode 100644 ha_addons/ha_addon/rootfs/home/proxy/messages.py delete mode 100644 ha_addons/ha_addon/rootfs/home/proxy/modbus.py delete mode 100644 ha_addons/ha_addon/rootfs/home/proxy/modbus_tcp.py delete mode 100644 ha_addons/ha_addon/rootfs/home/proxy/mqtt.py delete mode 100644 ha_addons/ha_addon/rootfs/home/proxy/my_timer.py delete mode 100644 ha_addons/ha_addon/rootfs/home/proxy/protocol_ifc.py delete mode 100644 ha_addons/ha_addon/rootfs/home/proxy/proxy.py delete mode 100644 ha_addons/ha_addon/rootfs/home/proxy/scheduler.py delete mode 100644 ha_addons/ha_addon/rootfs/home/proxy/server.py delete mode 100644 ha_addons/ha_addon/rootfs/home/proxy/singleton.py delete mode 100644 ha_addons/ha_addon/rootfs/requirements.txt rename {ha_addon => ha_addons/ha_addon}/rootfs/run.sh (77%) create mode 100644 ha_addons/ha_addon/tests/test_create_config_toml.py rename {ha_addon => ha_addons/ha_addon}/translations/en.yaml (64%) create mode 100644 ha_addons/repository.yaml diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index 8434843..9dc387c 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -54,7 +54,7 @@ jobs: flake8 --exit-zero --ignore=C901,E121,E123,E126,E133,E226,E241,E242,E704,W503,W504,W505 --format=pylint --output-file=output_flake.txt --exclude=*.pyc app/src/ - name: Test with pytest run: | - python -m pytest app ha_addon --cov=app/src --cov=ha_addon/rootfs/home --cov-report=xml + python -m pytest app ha_addons --cov=app/src --cov=ha_addons/ha_addon/rootfs/home --cov-report=xml coverage report - name: Analyze with SonarCloud if: ${{ env.SONAR_TOKEN != 0 }} diff --git a/.gitignore b/.gitignore index 6312b62..3e39de3 100644 --- a/.gitignore +++ b/.gitignore @@ -4,8 +4,8 @@ __pycache__ bin/** mosquitto/** homeassistant/** -ha_addon/rootfs/home/proxy/* -ha_addon/rootfs/requirements.txt +ha_addons/ha_addon/rootfs/home/proxy/* +ha_addons/ha_addon/rootfs/requirements.txt tsun_proxy/** Doku/** .DS_Store diff --git a/.vscode/settings.json b/.vscode/settings.json index 04d690a..a17eb69 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,20 +1,22 @@ { "python.analysis.extraPaths": [ "app/src", - ".venv/lib" ], + "app/tests", + ".venv/lib", + "ha_addons/ha_addon/rootfs" ], "python.testing.pytestArgs": [ - "-v", + "-vvv", "--cov=app/src", - "--cov=ha_addon/rootfs/home", + "--cov=ha_addons/ha_addon/rootfs", "--cov-report=xml", "app", "system_tests", - "ha_addon" + "ha_addons" ], "python.testing.unittestEnabled": false, "python.testing.pytestEnabled": true, "flake8.args": [ - "--extend-exclude=app/tests/*.py system_tests/*.py" + "--extend-exclude=app/tests/*.py system_tests/*.py ha_addons/ha_addon/tests/*.py" ], "sonarlint.connectedMode.project": { "connectionId": "s-allius", @@ -27,5 +29,6 @@ "python.autoComplete.extraPaths": [ ".venv/lib" ], - "coverage-gutters.coverageBaseDir": "tsun" + "coverage-gutters.coverageBaseDir": "tsun", + "makefile.configureOnOpen": false } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 14dc30f..532e545 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [unreleased] +- add initial support for home assistant add-ons from @mime24 - github action: use ubuntu 24.04 and sonar-scanner-action 4 [#222](https://github.com/s-allius/tsun-gen3-proxy/issues/222) - migrate paho.mqtt CallbackAPIVersion to VERSION2 [#224](https://github.com/s-allius/tsun-gen3-proxy/issues/224) - add PROD_COMPL_TYPE to trace diff --git a/Makefile b/Makefile index a09ae26..493875d 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,10 @@ -.PHONY: build clean +.PHONY: build clean addon-dev addon-debug sddon-rc # debug dev: # $(MAKE) -C app $@ clean build: - $(MAKE) -C ha_addon $@ \ No newline at end of file + $(MAKE) -C ha_addons/ha_addon $@ + +addon-dev addon-debug addon-rc: + $(MAKE) -C ha_addons/ha_addon $(patsubst addon-%,%,$@) \ No newline at end of file diff --git a/app/.version b/app/.version new file mode 100644 index 0000000..d33c3a2 --- /dev/null +++ b/app/.version @@ -0,0 +1 @@ +0.12.0 \ No newline at end of file diff --git a/app/tests/test_config.py b/app/tests/test_config.py index 3eb2169..a0eacb6 100644 --- a/app/tests/test_config.py +++ b/app/tests/test_config.py @@ -1,4 +1,5 @@ # test_with_pytest.py +import pytest import tomllib from schema import SchemaMissingKeyError from cnf.config import Config, ConfigIfc @@ -22,6 +23,77 @@ def test_empty_config(): except SchemaMissingKeyError: pass +@pytest.fixture +def ConfigComplete(): + return { + 'gen3plus': { + 'at_acl': { + 'mqtt': {'allow': ['AT+'], 'block': ['AT+SUPDATE']}, + 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], + 'block': ['AT+SUPDATE']} + } + }, + 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', + 'port': 5005}, + 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', + 'port': 10000}, + 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, + 'ha': {'auto_conf_prefix': 'homeassistant', + 'discovery_prefix': 'homeassistant', + 'entity_prefix': 'tsun', + 'proxy_node_id': 'proxy', + 'proxy_unique_id': 'P170000000000001'}, + 'inverters': { + 'allow_all': False, + 'R170000000000001': {'node_id': 'PV-Garage/', + 'modbus_polling': False, + 'monitor_sn': 0, + 'pv1': {'manufacturer': 'man1', + 'type': 'type1'}, + 'pv2': {'manufacturer': 'man2', + 'type': 'type2'}, + 'suggested_area': 'Garage', + 'sensor_list': 688}, + 'Y170000000000001': {'modbus_polling': True, + 'monitor_sn': 2000000000, + 'node_id': 'PV-Garage2/', + 'pv1': {'manufacturer': 'man1', + 'type': 'type1'}, + 'pv2': {'manufacturer': 'man2', + 'type': 'type2'}, + 'pv3': {'manufacturer': 'man3', + 'type': 'type3'}, + 'pv4': {'manufacturer': 'man4', + 'type': 'type4'}, + 'suggested_area': 'Garage2', + 'sensor_list': 688} + } + } + +@pytest.fixture +def ConfigMinimum(): + return { + 'gen3plus': { + 'at_acl': { + 'mqtt': {'allow': ['AT+'], 'block': []}, + 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], + 'block': []} + } + }, + 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', + 'port': 5005}, + 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, + 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, + 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, + 'inverters': { + 'allow_all': True, + 'R170000000000001': {'node_id': '', + 'modbus_polling': True, + 'monitor_sn': 0, + 'suggested_area': '', + 'sensor_list': 688}}} + + def test_default_config(): with open("app/config/default_config.toml", "rb") as f: cnf = tomllib.load(f) @@ -58,23 +130,23 @@ def test_default_config(): 'suggested_area': '', 'sensor_list': 688}}} -def test_full_config(): +def test_full_config(ConfigComplete): cnf = {'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, - 'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, - 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, + 'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': ['AT+SUPDATE']}, + 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': ['AT+SUPDATE']}}}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': '', 'passwd': ''}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, - 'inverters': {'allow_all': True, - 'R170000000000001': {'modbus_polling': True, 'node_id': '', 'sensor_list': 0, 'suggested_area': '', 'pv1': {'type': 'type1', 'manufacturer': 'man1'}, 'pv2': {'type': 'type2', 'manufacturer': 'man2'}, 'pv3': {'type': 'type3', 'manufacturer': 'man3'}}, - 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'node_id': '', 'sensor_list': 0x1511, 'suggested_area': ''}}} + 'inverters': {'allow_all': False, + 'R170000000000001': {'modbus_polling': False, 'node_id': 'PV-Garage/', 'sensor_list': 0x02B0, 'suggested_area': 'Garage', 'pv1': {'type': 'type1', 'manufacturer': 'man1'}, 'pv2': {'type': 'type2', 'manufacturer': 'man2'}}, + 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'node_id': 'PV-Garage2/', 'sensor_list': 0x02B0, 'suggested_area': 'Garage2', 'pv1': {'type': 'type1', 'manufacturer': 'man1'}, 'pv2': {'type': 'type2', 'manufacturer': 'man2'}, 'pv3': {'type': 'type3', 'manufacturer': 'man3'}, 'pv4': {'type': 'type4', 'manufacturer': 'man4'}}}} try: validated = Config.conf_schema.validate(cnf) except Exception: assert False - assert validated == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'node_id': '', 'modbus_polling': True, 'monitor_sn': 0, 'pv1': {'manufacturer': 'man1','type': 'type1'},'pv2': {'manufacturer': 'man2','type': 'type2'},'pv3': {'manufacturer': 'man3','type': 'type3'}, 'suggested_area': '', 'sensor_list': 0}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'node_id': '', 'suggested_area': '', 'sensor_list': 5393}}} + assert validated == ConfigComplete -def test_mininum_config(): +def test_mininum_config(ConfigMinimum): cnf = {'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+']}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE']}}}, @@ -89,7 +161,7 @@ def test_mininum_config(): validated = Config.conf_schema.validate(cnf) except Exception: assert False - assert validated == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'node_id': '', 'modbus_polling': True, 'monitor_sn': 0, 'suggested_area': '', 'sensor_list': 688}}} + assert validated == ConfigMinimum def test_read_empty(): cnf = {} diff --git a/ha_addon/Makefile b/ha_addon/Makefile deleted file mode 100644 index b83b0f5..0000000 --- a/ha_addon/Makefile +++ /dev/null @@ -1,45 +0,0 @@ -SHELL = /bin/sh - -# Folders -SRC=../app -SRC_PROXY=$(SRC)/src -CNF_PROXY=$(SRC)/config - -DST=rootfs -DST_PROXY=$(DST)/home/proxy - -# collect source files -SRC_FILES := $(wildcard $(SRC_PROXY)/*.py)\ - $(wildcard $(SRC_PROXY)/*.ini)\ - $(wildcard $(SRC_PROXY)/gen3/*.py)\ - $(wildcard $(SRC_PROXY)/gen3plus/*.py) -CNF_FILES := $(wildcard $(CNF_PROXY)/*.toml) - -# determine destination files -TARGET_FILES = $(SRC_FILES:$(SRC_PROXY)/%=$(DST_PROXY)/%) -CONFIG_FILES = $(CNF_FILES:$(CNF_PROXY)/%=$(DST_PROXY)/%) - -build: rootfs - -clean: - rm -r -f $(DST_PROXY) - rm -f $(DST)/requirements.txt - -rootfs: $(TARGET_FILES) $(CONFIG_FILES) $(DST)/requirements.txt - -.PHONY: build clean rootfs - - -$(CONFIG_FILES): $(DST_PROXY)/% : $(CNF_PROXY)/% - @echo Copy $< to $@ - @mkdir -p $(@D) - @cp $< $@ - -$(TARGET_FILES): $(DST_PROXY)/% : $(SRC_PROXY)/% - @echo Copy $< to $@ - @mkdir -p $(@D) - @cp $< $@ - -$(DST)/requirements.txt : $(SRC)/requirements.txt - @echo Copy $< to $@ - @cp $< $@ diff --git a/ha_addon/rootfs/home/create_config_toml.py b/ha_addon/rootfs/home/create_config_toml.py deleted file mode 100644 index 91f8543..0000000 --- a/ha_addon/rootfs/home/create_config_toml.py +++ /dev/null @@ -1,65 +0,0 @@ -import json -import os - -# Dieses file übernimmt die Add-On Konfiguration und schreibt sie in die -# Konfigurationsdatei des tsun-proxy -# Die Addon Konfiguration wird in der Datei /data/options.json bereitgestellt -# Die Konfiguration wird in der Datei /home/proxy/config/config.toml -# gespeichert - -# Übernehme die Umgebungsvariablen -# alternativ kann auch auf die homeassistant supervisor API zugegriffen werden - -data = {} -data['mqtt.host'] = os.getenv('MQTT_HOST') -data['mqtt.port'] = os.getenv('MQTT_PORT') -data['mqtt.user'] = os.getenv('MQTT_USER') -data['mqtt.passwd'] = os.getenv('MQTT_PASSWORD') - - -# Lese die Add-On Konfiguration aus der Datei /data/options.json -with open('/data/options.json') as json_file: - # with open('options.json') as json_file: - options_data = json.load(json_file) - data.update(options_data) - - -# Schreibe die Add-On Konfiguration in die Datei /home/proxy/config/config.toml # noqa: E501 -with open('/home/proxy/config/config.toml', 'w+') as f: - # with open('./config/config.toml', 'w+') as f: - f.write(f""" -mqtt.host = '{data.get('mqtt.host')}' # URL or IP address of the mqtt broker -mqtt.port = {data.get('mqtt.port')} -mqtt.user = '{data.get('mqtt.user')}' -mqtt.passwd = '{data.get('mqtt.passwd')}' - - -ha.auto_conf_prefix = '{data.get('ha.auto_conf_prefix', 'homeassistant')}' # MQTT prefix for subscribing for homeassistant status updates # noqa: E501 -ha.discovery_prefix = '{data.get('ha.discovery_prefix', 'homeassistant')}' # MQTT prefix for discovery topic # noqa: E501 -ha.entity_prefix = '{data.get('ha.entity_prefix', 'tsun')}' # MQTT topic prefix for publishing inverter values # noqa: E501 -ha.proxy_node_id = '{data.get('ha.proxy_node_id', 'proxy')}' # MQTT node id, for the proxy_node_id -ha.proxy_unique_id = '{data.get('ha.proxy_unique_id', 'P170000000000001')}' # MQTT unique id, to identify a proxy instance - - -tsun.enabled = {str(data.get('tsun.enabled', True)).lower()} -tsun.host = '{data.get('tsun.host', 'logger.talent-monitoring.com')}' -tsun.port = {data.get('tsun.port', 5005)} - - -solarman.enabled = {str(data.get('solarman.enabled', True)).lower()} -solarman.host = '{data.get('solarman.host', 'iot.talent-monitoring.com')}' -solarman.port = {data.get('solarman.port', 10000)} - - -inverters.allow_all = {str(data.get('inverters.allow_all', False)).lower()} -""") - - for inverter in data['inverters']: - f.write(f""" -[inverters."{inverter['serial']}"] -node_id = '{inverter['node_id']}' -suggested_area = '{inverter['suggested_area']}' -modbus_polling = {str(inverter['modbus_polling']).lower()} -pv1 = {{type = '{inverter['pv1_type']}', manufacturer = '{inverter['pv1_manufacturer']}'}} # Optional, PV module descr # noqa: E501 -pv2 = {{type = '{inverter['pv2_type']}', manufacturer = '{inverter['pv2_manufacturer']}'}} # Optional, PV module descr # noqa: E501 -""") diff --git a/ha_addon/rootfs/home/options.json b/ha_addon/rootfs/home/options.json deleted file mode 100644 index 6603eb5..0000000 --- a/ha_addon/rootfs/home/options.json +++ /dev/null @@ -1,19 +0,0 @@ - - -{ - "inverters": [ - { - "serial": "R17E760702080400", - "node_id": "PV-Garage", - "suggested_area": "Garage", - "modbus_polling": false, - "pv1_manufacturer": "Shinefar", - "pv1_type": "SF-M18/144550", - "pv2_manufacturer": "Shinefar", - "pv2_type": "SF-M18/144550" - } - ], - "tsun.enabled": false, - "solarman.enabled": false, - "inverters.allow_all": false -} \ No newline at end of file diff --git a/ha_addon/tests/test_create_config_toml.py b/ha_addon/tests/test_create_config_toml.py deleted file mode 100644 index 077a615..0000000 --- a/ha_addon/tests/test_create_config_toml.py +++ /dev/null @@ -1,6 +0,0 @@ -# test_with_pytest.py -# import ha_addon.rootfs.home.create_config_toml - - -def test_config(): - pass diff --git a/ha_addon/Dockerfile b/ha_addons/ha_addon/Dockerfile similarity index 77% rename from ha_addon/Dockerfile rename to ha_addons/ha_addon/Dockerfile index 6568cc7..aa267cd 100755 --- a/ha_addon/Dockerfile +++ b/ha_addons/ha_addon/Dockerfile @@ -13,10 +13,7 @@ # 1 Build Image # ###################### -# opt for suitable build base. I opted for the recommended hassio-addon base - -#ARG BUILD_FROM="ghcr.io/hassio-addons/debian-base:latest" -ARG BUILD_FROM="ghcr.io/hassio-addons/base:latest" +ARG BUILD_FROM="ghcr.io/hassio-addons/base:stable" FROM $BUILD_FROM @@ -70,18 +67,16 @@ COPY rootfs/ / RUN chmod a+x /run.sh -# no idea whether needed or not -ENV SERVICE_NAME="tsun-proxy" -ENV UID=1000 -ENV GID=1000 -ENV VERSION="0.0" - - ####################### # 6 run app # ####################### +ARG SERVICE_NAME +ARG VERSION +ENV SERVICE_NAME=${SERVICE_NAME} + +RUN echo ${VERSION} > /proxy-version.txt # command to run on container start CMD [ "/run.sh" ] @@ -90,8 +85,3 @@ CMD [ "/run.sh" ] ####################### -# Labels -LABEL \ - io.hass.version="VERSION" \ - io.hass.type="addon" \ - io.hass.arch="armhf|aarch64|i386|amd64" diff --git a/ha_addons/ha_addon/Makefile b/ha_addons/ha_addon/Makefile new file mode 100644 index 0000000..43de018 --- /dev/null +++ b/ha_addons/ha_addon/Makefile @@ -0,0 +1,74 @@ +#!make +include ../../.env + +SHELL = /bin/sh +IMAGE = tsun-gen3-addon + + +# Folders +SRC=../../app +SRC_PROXY=$(SRC)/src +CNF_PROXY=$(SRC)/config + +DST=rootfs +DST_PROXY=$(DST)/home/proxy + +# collect source files +SRC_FILES := $(wildcard $(SRC_PROXY)/*.py)\ + $(wildcard $(SRC_PROXY)/*.ini)\ + $(wildcard $(SRC_PROXY)/cnf/*.py)\ + $(wildcard $(SRC_PROXY)/gen3/*.py)\ + $(wildcard $(SRC_PROXY)/gen3plus/*.py) +CNF_FILES := $(wildcard $(CNF_PROXY)/*.toml) + +# determine destination files +TARGET_FILES = $(SRC_FILES:$(SRC_PROXY)/%=$(DST_PROXY)/%) +CONFIG_FILES = $(CNF_FILES:$(CNF_PROXY)/%=$(DST_PROXY)/%) + +export BUILD_DATE := ${shell date -Iminutes} +VERSION := $(shell cat $(SRC)/.version) +export MAJOR := $(shell echo $(VERSION) | cut -f1 -d.) + +PUBLIC_URL := $(shell echo $(PUBLIC_CONTAINER_REGISTRY) | cut -f1 -d/) +PUBLIC_USER :=$(shell echo $(PUBLIC_CONTAINER_REGISTRY) | cut -f2 -d/) + + +dev debug: build + @echo version: $(VERSION) build-date: $(BUILD_DATE) image: $(PRIVAT_CONTAINER_REGISTRY)$(IMAGE) + export VERSION=$(VERSION)-$@ && \ + export IMAGE=$(PRIVAT_CONTAINER_REGISTRY)$(IMAGE) && \ + docker buildx bake -f docker-bake.hcl $@ + +rc: build + @echo version: $(VERSION) build-date: $(BUILD_DATE) image: $(PUBLIC_CONTAINER_REGISTRY)$(IMAGE) + @echo login at $(PUBLIC_URL) as $(PUBLIC_USER) + @DO_LOGIN="$(shell echo $(PUBLIC_CR_KEY) | docker login $(PUBLIC_URL) -u $(PUBLIC_USER) --password-stdin)" + export VERSION=$(VERSION)-$@ && \ + export IMAGE=$(PUBLIC_CONTAINER_REGISTRY)$(IMAGE) && \ + docker buildx bake -f docker-bake.hcl $@ + + +build: rootfs + +clean: + rm -r -f $(DST_PROXY) + rm -f $(DST)/requirements.txt + +rootfs: $(TARGET_FILES) $(CONFIG_FILES) $(DST)/requirements.txt + +.PHONY: debug dev build clean rootfs + + +$(CONFIG_FILES): $(DST_PROXY)/% : $(CNF_PROXY)/% + @echo Copy $< to $@ + @mkdir -p $(@D) + @cp $< $@ + +$(TARGET_FILES): $(DST_PROXY)/% : $(SRC_PROXY)/% + @echo Copy $< to $@ + @mkdir -p $(@D) + @cp $< $@ + +$(DST)/requirements.txt : $(SRC)/requirements.txt + @echo Copy $< to $@ + @cp $< $@ diff --git a/ha_addon/config.yaml b/ha_addons/ha_addon/config.yaml similarity index 63% rename from ha_addon/config.yaml rename to ha_addons/ha_addon/config.yaml index 4643693..213f0ee 100755 --- a/ha_addon/config.yaml +++ b/ha_addons/ha_addon/config.yaml @@ -1,6 +1,8 @@ name: "TSUN-Proxy" description: "MQTT Proxy for TSUN Photovoltaic Inverters" -version: "0.0.7" +version: "dev" +image: docker.io/sallius/tsun-gen3-addon +url: https://github.com/s-allius/tsun-gen3-proxy slug: "tsun-proxy" init: false arch: @@ -20,24 +22,35 @@ ports: # Definition of parameters in the configuration tab of the addon # parameters are available within the container as /data/options.json -# and should become picked up by the proxy - current workarround as a transfer script -# TODO: add further schema for remaining config parameters +# and should become picked up by the proxy - current workaround as a transfer script +# TODO: check again for multi hierarchie parameters # TODO: implement direct reading of the configuration file schema: inverters: - serial: str + monitor_sn: int? node_id: str suggested_area: str modbus_polling: bool - #strings: # leider funktioniert es nicht die folgenden 3 parameter im schema aufzulisten. möglicherweise wird die verschachtelung nicht unterstützt. + client_mode_host: str? + client_mode_port: int? + #strings: # leider funktioniert es nicht die folgenden 3 parameter im schema aufzulisten. möglicherweise wird die verschachtelung nicht unterstützt. # - string: str # type: str # manufacturer: str # daher diese variante - pv1_manufacturer: str - pv1_type: str - pv2_manufacturer: str - pv2_type: str + pv1_manufacturer: str? + pv1_type: str? + pv2_manufacturer: str? + pv2_type: str? + pv3_manufacturer: str? + pv3_type: str? + pv4_manufacturer: str? + pv4_type: str? + pv5_manufacturer: str? + pv5_type: str? + pv6_manufacturer: str? + pv6_type: str? tsun.enabled: bool solarman.enabled: bool inverters.allow_all: bool @@ -52,6 +65,16 @@ schema: ha.entity_prefix: str? #dito ha.proxy_node_id: str? #dito ha.proxy_unique_id: str? #dito + tsun.host: str? + solarman.host: str? + gen3plus.at_acl.tsun.allow: + - str + gen3plus.at_acl.tsun.block: + - str? + gen3plus.at_acl.mqtt.allow: + - str + gen3plus.at_acl.mqtt.block: + - str? # set default options for mandatory parameters # for optional parameters do not define any default value in the options dictionary. @@ -62,7 +85,7 @@ options: node_id: PV-Garage suggested_area: Garage modbus_polling: false - #strings: + # strings: # - string: PV1 # type: SF-M18/144550 # manufacturer: Shinefar @@ -76,3 +99,5 @@ options: tsun.enabled: true # set default solarman.enabled: true # set default inverters.allow_all: false # set default + gen3plus.at_acl.tsun.allow: ["AT+Z", "AT+UPURL", "AT+SUPDATE"] + gen3plus.at_acl.mqtt.allow: ["AT+"] \ No newline at end of file diff --git a/ha_addons/ha_addon/docker-bake.hcl b/ha_addons/ha_addon/docker-bake.hcl new file mode 100644 index 0000000..408a326 --- /dev/null +++ b/ha_addons/ha_addon/docker-bake.hcl @@ -0,0 +1,99 @@ +variable "IMAGE" { + default = "tsun-gen3-addon" +} +variable "VERSION" { + default = "0.0.0" +} +variable "MAJOR" { + default = "0" +} +variable "BUILD_DATE" { + default = "dev" +} +variable "BRANCH" { + default = "" +} +variable "DESCRIPTION" { + default = "This proxy enables a reliable connection between TSUN third generation inverters (eg. TSOL MS600, MS800, MS2000) and an MQTT broker to integrate the inverter into typical home automations." +} + +target "_common" { + context = "." + dockerfile = "Dockerfile" + args = { + VERSION = "${VERSION}" + environment = "production" + } + attest = [ + "type =provenance,mode=max", + "type =sbom,generator=docker/scout-sbom-indexer:latest" + ] + annotations = [ + "index:io.hass.version=${VERSION}", + "index:io.hass.type=addon", + "index:io.hass.arch=armhf|aarch64|i386|amd64", + "index:org.opencontainers.image.title=TSUN-Proxy", + "index:org.opencontainers.image.authors=Stefan Allius", + "index:org.opencontainers.image.created=${BUILD_DATE}", + "index:org.opencontainers.image.version=${VERSION}", + "index:org.opencontainers.image.revision=${BRANCH}", + "index:org.opencontainers.image.description=${DESCRIPTION}", + "index:org.opencontainers.image.licenses=BSD-3-Clause", + "index:org.opencontainers.image.source=https://github.com/s-allius/tsun-gen3-proxy/ha_addons/ha_addon" + ] + labels = { + "io.hass.version" = "${VERSION}" + "io.hass.type" = "addon" + "io.hass.arch" = "armhf|aarch64|i386|amd64" + "org.opencontainers.image.title" = "TSUN-Proxy" + "org.opencontainers.image.authors" = "Stefan Allius" + "org.opencontainers.image.created" = "${BUILD_DATE}" + "org.opencontainers.image.version" = "${VERSION}" + "org.opencontainers.image.revision" = "${BRANCH}" + "org.opencontainers.image.description" = "${DESCRIPTION}" + "org.opencontainers.image.licenses" = "BSD-3-Clause" + "org.opencontainers.image.source" = "https://github.com/s-allius/tsun-gen3-proxy/ha_addonsha_addon" + } + output = [ + "type=image,push=true" + ] + + no-cache = false + platforms = ["linux/amd64", "linux/arm64", "linux/arm/v7"] +} + +target "_debug" { + args = { + LOG_LVL = "DEBUG" + environment = "dev" + } +} +target "_prod" { + args = { + } +} +target "debug" { + inherits = ["_common", "_debug"] + tags = ["${IMAGE}:debug"] +} + +target "dev" { + inherits = ["_common"] + tags = ["${IMAGE}:dev"] +} + +target "preview" { + inherits = ["_common", "_prod"] + tags = ["${IMAGE}:preview", "${IMAGE}:${VERSION}"] +} + +target "rc" { + inherits = ["_common", "_prod"] + tags = ["${IMAGE}:rc", "${IMAGE}:${VERSION}"] +} + +target "rel" { + inherits = ["_common", "_prod"] + tags = ["${IMAGE}:latest", "${IMAGE}:${MAJOR}", "${IMAGE}:${VERSION}"] + no-cache = true +} diff --git a/ha_addon/icon.png b/ha_addons/ha_addon/icon.png similarity index 100% rename from ha_addon/icon.png rename to ha_addons/ha_addon/icon.png diff --git a/ha_addon/logo.png b/ha_addons/ha_addon/logo.png similarity index 100% rename from ha_addon/logo.png rename to ha_addons/ha_addon/logo.png diff --git a/ha_addons/ha_addon/rootfs/home/create_config_toml.py b/ha_addons/ha_addon/rootfs/home/create_config_toml.py new file mode 100644 index 0000000..f806ac1 --- /dev/null +++ b/ha_addons/ha_addon/rootfs/home/create_config_toml.py @@ -0,0 +1,115 @@ +import json +import os + +# Dieses file übernimmt die Add-On Konfiguration und schreibt sie in die +# Konfigurationsdatei des tsun-proxy +# Die Addon Konfiguration wird in der Datei /data/options.json bereitgestellt +# Die Konfiguration wird in der Datei /home/proxy/config/config.toml +# gespeichert + +# Übernehme die Umgebungsvariablen +# alternativ kann auch auf die homeassistant supervisor API zugegriffen werden + + +def create_config(): + data = {} + data['mqtt.host'] = os.getenv('MQTT_HOST', "mqtt") + data['mqtt.port'] = os.getenv('MQTT_PORT', 1883) + data['mqtt.user'] = os.getenv('MQTT_USER', "") + data['mqtt.passwd'] = os.getenv('MQTT_PASSWORD', "") + + # Lese die Add-On Konfiguration aus der Datei /data/options.json + # with open('data/options.json') as json_file: + with open('/data/options.json') as json_file: + try: + options_data = json.load(json_file) + data.update(options_data) + except json.JSONDecodeError: + pass + + # Schreibe die Add-On Konfiguration in die Datei /home/proxy/config/config.toml # noqa: E501 + # with open('./config/config.toml', 'w+') as f: + with open('/home/proxy/config/config.toml', 'w+') as f: + f.write(f""" +mqtt.host = '{data.get('mqtt.host')}' # URL or IP address of the mqtt broker +mqtt.port = {data.get('mqtt.port')} +mqtt.user = '{data.get('mqtt.user')}' +mqtt.passwd = '{data.get('mqtt.passwd')}' + + +ha.auto_conf_prefix = '{data.get('ha.auto_conf_prefix', 'homeassistant')}' # MQTT prefix for subscribing for homeassistant status updates +ha.discovery_prefix = '{data.get('ha.discovery_prefix', 'homeassistant')}' # MQTT prefix for discovery topic +ha.entity_prefix = '{data.get('ha.entity_prefix', 'tsun')}' # MQTT topic prefix for publishing inverter values +ha.proxy_node_id = '{data.get('ha.proxy_node_id', 'proxy')}' # MQTT node id, for the proxy_node_id +ha.proxy_unique_id = '{data.get('ha.proxy_unique_id', 'P170000000000001')}' # MQTT unique id, to identify a proxy instance + + +tsun.enabled = {str(data.get('tsun.enabled', True)).lower()} +tsun.host = '{data.get('tsun.host', 'logger.talent-monitoring.com')}' +tsun.port = {data.get('tsun.port', 5005)} + + +solarman.enabled = {str(data.get('solarman.enabled', True)).lower()} +solarman.host = '{data.get('solarman.host', 'iot.talent-monitoring.com')}' +solarman.port = {data.get('solarman.port', 10000)} + + +inverters.allow_all = {str(data.get('inverters.allow_all', False)).lower()} +""") # noqa: E501 + + if 'inverters' in data: + for inverter in data['inverters']: + f.write(f""" +[inverters."{inverter['serial']}"] +node_id = '{inverter['node_id']}' +suggested_area = '{inverter['suggested_area']}' +modbus_polling = {str(inverter['modbus_polling']).lower()} + +# check if inverter has monitor_sn key. if not, skip monitor_sn +{f"monitor_sn = '{inverter['monitor_sn']}'" if 'monitor_sn' in inverter else ''} + + + +# check if inverter has 'pv1_type' and 'pv1_manufacturer' keys. if not, skip pv1 +{f"pv1 = {{type = '{inverter['pv1_type']}', manufacturer = '{inverter['pv1_manufacturer']}'}}" if 'pv1_type' in inverter and 'pv1_manufacturer' in inverter else ''} +# check if inverter has 'pv2_type' and 'pv2_manufacturer' keys. if not, skip pv2 +{f"pv2 = {{type = '{inverter['pv2_type']}', manufacturer = '{inverter['pv2_manufacturer']}'}}" if 'pv2_type' in inverter and 'pv2_manufacturer' in inverter else ''} +# check if inverter has 'pv3_type' and 'pv3_manufacturer' keys. if not, skip pv3 +{f"pv3 = {{type = '{inverter['pv3_type']}', manufacturer = '{inverter['pv3_manufacturer']}'}}" if 'pv3_type' in inverter and 'pv3_manufacturer' in inverter else ''} +# check if inverter has 'pv4_type' and 'pv4_manufacturer' keys. if not, skip pv4 +{f"pv4 = {{type = '{inverter['pv4_type']}', manufacturer = '{inverter['pv4_manufacturer']}'}}" if 'pv4_type' in inverter and 'pv4_manufacturer' in inverter else ''} +# check if inverter has 'pv5_type' and 'pv5_manufacturer' keys. if not, skip pv5 +{f"pv5 = {{type = '{inverter['pv5_type']}', manufacturer = '{inverter['pv5_manufacturer']}'}}" if 'pv5_type' in inverter and 'pv5_manufacturer' in inverter else ''} +# check if inverter has 'pv6_type' and 'pv6_manufacturer' keys. if not, skip pv6 +{f"pv6 = {{type = '{inverter['pv6_type']}', manufacturer = '{inverter['pv6_manufacturer']}'}}" if 'pv6_type' in inverter and 'pv6_manufacturer' in inverter else ''} + + +""") # noqa: E501 + + # add filters + f.write(""" +[gen3plus.at_acl] +# filter for received commands from the internet +tsun.allow = [""") + if 'gen3plus.at_acl.tsun.allow' in data: + for rule in data['gen3plus.at_acl.tsun.allow']: + f.write(f"'{rule}',") + f.write("]\ntsun.block = [") + if 'gen3plus.at_acl.tsun.block' in data: + for rule in data['gen3plus.at_acl.tsun.block']: + f.write(f"'{rule}',") + f.write("""] +# filter for received commands from the MQTT broker +mqtt.allow = [""") + if 'gen3plus.at_acl.mqtt.allow' in data: + for rule in data['gen3plus.at_acl.mqtt.allow']: + f.write(f"'{rule}',") + f.write("]\nmqtt.block = [") + if 'gen3plus.at_acl.mqtt.block' in data: + for rule in data['gen3plus.at_acl.mqtt.block']: + f.write(f"'{rule}',") + f.write("]") + + +if __name__ == "__main__": # pragma: no cover + create_config() diff --git a/ha_addons/ha_addon/rootfs/home/proxy/async_ifc.py b/ha_addons/ha_addon/rootfs/home/proxy/async_ifc.py deleted file mode 100644 index 80af383..0000000 --- a/ha_addons/ha_addon/rootfs/home/proxy/async_ifc.py +++ /dev/null @@ -1,104 +0,0 @@ -from abc import ABC, abstractmethod - - -class AsyncIfc(ABC): - @abstractmethod - def get_conn_no(self): - pass # pragma: no cover - - @abstractmethod - def set_node_id(self, value: str): - pass # pragma: no cover - - # - # TX - QUEUE - # - @abstractmethod - def tx_add(self, data: bytearray): - ''' add data to transmit queue''' - pass # pragma: no cover - - @abstractmethod - def tx_flush(self): - ''' send transmit queue and clears it''' - pass # pragma: no cover - - @abstractmethod - def tx_peek(self, size: int = None) -> bytearray: - '''returns size numbers of byte without removing them''' - pass # pragma: no cover - - @abstractmethod - def tx_log(self, level, info): - ''' log the transmit queue''' - pass # pragma: no cover - - @abstractmethod - def tx_clear(self): - ''' clear transmit queue''' - pass # pragma: no cover - - @abstractmethod - def tx_len(self): - ''' get numner of bytes in the transmit queue''' - pass # pragma: no cover - - # - # FORWARD - QUEUE - # - @abstractmethod - def fwd_add(self, data: bytearray): - ''' add data to forward queue''' - pass # pragma: no cover - - @abstractmethod - def fwd_log(self, level, info): - ''' log the forward queue''' - pass # pragma: no cover - - # - # RX - QUEUE - # - @abstractmethod - def rx_get(self, size: int = None) -> bytearray: - '''removes size numbers of bytes and return them''' - pass # pragma: no cover - - @abstractmethod - def rx_peek(self, size: int = None) -> bytearray: - '''returns size numbers of byte without removing them''' - pass # pragma: no cover - - @abstractmethod - def rx_log(self, level, info): - ''' logs the receive queue''' - pass # pragma: no cover - - @abstractmethod - def rx_clear(self): - ''' clear receive queue''' - pass # pragma: no cover - - @abstractmethod - def rx_len(self): - ''' get numner of bytes in the receive queue''' - pass # pragma: no cover - - @abstractmethod - def rx_set_cb(self, callback): - pass # pragma: no cover - - # - # Protocol Callbacks - # - @abstractmethod - def prot_set_timeout_cb(self, callback): - pass # pragma: no cover - - @abstractmethod - def prot_set_init_new_client_conn_cb(self, callback): - pass # pragma: no cover - - @abstractmethod - def prot_set_update_header_cb(self, callback): - pass # pragma: no cover diff --git a/ha_addons/ha_addon/rootfs/home/proxy/async_stream.py b/ha_addons/ha_addon/rootfs/home/proxy/async_stream.py deleted file mode 100644 index ec060b2..0000000 --- a/ha_addons/ha_addon/rootfs/home/proxy/async_stream.py +++ /dev/null @@ -1,397 +0,0 @@ -import asyncio -import logging -import traceback -import time -from asyncio import StreamReader, StreamWriter -from typing import Self -from itertools import count - -from proxy import Proxy -from byte_fifo import ByteFifo -from async_ifc import AsyncIfc -from infos import Infos - - -import gc -logger = logging.getLogger('conn') - - -class AsyncIfcImpl(AsyncIfc): - _ids = count(0) - - def __init__(self) -> None: - logger.debug('AsyncIfcImpl.__init__') - self.fwd_fifo = ByteFifo() - self.tx_fifo = ByteFifo() - self.rx_fifo = ByteFifo() - self.conn_no = next(self._ids) - self.node_id = '' - self.timeout_cb = None - self.init_new_client_conn_cb = None - self.update_header_cb = None - - def close(self): - self.timeout_cb = None - self.fwd_fifo.reg_trigger(None) - self.tx_fifo.reg_trigger(None) - self.rx_fifo.reg_trigger(None) - - def set_node_id(self, value: str): - self.node_id = value - - def get_conn_no(self): - return self.conn_no - - def tx_add(self, data: bytearray): - ''' add data to transmit queue''' - self.tx_fifo += data - - def tx_flush(self): - ''' send transmit queue and clears it''' - self.tx_fifo() - - def tx_peek(self, size: int = None) -> bytearray: - '''returns size numbers of byte without removing them''' - return self.tx_fifo.peek(size) - - def tx_log(self, level, info): - ''' log the transmit queue''' - self.tx_fifo.logging(level, info) - - def tx_clear(self): - ''' clear transmit queue''' - self.tx_fifo.clear() - - def tx_len(self): - ''' get numner of bytes in the transmit queue''' - return len(self.tx_fifo) - - def fwd_add(self, data: bytearray): - ''' add data to forward queue''' - self.fwd_fifo += data - - def fwd_log(self, level, info): - ''' log the forward queue''' - self.fwd_fifo.logging(level, info) - - def rx_get(self, size: int = None) -> bytearray: - '''removes size numbers of bytes and return them''' - return self.rx_fifo.get(size) - - def rx_peek(self, size: int = None) -> bytearray: - '''returns size numbers of byte without removing them''' - return self.rx_fifo.peek(size) - - def rx_log(self, level, info): - ''' logs the receive queue''' - self.rx_fifo.logging(level, info) - - def rx_clear(self): - ''' clear receive queue''' - self.rx_fifo.clear() - - def rx_len(self): - ''' get numner of bytes in the receive queue''' - return len(self.rx_fifo) - - def rx_set_cb(self, callback): - self.rx_fifo.reg_trigger(callback) - - def prot_set_timeout_cb(self, callback): - self.timeout_cb = callback - - def prot_set_init_new_client_conn_cb(self, callback): - self.init_new_client_conn_cb = callback - - def prot_set_update_header_cb(self, callback): - self.update_header_cb = callback - - -class StreamPtr(): - '''Descr StreamPtr''' - def __init__(self, _stream, _ifc=None): - self.stream = _stream - self.ifc = _ifc - - @property - def ifc(self): - return self._ifc - - @ifc.setter - def ifc(self, value): - self._ifc = value - - @property - def stream(self): - return self._stream - - @stream.setter - def stream(self, value): - self._stream = value - - -class AsyncStream(AsyncIfcImpl): - MAX_PROC_TIME = 2 - '''maximum processing time for a received msg in sec''' - MAX_START_TIME = 400 - '''maximum time without a received msg in sec''' - MAX_INV_IDLE_TIME = 120 - '''maximum time without a received msg from the inverter in sec''' - MAX_DEF_IDLE_TIME = 360 - '''maximum default time without a received msg in sec''' - - def __init__(self, reader: StreamReader, writer: StreamWriter, - rstream: "StreamPtr") -> None: - AsyncIfcImpl.__init__(self) - - logger.debug('AsyncStream.__init__') - - self.remote = rstream - self.tx_fifo.reg_trigger(self.__write_cb) - self._reader = reader - self._writer = writer - self.r_addr = writer.get_extra_info('peername') - self.l_addr = writer.get_extra_info('sockname') - self.proc_start = None # start processing start timestamp - self.proc_max = 0 - self.async_publ_mqtt = None # will be set AsyncStreamServer only - - def __write_cb(self): - self._writer.write(self.tx_fifo.get()) - - def __timeout(self) -> int: - if self.timeout_cb: - return self.timeout_cb() - return 360 - - async def loop(self) -> Self: - """Async loop handler for precessing all received messages""" - self.proc_start = time.time() - while True: - try: - self.__calc_proc_time() - dead_conn_to = self.__timeout() - await asyncio.wait_for(self.__async_read(), - dead_conn_to) - - await self.__async_write() - await self.__async_forward() - if self.async_publ_mqtt: - await self.async_publ_mqtt() - - except asyncio.TimeoutError: - logger.warning(f'[{self.node_id}:{self.conn_no}] Dead ' - f'connection timeout ({dead_conn_to}s) ' - f'for {self.l_addr}') - await self.disc() - return self - - except OSError as error: - logger.error(f'[{self.node_id}:{self.conn_no}] ' - f'{error} for l{self.l_addr} | ' - f'r{self.r_addr}') - await self.disc() - return self - - except RuntimeError as error: - logger.info(f'[{self.node_id}:{self.conn_no}] ' - f'{error} for {self.l_addr}') - await self.disc() - return self - - except Exception: - Infos.inc_counter('SW_Exception') - logger.error( - f"Exception for {self.r_addr}:\n" - f"{traceback.format_exc()}") - await asyncio.sleep(0) # be cooperative to other task - - def __calc_proc_time(self): - if self.proc_start: - proc = time.time() - self.proc_start - if proc > self.proc_max: - self.proc_max = proc - self.proc_start = None - - async def disc(self) -> None: - """Async disc handler for graceful disconnect""" - if self._writer.is_closing(): - return - logger.debug(f'AsyncStream.disc() l{self.l_addr} | r{self.r_addr}') - self._writer.close() - await self._writer.wait_closed() - - def close(self) -> None: - logging.debug(f'AsyncStream.close() l{self.l_addr} | r{self.r_addr}') - """close handler for a no waiting disconnect - - hint: must be called before releasing the connection instance - """ - super().close() - self._reader.feed_eof() # abort awaited read - if self._writer.is_closing(): - return - self._writer.close() - - def healthy(self) -> bool: - elapsed = 0 - if self.proc_start is not None: - elapsed = time.time() - self.proc_start - if elapsed > self.MAX_PROC_TIME: - logging.debug(f'[{self.node_id}:{self.conn_no}:' - f'{type(self).__name__}]' - f' act:{round(1000*elapsed)}ms' - f' max:{round(1000*self.proc_max)}ms') - logging.debug(f'Healthy()) refs: {gc.get_referrers(self)}') - return elapsed < 5 - - ''' - Our private methods - ''' - async def __async_read(self) -> None: - """Async read handler to read received data from TCP stream""" - data = await self._reader.read(4096) - if data: - self.proc_start = time.time() - self.rx_fifo += data - wait = self.rx_fifo() # call read in parent class - if wait and wait > 0: - await asyncio.sleep(wait) - else: - raise RuntimeError("Peer closed.") - - async def __async_write(self, headline: str = 'Transmit to ') -> None: - """Async write handler to transmit the send_buffer""" - if len(self.tx_fifo) > 0: - self.tx_fifo.logging(logging.INFO, f'{headline}{self.r_addr}:') - self._writer.write(self.tx_fifo.get()) - await self._writer.drain() - - async def __async_forward(self) -> None: - """forward handler transmits data over the remote connection""" - if len(self.fwd_fifo) == 0: - return - try: - await self._async_forward() - - except OSError as error: - if self.remote.stream: - rmt = self.remote - logger.error(f'[{rmt.stream.node_id}:{rmt.stream.conn_no}] ' - f'Fwd: {error} for ' - f'l{rmt.ifc.l_addr} | r{rmt.ifc.r_addr}') - await rmt.ifc.disc() - if rmt.ifc.close_cb: - rmt.ifc.close_cb() - - except RuntimeError as error: - if self.remote.stream: - rmt = self.remote - logger.info(f'[{rmt.stream.node_id}:{rmt.stream.conn_no}] ' - f'Fwd: {error} for {rmt.ifc.l_addr}') - await rmt.ifc.disc() - if rmt.ifc.close_cb: - rmt.ifc.close_cb() - - except Exception: - Infos.inc_counter('SW_Exception') - logger.error( - f"Fwd Exception for {self.r_addr}:\n" - f"{traceback.format_exc()}") - - async def publish_outstanding_mqtt(self): - '''Publish all outstanding MQTT topics''' - try: - await self.async_publ_mqtt() - await Proxy._async_publ_mqtt_proxy_stat('proxy') - except Exception: - pass - - -class AsyncStreamServer(AsyncStream): - def __init__(self, reader: StreamReader, writer: StreamWriter, - async_publ_mqtt, create_remote, - rstream: "StreamPtr") -> None: - AsyncStream.__init__(self, reader, writer, rstream) - self.create_remote = create_remote - self.async_publ_mqtt = async_publ_mqtt - - def close(self) -> None: - logging.debug('AsyncStreamServer.close()') - self.create_remote = None - self.async_publ_mqtt = None - super().close() - - async def server_loop(self) -> None: - '''Loop for receiving messages from the inverter (server-side)''' - logger.info(f'[{self.node_id}:{self.conn_no}] ' - f'Accept connection from {self.r_addr}') - Infos.inc_counter('Inverter_Cnt') - await self.publish_outstanding_mqtt() - await self.loop() - Infos.dec_counter('Inverter_Cnt') - await self.publish_outstanding_mqtt() - logger.info(f'[{self.node_id}:{self.conn_no}] Server loop stopped for' - f' r{self.r_addr}') - - # if the server connection closes, we also have to disconnect - # the connection to te TSUN cloud - if self.remote and self.remote.stream: - logger.info(f'[{self.node_id}:{self.conn_no}] disc client ' - f'connection: [{self.remote.ifc.node_id}:' - f'{self.remote.ifc.conn_no}]') - await self.remote.ifc.disc() - - async def _async_forward(self) -> None: - """forward handler transmits data over the remote connection""" - if not self.remote.stream: - await self.create_remote() - if self.remote.stream and \ - self.remote.ifc.init_new_client_conn_cb(): - await self.remote.ifc._AsyncStream__async_write() - if self.remote.stream: - self.remote.ifc.update_header_cb(self.fwd_fifo.peek()) - self.fwd_fifo.logging(logging.INFO, 'Forward to ' - f'{self.remote.ifc.r_addr}:') - self.remote.ifc._writer.write(self.fwd_fifo.get()) - await self.remote.ifc._writer.drain() - - -class AsyncStreamClient(AsyncStream): - def __init__(self, reader: StreamReader, writer: StreamWriter, - rstream: "StreamPtr", close_cb) -> None: - AsyncStream.__init__(self, reader, writer, rstream) - self.close_cb = close_cb - - async def disc(self) -> None: - logging.debug('AsyncStreamClient.disc()') - self.remote = None - await super().disc() - - def close(self) -> None: - logging.debug('AsyncStreamClient.close()') - self.close_cb = None - super().close() - - async def client_loop(self, _: str) -> None: - '''Loop for receiving messages from the TSUN cloud (client-side)''' - Infos.inc_counter('Cloud_Conn_Cnt') - await self.publish_outstanding_mqtt() - await self.loop() - Infos.dec_counter('Cloud_Conn_Cnt') - await self.publish_outstanding_mqtt() - logger.info(f'[{self.node_id}:{self.conn_no}] ' - 'Client loop stopped for' - f' l{self.l_addr}') - - if self.close_cb: - self.close_cb() - - async def _async_forward(self) -> None: - """forward handler transmits data over the remote connection""" - if self.remote.stream: - self.remote.ifc.update_header_cb(self.fwd_fifo.peek()) - self.fwd_fifo.logging(logging.INFO, 'Forward to ' - f'{self.remote.ifc.r_addr}:') - self.remote.ifc._writer.write(self.fwd_fifo.get()) - await self.remote.ifc._writer.drain() diff --git a/ha_addons/ha_addon/rootfs/home/proxy/byte_fifo.py b/ha_addons/ha_addon/rootfs/home/proxy/byte_fifo.py deleted file mode 100644 index 959eab2..0000000 --- a/ha_addons/ha_addon/rootfs/home/proxy/byte_fifo.py +++ /dev/null @@ -1,52 +0,0 @@ -from messages import hex_dump_str, hex_dump_memory - - -class ByteFifo: - """ a byte FIFO buffer with trigger callback """ - __slots__ = ('__buf', '__trigger_cb') - - def __init__(self): - self.__buf = bytearray() - self.__trigger_cb = None - - def reg_trigger(self, cb) -> None: - self.__trigger_cb = cb - - def __iadd__(self, data): - self.__buf.extend(data) - return self - - def __call__(self): - '''triggers the observer''' - if callable(self.__trigger_cb): - return self.__trigger_cb() - return None - - def get(self, size: int = None) -> bytearray: - '''removes size numbers of byte and return them''' - if not size: - data = self.__buf - self.clear() - else: - data = self.__buf[:size] - # The fast delete syntax - self.__buf[:size] = b'' - return data - - def peek(self, size: int = None) -> bytearray: - '''returns size numbers of byte without removing them''' - if not size: - return self.__buf - return self.__buf[:size] - - def clear(self): - self.__buf = bytearray() - - def __len__(self) -> int: - return len(self.__buf) - - def __str__(self) -> str: - return hex_dump_str(self.__buf, self.__len__()) - - def logging(self, level, info): - hex_dump_memory(level, info, self.__buf, self.__len__()) diff --git a/ha_addons/ha_addon/rootfs/home/proxy/config.py b/ha_addons/ha_addon/rootfs/home/proxy/config.py deleted file mode 100644 index 3424bd9..0000000 --- a/ha_addons/ha_addon/rootfs/home/proxy/config.py +++ /dev/null @@ -1,181 +0,0 @@ -'''Config module handles the proxy configuration in the config.toml file''' - -import shutil -import tomllib -import logging -from schema import Schema, And, Or, Use, Optional - - -class Config(): - '''Static class Config is reads and sanitize the config. - - Read config.toml file and sanitize it with read(). - Get named parts of the config with get()''' - - act_config = {} - def_config = {} - conf_schema = Schema({ - 'tsun': { - 'enabled': Use(bool), - 'host': Use(str), - 'port': And(Use(int), lambda n: 1024 <= n <= 65535) - }, - 'solarman': { - 'enabled': Use(bool), - 'host': Use(str), - 'port': And(Use(int), lambda n: 1024 <= n <= 65535) - }, - 'mqtt': { - 'host': Use(str), - 'port': And(Use(int), lambda n: 1024 <= n <= 65535), - 'user': And(Use(str), Use(lambda s: s if len(s) > 0 else None)), - 'passwd': And(Use(str), Use(lambda s: s if len(s) > 0 else None)) - }, - 'ha': { - 'auto_conf_prefix': Use(str), - 'discovery_prefix': Use(str), - 'entity_prefix': Use(str), - 'proxy_node_id': Use(str), - 'proxy_unique_id': Use(str) - }, - 'gen3plus': { - 'at_acl': { - Or('mqtt', 'tsun'): { - 'allow': [str], - Optional('block', default=[]): [str] - } - } - }, - 'inverters': { - 'allow_all': Use(bool), And(Use(str), lambda s: len(s) == 16): { - Optional('monitor_sn', default=0): Use(int), - Optional('node_id', default=""): And(Use(str), - Use(lambda s: s + '/' - if len(s) > 0 - and s[-1] != '/' - else s)), - Optional('client_mode'): { - 'host': Use(str), - Optional('port', default=8899): - And(Use(int), lambda n: 1024 <= n <= 65535), - Optional('forward', default=False): Use(bool), - }, - Optional('modbus_polling', default=True): Use(bool), - Optional('suggested_area', default=""): Use(str), - Optional('sensor_list', default=0x2b0): Use(int), - Optional('pv1'): { - Optional('type'): Use(str), - Optional('manufacturer'): Use(str), - }, - Optional('pv2'): { - Optional('type'): Use(str), - Optional('manufacturer'): Use(str), - }, - Optional('pv3'): { - Optional('type'): Use(str), - Optional('manufacturer'): Use(str), - }, - Optional('pv4'): { - Optional('type'): Use(str), - Optional('manufacturer'): Use(str), - }, - Optional('pv5'): { - Optional('type'): Use(str), - Optional('manufacturer'): Use(str), - }, - Optional('pv6'): { - Optional('type'): Use(str), - Optional('manufacturer'): Use(str), - } - } - } - }, ignore_extra_keys=True - ) - - @classmethod - def class_init(cls) -> None | str: # pragma: no cover - try: - # make the default config transparaent by copying it - # in the config.example file - logging.debug('Copy Default Config to config.example.toml') - - shutil.copy2("default_config.toml", - "config/config.example.toml") - except Exception: - pass - err_str = cls.read() - del cls.conf_schema - return err_str - - @classmethod - def _read_config_file(cls) -> dict: # pragma: no cover - usr_config = {} - - try: - with open("config/config.toml", "rb") as f: - usr_config = tomllib.load(f) - except Exception as error: - err = f'Config.read: {error}' - logging.error(err) - logging.info( - '\n To create the missing config.toml file, ' - 'you can rename the template config.example.toml\n' - ' and customize it for your scenario.\n') - return usr_config - - @classmethod - def read(cls, path='') -> None | str: - '''Read config file, merge it with the default config - and sanitize the result''' - err = None - config = {} - logger = logging.getLogger('data') - - try: - # read example config file as default configuration - cls.def_config = {} - with open(f"{path}default_config.toml", "rb") as f: - def_config = tomllib.load(f) - cls.def_config = cls.conf_schema.validate(def_config) - - # overwrite the default values, with values from - # the config.toml file - usr_config = cls._read_config_file() - - # merge the default and the user config - config = def_config.copy() - for key in ['tsun', 'solarman', 'mqtt', 'ha', 'inverters', - 'gen3plus']: - if key in usr_config: - config[key] |= usr_config[key] - - try: - cls.act_config = cls.conf_schema.validate(config) - except Exception as error: - err = f'Config.read: {error}' - logging.error(err) - - # logging.debug(f'Readed config: "{cls.act_config}" ') - - except Exception as error: - err = f'Config.read: {error}' - logger.error(err) - cls.act_config = {} - - return err - - @classmethod - def get(cls, member: str = None): - '''Get a named attribute from the proxy config. If member == - None it returns the complete config dict''' - - if member: - return cls.act_config.get(member, {}) - else: - return cls.act_config - - @classmethod - def is_default(cls, member: str) -> bool: - '''Check if the member is the default value''' - - return cls.act_config.get(member) == cls.def_config.get(member) diff --git a/ha_addons/ha_addon/rootfs/home/proxy/default_config.toml b/ha_addons/ha_addon/rootfs/home/proxy/default_config.toml deleted file mode 100644 index 57b2baf..0000000 --- a/ha_addons/ha_addon/rootfs/home/proxy/default_config.toml +++ /dev/null @@ -1,177 +0,0 @@ -########################################################################################## -### -### T S U N - G E N 3 - P R O X Y -### -### from Stefan Allius -### -########################################################################################## -### -### The readme will give you an overview of the project: -### https://s-allius.github.io/tsun-gen3-proxy/ -### -### The proxy supports different operation modes. Select the proper mode -### which depends on your inverter type and you inverter firmware. -### Please read: -### https://github.com/s-allius/tsun-gen3-proxy/wiki/Operation-Modes-Overview -### -### Here you will find a description of all configuration options: -### https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-details -### -### The configration uses the TOML format, which aims to be easy to read due to -### obvious semantics. You find more details here: https://toml.io/en/v1.0.0 -### -########################################################################################## - - -########################################################################################## -## -## MQTT broker configuration -## -## In this block, you must configure the connection to your MQTT broker and specify the -## required credentials. As the proxy does not currently support an encrypted connection -## to the MQTT broker, it is strongly recommended that you do not use a public broker. -## -## https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-details#mqtt-broker-account -## - -mqtt.host = 'mqtt' # URL or IP address of the mqtt broker -mqtt.port = 1883 -mqtt.user = '' -mqtt.passwd = '' - - -########################################################################################## -## -## HOME ASSISTANT -## -## The proxy supports the MQTT autoconfiguration of Home Assistant (HA). The default -## values match the HA default configuration. If you need to change these or want to use -## a different MQTT client, you can adjust the prefixes of the MQTT topics below. -## -## https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-details#home-assistant -## - -ha.auto_conf_prefix = 'homeassistant' # MQTT prefix for subscribing for homeassistant status updates -ha.discovery_prefix = 'homeassistant' # MQTT prefix for discovery topic -ha.entity_prefix = 'tsun' # MQTT topic prefix for publishing inverter values -ha.proxy_node_id = 'proxy' # MQTT node id, for the proxy_node_id -ha.proxy_unique_id = 'P170000000000001' # MQTT unique id, to identify a proxy instance - - -########################################################################################## -## -## GEN3 Proxy Mode Configuration -## -## In this block, you can configure an optional connection to the TSUN cloud for GEN3 -## inverters. This connection is only required if you want send data to the TSUN cloud -## to use the TSUN APPs or receive firmware updates. -## -## https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-details#tsun-cloud-for-gen3-inverter-only -## - -tsun.enabled = true # false: disables connecting to the tsun cloud, and avoids updates -tsun.host = 'logger.talent-monitoring.com' -tsun.port = 5005 - - -########################################################################################## -## -## GEN3PLUS Proxy Mode Configuration -## -## In this block, you can configure an optional connection to the TSUN cloud for GEN3PLUS -## inverters. This connection is only required if you want send data to the TSUN cloud -## to use the TSUN APPs or receive firmware updates. -## -## https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-details#solarman-cloud-for-gen3plus-inverter-only -## - -solarman.enabled = true # false: disables connecting to the tsun cloud, and avoids updates -solarman.host = 'iot.talent-monitoring.com' -solarman.port = 10000 - - -########################################################################################## -### -### Inverter Definitions -### -### The proxy supports the simultaneous operation of several inverters, even of different -### types. A configuration block must be defined for each inverter, in which all necessary -### parameters must be specified. These depend on the operation mode used and also differ -### slightly depending on the inverter type. -### -### In addition, the PV modules can be defined at the individual inputs for documentation -### purposes, whereby these are displayed in Home Assistant. -### -### The proxy only accepts connections from known inverters. This can be switched off for -### test purposes and unknown serial numbers are also accepted. -### - -inverters.allow_all = false # only allow known inverters - - -########################################################################################## -## -## For each GEN3 inverter, the serial number of the inverter must be mapped to an MQTT -## definition. To do this, the corresponding configuration block is started with -## `[Inverter.“<16-digit serial number>”]` so that all subsequent parameters are assigned -## to this inverter. Further inverter-specific parameters (e.g. polling mode) can be set -## in the configuration block -## -## The serial numbers of all GEN3 inverters start with `R17`! -## - -[inverters."R170000000000001"] -node_id = '' # MQTT replacement for inverters serial number -suggested_area = '' # suggested installation area for home-assistant -modbus_polling = false # Disable optional MODBUS polling -pv1 = {type = 'RSM40-8-395M', manufacturer = 'Risen'} # Optional, PV module descr -pv2 = {type = 'RSM40-8-395M', manufacturer = 'Risen'} # Optional, PV module descr - - -########################################################################################## -## -## For each GEN3PLUS inverter, the serial number of the inverter must be mapped to an MQTT -## definition. To do this, the corresponding configuration block is started with -## `[Inverter.“<16-digit serial number>”]` so that all subsequent parameters are assigned -## to this inverter. Further inverter-specific parameters (e.g. polling mode, client mode) -## can be set in the configuration block -## -## The serial numbers of all GEN3PLUS inverters start with `Y17` or Y47! Each GEN3PLUS -## inverter is supplied with a “Monitoring SN:”. This can be found on a sticker enclosed -## with the inverter. -## - -[inverters."Y170000000000001"] -monitor_sn = 2000000000 # The GEN3PLUS "Monitoring SN:" -node_id = '' # MQTT replacement for inverters serial number -suggested_area = '' # suggested installation place for home-assistant -modbus_polling = true # Enable optional MODBUS polling - -# if your inverter supports SSL connections you must use the client_mode. Pls, uncomment -# the next line and configure the fixed IP of your inverter -#client_mode = {host = '192.168.0.1', port = 8899} - -pv1 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr -pv2 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr -pv3 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr -pv4 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr - - -########################################################################################## -### -### If the proxy mode is configured, commands from TSUN can be sent to the inverter via -### this connection or parameters (e.g. network credentials) can be queried. Filters can -### then be configured for the AT+ commands from the TSUN Cloud so that only certain -### accesses are permitted. -### -### An overview of all known AT+ commands can be found here: -### https://github.com/s-allius/tsun-gen3-proxy/wiki/AT--commands -### - -[gen3plus.at_acl] -# filter for received commands from the internet -tsun.allow = ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'] -tsun.block = [] -# filter for received commands from the MQTT broker -mqtt.allow = ['AT+'] -mqtt.block = [] diff --git a/ha_addons/ha_addon/rootfs/home/proxy/gen3/infos_g3.py b/ha_addons/ha_addon/rootfs/home/proxy/gen3/infos_g3.py deleted file mode 100644 index efa220c..0000000 --- a/ha_addons/ha_addon/rootfs/home/proxy/gen3/infos_g3.py +++ /dev/null @@ -1,194 +0,0 @@ - -import struct -import logging -from typing import Generator - -from infos import Infos, Register - - -class RegisterMap: - __slots__ = () - - map = { - 0x00092ba8: {'reg': Register.COLLECTOR_FW_VERSION}, - 0x000927c0: {'reg': Register.CHIP_TYPE}, - 0x00092f90: {'reg': Register.CHIP_MODEL}, - 0x00094ae8: {'reg': Register.MAC_ADDR}, - 0x00095a88: {'reg': Register.TRACE_URL}, - 0x00095aec: {'reg': Register.LOGGER_URL}, - 0x0000000a: {'reg': Register.PRODUCT_NAME}, - 0x00000014: {'reg': Register.MANUFACTURER}, - 0x0000001e: {'reg': Register.VERSION}, - 0x00000028: {'reg': Register.SERIAL_NUMBER}, - 0x00000032: {'reg': Register.EQUIPMENT_MODEL}, - 0x00013880: {'reg': Register.NO_INPUTS}, - 0xffffff00: {'reg': Register.INVERTER_CNT}, - 0xffffff01: {'reg': Register.UNKNOWN_SNR}, - 0xffffff02: {'reg': Register.UNKNOWN_MSG}, - 0xffffff03: {'reg': Register.INVALID_DATA_TYPE}, - 0xffffff04: {'reg': Register.INTERNAL_ERROR}, - 0xffffff05: {'reg': Register.UNKNOWN_CTRL}, - 0xffffff06: {'reg': Register.OTA_START_MSG}, - 0xffffff07: {'reg': Register.SW_EXCEPTION}, - 0xffffff08: {'reg': Register.POLLING_INTERVAL}, - 0xfffffffe: {'reg': Register.TEST_REG1}, - 0xffffffff: {'reg': Register.TEST_REG2}, - 0x00000640: {'reg': Register.OUTPUT_POWER}, - 0x000005dc: {'reg': Register.RATED_POWER}, - 0x00000514: {'reg': Register.INVERTER_TEMP}, - 0x000006a4: {'reg': Register.PV1_VOLTAGE}, - 0x00000708: {'reg': Register.PV1_CURRENT}, - 0x0000076c: {'reg': Register.PV1_POWER}, - 0x000007d0: {'reg': Register.PV2_VOLTAGE}, - 0x00000834: {'reg': Register.PV2_CURRENT}, - 0x00000898: {'reg': Register.PV2_POWER}, - 0x000008fc: {'reg': Register.PV3_VOLTAGE}, - 0x00000960: {'reg': Register.PV3_CURRENT}, - 0x000009c4: {'reg': Register.PV3_POWER}, - 0x00000a28: {'reg': Register.PV4_VOLTAGE}, - 0x00000a8c: {'reg': Register.PV4_CURRENT}, - 0x00000af0: {'reg': Register.PV4_POWER}, - 0x00000c1c: {'reg': Register.PV1_DAILY_GENERATION}, - 0x00000c80: {'reg': Register.PV1_TOTAL_GENERATION}, - 0x00000ce4: {'reg': Register.PV2_DAILY_GENERATION}, - 0x00000d48: {'reg': Register.PV2_TOTAL_GENERATION}, - 0x00000dac: {'reg': Register.PV3_DAILY_GENERATION}, - 0x00000e10: {'reg': Register.PV3_TOTAL_GENERATION}, - 0x00000e74: {'reg': Register.PV4_DAILY_GENERATION}, - 0x00000ed8: {'reg': Register.PV4_TOTAL_GENERATION}, - 0x00000b54: {'reg': Register.DAILY_GENERATION}, - 0x00000bb8: {'reg': Register.TOTAL_GENERATION}, - 0x000003e8: {'reg': Register.GRID_VOLTAGE}, - 0x0000044c: {'reg': Register.GRID_CURRENT}, - 0x000004b0: {'reg': Register.GRID_FREQUENCY}, - 0x000cfc38: {'reg': Register.CONNECT_COUNT}, - 0x000c3500: {'reg': Register.SIGNAL_STRENGTH}, - 0x000c96a8: {'reg': Register.POWER_ON_TIME}, - 0x000d0020: {'reg': Register.COLLECT_INTERVAL}, - 0x000cf850: {'reg': Register.DATA_UP_INTERVAL}, - 0x000c7f38: {'reg': Register.COMMUNICATION_TYPE}, - 0x00000190: {'reg': Register.EVENT_ALARM}, - 0x000001f4: {'reg': Register.EVENT_FAULT}, - 0x00000258: {'reg': Register.EVENT_BF1}, - 0x000002bc: {'reg': Register.EVENT_BF2}, - 0x00000064: {'reg': Register.INVERTER_STATUS}, - - 0x00000fa0: {'reg': Register.BOOT_STATUS}, - 0x00001004: {'reg': Register.DSP_STATUS}, - 0x000010cc: {'reg': Register.WORK_MODE}, - 0x000011f8: {'reg': Register.OUTPUT_SHUTDOWN}, - 0x0000125c: {'reg': Register.MAX_DESIGNED_POWER}, - 0x000012c0: {'reg': Register.RATED_LEVEL}, - 0x00001324: {'reg': Register.INPUT_COEFFICIENT, 'ratio': 100/1024}, - 0x00001388: {'reg': Register.GRID_VOLT_CAL_COEF}, - 0x00002710: {'reg': Register.PROD_COMPL_TYPE}, - 0x00003200: {'reg': Register.OUTPUT_COEFFICIENT, 'ratio': 100/1024}, - } - - -class InfosG3(Infos): - __slots__ = () - - def ha_confs(self, ha_prfx: str, node_id: str, snr: str, - sug_area: str = '') \ - -> Generator[tuple[dict, str], None, None]: - '''Generator function yields a json register struct for home-assistant - auto configuration and a unique entity string - - arguments: - prfx:str ==> MQTT prefix for the home assistant 'stat_t string - snr:str ==> serial number of the inverter, used to build unique - entity strings - sug_area:str ==> suggested area string from the config file''' - # iterate over RegisterMap.map and get the register values - for row in RegisterMap.map.values(): - reg = row['reg'] - res = self.ha_conf(reg, ha_prfx, node_id, snr, False, sug_area) # noqa: E501 - if res: - yield res - - def parse(self, buf, ind=0, node_id: str = '') -> \ - Generator[tuple[str, bool], None, None]: - '''parse a data sequence received from the inverter and - stores the values in Infos.db - - buf: buffer of the sequence to parse''' - result = struct.unpack_from('!l', buf, ind) - elms = result[0] - i = 0 - ind += 4 - while i < elms: - result = struct.unpack_from('!lB', buf, ind) - addr = result[0] - if addr not in RegisterMap.map: - row = None - info_id = -1 - else: - row = RegisterMap.map[addr] - info_id = row['reg'] - data_type = result[1] - ind += 5 - - if data_type == 0x54: # 'T' -> Pascal-String - str_len = buf[ind] - result = struct.unpack_from(f'!{str_len+1}p', buf, - ind)[0].decode(encoding='ascii', - errors='replace') - ind += str_len+1 - - elif data_type == 0x00: # 'Nul' -> end - i = elms # abort the loop - - elif data_type == 0x41: # 'A' -> Nop ?? - ind += 0 - i += 1 - continue - - elif data_type == 0x42: # 'B' -> byte, int8 - result = struct.unpack_from('!B', buf, ind)[0] - ind += 1 - - elif data_type == 0x49: # 'I' -> int32 - result = struct.unpack_from('!l', buf, ind)[0] - ind += 4 - - elif data_type == 0x53: # 'S' -> short, int16 - result = struct.unpack_from('!h', buf, ind)[0] - ind += 2 - - elif data_type == 0x46: # 'F' -> float32 - result = round(struct.unpack_from('!f', buf, ind)[0], 2) - ind += 4 - - elif data_type == 0x4c: # 'L' -> long, int64 - result = struct.unpack_from('!q', buf, ind)[0] - ind += 8 - - else: - self.inc_counter('Invalid_Data_Type') - logging.error(f"Infos.parse: data_type: {data_type}" - f" @0x{addr:04x} No:{i}" - " not supported") - return - - result = self.__modify_val(row, result) - - yield from self.__store_result(addr, result, info_id, node_id) - i += 1 - - def __modify_val(self, row, result): - if row and 'ratio' in row: - result = round(result * row['ratio'], 2) - return result - - def __store_result(self, addr, result, info_id, node_id): - keys, level, unit, must_incr = self._key_obj(info_id) - if keys: - name, update = self.update_db(keys, must_incr, result) - yield keys[0], update - else: - update = False - name = str(f'info-id.0x{addr:x}') - if update: - self.tracer.log(level, f'[{node_id}] GEN3: {name} :' - f' {result}{unit}') diff --git a/ha_addons/ha_addon/rootfs/home/proxy/gen3/inverter_g3.py b/ha_addons/ha_addon/rootfs/home/proxy/gen3/inverter_g3.py deleted file mode 100644 index efaeca0..0000000 --- a/ha_addons/ha_addon/rootfs/home/proxy/gen3/inverter_g3.py +++ /dev/null @@ -1,9 +0,0 @@ -from asyncio import StreamReader, StreamWriter - -from inverter_base import InverterBase -from gen3.talent import Talent - - -class InverterG3(InverterBase): - def __init__(self, reader: StreamReader, writer: StreamWriter): - super().__init__(reader, writer, 'tsun', Talent) diff --git a/ha_addons/ha_addon/rootfs/home/proxy/gen3/talent.py b/ha_addons/ha_addon/rootfs/home/proxy/gen3/talent.py deleted file mode 100644 index da3ebc8..0000000 --- a/ha_addons/ha_addon/rootfs/home/proxy/gen3/talent.py +++ /dev/null @@ -1,569 +0,0 @@ -import struct -import logging -from zoneinfo import ZoneInfo -from datetime import datetime -from tzlocal import get_localzone - -from async_ifc import AsyncIfc -from messages import Message, State -from modbus import Modbus -from config import Config -from gen3.infos_g3 import InfosG3 -from infos import Register - -logger = logging.getLogger('msg') - - -class Control: - def __init__(self, ctrl: int): - self.ctrl = ctrl - - def __int__(self) -> int: - return self.ctrl - - def is_ind(self) -> bool: - return (self.ctrl == 0x91) - - def is_req(self) -> bool: - return (self.ctrl == 0x70) - - def is_resp(self) -> bool: - return (self.ctrl == 0x99) - - -class Talent(Message): - TXT_UNKNOWN_CTRL = 'Unknown Ctrl' - - def __init__(self, addr, ifc: "AsyncIfc", server_side: bool, - client_mode: bool = False, id_str=b''): - super().__init__('G3', ifc, server_side, self.send_modbus_cb, - mb_timeout=15) - ifc.rx_set_cb(self.read) - ifc.prot_set_timeout_cb(self._timeout) - ifc.prot_set_init_new_client_conn_cb(self._init_new_client_conn) - ifc.prot_set_update_header_cb(self._update_header) - - self.addr = addr - self.conn_no = ifc.get_conn_no() - self.await_conn_resp_cnt = 0 - self.id_str = id_str - self.contact_name = b'' - self.contact_mail = b'' - self.ts_offset = 0 # time offset between tsun cloud and local - self.db = InfosG3() - self.switch = { - 0x00: self.msg_contact_info, - 0x13: self.msg_ota_update, - 0x22: self.msg_get_time, - 0x99: self.msg_heartbeat, - 0x71: self.msg_collector_data, - # 0x76: - 0x77: self.msg_modbus, - # 0x78: - 0x87: self.msg_modbus2, - 0x04: self.msg_inverter_data, - } - self.log_lvl = { - 0x00: logging.INFO, - 0x13: logging.INFO, - 0x22: logging.INFO, - 0x99: logging.INFO, - 0x71: logging.INFO, - # 0x76: - 0x77: self.get_modbus_log_lvl, - # 0x78: - 0x87: self.get_modbus_log_lvl, - 0x04: logging.INFO, - } - - ''' - Our puplic methods - ''' - def close(self) -> None: - logging.debug('Talent.close()') - # we have references to methods of this class in self.switch - # so we have to erase self.switch, otherwise this instance can't be - # deallocated by the garbage collector ==> we get a memory leak - self.switch.clear() - self.log_lvl.clear() - super().close() - - def __set_serial_no(self, serial_no: str): - - if self.unique_id == serial_no: - logger.debug(f'SerialNo: {serial_no}') - else: - inverters = Config.get('inverters') - # logger.debug(f'Inverters: {inverters}') - - if serial_no in inverters: - inv = inverters[serial_no] - self.node_id = inv['node_id'] - self.sug_area = inv['suggested_area'] - self.modbus_polling = inv['modbus_polling'] - logger.debug(f'SerialNo {serial_no} allowed! area:{self.sug_area}') # noqa: E501 - self.db.set_pv_module_details(inv) - if self.mb: - self.mb.set_node_id(self.node_id) - else: - self.node_id = '' - self.sug_area = '' - if 'allow_all' not in inverters or not inverters['allow_all']: - self.inc_counter('Unknown_SNR') - self.unique_id = None - logger.warning(f'ignore message from unknow inverter! (SerialNo: {serial_no})') # noqa: E501 - return - logger.debug(f'SerialNo {serial_no} not known but accepted!') - - self.unique_id = serial_no - self.db.set_db_def_value(Register.COLLECTOR_SNR, serial_no) - - def read(self) -> float: - '''process all received messages in the _recv_buffer''' - self._read() - while True: - if not self.header_valid: - self.__parse_header(self.ifc.rx_peek(), self.ifc.rx_len()) - - if self.header_valid and \ - self.ifc.rx_len() >= (self.header_len + self.data_len): - if self.state == State.init: - self.state = State.received # received 1st package - - log_lvl = self.log_lvl.get(self.msg_id, logging.WARNING) - if callable(log_lvl): - log_lvl = log_lvl() - - self.ifc.rx_log(log_lvl, f'Received from {self.addr}:' - f' BufLen: {self.ifc.rx_len()}' - f' HdrLen: {self.header_len}' - f' DtaLen: {self.data_len}') - - self.__set_serial_no(self.id_str.decode("utf-8")) - self.__dispatch_msg() - self.__flush_recv_msg() - else: - return 0 # don not wait before sending a response - - def forward(self) -> None: - '''add the actual receive msg to the forwarding queue''' - tsun = Config.get('tsun') - if tsun['enabled']: - buflen = self.header_len+self.data_len - buffer = self.ifc.rx_peek(buflen) - self.ifc.fwd_add(buffer) - self.ifc.fwd_log(logging.DEBUG, 'Store for forwarding:') - - fnc = self.switch.get(self.msg_id, self.msg_unknown) - logger.info(self.__flow_str(self.server_side, 'forwrd') + - f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}') - - def send_modbus_cb(self, modbus_pdu: bytearray, log_lvl: int, state: str): - if self.state != State.up: - logger.warning(f'[{self.node_id}] ignore MODBUS cmd,' - ' cause the state is not UP anymore') - return - - self.__build_header(0x70, 0x77) - self.ifc.tx_add(b'\x00\x01\xa3\x28') # magic ? - self.ifc.tx_add(struct.pack('!B', len(modbus_pdu))) - self.ifc.tx_add(modbus_pdu) - self.__finish_send_msg() - - self.ifc.tx_log(log_lvl, f'Send Modbus {state}:{self.addr}:') - self.ifc.tx_flush() - - def mb_timout_cb(self, exp_cnt): - self.mb_timer.start(self.mb_timeout) - - if 2 == (exp_cnt % 30): - # logging.info("Regular Modbus Status request") - self._send_modbus_cmd(Modbus.READ_REGS, 0x2000, 96, logging.DEBUG) - else: - self._send_modbus_cmd(Modbus.READ_REGS, 0x3000, 48, logging.DEBUG) - - def _init_new_client_conn(self) -> bool: - contact_name = self.contact_name - contact_mail = self.contact_mail - logger.info(f'name: {contact_name} mail: {contact_mail}') - self.msg_id = 0 - self.await_conn_resp_cnt += 1 - self.__build_header(0x91) - self.ifc.tx_add(struct.pack(f'!{len(contact_name)+1}p' - f'{len(contact_mail)+1}p', - contact_name, contact_mail)) - - self.__finish_send_msg() - return True - - ''' - Our private methods - ''' - def __flow_str(self, server_side: bool, type: str): # noqa: F821 - switch = { - 'rx': ' <', - 'tx': ' >', - 'forwrd': '<< ', - 'drop': ' xx', - 'rxS': '> ', - 'txS': '< ', - 'forwrdS': ' >>', - 'dropS': 'xx ', - } - if server_side: - type += 'S' - return switch.get(type, '???') - - def _timestamp(self): # pragma: no cover - '''returns timestamp fo the inverter as localtime - since 1.1.1970 in msec''' - # convert localtime in epoche - ts = (datetime.now() - datetime(1970, 1, 1)).total_seconds() - return round(ts*1000) - - def _utcfromts(self, ts: float): - '''converts inverter timestamp into unix time (epoche)''' - dt = datetime.fromtimestamp(ts/1000, tz=ZoneInfo("UTC")). \ - replace(tzinfo=get_localzone()) - return dt.timestamp() - - def _utc(self): # pragma: no cover - '''returns unix time (epoche)''' - return datetime.now().timestamp() - - def _update_header(self, _forward_buffer): - '''update header for message before forwarding, - add time offset to timestamp''' - _len = len(_forward_buffer) - ofs = 0 - while ofs < _len: - result = struct.unpack_from('!lB', _forward_buffer, 0) - msg_len = 4 + result[0] - id_len = result[1] # len of variable id string - if _len < 2*id_len + 21: - return - - result = struct.unpack_from('!B', _forward_buffer, id_len+6) - msg_code = result[0] - if msg_code == 0x71 or msg_code == 0x04: - result = struct.unpack_from('!q', _forward_buffer, 13+2*id_len) - ts = result[0] + self.ts_offset - logger.debug(f'offset: {self.ts_offset:08x}' - f' proxy-time: {ts:08x}') - struct.pack_into('!q', _forward_buffer, 13+2*id_len, ts) - ofs += msg_len - - # check if there is a complete header in the buffer, parse it - # and set - # self.header_len - # self.data_len - # self.id_str - # self.ctrl - # self.msg_id - # - # if the header is incomplete, than self.header_len is still 0 - # - def __parse_header(self, buf: bytes, buf_len: int) -> None: - - if (buf_len < 5): # enough bytes to read len and id_len? - return - result = struct.unpack_from('!lB', buf, 0) - msg_len = result[0] # len of complete message - id_len = result[1] # len of variable id string - if id_len > 17: - logger.warning(f'len of ID string must == 16 but is {id_len}') - self.inc_counter('Invalid_Msg_Format') - - # erase broken recv buffer - self.ifc.rx_clear() - return - - hdr_len = 5+id_len+2 - - if (buf_len < hdr_len): # enough bytes for complete header? - return - - result = struct.unpack_from(f'!{id_len+1}pBB', buf, 4) - - # store parsed header values in the class - self.id_str = result[0] - self.ctrl = Control(result[1]) - self.msg_id = result[2] - self.data_len = msg_len-id_len-3 - self.header_len = hdr_len - self.header_valid = True - - def __build_header(self, ctrl, msg_id=None) -> None: - if not msg_id: - msg_id = self.msg_id - self.send_msg_ofs = self.ifc.tx_len() - self.ifc.tx_add(struct.pack(f'!l{len(self.id_str)+1}pBB', - 0, self.id_str, ctrl, msg_id)) - fnc = self.switch.get(msg_id, self.msg_unknown) - logger.info(self.__flow_str(self.server_side, 'tx') + - f' Ctl: {int(ctrl):#02x} Msg: {fnc.__name__!r}') - - def __finish_send_msg(self) -> None: - _len = self.ifc.tx_len() - self.send_msg_ofs - struct.pack_into('!l', self.ifc.tx_peek(), self.send_msg_ofs, - _len-4) - - def __dispatch_msg(self) -> None: - fnc = self.switch.get(self.msg_id, self.msg_unknown) - if self.unique_id: - logger.info(self.__flow_str(self.server_side, 'rx') + - f' Ctl: {int(self.ctrl):#02x} ({self.state}) ' - f'Msg: {fnc.__name__!r}') - fnc() - else: - logger.info(self.__flow_str(self.server_side, 'drop') + - f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}') - - def __flush_recv_msg(self) -> None: - self.ifc.rx_get(self.header_len+self.data_len) - self.header_valid = False - - ''' - Message handler methods - ''' - def msg_contact_info(self): - if self.ctrl.is_ind(): - if self.server_side and self.__process_contact_info(): - self.__build_header(0x91) - self.ifc.tx_add(b'\x01') - self.__finish_send_msg() - # don't forward this contact info here, we will build one - # when the remote connection is established - elif self.await_conn_resp_cnt > 0: - self.await_conn_resp_cnt -= 1 - else: - self.forward() - else: - logger.warning(self.TXT_UNKNOWN_CTRL) - self.inc_counter('Unknown_Ctrl') - self.forward() - - def __process_contact_info(self) -> bool: - buf = self.ifc.rx_peek() - result = struct.unpack_from('!B', buf, self.header_len) - name_len = result[0] - if self.data_len == 1: # this is a response withone status byte - return False - if self.data_len >= name_len+2: - result = struct.unpack_from(f'!{name_len+1}pB', buf, - self.header_len) - self.contact_name = result[0] - mail_len = result[1] - logger.info(f'name: {self.contact_name}') - - result = struct.unpack_from(f'!{mail_len+1}p', buf, - self.header_len+name_len+1) - self.contact_mail = result[0] - logger.info(f'mail: {self.contact_mail}') - return True - - def msg_get_time(self): - if self.ctrl.is_ind(): - if self.data_len == 0: - if self.state == State.up: - self.state = State.pend # block MODBUS cmds - - ts = self._timestamp() - logger.debug(f'time: {ts:08x}') - self.__build_header(0x91) - self.ifc.tx_add(struct.pack('!q', ts)) - self.__finish_send_msg() - - elif self.data_len >= 8: - ts = self._timestamp() - result = struct.unpack_from('!q', self.ifc.rx_peek(), - self.header_len) - self.ts_offset = result[0]-ts - if self.ifc.remote.stream: - self.ifc.remote.stream.ts_offset = self.ts_offset - logger.debug(f'tsun-time: {int(result[0]):08x}' - f' proxy-time: {ts:08x}' - f' offset: {self.ts_offset}') - return # ignore received response - else: - logger.warning(self.TXT_UNKNOWN_CTRL) - self.inc_counter('Unknown_Ctrl') - - self.forward() - - def msg_heartbeat(self): - if self.ctrl.is_ind(): - if self.data_len == 9: - self.state = State.up # allow MODBUS cmds - if (self.modbus_polling): - self.mb_timer.start(self.mb_first_timeout) - self.db.set_db_def_value(Register.POLLING_INTERVAL, - self.mb_timeout) - self.__build_header(0x99) - self.ifc.tx_add(b'\x02') - self.__finish_send_msg() - - result = struct.unpack_from('!Bq', self.ifc.rx_peek(), - self.header_len) - resp_code = result[0] - ts = result[1]+self.ts_offset - logger.debug(f'inv-time: {int(result[1]):08x}' - f' tsun-time: {ts:08x}' - f' offset: {self.ts_offset}') - struct.pack_into('!Bq', self.ifc.rx_peek(), - self.header_len, resp_code, ts) - elif self.ctrl.is_resp(): - result = struct.unpack_from('!B', self.ifc.rx_peek(), - self.header_len) - resp_code = result[0] - logging.debug(f'Heartbeat-RespCode: {resp_code}') - return - else: - logger.warning(self.TXT_UNKNOWN_CTRL) - self.inc_counter('Unknown_Ctrl') - - self.forward() - - def parse_msg_header(self): - result = struct.unpack_from('!lB', self.ifc.rx_peek(), - self.header_len) - - data_id = result[0] # len of complete message - id_len = result[1] # len of variable id string - logger.debug(f'Data_ID: 0x{data_id:08x} id_len: {id_len}') - - msg_hdr_len = 5+id_len+9 - - result = struct.unpack_from(f'!{id_len+1}pBq', self.ifc.rx_peek(), - self.header_len + 4) - - timestamp = result[2] - logger.debug(f'ID: {result[0]} B: {result[1]}') - logger.debug(f'time: {timestamp:08x}') - # logger.info(f'time: {datetime.utcfromtimestamp(result[2]).strftime( - # "%Y-%m-%d %H:%M:%S")}') - return msg_hdr_len, timestamp - - def msg_collector_data(self): - if self.ctrl.is_ind(): - self.__build_header(0x99) - self.ifc.tx_add(b'\x01') - self.__finish_send_msg() - self.__process_data() - - elif self.ctrl.is_resp(): - return # ignore received response - else: - logger.warning(self.TXT_UNKNOWN_CTRL) - self.inc_counter('Unknown_Ctrl') - - self.forward() - - def msg_inverter_data(self): - if self.ctrl.is_ind(): - self.__build_header(0x99) - self.ifc.tx_add(b'\x01') - self.__finish_send_msg() - self.__process_data() - self.state = State.up # allow MODBUS cmds - if (self.modbus_polling): - self.mb_timer.start(self.mb_first_timeout) - self.db.set_db_def_value(Register.POLLING_INTERVAL, - self.mb_timeout) - - elif self.ctrl.is_resp(): - return # ignore received response - else: - logger.warning(self.TXT_UNKNOWN_CTRL) - self.inc_counter('Unknown_Ctrl') - - self.forward() - - def __process_data(self): - msg_hdr_len, ts = self.parse_msg_header() - - for key, update in self.db.parse(self.ifc.rx_peek(), self.header_len - + msg_hdr_len, self.node_id): - if update: - self._set_mqtt_timestamp(key, self._utcfromts(ts)) - self.new_data[key] = True - - def msg_ota_update(self): - if self.ctrl.is_req(): - self.inc_counter('OTA_Start_Msg') - elif self.ctrl.is_ind(): - pass # Ok, nothing to do - else: - logger.warning(self.TXT_UNKNOWN_CTRL) - self.inc_counter('Unknown_Ctrl') - self.forward() - - def parse_modbus_header(self): - - msg_hdr_len = 5 - - result = struct.unpack_from('!lBB', self.ifc.rx_peek(), - self.header_len) - modbus_len = result[1] - return msg_hdr_len, modbus_len - - def parse_modbus_header2(self): - - msg_hdr_len = 6 - - result = struct.unpack_from('!lBBB', self.ifc.rx_peek(), - self.header_len) - modbus_len = result[2] - return msg_hdr_len, modbus_len - - def get_modbus_log_lvl(self) -> int: - if self.ctrl.is_req(): - return logging.INFO - elif self.ctrl.is_ind() and self.server_side: - return self.mb.last_log_lvl - return logging.WARNING - - def msg_modbus(self): - hdr_len, _ = self.parse_modbus_header() - self.__msg_modbus(hdr_len) - - def msg_modbus2(self): - hdr_len, _ = self.parse_modbus_header2() - self.__msg_modbus(hdr_len) - - def __msg_modbus(self, hdr_len): - data = self.ifc.rx_peek()[self.header_len: - self.header_len+self.data_len] - - if self.ctrl.is_req(): - rstream = self.ifc.remote.stream - if rstream.mb.recv_req(data[hdr_len:], rstream.msg_forward): - self.inc_counter('Modbus_Command') - else: - self.inc_counter('Invalid_Msg_Format') - elif self.ctrl.is_ind(): - self.modbus_elms = 0 - # logger.debug(f'Modbus Ind MsgLen: {modbus_len}') - if not self.server_side: - logger.warning('Unknown Message') - self.inc_counter('Unknown_Msg') - return - - for key, update, _ in self.mb.recv_resp(self.db, data[ - hdr_len:]): - if update: - self._set_mqtt_timestamp(key, self._utc()) - self.new_data[key] = True - self.modbus_elms += 1 # count for unit tests - else: - logger.warning(self.TXT_UNKNOWN_CTRL) - self.inc_counter('Unknown_Ctrl') - self.forward() - - def msg_forward(self): - self.forward() - - def msg_unknown(self): - logger.warning(f"Unknow Msg: ID:{self.msg_id}") - self.inc_counter('Unknown_Msg') - self.forward() diff --git a/ha_addons/ha_addon/rootfs/home/proxy/gen3plus/infos_g3p.py b/ha_addons/ha_addon/rootfs/home/proxy/gen3plus/infos_g3p.py deleted file mode 100644 index 417487a..0000000 --- a/ha_addons/ha_addon/rootfs/home/proxy/gen3plus/infos_g3p.py +++ /dev/null @@ -1,204 +0,0 @@ - -from typing import Generator - -from infos import Infos, Register, ProxyMode, Fmt - - -class RegisterMap: - # make the class read/only by using __slots__ - __slots__ = () - - FMT_2_16BIT_VAL = '!HH' - FMT_3_16BIT_VAL = '!HHH' - FMT_4_16BIT_VAL = '!HHHH' - - map = { - # 0x41020007: {'reg': Register.DEVICE_SNR, 'fmt': ' bool: - if 'dep' in row: - mode = row['dep'] - if self.client_mode: - return mode != ProxyMode.CLIENT - else: - return mode != ProxyMode.SERVER - return False - - def ha_confs(self, ha_prfx: str, node_id: str, snr: str, - sug_area: str = '') \ - -> Generator[tuple[dict, str], None, None]: - '''Generator function yields a json register struct for home-assistant - auto configuration and a unique entity string - - arguments: - prfx:str ==> MQTT prefix for the home assistant 'stat_t string - snr:str ==> serial number of the inverter, used to build unique - entity strings - sug_area:str ==> suggested area string from the config file''' - # iterate over RegisterMap.map and get the register values - for row in RegisterMap.map.values(): - info_id = row['reg'] - if self.__hide_topic(row): - res = self.ha_remove(info_id, node_id, snr) # noqa: E501 - else: - res = self.ha_conf(info_id, ha_prfx, node_id, snr, False, sug_area) # noqa: E501 - if res: - yield res - - def parse(self, buf, msg_type: int, rcv_ftype: int, node_id: str = '') \ - -> Generator[tuple[str, bool], None, None]: - '''parse a data sequence received from the inverter and - stores the values in Infos.db - - buf: buffer of the sequence to parse''' - for idx, row in RegisterMap.map.items(): - addr = idx & 0xffff - ftype = (idx >> 16) & 0xff - mtype = (idx >> 24) & 0xff - if ftype != rcv_ftype or mtype != msg_type: - continue - if not isinstance(row, dict): - continue - info_id = row['reg'] - result = Fmt.get_value(buf, addr, row) - - keys, level, unit, must_incr = self._key_obj(info_id) - - if keys: - name, update = self.update_db(keys, must_incr, result) - yield keys[0], update - else: - name = str(f'info-id.0x{addr:x}') - update = False - - if update: - self.tracer.log(level, f'[{node_id}] GEN3PLUS: {name}' - f' : {result}{unit}') - - def build(self, len, msg_type: int, rcv_ftype: int): - buf = bytearray(len) - for idx, row in RegisterMap.map.items(): - addr = idx & 0xffff - ftype = (idx >> 16) & 0xff - mtype = (idx >> 24) & 0xff - if ftype != rcv_ftype or mtype != msg_type: - continue - if not isinstance(row, dict): - continue - if 'const' in row: - val = row['const'] - else: - info_id = row['reg'] - val = self.get_db_value(info_id) - if not val: - continue - Fmt.set_value(buf, addr, row, val) - return buf diff --git a/ha_addons/ha_addon/rootfs/home/proxy/gen3plus/inverter_g3p.py b/ha_addons/ha_addon/rootfs/home/proxy/gen3plus/inverter_g3p.py deleted file mode 100644 index f3680c9..0000000 --- a/ha_addons/ha_addon/rootfs/home/proxy/gen3plus/inverter_g3p.py +++ /dev/null @@ -1,15 +0,0 @@ -from asyncio import StreamReader, StreamWriter - -from inverter_base import InverterBase -from gen3plus.solarman_v5 import SolarmanV5 -from gen3plus.solarman_emu import SolarmanEmu - - -class InverterG3P(InverterBase): - def __init__(self, reader: StreamReader, writer: StreamWriter, - client_mode: bool = False): - remote_prot = None - if client_mode: - remote_prot = SolarmanEmu - super().__init__(reader, writer, 'solarman', - SolarmanV5, client_mode, remote_prot) diff --git a/ha_addons/ha_addon/rootfs/home/proxy/gen3plus/solarman_emu.py b/ha_addons/ha_addon/rootfs/home/proxy/gen3plus/solarman_emu.py deleted file mode 100644 index 66035bb..0000000 --- a/ha_addons/ha_addon/rootfs/home/proxy/gen3plus/solarman_emu.py +++ /dev/null @@ -1,138 +0,0 @@ -import logging -import struct - -from async_ifc import AsyncIfc -from gen3plus.solarman_v5 import SolarmanBase -from my_timer import Timer -from infos import Register - -logger = logging.getLogger('msg') - - -class SolarmanEmu(SolarmanBase): - def __init__(self, addr, ifc: "AsyncIfc", - server_side: bool, client_mode: bool): - super().__init__(addr, ifc, server_side=False, - _send_modbus_cb=None, - mb_timeout=8) - logging.debug('SolarmanEmu.init()') - self.db = ifc.remote.stream.db - self.snr = ifc.remote.stream.snr - self.hb_timeout = 60 - '''actual heatbeat timeout from the last response message''' - self.data_up_inv = self.db.get_db_value(Register.DATA_UP_INTERVAL) - '''time interval for getting new MQTT data messages''' - self.hb_timer = Timer(self.send_heartbeat_cb, self.node_id) - self.data_timer = Timer(self.send_data_cb, self.node_id) - self.last_sync = self._emu_timestamp() - '''timestamp when we send the last sync message (4110)''' - self.pkt_cnt = 0 - '''last sent packet number''' - - self.switch = { - - 0x4210: 'msg_data_ind', # real time data - 0x1210: self.msg_response, # at least every 5 minutes - - 0x4710: 'msg_hbeat_ind', # heatbeat - 0x1710: self.msg_response, # every 2 minutes - - 0x4110: 'msg_dev_ind', # device data, sync start - 0x1110: self.msg_response, # every 3 hours - - } - - self.log_lvl = { - - 0x4110: logging.INFO, # device data, sync start - 0x1110: logging.INFO, # every 3 hours - - 0x4210: logging.INFO, # real time data - 0x1210: logging.INFO, # at least every 5 minutes - - 0x4710: logging.DEBUG, # heatbeat - 0x1710: logging.DEBUG, # every 2 minutes - - } - - ''' - Our puplic methods - ''' - def close(self) -> None: - logging.info('SolarmanEmu.close()') - # we have references to methods of this class in self.switch - # so we have to erase self.switch, otherwise this instance can't be - # deallocated by the garbage collector ==> we get a memory leak - self.switch.clear() - self.log_lvl.clear() - self.hb_timer.close() - self.data_timer.close() - self.db = None - super().close() - - def _set_serial_no(self, snr: int): - logging.debug(f'SolarmanEmu._set_serial_no, snr: {snr}') - self.unique_id = str(snr) - - def _init_new_client_conn(self) -> bool: - logging.debug('SolarmanEmu.init_new()') - self.data_timer.start(self.data_up_inv) - return False - - def next_pkt_cnt(self): - '''get the next packet number''' - self.pkt_cnt = (self.pkt_cnt + 1) & 0xffffffff - return self.pkt_cnt - - def seconds_since_last_sync(self): - '''get seconds since last 0x4110 message was sent''' - return self._emu_timestamp() - self.last_sync - - def send_heartbeat_cb(self, exp_cnt): - '''send a heartbeat to the TSUN cloud''' - self._build_header(0x4710) - self.ifc.tx_add(struct.pack('> 8 - self.snd_idx = val & 0xff - else: - self.rcv_idx = val & 0xff - self.snd_idx = val >> 8 - - def get_send(self): - self.snd_idx += 1 - self.snd_idx &= 0xff - if self.server_side: - return (self.rcv_idx << 8) | self.snd_idx - else: - return (self.snd_idx << 8) | self.rcv_idx - - def __str__(self): - return f'{self.rcv_idx:02x}:{self.snd_idx:02x}' - - -class SolarmanBase(Message): - def __init__(self, addr, ifc: "AsyncIfc", server_side: bool, - _send_modbus_cb, mb_timeout: int): - super().__init__('G3P', ifc, server_side, _send_modbus_cb, - mb_timeout) - ifc.rx_set_cb(self.read) - ifc.prot_set_timeout_cb(self._timeout) - ifc.prot_set_init_new_client_conn_cb(self._init_new_client_conn) - ifc.prot_set_update_header_cb(self.__update_header) - self.addr = addr - self.conn_no = ifc.get_conn_no() - self.header_len = 11 # overwrite construcor in class Message - self.control = 0 - self.seq = Sequence(server_side) - self.snr = 0 - self.time_ofs = 0 - - def read(self) -> float: - '''process all received messages in the _recv_buffer''' - self._read() - while True: - if not self.header_valid: - self.__parse_header(self.ifc.rx_peek(), - self.ifc.rx_len()) - - if self.header_valid and self.ifc.rx_len() >= \ - (self.header_len + self.data_len+2): - self.__process_complete_received_msg() - self.__flush_recv_msg() - else: - return 0 # wait 0s before sending a response - ''' - Our public methods - ''' - def _flow_str(self, server_side: bool, type: str): # noqa: F821 - switch = { - 'rx': ' <', - 'tx': ' >', - 'forwrd': '<< ', - 'drop': ' xx', - 'rxS': '> ', - 'txS': '< ', - 'forwrdS': ' >>', - 'dropS': 'xx ', - } - if server_side: - type += 'S' - return switch.get(type, '???') - - def get_fnc_handler(self, ctrl): - fnc = self.switch.get(ctrl, self.msg_unknown) - if callable(fnc): - return fnc, repr(fnc.__name__) - else: - return self.msg_unknown, repr(fnc) - - def _build_header(self, ctrl) -> None: - '''build header for new transmit message''' - self.send_msg_ofs = self.ifc.tx_len() - - self.ifc.tx_add(struct.pack( - ' None: - '''finish the transmit message, set lenght and checksum''' - _len = self.ifc.tx_len() - self.send_msg_ofs - struct.pack_into(' None: - - if (buf_len < self.header_len): # enough bytes for complete header? - return - - result = struct.unpack_from(' bool: - crc = buf[self.data_len+11] - stop = buf[self.data_len+12] - if stop != 0x15: - hex_dump_memory(logging.ERROR, - 'Drop packet w invalid stop byte from ' - f'{self.addr}:', buf, buf_len) - self.inc_counter('Invalid_Msg_Format') - if self.ifc.rx_len() > (self.data_len+13): - next_start = buf[self.data_len+13] - if next_start != 0xa5: - # erase broken recv buffer - self.ifc.rx_clear() - - return False - - check = sum(buf[1:buf_len-2]) & 0xff - if check != crc: - self.inc_counter('Invalid_Msg_Format') - logger.debug(f'CRC {int(crc):#02x} {int(check):#08x}' - f' Stop:{int(stop):#02x}') - # start & stop byte are valid, discard only this message - return False - - return True - - def __flush_recv_msg(self) -> None: - self.ifc.rx_get(self.header_len + self.data_len+2) - self.header_valid = False - - def __dispatch_msg(self) -> None: - _fnc, _str = self.get_fnc_handler(self.control) - if self.unique_id: - logger.info(self._flow_str(self.server_side, 'rx') + - f' Ctl: {int(self.control):#04x}' + - f' Msg: {_str}') - _fnc() - else: - logger.info(self._flow_str(self.server_side, 'drop') + - f' Ctl: {int(self.control):#04x}' + - f' Msg: {_str}') - - ''' - Message handler methods - ''' - def msg_response(self): - data = self.ifc.rx_peek()[self.header_len:] - result = struct.unpack_from(' None: - logging.debug('Solarman.close()') - # we have references to methods of this class in self.switch - # so we have to erase self.switch, otherwise this instance can't be - # deallocated by the garbage collector ==> we get a memory leak - self.switch.clear() - self.log_lvl.clear() - super().close() - - async def send_start_cmd(self, snr: int, host: str, - forward: bool, - start_timeout=MB_CLIENT_DATA_UP): - self.no_forwarding = True - self.establish_inv_emu = forward - self.snr = snr - self._set_serial_no(snr) - self.mb_timeout = start_timeout - self.db.set_db_def_value(Register.IP_ADDRESS, host) - self.db.set_db_def_value(Register.POLLING_INTERVAL, - self.mb_timeout) - self.db.set_db_def_value(Register.DATA_UP_INTERVAL, - 300) - self.db.set_db_def_value(Register.COLLECT_INTERVAL, - 1) - self.db.set_db_def_value(Register.HEARTBEAT_INTERVAL, - 120) - self.db.set_db_def_value(Register.SENSOR_LIST, - Fmt.hex4((self.sensor_list, ))) - self.new_data['controller'] = True - - self.state = State.up - self._send_modbus_cmd(Modbus.READ_REGS, 0x3000, 48, logging.DEBUG) - self.mb_timer.start(self.mb_timeout) - - def new_state_up(self): - if self.state is not State.up: - self.state = State.up - if (self.modbus_polling): - self.mb_timer.start(self.mb_first_timeout) - self.db.set_db_def_value(Register.POLLING_INTERVAL, - self.mb_timeout) - - def establish_emu(self): - _len = 223 - build_msg = self.db.build(_len, 0x41, 2) - struct.pack_into( - ' {inv}') - if (type(inv) is dict and 'monitor_sn' in inv - and inv['monitor_sn'] == snr): - self.__set_config_parms(inv) - self.db.set_pv_module_details(inv) - logger.debug(f'SerialNo {serial_no} allowed! area:{self.sug_area}') # noqa: E501 - - self.db.set_db_def_value(Register.COLLECTOR_SNR, snr) - self.db.set_db_def_value(Register.SERIAL_NUMBER, key) - break - else: - self.node_id = '' - self.sug_area = '' - if 'allow_all' not in inverters or not inverters['allow_all']: - self.inc_counter('Unknown_SNR') - self.unique_id = None - logger.warning(f'ignore message from unknow inverter! (SerialNo: {serial_no})') # noqa: E501 - return - logger.warning(f'SerialNo {serial_no} not known but accepted!') - - self.unique_id = serial_no - - def forward(self, buffer, buflen) -> None: - '''add the actual receive msg to the forwarding queue''' - if self.no_forwarding: - return - tsun = Config.get('solarman') - if tsun['enabled']: - self.ifc.fwd_add(buffer[:buflen]) - self.ifc.fwd_log(logging.DEBUG, 'Store for forwarding:') - - _, _str = self.get_fnc_handler(self.control) - logger.info(self._flow_str(self.server_side, 'forwrd') + - f' Ctl: {int(self.control):#04x}' - f' Msg: {_str}') - - def _init_new_client_conn(self) -> bool: - return False - - def _heartbeat(self) -> int: - return 60 # pragma: no cover - - def __send_ack_rsp(self, msgtype, ftype, ack=1): - self._build_header(msgtype) - self.ifc.tx_add(struct.pack(' bool: - return not cmd.startswith(tuple(self.at_acl[connection]['allow'])) or \ - cmd.startswith(tuple(self.at_acl[connection]['block'])) - - async def send_at_cmd(self, at_cmd: str) -> None: - if self.state != State.up: - logger.warning(f'[{self.node_id}] ignore AT+ cmd,' - ' as the state is not UP') - return - at_cmd = at_cmd.strip() - - if self.at_cmd_forbidden(cmd=at_cmd, connection='mqtt'): - data_json = f'\'{at_cmd}\' is forbidden' - node_id = self.node_id - key = 'at_resp' - logger.info(f'{key}: {data_json}') - await self.mqtt.publish(f'{self.entity_prfx}{node_id}{key}', data_json) # noqa: E501 - return - - self.forward_at_cmd_resp = False - self._build_header(0x4510) - self.ifc.tx_add(struct.pack(f'> 8 - for key, update in self.db.parse(self.ifc.rx_peek(), msg_type, ftype, - self.node_id): - if update: - if key == 'inverter': - inv_update = True - self._set_mqtt_timestamp(key, ts) - self.new_data[key] = True - - if inv_update: - self.__build_model_name() - ''' - Message handler methods - ''' - def msg_unknown(self): - logger.warning(f"Unknow Msg: ID:{int(self.control):#04x}") - self.inc_counter('Unknown_Msg') - self.__forward_msg() - - def msg_dev_ind(self): - data = self.ifc.rx_peek()[self.header_len:] - result = struct.unpack_from(self.HDR_FMT, data, 0) - ftype = result[0] # always 2 - total = result[1] - tim = result[2] - res = result[3] # always zero - logger.info(f'frame type:{ftype:02x}' - f' timer:{tim:08x}s null:{res}') - if self.time_ofs: - # dt = datetime.fromtimestamp(total + self.time_ofs) - # logger.info(f'ts: {dt.strftime("%Y-%m-%d %H:%M:%S")}') - ts = total + self.time_ofs - else: - ts = None - self.__process_data(ftype, ts) - self.sensor_list = int(self.db.get_db_value(Register.SENSOR_LIST, 0), - 16) - self.__forward_msg() - self.__send_ack_rsp(0x1110, ftype) - - def msg_data_ind(self): - data = self.ifc.rx_peek() - result = struct.unpack_from(' int: - ftype = self.ifc.rx_peek()[self.header_len] - if ftype == self.AT_CMD: - if self.forward_at_cmd_resp: - return logging.INFO - return logging.DEBUG - elif ftype == self.MB_RTU_CMD \ - and self.server_side: - return self.mb.last_log_lvl - - return logging.WARNING - - def msg_command_rsp(self): - data = self.ifc.rx_peek()[self.header_len: - self.header_len+self.data_len] - ftype = data[0] - if ftype == self.AT_CMD: - if not self.forward_at_cmd_resp: - data_json = data[14:].decode("utf-8") - node_id = self.node_id - key = 'at_resp' - logger.info(f'{key}: {data_json}') - self.publish_mqtt(f'{self.entity_prfx}{node_id}{key}', data_json) # noqa: E501 - return - elif ftype == self.MB_RTU_CMD: - self.__modbus_command_rsp(data) - return - self.__forward_msg() - - def __parse_modbus_rsp(self, data): - inv_update = False - self.modbus_elms = 0 - for key, update, _ in self.mb.recv_resp(self.db, data[14:]): - self.modbus_elms += 1 - if update: - if key == 'inverter': - inv_update = True - self._set_mqtt_timestamp(key, self._timestamp()) - self.new_data[key] = True - return inv_update - - def __modbus_command_rsp(self, data): - '''precess MODBUS RTU response''' - valid = data[1] - modbus_msg_len = self.data_len - 14 - # logger.debug(f'modbus_len:{modbus_msg_len} accepted:{valid}') - if valid == 1 and modbus_msg_len > 4: - # logger.info(f'first byte modbus:{data[14]}') - inv_update = self.__parse_modbus_rsp(data) - if inv_update: - self.__build_model_name() - - if self.establish_inv_emu and not self.ifc.remote.stream: - self.establish_emu() - - def msg_hbeat_ind(self): - data = self.ifc.rx_peek()[self.header_len:] - result = struct.unpack_from(' str | int: - if not reverse: - return f'{val[0]:04x}' - else: - return int(val, 16) - - @staticmethod - def mac(val: tuple | str, reverse=False) -> str | tuple: - if not reverse: - return "%02x:%02x:%02x:%02x:%02x:%02x" % val - else: - return ( - int(val[0:2], 16), int(val[3:5], 16), - int(val[6:8], 16), int(val[9:11], 16), - int(val[12:14], 16), int(val[15:], 16)) - - @staticmethod - def version(val: tuple | str, reverse=False) -> str | int: - if not reverse: - x = val[0] - return f'V{(x >> 12)}.{(x >> 8) & 0xf}' \ - f'.{(x >> 4) & 0xf}{x & 0xf:1X}' - else: - arr = val[1:].split('.') - return int(arr[0], 10) << 12 | \ - int(arr[1], 10) << 8 | \ - int(arr[2][:-1], 10) << 4 | \ - int(arr[2][-1:], 16) - - @staticmethod - def set_value(buf: bytearray, idx: int, row: dict, val): - '''Get a value from buf and interpret as in row defined''' - fmt = row['fmt'] - if 'offset' in row: - val = val - row['offset'] - if 'quotient' in row: - val = round(val * row['quotient']) - if 'ratio' in row: - val = round(val / row['ratio']) - if 'func' in row: - val = row['func'](val, reverse=True) - if isinstance(val, str): - val = bytes(val, 'UTF8') - - if isinstance(val, tuple): - struct.pack_into(fmt, buf, idx, *val) - else: - struct.pack_into(fmt, buf, idx, val) - - -class ClrAtMidnight: - __clr_at_midnight = [Register.PV1_DAILY_GENERATION, Register.PV2_DAILY_GENERATION, Register.PV3_DAILY_GENERATION, Register.PV4_DAILY_GENERATION, Register.PV5_DAILY_GENERATION, Register.PV6_DAILY_GENERATION, Register.DAILY_GENERATION] # noqa: E501 - db = {} - - @classmethod - def add(cls, keys: list, prfx: str, reg: Register) -> None: - if reg not in cls.__clr_at_midnight: - return - - prfx += f'{keys[0]}' - db_dict = cls.db - if prfx not in db_dict: - db_dict[prfx] = {} - db_dict = db_dict[prfx] - - for key in keys[1:-1]: - if key not in db_dict: - db_dict[key] = {} - db_dict = db_dict[key] - db_dict[keys[-1]] = 0 - - @classmethod - def elm(cls) -> Generator[tuple[str, dict], None, None]: - for reg, name in cls.db.items(): - yield reg, name - cls.db = {} - - -class Infos: - __slots__ = ('db', 'tracer', ) - - LIGHTNING = 'mdi:lightning-bolt' - COUNTER = 'mdi:counter' - GAUGE = 'mdi:gauge' - SOLAR_POWER_VAR = 'mdi:solar-power-variant' - SOLAR_POWER = 'mdi:solar-power' - WIFI = 'mdi:wifi' - UPDATE = 'mdi:update' - DAILY_GEN = 'Daily Generation' - TOTAL_GEN = 'Total Generation' - FMT_INT = '| int' - FMT_FLOAT = '| float' - FMT_STRING_SEC = '| string + " s"' - stat = {} - app_name = os.getenv('SERVICE_NAME', 'proxy') - version = os.getenv('VERSION', 'unknown') - new_stat_data = {} - - @classmethod - def static_init(cls): - logging.debug('Initialize proxy statistics') - # init proxy counter in the class.stat dictionary - cls.stat['proxy'] = {} - for key in cls.__info_defs: - name = cls.__info_defs[key]['name'] - if name[0] == 'proxy': - cls.stat['proxy'][name[1]] = 0 - - # add values from the environment to the device definition table - prxy = cls.__info_devs['proxy'] - prxy['sw'] = cls.version - prxy['mdl'] = cls.app_name - - def __init__(self): - self.db = {} - self.tracer = logging.getLogger('data') - - __info_devs = { - 'proxy': {'singleton': True, 'name': 'Proxy', 'mf': 'Stefan Allius'}, # noqa: E501 - 'controller': {'via': 'proxy', 'name': 'Controller', 'mdl': Register.CHIP_MODEL, 'mf': Register.CHIP_TYPE, 'sw': Register.COLLECTOR_FW_VERSION, 'mac': Register.MAC_ADDR, 'sn': Register.COLLECTOR_SNR}, # noqa: E501 - 'inverter': {'via': 'controller', 'name': 'Micro Inverter', 'mdl': Register.EQUIPMENT_MODEL, 'mf': Register.MANUFACTURER, 'sw': Register.VERSION, 'sn': Register.SERIAL_NUMBER}, # noqa: E501 - 'input_pv1': {'via': 'inverter', 'name': 'Module PV1', 'mdl': Register.PV1_MODEL, 'mf': Register.PV1_MANUFACTURER}, # noqa: E501 - 'input_pv2': {'via': 'inverter', 'name': 'Module PV2', 'mdl': Register.PV2_MODEL, 'mf': Register.PV2_MANUFACTURER, 'dep': {'reg': Register.NO_INPUTS, 'gte': 2}}, # noqa: E501 - 'input_pv3': {'via': 'inverter', 'name': 'Module PV3', 'mdl': Register.PV3_MODEL, 'mf': Register.PV3_MANUFACTURER, 'dep': {'reg': Register.NO_INPUTS, 'gte': 3}}, # noqa: E501 - 'input_pv4': {'via': 'inverter', 'name': 'Module PV4', 'mdl': Register.PV4_MODEL, 'mf': Register.PV4_MANUFACTURER, 'dep': {'reg': Register.NO_INPUTS, 'gte': 4}}, # noqa: E501 - 'input_pv5': {'via': 'inverter', 'name': 'Module PV5', 'mdl': Register.PV5_MODEL, 'mf': Register.PV5_MANUFACTURER, 'dep': {'reg': Register.NO_INPUTS, 'gte': 5}}, # noqa: E501 - 'input_pv6': {'via': 'inverter', 'name': 'Module PV6', 'mdl': Register.PV6_MODEL, 'mf': Register.PV6_MANUFACTURER, 'dep': {'reg': Register.NO_INPUTS, 'gte': 6}}, # noqa: E501 - } - - __comm_type_val_tpl = "{%set com_types = ['n/a','Wi-Fi', 'G4', 'G5', 'GPRS'] %}{{com_types[value_json['Communication_Type']|int(0)]|default(value_json['Communication_Type'])}}" # noqa: E501 - __work_mode_val_tpl = "{%set mode = ['Normal-Mode', 'Aging-Mode', 'ATE-Mode', 'Shielding GFDI', 'DTU-Mode'] %}{{mode[value_json['Work_Mode']|int(0)]|default(value_json['Work_Mode'])}}" # noqa: E501 - __status_type_val_tpl = "{%set inv_status = ['Off-line', 'On-grid', 'Off-grid'] %}{{inv_status[value_json['Inverter_Status']|int(0)]|default(value_json['Inverter_Status'])}}" # noqa: E501 - __rated_power_val_tpl = "{% if 'Rated_Power' in value_json and value_json['Rated_Power'] != None %}{{value_json['Rated_Power']|string() +' W'}}{% else %}{{ this.state }}{% endif %}" # noqa: E501 - __designed_power_val_tpl = ''' -{% if 'Max_Designed_Power' in value_json and - value_json['Max_Designed_Power'] != None %} - {% if value_json['Max_Designed_Power'] | int(0xffff) < 0x8000 %} - {{value_json['Max_Designed_Power']|string() +' W'}} - {% else %} - n/a - {% endif %} -{% else %} - {{ this.state }} -{% endif %} -''' - __inv_alarm_val_tpl = ''' -{% if 'Inverter_Alarm' in value_json and - value_json['Inverter_Alarm'] != None %} - {% set val_int = value_json['Inverter_Alarm'] | int %} - {% if val_int == 0 %} - {% set result = 'noAlarm'%} - {%else%} - {% set result = '' %} - {% if val_int | bitwise_and(1)%}{% set result = result + 'Bit1, '%} - {% endif %} - {% if val_int | bitwise_and(2)%}{% set result = result + 'Bit2, '%} - {% endif %} - {% if val_int | bitwise_and(3)%}{% set result = result + 'Bit3, '%} - {% endif %} - {% if val_int | bitwise_and(4)%}{% set result = result + 'Bit4, '%} - {% endif %} - {% if val_int | bitwise_and(5)%}{% set result = result + 'Bit5, '%} - {% endif %} - {% if val_int | bitwise_and(6)%}{% set result = result + 'Bit6, '%} - {% endif %} - {% if val_int | bitwise_and(7)%}{% set result = result + 'Bit7, '%} - {% endif %} - {% if val_int | bitwise_and(8)%}{% set result = result + 'Bit8, '%} - {% endif %} - {% if val_int | bitwise_and(9)%}{% set result = result + 'noUtility, '%} - {% endif %} - {% if val_int | bitwise_and(10)%}{% set result = result + 'Bit10, '%} - {% endif %} - {% if val_int | bitwise_and(11)%}{% set result = result + 'Bit11, '%} - {% endif %} - {% if val_int | bitwise_and(12)%}{% set result = result + 'Bit12, '%} - {% endif %} - {% if val_int | bitwise_and(13)%}{% set result = result + 'Bit13, '%} - {% endif %} - {% if val_int | bitwise_and(14)%}{% set result = result + 'Bit14, '%} - {% endif %} - {% if val_int | bitwise_and(15)%}{% set result = result + 'Bit15, '%} - {% endif %} - {% if val_int | bitwise_and(16)%}{% set result = result + 'Bit16, '%} - {% endif %} - {% endif %} - {{ result }} -{% else %} - {{ this.state }} -{% endif %} -''' - __inv_fault_val_tpl = ''' -{% if 'Inverter_Fault' in value_json and - value_json['Inverter_Fault'] != None %} - {% set val_int = value_json['Inverter_Fault'] | int %} - {% if val_int == 0 %} - {% set result = 'noFault'%} - {%else%} - {% set result = '' %} - {% if val_int | bitwise_and(1)%}{% set result = result + 'Bit1, '%} - {% endif %} - {% if val_int | bitwise_and(2)%}{% set result = result + 'Bit2, '%} - {% endif %} - {% if val_int | bitwise_and(3)%}{% set result = result + 'Bit3, '%} - {% endif %} - {% if val_int | bitwise_and(4)%}{% set result = result + 'Bit4, '%} - {% endif %} - {% if val_int | bitwise_and(5)%}{% set result = result + 'Bit5, '%} - {% endif %} - {% if val_int | bitwise_and(6)%}{% set result = result + 'Bit6, '%} - {% endif %} - {% if val_int | bitwise_and(7)%}{% set result = result + 'Bit7, '%} - {% endif %} - {% if val_int | bitwise_and(8)%}{% set result = result + 'Bit8, '%} - {% endif %} - {% if val_int | bitwise_and(9)%}{% set result = result + 'Bit9, '%} - {% endif %} - {% if val_int | bitwise_and(10)%}{% set result = result + 'Bit10, '%} - {% endif %} - {% if val_int | bitwise_and(11)%}{% set result = result + 'Bit11, '%} - {% endif %} - {% if val_int | bitwise_and(12)%}{% set result = result + 'Bit12, '%} - {% endif %} - {% if val_int | bitwise_and(13)%}{% set result = result + 'Bit13, '%} - {% endif %} - {% if val_int | bitwise_and(14)%}{% set result = result + 'Bit14, '%} - {% endif %} - {% if val_int | bitwise_and(15)%}{% set result = result + 'Bit15, '%} - {% endif %} - {% if val_int | bitwise_and(16)%}{% set result = result + 'Bit16, '%} - {% endif %} - {% endif %} - {{ result }} -{% else %} - {{ this.state }} -{% endif %} -''' - - __input_coef_val_tpl = "{% if 'Output_Coefficient' in value_json and value_json['Input_Coefficient'] != None %}{{value_json['Input_Coefficient']|string() +' %'}}{% else %}{{ this.state }}{% endif %}" # noqa: E501 - __output_coef_val_tpl = "{% if 'Output_Coefficient' in value_json and value_json['Output_Coefficient'] != None %}{{value_json['Output_Coefficient']|string() +' %'}}{% else %}{{ this.state }}{% endif %}" # noqa: E501 - - __info_defs = { - # collector values used for device registration: - Register.COLLECTOR_FW_VERSION: {'name': ['collector', 'Collector_Fw_Version'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 - Register.CHIP_TYPE: {'name': ['collector', 'Chip_Type'], 'singleton': False, 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.CHIP_MODEL: {'name': ['collector', 'Chip_Model'], 'singleton': False, 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.TRACE_URL: {'name': ['collector', 'Trace_URL'], 'singleton': False, 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.LOGGER_URL: {'name': ['collector', 'Logger_URL'], 'singleton': False, 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.MAC_ADDR: {'name': ['collector', 'MAC-Addr'], 'singleton': False, 'level': logging.INFO, 'unit': ''}, # noqa: E501 - Register.COLLECTOR_SNR: {'name': ['collector', 'Serial_Number'], 'singleton': False, 'level': logging.INFO, 'unit': ''}, # noqa: E501 - - - # inverter values used for device registration: - Register.PRODUCT_NAME: {'name': ['inverter', 'Product_Name'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.MANUFACTURER: {'name': ['inverter', 'Manufacturer'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.VERSION: {'name': ['inverter', 'Version'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 - Register.SERIAL_NUMBER: {'name': ['inverter', 'Serial_Number'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EQUIPMENT_MODEL: {'name': ['inverter', 'Equipment_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.NO_INPUTS: {'name': ['inverter', 'No_Inputs'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.MAX_DESIGNED_POWER: {'name': ['inverter', 'Max_Designed_Power'], 'level': logging.INFO, 'unit': 'W', 'ha': {'dev': 'inverter', 'dev_cla': None, 'stat_cla': None, 'id': 'designed_power_', 'val_tpl': __designed_power_val_tpl, 'name': 'Max Designed Power', 'icon': LIGHTNING, 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.RATED_POWER: {'name': ['inverter', 'Rated_Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'inverter', 'dev_cla': None, 'stat_cla': None, 'id': 'rated_power_', 'val_tpl': __rated_power_val_tpl, 'name': 'Rated Power', 'icon': LIGHTNING, 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.WORK_MODE: {'name': ['inverter', 'Work_Mode'], 'level': logging.DEBUG, 'unit': '', 'ha': {'dev': 'inverter', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'work_mode_', 'name': 'Work Mode', 'val_tpl': __work_mode_val_tpl, 'icon': 'mdi:power', 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.INPUT_COEFFICIENT: {'name': ['inverter', 'Input_Coefficient'], 'level': logging.DEBUG, 'unit': '%', 'ha': {'dev': 'inverter', 'dev_cla': None, 'stat_cla': None, 'id': 'input_coef_', 'val_tpl': __input_coef_val_tpl, 'name': 'Input Coefficient', 'icon': LIGHTNING, 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.OUTPUT_COEFFICIENT: {'name': ['inverter', 'Output_Coefficient'], 'level': logging.INFO, 'unit': '%', 'ha': {'dev': 'inverter', 'dev_cla': None, 'stat_cla': None, 'id': 'output_coef_', 'val_tpl': __output_coef_val_tpl, 'name': 'Output Coefficient', 'icon': LIGHTNING, 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.PV1_MANUFACTURER: {'name': ['inverter', 'PV1_Manufacturer'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.PV1_MODEL: {'name': ['inverter', 'PV1_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.PV2_MANUFACTURER: {'name': ['inverter', 'PV2_Manufacturer'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.PV2_MODEL: {'name': ['inverter', 'PV2_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.PV3_MANUFACTURER: {'name': ['inverter', 'PV3_Manufacturer'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.PV3_MODEL: {'name': ['inverter', 'PV3_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.PV4_MANUFACTURER: {'name': ['inverter', 'PV4_Manufacturer'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.PV4_MODEL: {'name': ['inverter', 'PV4_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.PV5_MANUFACTURER: {'name': ['inverter', 'PV5_Manufacturer'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.PV5_MODEL: {'name': ['inverter', 'PV5_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.PV6_MANUFACTURER: {'name': ['inverter', 'PV6_Manufacturer'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.PV6_MODEL: {'name': ['inverter', 'PV6_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.BOOT_STATUS: {'name': ['inverter', 'BOOT_STATUS'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.DSP_STATUS: {'name': ['inverter', 'DSP_STATUS'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - # proxy: - Register.INVERTER_CNT: {'name': ['proxy', 'Inverter_Cnt'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_count_', 'fmt': FMT_INT, 'name': 'Active Inverter Connections', 'icon': COUNTER}}, # noqa: E501 - Register.CLOUD_CONN_CNT: {'name': ['proxy', 'Cloud_Conn_Cnt'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'cloud_conn_count_', 'fmt': FMT_INT, 'name': 'Active Cloud Connections', 'icon': COUNTER}}, # noqa: E501 - Register.UNKNOWN_SNR: {'name': ['proxy', 'Unknown_SNR'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'unknown_snr_', 'fmt': FMT_INT, 'name': 'Unknown Serial No', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.UNKNOWN_MSG: {'name': ['proxy', 'Unknown_Msg'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'unknown_msg_', 'fmt': FMT_INT, 'name': 'Unknown Msg Type', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.INVALID_DATA_TYPE: {'name': ['proxy', 'Invalid_Data_Type'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_data_type_', 'fmt': FMT_INT, 'name': 'Invalid Data Type', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.INTERNAL_ERROR: {'name': ['proxy', 'Internal_Error'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'intern_err_', 'fmt': FMT_INT, 'name': 'Internal Error', 'icon': COUNTER, 'ent_cat': 'diagnostic', 'en': False}}, # noqa: E501 - Register.UNKNOWN_CTRL: {'name': ['proxy', 'Unknown_Ctrl'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'unknown_ctrl_', 'fmt': FMT_INT, 'name': 'Unknown Control Type', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.OTA_START_MSG: {'name': ['proxy', 'OTA_Start_Msg'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'ota_start_cmd_', 'fmt': FMT_INT, 'name': 'OTA Start Cmd', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.SW_EXCEPTION: {'name': ['proxy', 'SW_Exception'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'sw_exception_', 'fmt': FMT_INT, 'name': 'Internal SW Exception', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.INVALID_MSG_FMT: {'name': ['proxy', 'Invalid_Msg_Format'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_msg_fmt_', 'fmt': FMT_INT, 'name': 'Invalid Message Format', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.AT_COMMAND: {'name': ['proxy', 'AT_Command'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'at_cmd_', 'fmt': FMT_INT, 'name': 'AT Command', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.AT_COMMAND_BLOCKED: {'name': ['proxy', 'AT_Command_Blocked'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'at_cmd_blocked_', 'fmt': FMT_INT, 'name': 'AT Command Blocked', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.MODBUS_COMMAND: {'name': ['proxy', 'Modbus_Command'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'modbus_cmd_', 'fmt': FMT_INT, 'name': 'Modbus Command', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501 - # 0xffffff03: {'name':['proxy', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha':{'dev':'proxy', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id':'proxy_volt_', 'fmt':FMT_FLOAT,'name': 'Grid Voltage'}}, # noqa: E501 - - # events - Register.EVENT_ALARM: {'name': ['events', 'Inverter_Alarm'], 'level': logging.INFO, 'unit': '', 'ha': {'dev': 'inverter', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_alarm_', 'name': 'Inverter Alarm', 'val_tpl': __inv_alarm_val_tpl, 'icon': 'mdi:alarm-light'}}, # noqa: E501 - Register.EVENT_FAULT: {'name': ['events', 'Inverter_Fault'], 'level': logging.INFO, 'unit': '', 'ha': {'dev': 'inverter', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_fault_', 'name': 'Inverter Fault', 'val_tpl': __inv_fault_val_tpl, 'icon': 'mdi:alarm-light'}}, # noqa: E501 - Register.EVENT_BF1: {'name': ['events', 'Inverter_Bitfield_1'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 - Register.EVENT_BF2: {'name': ['events', 'Inverter_bitfield_2'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 - # Register.EVENT_409: {'name': ['events', '409_No_Utility'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - # Register.EVENT_415: {'name': ['events', '415_GridFreqOverRating'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - - # grid measures: - Register.TS_GRID: {'name': ['grid', 'Timestamp'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 - Register.GRID_VOLTAGE: {'name': ['grid', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'inverter', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'out_volt_', 'fmt': FMT_FLOAT, 'name': 'Grid Voltage', 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.GRID_CURRENT: {'name': ['grid', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'inverter', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'out_cur_', 'fmt': FMT_FLOAT, 'name': 'Grid Current', 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.GRID_FREQUENCY: {'name': ['grid', 'Frequency'], 'level': logging.DEBUG, 'unit': 'Hz', 'ha': {'dev': 'inverter', 'dev_cla': 'frequency', 'stat_cla': 'measurement', 'id': 'out_freq_', 'fmt': FMT_FLOAT, 'name': 'Grid Frequency', 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.OUTPUT_POWER: {'name': ['grid', 'Output_Power'], 'level': logging.INFO, 'unit': 'W', 'ha': {'dev': 'inverter', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'out_power_', 'fmt': FMT_FLOAT, 'name': 'Power'}}, # noqa: E501 - Register.INVERTER_TEMP: {'name': ['env', 'Inverter_Temp'], 'level': logging.DEBUG, 'unit': '°C', 'ha': {'dev': 'inverter', 'dev_cla': 'temperature', 'stat_cla': 'measurement', 'id': 'temp_', 'fmt': FMT_INT, 'name': 'Temperature'}}, # noqa: E501 - Register.INVERTER_STATUS: {'name': ['env', 'Inverter_Status'], 'level': logging.INFO, 'unit': '', 'ha': {'dev': 'inverter', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_status_', 'name': 'Inverter Status', 'val_tpl': __status_type_val_tpl, 'icon': 'mdi:power'}}, # noqa: E501 - Register.DETECT_STATUS_1: {'name': ['env', 'Detect_Status_1'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.DETECT_STATUS_2: {'name': ['env', 'Detect_Status_2'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - - # input measures: - Register.TS_INPUT: {'name': ['input', 'Timestamp'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 - Register.PV1_VOLTAGE: {'name': ['input', 'pv1', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'input_pv1', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'volt_pv1_', 'val_tpl': "{{ (value_json['pv1']['Voltage'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.PV1_CURRENT: {'name': ['input', 'pv1', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'input_pv1', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'cur_pv1_', 'val_tpl': "{{ (value_json['pv1']['Current'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.PV1_POWER: {'name': ['input', 'pv1', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'input_pv1', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'power_pv1_', 'val_tpl': "{{ (value_json['pv1']['Power'] | float)}}"}}, # noqa: E501 - Register.PV2_VOLTAGE: {'name': ['input', 'pv2', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'input_pv2', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'volt_pv2_', 'val_tpl': "{{ (value_json['pv2']['Voltage'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.PV2_CURRENT: {'name': ['input', 'pv2', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'input_pv2', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'cur_pv2_', 'val_tpl': "{{ (value_json['pv2']['Current'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.PV2_POWER: {'name': ['input', 'pv2', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'input_pv2', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'power_pv2_', 'val_tpl': "{{ (value_json['pv2']['Power'] | float)}}"}}, # noqa: E501 - Register.PV3_VOLTAGE: {'name': ['input', 'pv3', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'input_pv3', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'volt_pv3_', 'val_tpl': "{{ (value_json['pv3']['Voltage'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.PV3_CURRENT: {'name': ['input', 'pv3', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'input_pv3', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'cur_pv3_', 'val_tpl': "{{ (value_json['pv3']['Current'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.PV3_POWER: {'name': ['input', 'pv3', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'input_pv3', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'power_pv3_', 'val_tpl': "{{ (value_json['pv3']['Power'] | float)}}"}}, # noqa: E501 - Register.PV4_VOLTAGE: {'name': ['input', 'pv4', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'input_pv4', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'volt_pv4_', 'val_tpl': "{{ (value_json['pv4']['Voltage'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.PV4_CURRENT: {'name': ['input', 'pv4', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'input_pv4', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'cur_pv4_', 'val_tpl': "{{ (value_json['pv4']['Current'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.PV4_POWER: {'name': ['input', 'pv4', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'input_pv4', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'power_pv4_', 'val_tpl': "{{ (value_json['pv4']['Power'] | float)}}"}}, # noqa: E501 - Register.PV5_VOLTAGE: {'name': ['input', 'pv5', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'input_pv5', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'volt_pv5_', 'val_tpl': "{{ (value_json['pv5']['Voltage'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.PV5_CURRENT: {'name': ['input', 'pv5', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'input_pv5', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'cur_pv5_', 'val_tpl': "{{ (value_json['pv5']['Current'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.PV5_POWER: {'name': ['input', 'pv5', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'input_pv5', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'power_pv5_', 'val_tpl': "{{ (value_json['pv5']['Power'] | float)}}"}}, # noqa: E501 - Register.PV6_VOLTAGE: {'name': ['input', 'pv6', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'input_pv6', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'volt_pv6_', 'val_tpl': "{{ (value_json['pv6']['Voltage'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.PV6_CURRENT: {'name': ['input', 'pv6', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'input_pv6', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'cur_pv6_', 'val_tpl': "{{ (value_json['pv6']['Current'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.PV6_POWER: {'name': ['input', 'pv6', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'input_pv6', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'power_pv6_', 'val_tpl': "{{ (value_json['pv6']['Power'] | float)}}"}}, # noqa: E501 - Register.PV1_DAILY_GENERATION: {'name': ['input', 'pv1', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv1', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_pv1_', 'name': DAILY_GEN, 'val_tpl': "{{ (value_json['pv1']['Daily_Generation'] | float)}}", 'icon': SOLAR_POWER_VAR, 'must_incr': True}}, # noqa: E501 - Register.PV1_TOTAL_GENERATION: {'name': ['input', 'pv1', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv1', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_pv1_', 'name': TOTAL_GEN, 'val_tpl': "{{ (value_json['pv1']['Total_Generation'] | float)}}", 'icon': SOLAR_POWER, 'must_incr': True}}, # noqa: E501 - Register.PV2_DAILY_GENERATION: {'name': ['input', 'pv2', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv2', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_pv2_', 'name': DAILY_GEN, 'val_tpl': "{{ (value_json['pv2']['Daily_Generation'] | float)}}", 'icon': SOLAR_POWER_VAR, 'must_incr': True}}, # noqa: E501 - Register.PV2_TOTAL_GENERATION: {'name': ['input', 'pv2', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv2', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_pv2_', 'name': TOTAL_GEN, 'val_tpl': "{{ (value_json['pv2']['Total_Generation'] | float)}}", 'icon': SOLAR_POWER, 'must_incr': True}}, # noqa: E501 - Register.PV3_DAILY_GENERATION: {'name': ['input', 'pv3', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv3', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_pv3_', 'name': DAILY_GEN, 'val_tpl': "{{ (value_json['pv3']['Daily_Generation'] | float)}}", 'icon': SOLAR_POWER_VAR, 'must_incr': True}}, # noqa: E501 - Register.PV3_TOTAL_GENERATION: {'name': ['input', 'pv3', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv3', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_pv3_', 'name': TOTAL_GEN, 'val_tpl': "{{ (value_json['pv3']['Total_Generation'] | float)}}", 'icon': SOLAR_POWER, 'must_incr': True}}, # noqa: E501 - Register.PV4_DAILY_GENERATION: {'name': ['input', 'pv4', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv4', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_pv4_', 'name': DAILY_GEN, 'val_tpl': "{{ (value_json['pv4']['Daily_Generation'] | float)}}", 'icon': SOLAR_POWER_VAR, 'must_incr': True}}, # noqa: E501 - Register.PV4_TOTAL_GENERATION: {'name': ['input', 'pv4', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv4', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_pv4_', 'name': TOTAL_GEN, 'val_tpl': "{{ (value_json['pv4']['Total_Generation'] | float)}}", 'icon': SOLAR_POWER, 'must_incr': True}}, # noqa: E501 - Register.PV5_DAILY_GENERATION: {'name': ['input', 'pv5', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv5', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_pv5_', 'name': DAILY_GEN, 'val_tpl': "{{ (value_json['pv5']['Daily_Generation'] | float)}}", 'icon': SOLAR_POWER_VAR, 'must_incr': True}}, # noqa: E501 - Register.PV5_TOTAL_GENERATION: {'name': ['input', 'pv5', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv5', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_pv5_', 'name': TOTAL_GEN, 'val_tpl': "{{ (value_json['pv5']['Total_Generation'] | float)}}", 'icon': SOLAR_POWER, 'must_incr': True}}, # noqa: E501 - Register.PV6_DAILY_GENERATION: {'name': ['input', 'pv6', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv6', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_pv6_', 'name': DAILY_GEN, 'val_tpl': "{{ (value_json['pv6']['Daily_Generation'] | float)}}", 'icon': SOLAR_POWER_VAR, 'must_incr': True}}, # noqa: E501 - Register.PV6_TOTAL_GENERATION: {'name': ['input', 'pv6', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv6', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_pv6_', 'name': TOTAL_GEN, 'val_tpl': "{{ (value_json['pv6']['Total_Generation'] | float)}}", 'icon': SOLAR_POWER, 'must_incr': True}}, # noqa: E501 - # total: - Register.TS_TOTAL: {'name': ['total', 'Timestamp'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 - Register.DAILY_GENERATION: {'name': ['total', 'Daily_Generation'], 'level': logging.INFO, 'unit': 'kWh', 'ha': {'dev': 'inverter', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_', 'fmt': FMT_FLOAT, 'name': DAILY_GEN, 'icon': SOLAR_POWER_VAR, 'must_incr': True}}, # noqa: E501 - Register.TOTAL_GENERATION: {'name': ['total', 'Total_Generation'], 'level': logging.INFO, 'unit': 'kWh', 'ha': {'dev': 'inverter', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_', 'fmt': FMT_FLOAT, 'name': TOTAL_GEN, 'icon': SOLAR_POWER, 'must_incr': True}}, # noqa: E501 - - # controller: - Register.SIGNAL_STRENGTH: {'name': ['controller', 'Signal_Strength'], 'level': logging.DEBUG, 'unit': '%', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': 'measurement', 'id': 'signal_', 'fmt': FMT_INT, 'name': 'Signal Strength', 'icon': WIFI}}, # noqa: E501 - Register.POWER_ON_TIME: {'name': ['controller', 'Power_On_Time'], 'level': logging.DEBUG, 'unit': 's', 'ha': {'dev': 'controller', 'dev_cla': 'duration', 'stat_cla': 'measurement', 'id': 'power_on_time_', 'fmt': FMT_INT, 'name': 'Power on Time', 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.COLLECT_INTERVAL: {'name': ['controller', 'Collect_Interval'], 'level': logging.DEBUG, 'unit': 'min', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'data_collect_intval_', 'fmt': '| string + " min"', 'name': 'Data Collect Interval', 'icon': UPDATE, 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.CONNECT_COUNT: {'name': ['controller', 'Connect_Count'], 'level': logging.DEBUG, 'unit': '', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'connect_count_', 'fmt': FMT_INT, 'name': 'Connect Count', 'icon': COUNTER, 'comp': 'sensor', 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.COMMUNICATION_TYPE: {'name': ['controller', 'Communication_Type'], 'level': logging.DEBUG, 'unit': '', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'comm_type_', 'name': 'Communication Type', 'val_tpl': __comm_type_val_tpl, 'comp': 'sensor', 'icon': WIFI}}, # noqa: E501 - Register.DATA_UP_INTERVAL: {'name': ['controller', 'Data_Up_Interval'], 'level': logging.DEBUG, 'unit': 's', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'data_up_intval_', 'fmt': FMT_STRING_SEC, 'name': 'Data Up Interval', 'icon': UPDATE, 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.HEARTBEAT_INTERVAL: {'name': ['controller', 'Heartbeat_Interval'], 'level': logging.DEBUG, 'unit': 's', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'heartbeat_intval_', 'fmt': FMT_STRING_SEC, 'name': 'Heartbeat Interval', 'icon': UPDATE, 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.IP_ADDRESS: {'name': ['controller', 'IP_Address'], 'level': logging.DEBUG, 'unit': '', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'ip_address_', 'fmt': '| string', 'name': 'IP Address', 'icon': WIFI, 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.POLLING_INTERVAL: {'name': ['controller', 'Polling_Interval'], 'level': logging.DEBUG, 'unit': 's', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'polling_intval_', 'fmt': FMT_STRING_SEC, 'name': 'Polling Interval', 'icon': UPDATE, 'ent_cat': 'diagnostic'}}, # noqa: E501 - Register.SENSOR_LIST: {'name': ['controller', 'Sensor_List'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 - Register.SSID: {'name': ['controller', 'WiFi_SSID'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - - Register.OUTPUT_SHUTDOWN: {'name': ['other', 'Output_Shutdown'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.RATED_LEVEL: {'name': ['other', 'Rated_Level'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.GRID_VOLT_CAL_COEF: {'name': ['other', 'Grid_Volt_Cal_Coef'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.PROD_COMPL_TYPE: {'name': ['other', 'Prod_Compliance_Type'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 - Register.INV_UNKNOWN_1: {'name': ['inv_unknown', 'Unknown_1'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - - } - - @property - def info_devs(self) -> dict: - return self.__info_devs - - @property - def info_defs(self) -> dict: - return self.__info_defs - - def dev_value(self, idx: str | int) -> str | int | float | dict | None: - '''returns the stored device value from our database - - idx:int ==> lookup the value in the database and return it as str, - int or float. If the value is not available return 'None' - idx:str ==> returns the string as a fixed value without a - database lookup - ''' - if type(idx) is str: - return idx # return idx as a fixed value - elif idx in self.info_defs: - row = self.info_defs[idx] - if 'singleton' in row and row['singleton']: - db_dict = self.stat - else: - db_dict = self.db - - keys = row['name'] - - for key in keys: - if key not in db_dict: - return None # value not found in the database - db_dict = db_dict[key] - return db_dict # value of the reqeusted entry - - return None # unknwon idx, not in info_defs - - @classmethod - def inc_counter(cls, counter: str) -> None: - '''inc proxy statistic counter''' - db_dict = cls.stat['proxy'] - db_dict[counter] += 1 - cls.new_stat_data['proxy'] = True - - @classmethod - def dec_counter(cls, counter: str) -> None: - '''dec proxy statistic counter''' - db_dict = cls.stat['proxy'] - db_dict[counter] -= 1 - cls.new_stat_data['proxy'] = True - - def ha_proxy_confs(self, ha_prfx: str, node_id: str, snr: str) \ - -> Generator[tuple[str, str, str, str], None, None]: - '''Generator function yields json register struct for home-assistant - auto configuration and the unique entity string, for all proxy - registers - - arguments: - ha_prfx:str ==> MQTT prefix for the home assistant 'stat_t string - node_id:str ==> node id of the inverter, used to build unique entity - snr:str ==> serial number of the inverter, used to build unique - entity strings - ''' - # iterate over RegisterMap.map and get the register values for entries - # with Singleton=True, which means that this is a proxy register - for reg in self.info_defs.keys(): - res = self.ha_conf(reg, ha_prfx, node_id, snr, True) # noqa: E501 - if res: - yield res - - def ha_conf(self, key, ha_prfx, node_id, snr, singleton: bool, - sug_area: str = '') -> tuple[str, str, str, str] | None: - '''Method to build json register struct for home-assistant - auto configuration and the unique entity string, for all proxy - registers - - arguments: - key ==> index of info_defs dict which reference the topic - ha_prfx:str ==> MQTT prefix for the home assistant 'stat_t string - node_id:str ==> node id of the inverter, used to build unique entity - snr:str ==> serial number of the inverter, used to build unique - entity strings - singleton ==> bool to allow/disaalow proxy topics which are common - for all invters - sug_area ==> area name for home assistant - ''' - if key not in self.info_defs: - return None - row = self.info_defs[key] - - if 'singleton' in row: - if singleton != row['singleton']: - return None - elif singleton: - return None - - # check if we have details for home assistant - if 'ha' in row: - return self.__ha_conf(row, key, ha_prfx, node_id, snr, sug_area) - return None - - def __ha_conf(self, row, key, ha_prfx, node_id, snr, - sug_area: str) -> tuple[str, str, str, str] | None: - ha = row['ha'] - if 'comp' in ha: - component = ha['comp'] - else: - component = 'sensor' - attr = self.__build_attr(row, key, ha_prfx, node_id, snr) - if 'dev' in ha: - device = self.info_devs[ha['dev']] - if 'dep' in device and self.ignore_this_device(device['dep']): # noqa: E501 - return None - attr['dev'] = self.__build_dev(device, key, ha, snr, - sug_area) - attr['o'] = self.__build_origin() - - else: - self.inc_counter('Internal_Error') - logging.error(f"Infos.info_defs: the row for {key} " - "missing 'dev' value for ha register") - return json.dumps(attr), component, node_id, attr['uniq_id'] - - def __build_attr(self, row, key, ha_prfx, node_id, snr): - attr = {} - ha = row['ha'] - if 'name' in ha: - attr['name'] = ha['name'] - else: - attr['name'] = row['name'][-1] - prfx = ha_prfx + node_id - attr['stat_t'] = prfx + row['name'][0] - attr['dev_cla'] = ha['dev_cla'] - attr['stat_cla'] = ha['stat_cla'] - attr['uniq_id'] = ha['id']+snr - if 'val_tpl' in ha: - attr['val_tpl'] = ha['val_tpl'] - elif 'fmt' in ha: - attr['val_tpl'] = '{{value_json' + f"['{row['name'][-1]}'] {ha['fmt']}" + '}}' # eg. 'val_tpl': "{{ value_json['Output_Power']|float }} # noqa: E501 - else: - self.inc_counter('Internal_Error') - logging.error(f"Infos.info_defs: the row for {key} do" - " not have a 'val_tpl' nor a 'fmt' value") - # add unit_of_meas only, if status_class isn't none. If - # status_cla is None we want a number format and not line - # graph in home assistant. A unit will change the number - # format to a line graph - if 'unit' in row and attr['stat_cla'] is not None: - attr['unit_of_meas'] = row['unit'] # 'unit_of_meas' - if 'icon' in ha: - attr['ic'] = ha['icon'] # icon for the entity - if 'nat_prc' in ha: # pragma: no cover - attr['sug_dsp_prc'] = ha['nat_prc'] # precison of floats - if 'ent_cat' in ha: - attr['ent_cat'] = ha['ent_cat'] # diagnostic, config - # enabled_by_default is deactivated, since it avoid the via - # setup of the devices. It seems, that there is a bug in home - # assistant. tested with 'Home Assistant 2023.10.4' - # if 'en' in ha: # enabled_by_default - # attr['en'] = ha['en'] - return attr - - def __build_dev(self, device, key, ha, snr, sug_area): - dev = {} - singleton = 'singleton' in device and device['singleton'] - # the same name for 'name' and 'suggested area', so we get - # dedicated devices in home assistant with short value - # name and headline - if (sug_area == '' or singleton): - dev['name'] = device['name'] - dev['sa'] = device['name'] - else: - dev['name'] = device['name']+' - '+sug_area - dev['sa'] = device['name']+' - '+sug_area - self.__add_via_dev(dev, device, key, snr) - for key in ('mdl', 'mf', 'sw', 'hw', 'sn'): # add optional - # values fpr 'modell', 'manufacturer', 'sw version' and - # 'hw version' - if key in device: - data = self.dev_value(device[key]) - if data is not None: - dev[key] = data - if singleton: - dev['ids'] = [f"{ha['dev']}"] - else: - dev['ids'] = [f"{ha['dev']}_{snr}"] - self.__add_connection(dev, device) - return dev - - def __add_connection(self, dev, device): - if 'mac' in device: - mac_str = self.dev_value(device['mac']) - if mac_str is not None: - if 12 == len(mac_str): - mac_str = ':'.join(mac_str[i:i+2] for i in range(0, 12, 2)) - dev['cns'] = [["mac", f"{mac_str}"]] - - def __add_via_dev(self, dev, device, key, snr): - if 'via' in device: # add the link to the parent device - via = device['via'] - if via in self.info_devs: - via_dev = self.info_devs[via] - if 'singleton' in via_dev and via_dev['singleton']: - dev['via_device'] = via - else: - dev['via_device'] = f"{via}_{snr}" - else: - self.inc_counter('Internal_Error') - logging.error(f"Infos.info_defs: the row for " - f"{key} has an invalid via value: " - f"{via}") - - def __build_origin(self): - origin = {} - origin['name'] = self.app_name - origin['sw'] = self.version - return origin - - def ha_remove(self, key, node_id, snr) -> tuple[str, str, str, str] | None: - '''Method to build json unregister struct for home-assistant - to remove topics per auto configuration. Only for inverer topics. - - arguments: - key ==> index of info_defs dict which reference the topic - node_id:str ==> node id of the inverter, used to build unique entity - snr:str ==> serial number of the inverter, used to build unique - entity strings - - hint: - the returned tuple must have the same format as self.ha_conf() - ''' - if key not in self.info_defs: - return None - row = self.info_defs[key] - - if 'singleton' in row and row['singleton']: - return None - - # check if we have details for home assistant - if 'ha' in row: - ha = row['ha'] - if 'comp' in ha: - component = ha['comp'] - else: - component = 'sensor' - attr = {} - uniq_id = ha['id']+snr - - return json.dumps(attr), component, node_id, uniq_id - return None - - def _key_obj(self, id: Register) -> tuple: - d = self.info_defs.get(id, {'name': None, 'level': logging.DEBUG, - 'unit': ''}) - if 'ha' in d and 'must_incr' in d['ha']: - must_incr = d['ha']['must_incr'] - else: - must_incr = False - - return d['name'], d['level'], d['unit'], must_incr - - def update_db(self, keys: list, must_incr: bool, result): - name = '' - db_dict = self.db - for key in keys[:-1]: - if key not in db_dict: - db_dict[key] = {} - db_dict = db_dict[key] - name += key + '.' - if keys[-1] not in db_dict: - update = (not must_incr or result > 0) - else: - if must_incr: - update = db_dict[keys[-1]] < result - else: - update = db_dict[keys[-1]] != result - if update: - db_dict[keys[-1]] = result - name += keys[-1] - return name, update - - def set_db_def_value(self, id: Register, value) -> None: - '''set default value''' - row = self.info_defs[id] - if isinstance(row, dict): - keys = row['name'] - self.update_db(keys, False, value) - - def reg_clr_at_midnight(self, prfx: str, - check_dependencies: bool = True) -> None: - '''register all registers for the 'ClrAtMidnight' class and - check if device of every register is available otherwise ignore - the register. - - prfx:str ==> prefix for the home assistant 'stat_t string'' - ''' - for id, row in self.info_defs.items(): - if check_dependencies and 'ha' in row: - ha = row['ha'] - if 'dev' in ha: - device = self.info_devs[ha['dev']] - if 'dep' in device and self.ignore_this_device(device['dep']): # noqa: E501 - continue - - keys = row['name'] - ClrAtMidnight.add(keys, prfx, id) - - def get_db_value(self, id: Register, not_found_result: any = None): - '''get database value''' - if id not in self.info_defs: - return not_found_result - row = self.info_defs[id] - if isinstance(row, dict): - keys = row['name'] - elm = self.db - for key in keys: - if key not in elm: - return not_found_result - elm = elm[key] - return elm - return not_found_result - - def ignore_this_device(self, dep: dict) -> bool: - '''Checks the equation in the dep(endency) dict - - returns 'False' only if the equation is valid; - 'True' in any other case''' - if 'reg' in dep: - value = self.dev_value(dep['reg']) - if not value: - return True - - if 'gte' in dep: - return value < dep['gte'] - elif 'less_eq' in dep: - return value > dep['less_eq'] - return True - - def set_pv_module_details(self, inv: dict) -> None: - pvs = {'pv1': {'manufacturer': Register.PV1_MANUFACTURER, 'model': Register.PV1_MODEL}, # noqa: E501 - 'pv2': {'manufacturer': Register.PV2_MANUFACTURER, 'model': Register.PV2_MODEL}, # noqa: E501 - 'pv3': {'manufacturer': Register.PV3_MANUFACTURER, 'model': Register.PV3_MODEL}, # noqa: E501 - 'pv4': {'manufacturer': Register.PV4_MANUFACTURER, 'model': Register.PV4_MODEL}, # noqa: E501 - 'pv5': {'manufacturer': Register.PV5_MANUFACTURER, 'model': Register.PV5_MODEL}, # noqa: E501 - 'pv6': {'manufacturer': Register.PV6_MANUFACTURER, 'model': Register.PV6_MODEL} # noqa: E501 - } - - for key, reg in pvs.items(): - if key in inv: - if 'manufacturer' in inv[key]: - self.set_db_def_value(reg['manufacturer'], - inv[key]['manufacturer']) - if 'type' in inv[key]: - self.set_db_def_value(reg['model'], inv[key]['type']) diff --git a/ha_addons/ha_addon/rootfs/home/proxy/inverter_base.py b/ha_addons/ha_addon/rootfs/home/proxy/inverter_base.py deleted file mode 100644 index 757b883..0000000 --- a/ha_addons/ha_addon/rootfs/home/proxy/inverter_base.py +++ /dev/null @@ -1,178 +0,0 @@ -import weakref -import asyncio -import logging -import traceback -import json -import gc -from aiomqtt import MqttCodeError -from asyncio import StreamReader, StreamWriter - -from inverter_ifc import InverterIfc -from proxy import Proxy -from async_stream import StreamPtr -from async_stream import AsyncStreamClient -from async_stream import AsyncStreamServer -from config import Config -from infos import Infos - -logger_mqtt = logging.getLogger('mqtt') - - -class InverterBase(InverterIfc, Proxy): - - def __init__(self, reader: StreamReader, writer: StreamWriter, - config_id: str, prot_class, - client_mode: bool = False, - remote_prot_class=None): - Proxy.__init__(self) - self._registry.append(weakref.ref(self)) - self.addr = writer.get_extra_info('peername') - self.config_id = config_id - if remote_prot_class: - self.prot_class = remote_prot_class - else: - self.prot_class = prot_class - self.__ha_restarts = -1 - self.remote = StreamPtr(None) - ifc = AsyncStreamServer(reader, writer, - self.async_publ_mqtt, - self.create_remote, - self.remote) - - self.local = StreamPtr( - prot_class(self.addr, ifc, True, client_mode), ifc - ) - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc, tb) -> None: - logging.debug(f'InverterBase.__exit__() {self.addr}') - self.__del_remote() - - self.local.stream.close() - self.local.stream = None - self.local.ifc.close() - self.local.ifc = None - - # now explicitly call garbage collector to release unreachable objects - unreachable_obj = gc.collect() - logging.debug( - f'InverterBase.__exit: freed unreachable obj: {unreachable_obj}') - - def __del_remote(self): - if self.remote.stream: - self.remote.stream.close() - self.remote.stream = None - - if self.remote.ifc: - self.remote.ifc.close() - self.remote.ifc = None - - async def disc(self, shutdown_started=False) -> None: - if self.remote.stream: - self.remote.stream.shutdown_started = shutdown_started - if self.remote.ifc: - await self.remote.ifc.disc() - if self.local.stream: - self.local.stream.shutdown_started = shutdown_started - if self.local.ifc: - await self.local.ifc.disc() - - def healthy(self) -> bool: - logging.debug('InverterBase healthy()') - - if self.local.ifc and not self.local.ifc.healthy(): - return False - if self.remote.ifc and not self.remote.ifc.healthy(): - return False - return True - - async def create_remote(self) -> None: - '''Establish a client connection to the TSUN cloud''' - - tsun = Config.get(self.config_id) - host = tsun['host'] - port = tsun['port'] - addr = (host, port) - stream = self.local.stream - - try: - logging.info(f'[{stream.node_id}] Connect to {addr}') - connect = asyncio.open_connection(host, port) - reader, writer = await connect - ifc = AsyncStreamClient( - reader, writer, self.local, self.__del_remote) - - self.remote.ifc = ifc - if hasattr(stream, 'id_str'): - self.remote.stream = self.prot_class( - addr, ifc, server_side=False, - client_mode=False, id_str=stream.id_str) - else: - self.remote.stream = self.prot_class( - addr, ifc, server_side=False, - client_mode=False) - - logging.info(f'[{self.remote.stream.node_id}:' - f'{self.remote.stream.conn_no}] ' - f'Connected to {addr}') - asyncio.create_task(self.remote.ifc.client_loop(addr)) - - except (ConnectionRefusedError, TimeoutError) as error: - logging.info(f'{error}') - except Exception: - Infos.inc_counter('SW_Exception') - logging.error( - f"Inverter: Exception for {addr}:\n" - f"{traceback.format_exc()}") - - async def async_publ_mqtt(self) -> None: - '''publish data to MQTT broker''' - stream = self.local.stream - if not stream or not stream.unique_id: - return - # check if new inverter or collector infos are available or when the - # home assistant has changed the status back to online - try: - if (('inverter' in stream.new_data and stream.new_data['inverter']) - or ('collector' in stream.new_data and - stream.new_data['collector']) - or self.mqtt.ha_restarts != self.__ha_restarts): - await self._register_proxy_stat_home_assistant() - await self.__register_home_assistant(stream) - self.__ha_restarts = self.mqtt.ha_restarts - - for key in stream.new_data: - await self.__async_publ_mqtt_packet(stream, key) - for key in Infos.new_stat_data: - await Proxy._async_publ_mqtt_proxy_stat(key) - - except MqttCodeError as error: - logging.error(f'Mqtt except: {error}') - except Exception: - Infos.inc_counter('SW_Exception') - logging.error( - f"Inverter: Exception:\n" - f"{traceback.format_exc()}") - - async def __async_publ_mqtt_packet(self, stream, key): - db = stream.db.db - if key in db and stream.new_data[key]: - data_json = json.dumps(db[key]) - node_id = stream.node_id - logger_mqtt.debug(f'{key}: {data_json}') - await self.mqtt.publish(f'{self.entity_prfx}{node_id}{key}', data_json) # noqa: E501 - stream.new_data[key] = False - - async def __register_home_assistant(self, stream) -> None: - '''register all our topics at home assistant''' - for data_json, component, node_id, id in stream.db.ha_confs( - self.entity_prfx, stream.node_id, stream.unique_id, - stream.sug_area): - logger_mqtt.debug(f"MQTT Register: cmp:'{component}'" - f" node_id:'{node_id}' {data_json}") - await self.mqtt.publish(f"{self.discovery_prfx}{component}" - f"/{node_id}{id}/config", data_json) - - stream.db.reg_clr_at_midnight(f'{self.entity_prfx}{stream.node_id}') diff --git a/ha_addons/ha_addon/rootfs/home/proxy/inverter_ifc.py b/ha_addons/ha_addon/rootfs/home/proxy/inverter_ifc.py deleted file mode 100644 index 11bd5e8..0000000 --- a/ha_addons/ha_addon/rootfs/home/proxy/inverter_ifc.py +++ /dev/null @@ -1,37 +0,0 @@ -from abc import abstractmethod -import logging -from asyncio import StreamReader, StreamWriter - -from iter_registry import AbstractIterMeta - -logger_mqtt = logging.getLogger('mqtt') - - -class InverterIfc(metaclass=AbstractIterMeta): - _registry = [] - - @abstractmethod - def __init__(self, reader: StreamReader, writer: StreamWriter, - config_id: str, prot_class, - client_mode: bool): - pass # pragma: no cover - - @abstractmethod - def __enter__(self): - pass # pragma: no cover - - @abstractmethod - def __exit__(self, exc_type, exc, tb): - pass # pragma: no cover - - @abstractmethod - def healthy(self) -> bool: - pass # pragma: no cover - - @abstractmethod - async def disc(self, shutdown_started=False) -> None: - pass # pragma: no cover - - @abstractmethod - async def create_remote(self) -> None: - pass # pragma: no cover diff --git a/ha_addons/ha_addon/rootfs/home/proxy/iter_registry.py b/ha_addons/ha_addon/rootfs/home/proxy/iter_registry.py deleted file mode 100644 index ea0cd73..0000000 --- a/ha_addons/ha_addon/rootfs/home/proxy/iter_registry.py +++ /dev/null @@ -1,9 +0,0 @@ -from abc import ABCMeta - - -class AbstractIterMeta(ABCMeta): - def __iter__(cls): - for ref in cls._registry: - obj = ref() - if obj is not None: - yield obj diff --git a/ha_addons/ha_addon/rootfs/home/proxy/logging.ini b/ha_addons/ha_addon/rootfs/home/proxy/logging.ini deleted file mode 100644 index 34db695..0000000 --- a/ha_addons/ha_addon/rootfs/home/proxy/logging.ini +++ /dev/null @@ -1,76 +0,0 @@ -[loggers] -keys=root,tracer,mesg,conn,data,mqtt,asyncio - -[handlers] -keys=console_handler,file_handler_name1,file_handler_name2 - -[formatters] -keys=console_formatter,file_formatter - -[logger_root] -level=DEBUG -handlers=console_handler,file_handler_name1 - - -[logger_conn] -level=DEBUG -handlers=console_handler,file_handler_name1 -propagate=0 -qualname=conn - -[logger_mqtt] -level=INFO -handlers=console_handler,file_handler_name1 -propagate=0 -qualname=mqtt - -[logger_asyncio] -level=INFO -handlers=console_handler,file_handler_name1 -propagate=0 -qualname=asyncio - -[logger_data] -level=DEBUG -handlers=file_handler_name1 -propagate=0 -qualname=data - - -[logger_mesg] -level=DEBUG -handlers=file_handler_name2 -propagate=0 -qualname=msg - -[logger_tracer] -level=INFO -handlers=file_handler_name2 -propagate=0 -qualname=tracer - -[handler_console_handler] -class=StreamHandler -level=DEBUG -formatter=console_formatter - -[handler_file_handler_name1] -class=handlers.TimedRotatingFileHandler -level=INFO -formatter=file_formatter -args=('log/proxy.log', when:='midnight') - -[handler_file_handler_name2] -class=handlers.TimedRotatingFileHandler -level=NOTSET -formatter=file_formatter -args=('log/trace.log', when:='midnight') - -[formatter_console_formatter] -format=%(asctime)s %(levelname)5s | %(name)4s | %(message)s' -datefmt='%Y-%m-%d %H:%M:%S - -[formatter_file_formatter] -format=%(asctime)s %(levelname)5s | %(name)4s | %(message)s' -datefmt='%Y-%m-%d %H:%M:%S - diff --git a/ha_addons/ha_addon/rootfs/home/proxy/messages.py b/ha_addons/ha_addon/rootfs/home/proxy/messages.py deleted file mode 100644 index eecfc80..0000000 --- a/ha_addons/ha_addon/rootfs/home/proxy/messages.py +++ /dev/null @@ -1,203 +0,0 @@ -import logging -import weakref -from typing import Callable -from enum import Enum - -from async_ifc import AsyncIfc -from protocol_ifc import ProtocolIfc -from infos import Infos, Register -from modbus import Modbus -from my_timer import Timer - -logger = logging.getLogger('msg') - - -def __hex_val(n, data, data_len): - line = '' - for j in range(n-16, n): - if j >= data_len: - break - line += '%02x ' % abs(data[j]) - return line - - -def __asc_val(n, data, data_len): - line = '' - for j in range(n-16, n): - if j >= data_len: - break - c = data[j] if not (data[j] < 0x20 or data[j] > 0x7e) else '.' - line += '%c' % c - return line - - -def hex_dump(data, data_len) -> list: - n = 0 - lines = [] - - for i in range(0, data_len, 16): - line = ' ' - line += '%04x | ' % (i) - n += 16 - line += __hex_val(n, data, data_len) - line += ' ' * (3 * 16 + 9 - len(line)) + ' | ' - line += __asc_val(n, data, data_len) - lines.append(line) - - return lines - - -def hex_dump_str(data, data_len): - lines = hex_dump(data, data_len) - return '\n'.join(lines) - - -def hex_dump_memory(level, info, data, data_len): - lines = [] - lines.append(info) - tracer = logging.getLogger('tracer') - if not tracer.isEnabledFor(level): - return - - lines += hex_dump(data, data_len) - - tracer.log(level, '\n'.join(lines)) - - -class State(Enum): - '''state of the logical connection''' - init = 0 - '''just created''' - received = 1 - '''at least one packet received''' - up = 2 - '''at least one cmd-rsp transaction''' - pend = 3 - '''inverter transaction pending, don't send MODBUS cmds''' - closed = 4 - '''connection closed''' - - -class Message(ProtocolIfc): - MAX_START_TIME = 400 - '''maximum time without a received msg in sec''' - MAX_INV_IDLE_TIME = 120 - '''maximum time without a received msg from the inverter in sec''' - MAX_DEF_IDLE_TIME = 360 - '''maximum default time without a received msg in sec''' - MB_START_TIMEOUT = 40 - '''start delay for Modbus polling in server mode''' - MB_REGULAR_TIMEOUT = 60 - '''regular Modbus polling time in server mode''' - - def __init__(self, node_id, ifc: "AsyncIfc", server_side: bool, - send_modbus_cb: Callable[[bytes, int, str], None], - mb_timeout: int): - self._registry.append(weakref.ref(self)) - - self.server_side = server_side - self.ifc = ifc - self.node_id = node_id - if server_side: - self.mb = Modbus(send_modbus_cb, mb_timeout) - self.mb_timer = Timer(self.mb_timout_cb, self.node_id) - else: - self.mb = None - self.mb_timer = None - self.header_valid = False - self.header_len = 0 - self.data_len = 0 - self.unique_id = 0 - self.sug_area = '' - self.new_data = {} - self.state = State.init - self.shutdown_started = False - self.modbus_elms = 0 # for unit tests - self.mb_timeout = self.MB_REGULAR_TIMEOUT - self.mb_first_timeout = self.MB_START_TIMEOUT - '''timer value for next Modbus polling request''' - self.modbus_polling = False - - @property - def node_id(self): - return self._node_id - - @node_id.setter - def node_id(self, value): - self._node_id = value - self.ifc.set_node_id(value) - - ''' - Empty methods, that have to be implemented in any child class which - don't use asyncio - ''' - def _read(self) -> None: # read data bytes from socket and copy them - # to our _recv_buffer - return # pragma: no cover - - def _set_mqtt_timestamp(self, key, ts: float | None): - if key not in self.new_data or \ - not self.new_data[key]: - if key == 'grid': - info_id = Register.TS_GRID - elif key == 'input': - info_id = Register.TS_INPUT - elif key == 'total': - info_id = Register.TS_TOTAL - else: - return - # tstr = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(ts)) - # logger.info(f'update: key: {key} ts:{tstr}' - self.db.set_db_def_value(info_id, round(ts)) - - def _timeout(self) -> int: - if self.state == State.init or self.state == State.received: - to = self.MAX_START_TIME - elif self.state == State.up and \ - self.server_side and self.modbus_polling: - to = self.MAX_INV_IDLE_TIME - else: - to = self.MAX_DEF_IDLE_TIME - return to - - def _send_modbus_cmd(self, func, addr, val, log_lvl) -> None: - if self.state != State.up: - logger.log(log_lvl, f'[{self.node_id}] ignore MODBUS cmd,' - ' as the state is not UP') - return - self.mb.build_msg(Modbus.INV_ADDR, func, addr, val, log_lvl) - - async def send_modbus_cmd(self, func, addr, val, log_lvl) -> None: - self._send_modbus_cmd(func, addr, val, log_lvl) - - ''' - Our puplic methods - ''' - def close(self) -> None: - if self.server_side: - # set inverter state to offline, if output power is very low - logging.debug('close power: ' - f'{self.db.get_db_value(Register.OUTPUT_POWER, -1)}') - if self.db.get_db_value(Register.OUTPUT_POWER, 999) < 2: - self.db.set_db_def_value(Register.INVERTER_STATUS, 0) - self.new_data['env'] = True - self.mb_timer.close() - self.state = State.closed - self.ifc.rx_set_cb(None) - self.ifc.prot_set_timeout_cb(None) - self.ifc.prot_set_init_new_client_conn_cb(None) - self.ifc.prot_set_update_header_cb(None) - self.ifc = None - - if self.mb: - self.mb.close() - self.mb = None - # pragma: no cover - - def inc_counter(self, counter: str) -> None: - self.db.inc_counter(counter) - Infos.new_stat_data['proxy'] = True - - def dec_counter(self, counter: str) -> None: - self.db.dec_counter(counter) - Infos.new_stat_data['proxy'] = True diff --git a/ha_addons/ha_addon/rootfs/home/proxy/modbus.py b/ha_addons/ha_addon/rootfs/home/proxy/modbus.py deleted file mode 100644 index 5c64086..0000000 --- a/ha_addons/ha_addon/rootfs/home/proxy/modbus.py +++ /dev/null @@ -1,345 +0,0 @@ -'''MODBUS module for TSUN inverter support - -TSUN uses the MODBUS in the RTU transmission mode over serial line. -see: https://modbus.org/docs/Modbus_Application_Protocol_V1_1b3.pdf -see: https://modbus.org/docs/Modbus_over_serial_line_V1_02.pdf - -A Modbus PDU consists of: 'Function-Code' + 'Data' -A Modbus RTU message consists of: 'Addr' + 'Modbus-PDU' + 'CRC-16' -The inverter is a MODBUS server and the proxy the MODBUS client. - -The 16-bit CRC is known as CRC-16-ANSI(reverse) -see: https://en.wikipedia.org/wiki/Computation_of_cyclic_redundancy_checks -''' -import struct -import logging -import asyncio -from typing import Generator, Callable - -from infos import Register, Fmt - -logger = logging.getLogger('data') - -CRC_POLY = 0xA001 # (LSBF/reverse) -CRC_INIT = 0xFFFF - - -class Modbus(): - '''Simple MODBUS implementation with TX queue and retransmit timer''' - INV_ADDR = 1 - '''MODBUS server address of the TSUN inverter''' - READ_REGS = 3 - '''MODBUS function code: Read Holding Register''' - READ_INPUTS = 4 - '''MODBUS function code: Read Input Register''' - WRITE_SINGLE_REG = 6 - '''Modbus function code: Write Single Register''' - - __crc_tab = [] - mb_reg_mapping = { - 0x2000: {'reg': Register.BOOT_STATUS, 'fmt': '!H'}, # noqa: E501 - 0x2001: {'reg': Register.DSP_STATUS, 'fmt': '!H'}, # noqa: E501 - 0x2003: {'reg': Register.WORK_MODE, 'fmt': '!H'}, - 0x2006: {'reg': Register.OUTPUT_SHUTDOWN, 'fmt': '!H'}, - 0x2007: {'reg': Register.MAX_DESIGNED_POWER, 'fmt': '!H', 'ratio': 1}, # noqa: E501 - 0x2008: {'reg': Register.RATED_LEVEL, 'fmt': '!H'}, - 0x2009: {'reg': Register.INPUT_COEFFICIENT, 'fmt': '!H', 'ratio': 100/1024}, # noqa: E501 - 0x200a: {'reg': Register.GRID_VOLT_CAL_COEF, 'fmt': '!H'}, - 0x2010: {'reg': Register.PROD_COMPL_TYPE, 'fmt': '!H'}, - 0x202c: {'reg': Register.OUTPUT_COEFFICIENT, 'fmt': '!H', 'ratio': 100/1024}, # noqa: E501 - - 0x3000: {'reg': Register.INVERTER_STATUS, 'fmt': '!H'}, # noqa: E501 - 0x3001: {'reg': Register.DETECT_STATUS_1, 'fmt': '!H'}, # noqa: E501 - 0x3002: {'reg': Register.DETECT_STATUS_2, 'fmt': '!H'}, # noqa: E501 - 0x3003: {'reg': Register.EVENT_ALARM, 'fmt': '!H'}, # noqa: E501 - 0x3004: {'reg': Register.EVENT_FAULT, 'fmt': '!H'}, # noqa: E501 - 0x3005: {'reg': Register.EVENT_BF1, 'fmt': '!H'}, # noqa: E501 - 0x3006: {'reg': Register.EVENT_BF2, 'fmt': '!H'}, # noqa: E501 - - 0x3008: {'reg': Register.VERSION, 'fmt': '!H', 'func': Fmt.version}, # noqa: E501 - 0x3009: {'reg': Register.GRID_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501 - 0x300a: {'reg': Register.GRID_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 - 0x300b: {'reg': Register.GRID_FREQUENCY, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 - 0x300c: {'reg': Register.INVERTER_TEMP, 'fmt': '!H', 'offset': -40}, # noqa: E501 - # 0x300d - 0x300e: {'reg': Register.RATED_POWER, 'fmt': '!H', 'ratio': 1}, # noqa: E501 - 0x300f: {'reg': Register.OUTPUT_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501 - 0x3010: {'reg': Register.PV1_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501 - 0x3011: {'reg': Register.PV1_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 - 0x3012: {'reg': Register.PV1_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501 - 0x3013: {'reg': Register.PV2_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501 - 0x3014: {'reg': Register.PV2_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 - 0x3015: {'reg': Register.PV2_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501 - 0x3016: {'reg': Register.PV3_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501 - 0x3017: {'reg': Register.PV3_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 - 0x3018: {'reg': Register.PV3_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501 - 0x3019: {'reg': Register.PV4_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501 - 0x301a: {'reg': Register.PV4_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 - 0x301b: {'reg': Register.PV4_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501 - 0x301c: {'reg': Register.DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 - 0x301d: {'reg': Register.TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501 - 0x301f: {'reg': Register.PV1_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 - 0x3020: {'reg': Register.PV1_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501 - 0x3022: {'reg': Register.PV2_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 - 0x3023: {'reg': Register.PV2_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501 - 0x3025: {'reg': Register.PV3_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 - 0x3026: {'reg': Register.PV3_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501 - 0x3028: {'reg': Register.PV4_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 - 0x3029: {'reg': Register.PV4_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501 - # 0x302a - } - - def __init__(self, snd_handler: Callable[[bytes, int, str], None], - timeout: int = 1): - if not len(self.__crc_tab): - self.__build_crc_tab(CRC_POLY) - self.que = asyncio.Queue(100) - self.snd_handler = snd_handler - '''Send handler to transmit a MODBUS RTU request''' - self.rsp_handler = None - '''Response handler to forward the response''' - self.timeout = timeout - '''MODBUS response timeout in seconds''' - self.max_retries = 1 - '''Max retransmit for MODBUS requests''' - self.retry_cnt = 0 - self.last_req = b'' - self.counter = {} - '''Dictenary with statistic counter''' - self.counter['timeouts'] = 0 - self.counter['retries'] = {} - for i in range(0, self.max_retries+1): - self.counter['retries'][f'{i}'] = 0 - self.last_log_lvl = logging.DEBUG - self.last_addr = 0 - self.last_fcode = 0 - self.last_len = 0 - self.last_reg = 0 - self.err = 0 - self.loop = asyncio.get_event_loop() - self.req_pend = False - self.tim = None - self.node_id = '' - - def close(self): - """free the queue and erase the callback handlers""" - logging.debug('Modbus close:') - self.__stop_timer() - self.rsp_handler = None - self.snd_handler = None - while not self.que.empty(): - self.que.get_nowait() - - def set_node_id(self, node_id: str): - self.node_id = node_id - - def build_msg(self, addr: int, func: int, reg: int, val: int, - log_lvl=logging.DEBUG) -> None: - """Build MODBUS RTU request frame and add it to the tx queue - - Keyword arguments: - addr: RTU server address (inverter) - func: MODBUS function code - reg: 16-bit register number - val: 16 bit value - """ - msg = struct.pack('>BBHH', addr, func, reg, val) - msg += struct.pack(' bool: - """Add the received Modbus RTU request to the tx queue - - Keyword arguments: - buf: Modbus RTU pdu incl ADDR byte and trailing CRC - rsp_handler: Callback, if the received pdu is valid - - Returns: - True: PDU was added to the queue - False: PDU was ignored, due to an error - """ - # logging.info(f'recv_req: first byte modbus:{buf[0]} len:{len(buf)}') - if not self.__check_crc(buf): - self.err = 1 - logger.error('Modbus recv: CRC error') - return False - self.que.put_nowait({'req': buf, - 'rsp_hdl': rsp_handler, - 'log_lvl': logging.INFO}) - if self.que.qsize() == 1: - self.__send_next_from_que() - - return True - - def recv_resp(self, info_db, buf: bytes) -> \ - Generator[tuple[str, bool, int | float | str], None, None]: - """Generator which check and parse a received MODBUS response. - - Keyword arguments: - info_db: database for info lockups - buf: received Modbus RTU response frame - - Returns on error and set Self.err to: - 1: CRC error - 2: Wrong server address - 3: Unexpected function code - 4: Unexpected data length - 5: No MODBUS request pending - """ - # logging.info(f'recv_resp: first byte modbus:{buf[0]} len:{len(buf)}') - - fcode = buf[1] - data_available = self.last_addr == self.INV_ADDR and \ - (fcode == 3 or fcode == 4) - - if self.__resp_error_check(buf, data_available): - return - - if data_available: - elmlen = buf[2] >> 1 - first_reg = self.last_reg # save last_reg before sending next pdu - self.__stop_timer() # stop timer and send next pdu - yield from self.__process_data(info_db, buf, first_reg, elmlen) - else: - self.__stop_timer() - - self.counter['retries'][f'{self.retry_cnt}'] += 1 - if self.rsp_handler: - self.rsp_handler() - self.__send_next_from_que() - - def __resp_error_check(self, buf: bytes, data_available: bool) -> bool: - '''Check the MODBUS response for errors, returns True if one accure''' - if not self.req_pend: - self.err = 5 - return True - if not self.__check_crc(buf): - logger.error(f'[{self.node_id}] Modbus resp: CRC error') - self.err = 1 - return True - if buf[0] != self.last_addr: - logger.info(f'[{self.node_id}] Modbus resp: Wrong addr {buf[0]}') - self.err = 2 - return True - fcode = buf[1] - if fcode != self.last_fcode: - logger.info(f'[{self.node_id}] Modbus: Wrong fcode {fcode}' - f' != {self.last_fcode}') - self.err = 3 - return True - if data_available: - elmlen = buf[2] >> 1 - if elmlen != self.last_len: - logger.info(f'[{self.node_id}] Modbus: len error {elmlen}' - f' != {self.last_len}') - self.err = 4 - return True - - return False - - def __process_data(self, info_db, buf: bytes, first_reg, elmlen): - '''Generator over received registers, updates the db''' - for i in range(0, elmlen): - addr = first_reg+i - if addr in self.mb_reg_mapping: - row = self.mb_reg_mapping[addr] - info_id = row['reg'] - keys, level, unit, must_incr = info_db._key_obj(info_id) - if keys: - result = Fmt.get_value(buf, 3+2*i, row) - name, update = info_db.update_db(keys, must_incr, - result) - yield keys[0], update, result - if update: - info_db.tracer.log(level, - f'[{self.node_id}] MODBUS: {name}' - f' : {result}{unit}') - - ''' - MODBUS response timer - ''' - def __start_timer(self) -> None: - '''Start response timer and set `req_pend` to True''' - self.req_pend = True - self.tim = self.loop.call_later(self.timeout, self.__timeout_cb) - # logging.debug(f'Modbus start timer {self}') - - def __stop_timer(self) -> None: - '''Stop response timer and set `req_pend` to False''' - self.req_pend = False - # logging.debug(f'Modbus stop timer {self}') - if self.tim: - self.tim.cancel() - self.tim = None - - def __timeout_cb(self) -> None: - '''Rsponse timeout handler retransmit pdu or send next pdu''' - self.req_pend = False - - if self.retry_cnt < self.max_retries: - logger.debug(f'Modbus retrans {self}') - self.retry_cnt += 1 - self.__start_timer() - self.snd_handler(self.last_req, self.last_log_lvl, state='Retrans') - else: - logger.info(f'[{self.node_id}] Modbus timeout ' - f'(FCode: {self.last_fcode} ' - f'Reg: 0x{self.last_reg:04x}, ' - f'{self.last_len})') - self.counter['timeouts'] += 1 - self.__send_next_from_que() - - def __send_next_from_que(self) -> None: - '''Get next MODBUS pdu from queue and transmit it''' - if self.req_pend: - return - try: - item = self.que.get_nowait() - req = item['req'] - self.last_req = req - self.rsp_handler = item['rsp_hdl'] - self.last_log_lvl = item['log_lvl'] - self.last_addr = req[0] - self.last_fcode = req[1] - - res = struct.unpack_from('>HH', req, 2) - self.last_reg = res[0] - self.last_len = res[1] - self.retry_cnt = 0 - self.__start_timer() - self.snd_handler(self.last_req, self.last_log_lvl, state='Command') - except asyncio.QueueEmpty: - pass - - ''' - Helper function for CRC-16 handling - ''' - def __check_crc(self, msg: bytes) -> bool: - '''Check CRC-16 and returns True if valid''' - return 0 == self.__calc_crc(msg) - - def __calc_crc(self, buffer: bytes) -> int: - '''Build CRC-16 for buffer and returns it''' - crc = CRC_INIT - - for cur in buffer: - crc = (crc >> 8) ^ self.__crc_tab[(crc ^ cur) & 0xFF] - return crc - - def __build_crc_tab(self, poly: int) -> None: - '''Build CRC-16 helper table, must be called exactly one time''' - for index in range(256): - data = index << 1 - crc = 0 - for _ in range(8, 0, -1): - data >>= 1 - if (data ^ crc) & 1: - crc = (crc >> 1) ^ poly - else: - crc >>= 1 - self.__crc_tab.append(crc) diff --git a/ha_addons/ha_addon/rootfs/home/proxy/modbus_tcp.py b/ha_addons/ha_addon/rootfs/home/proxy/modbus_tcp.py deleted file mode 100644 index f3788d4..0000000 --- a/ha_addons/ha_addon/rootfs/home/proxy/modbus_tcp.py +++ /dev/null @@ -1,88 +0,0 @@ -import logging -import traceback -import asyncio - -from config import Config -from gen3plus.inverter_g3p import InverterG3P -from infos import Infos - -logger = logging.getLogger('conn') - - -class ModbusConn(): - def __init__(self, host, port): - self.host = host - self.port = port - self.addr = (host, port) - self.inverter = None - - async def __aenter__(self) -> 'InverterG3P': - '''Establish a client connection to the TSUN cloud''' - connection = asyncio.open_connection(self.host, self.port) - reader, writer = await connection - self.inverter = InverterG3P(reader, writer, - client_mode=True) - self.inverter.__enter__() - stream = self.inverter.local.stream - logging.info(f'[{stream.node_id}:{stream.conn_no}] ' - f'Connected to {self.addr}') - Infos.inc_counter('Inverter_Cnt') - await self.inverter.local.ifc.publish_outstanding_mqtt() - return self.inverter - - async def __aexit__(self, exc_type, exc, tb): - Infos.dec_counter('Inverter_Cnt') - await self.inverter.local.ifc.publish_outstanding_mqtt() - self.inverter.__exit__(exc_type, exc, tb) - - -class ModbusTcp(): - - def __init__(self, loop, tim_restart=10) -> None: - self.tim_restart = tim_restart - - inverters = Config.get('inverters') - # logging.info(f'Inverters: {inverters}') - - for inv in inverters.values(): - if (type(inv) is dict - and 'monitor_sn' in inv - and 'client_mode' in inv): - client = inv['client_mode'] - # logging.info(f"SerialNo:{inv['monitor_sn']} host:{client['host']} port:{client['port']}") # noqa: E501 - loop.create_task(self.modbus_loop(client['host'], - client['port'], - inv['monitor_sn'], - client['forward'])) - - async def modbus_loop(self, host, port, - snr: int, forward: bool) -> None: - '''Loop for receiving messages from the TSUN cloud (client-side)''' - while True: - try: - async with ModbusConn(host, port) as inverter: - stream = inverter.local.stream - await stream.send_start_cmd(snr, host, forward) - await stream.ifc.loop() - logger.info(f'[{stream.node_id}:{stream.conn_no}] ' - f'Connection closed - Shutdown: ' - f'{stream.shutdown_started}') - if stream.shutdown_started: - return - del inverter # decrease ref counter after the with block - - except (ConnectionRefusedError, TimeoutError) as error: - logging.debug(f'Inv-conn:{error}') - - except OSError as error: - if error.errno == 113: # pragma: no cover - logging.debug(f'os-error:{error}') - else: - logging.info(f'os-error: {error}') - - except Exception: - logging.error( - f"ModbusTcpCreate: Exception for {(host, port)}:\n" - f"{traceback.format_exc()}") - - await asyncio.sleep(self.tim_restart) diff --git a/ha_addons/ha_addon/rootfs/home/proxy/mqtt.py b/ha_addons/ha_addon/rootfs/home/proxy/mqtt.py deleted file mode 100644 index f52b797..0000000 --- a/ha_addons/ha_addon/rootfs/home/proxy/mqtt.py +++ /dev/null @@ -1,182 +0,0 @@ -import asyncio -import logging -import aiomqtt -import traceback - -from modbus import Modbus -from messages import Message -from config import Config -from singleton import Singleton - -logger_mqtt = logging.getLogger('mqtt') - - -class Mqtt(metaclass=Singleton): - __client = None - __cb_mqtt_is_up = None - - def __init__(self, cb_mqtt_is_up): - logger_mqtt.debug('MQTT: __init__') - if cb_mqtt_is_up: - self.__cb_mqtt_is_up = cb_mqtt_is_up - loop = asyncio.get_event_loop() - self.task = loop.create_task(self.__loop()) - self.ha_restarts = 0 - - ha = Config.get('ha') - self.ha_status_topic = f"{ha['auto_conf_prefix']}/status" - self.mb_rated_topic = f"{ha['entity_prefix']}/+/rated_load" - self.mb_out_coeff_topic = f"{ha['entity_prefix']}/+/out_coeff" - self.mb_reads_topic = f"{ha['entity_prefix']}/+/modbus_read_regs" - self.mb_inputs_topic = f"{ha['entity_prefix']}/+/modbus_read_inputs" - self.mb_at_cmd_topic = f"{ha['entity_prefix']}/+/at_cmd" - - @property - def ha_restarts(self): - return self._ha_restarts - - @ha_restarts.setter - def ha_restarts(self, value): - self._ha_restarts = value - - async def close(self) -> None: - logger_mqtt.debug('MQTT: close') - self.task.cancel() - try: - await self.task - - except (asyncio.CancelledError, Exception) as e: - logging.debug(f"Mqtt.close: exception: {e} ...") - - async def publish(self, topic: str, payload: str | bytes | bytearray - | int | float | None = None) -> None: - if self.__client: - await self.__client.publish(topic, payload) - - async def __loop(self) -> None: - mqtt = Config.get('mqtt') - logger_mqtt.info(f'start MQTT: host:{mqtt["host"]} port:' - f'{mqtt["port"]} ' - f'user:{mqtt["user"]}') - self.__client = aiomqtt.Client(hostname=mqtt['host'], - port=mqtt['port'], - username=mqtt['user'], - password=mqtt['passwd']) - - interval = 5 # Seconds - - while True: - try: - async with self.__client: - logger_mqtt.info('MQTT broker connection established') - - if self.__cb_mqtt_is_up: - await self.__cb_mqtt_is_up() - - await self.__client.subscribe(self.ha_status_topic) - await self.__client.subscribe(self.mb_rated_topic) - await self.__client.subscribe(self.mb_out_coeff_topic) - await self.__client.subscribe(self.mb_reads_topic) - await self.__client.subscribe(self.mb_inputs_topic) - await self.__client.subscribe(self.mb_at_cmd_topic) - - async for message in self.__client.messages: - await self.dispatch_msg(message) - - except aiomqtt.MqttError: - if Config.is_default('mqtt'): - logger_mqtt.info( - "MQTT is unconfigured; Check your config.toml!") - interval = 30 - else: - interval = 5 # Seconds - logger_mqtt.info( - f"Connection lost; Reconnecting in {interval}" - " seconds ...") - - await asyncio.sleep(interval) - except asyncio.CancelledError: - logger_mqtt.debug("MQTT task cancelled") - self.__client = None - return - except Exception: - # self.inc_counter('SW_Exception') # fixme - logger_mqtt.error( - f"Exception:\n" - f"{traceback.format_exc()}") - - async def dispatch_msg(self, message): - if message.topic.matches(self.ha_status_topic): - status = message.payload.decode("UTF-8") - logger_mqtt.info('Home-Assistant Status:' - f' {status}') - if status == 'online': - self.ha_restarts += 1 - await self.__cb_mqtt_is_up() - - if message.topic.matches(self.mb_rated_topic): - await self.modbus_cmd(message, - Modbus.WRITE_SINGLE_REG, - 1, 0x2008) - - if message.topic.matches(self.mb_out_coeff_topic): - payload = message.payload.decode("UTF-8") - try: - val = round(float(payload) * 1024/100) - if val < 0 or val > 1024: - logger_mqtt.error('out_coeff: value must be in' - 'the range 0..100,' - f' got: {payload}') - else: - await self.modbus_cmd(message, - Modbus.WRITE_SINGLE_REG, - 0, 0x202c, val) - except Exception: - pass - - if message.topic.matches(self.mb_reads_topic): - await self.modbus_cmd(message, - Modbus.READ_REGS, 2) - - if message.topic.matches(self.mb_inputs_topic): - await self.modbus_cmd(message, - Modbus.READ_INPUTS, 2) - - if message.topic.matches(self.mb_at_cmd_topic): - await self.at_cmd(message) - - def each_inverter(self, message, func_name: str): - topic = str(message.topic) - node_id = topic.split('/')[1] + '/' - for m in Message: - if m.server_side and (m.node_id == node_id): - logger_mqtt.debug(f'Found: {node_id}') - fnc = getattr(m, func_name, None) - if callable(fnc): - yield fnc - else: - logger_mqtt.warning(f'Cmd not supported by: {node_id}') - break - - else: - logger_mqtt.warning(f'Node_id: {node_id} not found') - - async def modbus_cmd(self, message, func, params=0, addr=0, val=0): - payload = message.payload.decode("UTF-8") - for fnc in self.each_inverter(message, "send_modbus_cmd"): - res = payload.split(',') - if params > 0 and params != len(res): - logger_mqtt.error(f'Parameter expected: {params}, ' - f'got: {len(res)}') - return - if params == 1: - val = int(payload) - elif params == 2: - addr = int(res[0], base=16) - val = int(res[1]) # lenght - await fnc(func, addr, val, logging.INFO) - - async def at_cmd(self, message): - payload = message.payload.decode("UTF-8") - for fnc in self.each_inverter(message, "send_at_cmd"): - await fnc(payload) diff --git a/ha_addons/ha_addon/rootfs/home/proxy/my_timer.py b/ha_addons/ha_addon/rootfs/home/proxy/my_timer.py deleted file mode 100644 index 46435bd..0000000 --- a/ha_addons/ha_addon/rootfs/home/proxy/my_timer.py +++ /dev/null @@ -1,35 +0,0 @@ -import asyncio -import logging -from itertools import count - - -class Timer: - def __init__(self, cb, id_str: str = ''): - self.__timeout_cb = cb - self.loop = asyncio.get_event_loop() - self.tim = None - self.id_str = id_str - self.exp_count = count(0) - - def start(self, timeout: float) -> None: - '''Start timer with timeout seconds''' - if self.tim: - self.tim.cancel() - self.tim = self.loop.call_later(timeout, self.__timeout) - logging.debug(f'[{self.id_str}]Start timer') - - def stop(self) -> None: - '''Stop timer''' - logging.debug(f'[{self.id_str}]Stop timer') - if self.tim: - self.tim.cancel() - self.tim = None - - def __timeout(self) -> None: - '''timer expired handler''' - logging.debug(f'[{self.id_str}]Timer expired') - self.__timeout_cb(next(self.exp_count)) - - def close(self) -> None: - self.stop() - self.__timeout_cb = None diff --git a/ha_addons/ha_addon/rootfs/home/proxy/protocol_ifc.py b/ha_addons/ha_addon/rootfs/home/proxy/protocol_ifc.py deleted file mode 100644 index 3b6c886..0000000 --- a/ha_addons/ha_addon/rootfs/home/proxy/protocol_ifc.py +++ /dev/null @@ -1,17 +0,0 @@ -from abc import abstractmethod - -from async_ifc import AsyncIfc -from iter_registry import AbstractIterMeta - - -class ProtocolIfc(metaclass=AbstractIterMeta): - _registry = [] - - @abstractmethod - def __init__(self, addr, ifc: "AsyncIfc", server_side: bool, - client_mode: bool = False, id_str=b''): - pass # pragma: no cover - - @abstractmethod - def close(self): - pass # pragma: no cover diff --git a/ha_addons/ha_addon/rootfs/home/proxy/proxy.py b/ha_addons/ha_addon/rootfs/home/proxy/proxy.py deleted file mode 100644 index eadc3ac..0000000 --- a/ha_addons/ha_addon/rootfs/home/proxy/proxy.py +++ /dev/null @@ -1,101 +0,0 @@ -import asyncio -import logging -import json - -from config import Config -from mqtt import Mqtt -from infos import Infos - -logger_mqtt = logging.getLogger('mqtt') - - -class Proxy(): - '''class Proxy is a baseclass - - The class has some class method for managing common resources like a - connection to the MQTT broker or proxy error counter which are common - for all inverter connection - - Instances of the class are connections to an inverter and can have an - optional link to an remote connection to the TSUN cloud. A remote - connection dies with the inverter connection. - - class methods: - class_init(): initialize the common resources of the proxy (MQTT - broker, Proxy DB, etc). Must be called before the - first inverter instance can be created - class_close(): release the common resources of the proxy. Should not - be called before any instances of the class are - destroyed - - methods: - create_remote(): Establish a client connection to the TSUN cloud - async_publ_mqtt(): Publish data to MQTT broker - ''' - @classmethod - def class_init(cls) -> None: - logging.debug('Proxy.class_init') - # initialize the proxy statistics - Infos.static_init() - cls.db_stat = Infos() - - ha = Config.get('ha') - cls.entity_prfx = ha['entity_prefix'] + '/' - cls.discovery_prfx = ha['discovery_prefix'] + '/' - cls.proxy_node_id = ha['proxy_node_id'] + '/' - cls.proxy_unique_id = ha['proxy_unique_id'] - - # call Mqtt singleton to establisch the connection to the mqtt broker - cls.mqtt = Mqtt(cls._cb_mqtt_is_up) - - # register all counters which should be reset at midnight. - # This is needed if the proxy is restated before midnight - # and the inverters are offline, cause the normal refgistering - # needs an update on the counters. - # Without this registration here the counters would not be - # reset at midnight when you restart the proxy just before - # midnight! - inverters = Config.get('inverters') - # logger.debug(f'Proxys: {inverters}') - for inv in inverters.values(): - if (type(inv) is dict): - node_id = inv['node_id'] - cls.db_stat.reg_clr_at_midnight(f'{cls.entity_prfx}{node_id}', - check_dependencies=False) - - @classmethod - async def _cb_mqtt_is_up(cls) -> None: - logging.info('Initialize proxy device on home assistant') - # register proxy status counters at home assistant - await cls._register_proxy_stat_home_assistant() - - # send values of the proxy status counters - await asyncio.sleep(0.5) # wait a bit, before sending data - Infos.new_stat_data['proxy'] = True # force sending data to sync ha - await cls._async_publ_mqtt_proxy_stat('proxy') - - @classmethod - async def _register_proxy_stat_home_assistant(cls) -> None: - '''register all our topics at home assistant''' - for data_json, component, node_id, id in cls.db_stat.ha_proxy_confs( - cls.entity_prfx, cls.proxy_node_id, cls.proxy_unique_id): - logger_mqtt.debug(f"MQTT Register: cmp:'{component}' node_id:'{node_id}' {data_json}") # noqa: E501 - await cls.mqtt.publish(f'{cls.discovery_prfx}{component}/{node_id}{id}/config', data_json) # noqa: E501 - - @classmethod - async def _async_publ_mqtt_proxy_stat(cls, key) -> None: - stat = Infos.stat - if key in stat and Infos.new_stat_data[key]: - data_json = json.dumps(stat[key]) - node_id = cls.proxy_node_id - logger_mqtt.debug(f'{key}: {data_json}') - await cls.mqtt.publish(f"{cls.entity_prfx}{node_id}{key}", - data_json) - Infos.new_stat_data[key] = False - - @classmethod - def class_close(cls, loop) -> None: # pragma: no cover - logging.debug('Proxy.class_close') - logging.info('Close MQTT Task') - loop.run_until_complete(cls.mqtt.close()) - cls.mqtt = None diff --git a/ha_addons/ha_addon/rootfs/home/proxy/scheduler.py b/ha_addons/ha_addon/rootfs/home/proxy/scheduler.py deleted file mode 100644 index 3c1d25a..0000000 --- a/ha_addons/ha_addon/rootfs/home/proxy/scheduler.py +++ /dev/null @@ -1,30 +0,0 @@ -import logging -import json -from mqtt import Mqtt -from aiocron import crontab -from infos import ClrAtMidnight - -logger_mqtt = logging.getLogger('mqtt') - - -class Schedule: - mqtt = None - count = 0 - - @classmethod - def start(cls) -> None: - '''Start the scheduler and schedule the tasks (cron jobs)''' - logging.debug("Scheduler init") - cls.mqtt = Mqtt(None) - - crontab('0 0 * * *', func=cls.atmidnight, start=True) - - @classmethod - async def atmidnight(cls) -> None: - '''Clear daily counters at midnight''' - logging.info("Clear daily counters at midnight") - - for key, data in ClrAtMidnight.elm(): - logger_mqtt.debug(f'{key}: {data}') - data_json = json.dumps(data) - await cls.mqtt.publish(f"{key}", data_json) diff --git a/ha_addons/ha_addon/rootfs/home/proxy/server.py b/ha_addons/ha_addon/rootfs/home/proxy/server.py deleted file mode 100644 index cda8501..0000000 --- a/ha_addons/ha_addon/rootfs/home/proxy/server.py +++ /dev/null @@ -1,191 +0,0 @@ -import logging -import asyncio -import signal -import os -from asyncio import StreamReader, StreamWriter -from aiohttp import web -from logging import config # noqa F401 -from proxy import Proxy -from inverter_ifc import InverterIfc -from gen3.inverter_g3 import InverterG3 -from gen3plus.inverter_g3p import InverterG3P -from scheduler import Schedule -from config import Config -from modbus_tcp import ModbusTcp - -routes = web.RouteTableDef() -proxy_is_up = False - - -@routes.get('/') -async def hello(request): - return web.Response(text="Hello, world") - - -@routes.get('/-/ready') -async def ready(request): - if proxy_is_up: - status = 200 - text = 'Is ready' - else: - status = 503 - text = 'Not ready' - return web.Response(status=status, text=text) - - -@routes.get('/-/healthy') -async def healthy(request): - - if proxy_is_up: - # logging.info('web reqeust healthy()') - for inverter in InverterIfc: - try: - res = inverter.healthy() - if not res: - return web.Response(status=503, text="I have a problem") - except Exception as err: - logging.info(f'Exception:{err}') - - return web.Response(status=200, text="I'm fine") - - -async def webserver(addr, port): - '''coro running our webserver''' - app = web.Application() - app.add_routes(routes) - runner = web.AppRunner(app) - - await runner.setup() - site = web.TCPSite(runner, addr, port) - await site.start() - logging.info(f'HTTP server listen on port: {port}') - - try: - # Normal interaction with aiohttp - while True: - await asyncio.sleep(3600) # sleep forever - except asyncio.CancelledError: - logging.info('HTTP server cancelled') - await runner.cleanup() - logging.debug('HTTP cleanup done') - - -async def handle_client(reader: StreamReader, writer: StreamWriter, inv_class): - '''Handles a new incoming connection and starts an async loop''' - - with inv_class(reader, writer) as inv: - await inv.local.ifc.server_loop() - - -async def handle_shutdown(web_task): - '''Close all TCP connections and stop the event loop''' - - logging.info('Shutdown due to SIGTERM') - global proxy_is_up - proxy_is_up = False - - # - # first, disc all open TCP connections gracefully - # - for inverter in InverterIfc: - await inverter.disc(True) - - logging.info('Proxy disconnecting done') - - # - # second, cancel the web server - # - web_task.cancel() - await web_task - - # - # now cancel all remaining (pending) tasks - # - pending = asyncio.all_tasks() - for task in pending: - task.cancel() - - # - # at last, start a coro for stopping the loop - # - logging.debug("Stop event loop") - loop.stop() - - -def get_log_level() -> int: - '''checks if LOG_LVL is set in the environment and returns the - corresponding logging.LOG_LEVEL''' - log_level = os.getenv('LOG_LVL', 'INFO') - if log_level == 'DEBUG': - log_level = logging.DEBUG - elif log_level == 'WARN': - log_level = logging.WARNING - else: - log_level = logging.INFO - return log_level - - -if __name__ == "__main__": - # - # Setup our daily, rotating logger - # - serv_name = os.getenv('SERVICE_NAME', 'proxy') - version = os.getenv('VERSION', 'unknown') - - logging.config.fileConfig('logging.ini') - logging.info(f'Server "{serv_name} - {version}" will be started') - - # set lowest-severity for 'root', 'msg', 'conn' and 'data' logger - log_level = get_log_level() - logging.getLogger().setLevel(log_level) - logging.getLogger('msg').setLevel(log_level) - logging.getLogger('conn').setLevel(log_level) - logging.getLogger('data').setLevel(log_level) - logging.getLogger('tracer').setLevel(log_level) - logging.getLogger('asyncio').setLevel(log_level) - # logging.getLogger('mqtt').setLevel(log_level) - - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - - # read config file - ConfigErr = Config.class_init() - if ConfigErr is not None: - logging.info(f'ConfigErr: {ConfigErr}') - Proxy.class_init() - Schedule.start() - ModbusTcp(loop) - - # - # Create tasks for our listening servers. These must be tasks! If we call - # start_server directly out of our main task, the eventloop will be blocked - # and we can't receive and handle the UNIX signals! - # - for inv_class, port in [(InverterG3, 5005), (InverterG3P, 10000)]: - loop.create_task(asyncio.start_server(lambda r, w, i=inv_class: - handle_client(r, w, i), - '0.0.0.0', port)) - web_task = loop.create_task(webserver('0.0.0.0', 8127)) - - # - # Register some UNIX Signal handler for a gracefully server shutdown - # on Docker restart and stop - # - for signame in ('SIGINT', 'SIGTERM'): - loop.add_signal_handler(getattr(signal, signame), - lambda loop=loop: asyncio.create_task( - handle_shutdown(web_task))) - - loop.set_debug(log_level == logging.DEBUG) - try: - if ConfigErr is None: - proxy_is_up = True - loop.run_forever() - except KeyboardInterrupt: - pass - finally: - logging.info("Event loop is stopped") - Proxy.class_close(loop) - logging.debug('Close event loop') - loop.close() - logging.info(f'Finally, exit Server "{serv_name}"') diff --git a/ha_addons/ha_addon/rootfs/home/proxy/singleton.py b/ha_addons/ha_addon/rootfs/home/proxy/singleton.py deleted file mode 100644 index 8222146..0000000 --- a/ha_addons/ha_addon/rootfs/home/proxy/singleton.py +++ /dev/null @@ -1,14 +0,0 @@ -from weakref import WeakValueDictionary - - -class Singleton(type): - _instances = WeakValueDictionary() - - def __call__(cls, *args, **kwargs): - # logger_mqtt.debug('singleton: __call__') - if cls not in cls._instances: - instance = super(Singleton, - cls).__call__(*args, **kwargs) - cls._instances[cls] = instance - - return cls._instances[cls] diff --git a/ha_addons/ha_addon/rootfs/requirements.txt b/ha_addons/ha_addon/rootfs/requirements.txt deleted file mode 100644 index 1fb1c53..0000000 --- a/ha_addons/ha_addon/rootfs/requirements.txt +++ /dev/null @@ -1,4 +0,0 @@ - aiomqtt==2.3.0 - schema==0.7.7 - aiocron==1.8 - aiohttp==3.10.11 \ No newline at end of file diff --git a/ha_addon/rootfs/run.sh b/ha_addons/ha_addon/rootfs/run.sh similarity index 77% rename from ha_addon/rootfs/run.sh rename to ha_addons/ha_addon/rootfs/run.sh index d937ac8..7183c12 100755 --- a/ha_addon/rootfs/run.sh +++ b/ha_addons/ha_addon/rootfs/run.sh @@ -23,12 +23,16 @@ fi cd /home || exit +# Erstelle Ordner für log und config +mkdir -p proxy/log +mkdir -p proxy/config -echo "Erstelle config.toml" +echo "Create config.toml..." python3 create_config_toml.py - cd /home/proxy || exit -echo "Starte Webserver" +export VERSION=$(cat /proxy-version.txt) + +echo "Start Proxyserver..." python3 server.py diff --git a/ha_addons/ha_addon/tests/test_create_config_toml.py b/ha_addons/ha_addon/tests/test_create_config_toml.py new file mode 100644 index 0000000..3776b41 --- /dev/null +++ b/ha_addons/ha_addon/tests/test_create_config_toml.py @@ -0,0 +1,190 @@ +# test_with_pytest.py +import pytest +import tomllib +from mock import patch +from cnf.config import Config + +from home.create_config_toml import create_config +from test_config import ConfigComplete, ConfigMinimum + + + +class FakeBuffer: + rd = bytearray() + wr = str() + + +test_buffer = FakeBuffer + + +class FakeFile(): + def __init__(self): + self.buf = test_buffer + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc, tb): + pass + + +class FakeOptionsFile(FakeFile): + def read(self): + return self.buf.rd + + +class FakeConfigFile(FakeFile): + def write(self, data: str): + self.buf.wr += data + + +@pytest.fixture +def patch_open(): + + def new_open(file: str, OpenTextMode="r"): + if file == '/data/options.json': + return FakeOptionsFile() + elif file == '/home/proxy/config/config.toml': + # write_buffer += 'bla1'.encode('utf-8') + return FakeConfigFile() + + raise TimeoutError + + with patch('builtins.open', new_open) as conn: + yield conn + +@pytest.fixture +def ConfigTomlEmpty(): + return { + 'gen3plus': {'at_acl': {'mqtt': {'allow': [], 'block': []}, + 'tsun': {'allow': [], 'block': []}}}, + 'ha': {'auto_conf_prefix': 'homeassistant', + 'discovery_prefix': 'homeassistant', + 'entity_prefix': 'tsun', + 'proxy_node_id': 'proxy', + 'proxy_unique_id': 'P170000000000001'}, + 'inverters': { + 'allow_all': False + }, + 'mqtt': {'host': 'mqtt', 'passwd': '', 'port': 1883, 'user': ''}, + 'solarman': { + 'enabled': True, + 'host': 'iot.talent-monitoring.com', + 'port': 10000, + }, + 'tsun': { + 'enabled': True, + 'host': 'logger.talent-monitoring.com', + 'port': 5005, + }, + } + +def test_no_config(patch_open, ConfigTomlEmpty): + _ = patch_open + test_buffer.wr = "" + test_buffer.rd = "" # empty buffer, no json + create_config() + cnf = tomllib.loads(test_buffer.wr) + assert cnf == ConfigTomlEmpty + +def test_empty_config(patch_open, ConfigTomlEmpty): + _ = patch_open + test_buffer.wr = "" + test_buffer.rd = "{}" # empty json + create_config() + cnf = tomllib.loads(test_buffer.wr) + assert cnf == ConfigTomlEmpty + +def test_full_config(patch_open, ConfigComplete): + _ = patch_open + test_buffer.wr = "" + test_buffer.rd = """ +{ + "inverters": [ + { + "serial": "R170000000000001", + "node_id": "PV-Garage", + "suggested_area": "Garage", + "modbus_polling": false, + "pv1_manufacturer": "man1", + "pv1_type": "type1", + "pv2_manufacturer": "man2", + "pv2_type": "type2" + }, + { + "serial": "Y170000000000001", + "monitor_sn": 2000000000, + "node_id": "PV-Garage2", + "suggested_area": "Garage2", + "modbus_polling": true, + "client_mode_host": "InverterIP", + "client_mode_port": 1234, + "pv1_manufacturer": "man1", + "pv1_type": "type1", + "pv2_manufacturer": "man2", + "pv2_type": "type2", + "pv3_manufacturer": "man3", + "pv3_type": "type3", + "pv4_manufacturer": "man4", + "pv4_type": "type4" + } + ], + "tsun.enabled": true, + "solarman.enabled": true, + "inverters.allow_all": false, + "gen3plus.at_acl.tsun.allow": [ + "AT+Z", + "AT+UPURL", + "AT+SUPDATE" + ], + "gen3plus.at_acl.tsun.block": [ + "AT+SUPDATE" + ], + "gen3plus.at_acl.mqtt.allow": [ + "AT+" + ], + "gen3plus.at_acl.mqtt.block": [ + "AT+SUPDATE" + ] +} +""" + create_config() + cnf = tomllib.loads(test_buffer.wr) + + validated = Config.conf_schema.validate(cnf) + assert validated == ConfigComplete + +def test_minimum_config(patch_open, ConfigMinimum): + _ = patch_open + test_buffer.wr = "" + test_buffer.rd = """ +{ + "inverters": [ + { + "serial": "R170000000000001", + "monitor_sn": 0, + "node_id": "", + "suggested_area": "", + "modbus_polling": true, + "client_mode_host": "InverterIP", + "client_mode_port": 1234 + } + ], + "tsun.enabled": true, + "solarman.enabled": true, + "inverters.allow_all": true, + "gen3plus.at_acl.tsun.allow": [ + "AT+Z", + "AT+UPURL", + "AT+SUPDATE" + ], + "gen3plus.at_acl.mqtt.allow": [ + "AT+" + ] +} +""" + create_config() + cnf = tomllib.loads(test_buffer.wr) + + validated = Config.conf_schema.validate(cnf) + assert validated == ConfigMinimum diff --git a/ha_addon/translations/en.yaml b/ha_addons/ha_addon/translations/en.yaml similarity index 64% rename from ha_addon/translations/en.yaml rename to ha_addons/ha_addon/translations/en.yaml index 99e0501..9331e48 100755 --- a/ha_addon/translations/en.yaml +++ b/ha_addons/ha_addon/translations/en.yaml @@ -2,7 +2,7 @@ configuration: inverters: name: Inverters - description: >- + description: >+ For each GEN3 inverter, the serial number of the inverter must be mapped to an MQTT definition. To do this, the corresponding configuration block is started with <16-digit serial number> so that all subsequent parameters are assigned @@ -11,6 +11,7 @@ configuration: The serial numbers of all GEN3 inverters start with `R17`! + monitor_sn # The GEN3PLUS "Monitoring SN:" node_id # MQTT replacement for inverters serial number suggested_area # suggested installation area for home-assistant modbus_polling # Disable optional MODBUS polling @@ -18,21 +19,28 @@ configuration: pv2 # Optional, PV module descr tsun.enabled: - name: Connection to TSUN Cloud + name: Connection to TSUN Cloud - for GEN3 inverter only description: >- - disable connecting to the tsun cloud avoids updates. - The Inverter become isolated from Internet if switched on. + switch on/off connection to the TSUN cloud + This connection is only required if you want send data to the TSUN cloud + eg. to use the TSUN APPs or receive firmware updates. + + on - normal proxy operation + off - The Inverter become isolated from Internet solarman.enabled: - name: Connection to Solarman Cloud + name: Connection to Solarman Cloud - for GEN3PLUS inverter only description: >- - disables connecting to the Solarman cloud avoids updates. - The Inverter become isolated from Internet if switched on. + switch on/off connection to the Solarman cloud + This connection is only required if you want send data to the Solarman cloud + eg. to use the Solarman APPs or receive firmware updates. + + on - normal proxy operation + off - The Inverter become isolated from Internet inverters.allow_all: name: Allow all connections from all inverters description: >- The proxy only usually accepts connections from known inverters. - This can be switched off for test purposes and unknown serial - numbers are also accepted. + Switch on for test purposes and unknown serial numbers. mqtt.host: name: MQTT Broker Host description: >- @@ -59,6 +67,17 @@ configuration: name: MQTT node id, for the proxy_node_id ha.proxy_unique_id: name: MQTT unique id, to identify a proxy instance + tsun.host: + name: TSUN Cloud Host + description: >- + Hostname or IP address of the TSUN cloud. if not set, the addon will try to connect to the cloud default + on logger.talent-monitoring.com + solarman.host: + name: Solarman Cloud Host + description: >- + Hostname or IP address of the Solarman cloud. if not set, the addon will try to connect to the cloud default + on iot.talent-monitoring.com + network: 8127/tcp: x... diff --git a/ha_addons/repository.yaml b/ha_addons/repository.yaml new file mode 100644 index 0000000..42a9fc5 --- /dev/null +++ b/ha_addons/repository.yaml @@ -0,0 +1,3 @@ +name: TSUN-Proxy +url: https://github.com/s-allius/tsun-gen3-proxy/ha_addons +maintainer: Stefan Allius \ No newline at end of file diff --git a/pytest.ini b/pytest.ini index 71e8374..aafab51 100644 --- a/pytest.ini +++ b/pytest.ini @@ -2,7 +2,7 @@ [pytest] minversion = 8.0 addopts = -ra -q --durations=5 -pythonpath = app/src -testpaths = app/tests +pythonpath = app/src app/tests ha_addons/ha_addon/rootfs +testpaths = app/tests ha_addons/ha_addon/tests asyncio_default_fixture_loop_scope = function asyncio_mode = strict \ No newline at end of file diff --git a/sonar-project.properties b/sonar-project.properties index 61d8dbd..f015e81 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -7,13 +7,13 @@ sonar.projectName=tsun-gen3-proxy # Path is relative to the sonar-project.properties file. Replace "\" by "/" on Windows. -sonar.sources=app/src/ +sonar.sources=app/src/,ha_addons/ha_addon/rootfs/home/ # Encoding of the source code. Default is default system encoding #sonar.sourceEncoding=UTF-8 sonar.python.version=3.12 -sonar.tests=system_tests/,app/tests/ +sonar.tests=system_tests/,app/tests/,ha_addons/ha_addon/tests/ sonar.exclusions=**/.vscode/**/* # Name your criteria sonar.issue.ignore.multicriteria=e1,e2 From 47a89c269fa774abdac06d5e1c6ccd68c02a550c Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Tue, 3 Dec 2024 22:48:52 +0100 Subject: [PATCH 29/32] fix some flake8 warnings --- .vscode/settings.json | 2 +- ha_addons/ha_addon/tests/test_create_config_toml.py | 6 +++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/.vscode/settings.json b/.vscode/settings.json index a17eb69..5b28c72 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -16,7 +16,7 @@ "python.testing.unittestEnabled": false, "python.testing.pytestEnabled": true, "flake8.args": [ - "--extend-exclude=app/tests/*.py system_tests/*.py ha_addons/ha_addon/tests/*.py" + "--extend-exclude=app/tests/*.py,system_tests/*.py,ha_addons/ha_addon/tests/*.py" ], "sonarlint.connectedMode.project": { "connectionId": "s-allius", diff --git a/ha_addons/ha_addon/tests/test_create_config_toml.py b/ha_addons/ha_addon/tests/test_create_config_toml.py index 3776b41..3fd715e 100644 --- a/ha_addons/ha_addon/tests/test_create_config_toml.py +++ b/ha_addons/ha_addon/tests/test_create_config_toml.py @@ -8,7 +8,6 @@ from home.create_config_toml import create_config from test_config import ConfigComplete, ConfigMinimum - class FakeBuffer: rd = bytearray() wr = str() @@ -53,6 +52,7 @@ def patch_open(): with patch('builtins.open', new_open) as conn: yield conn + @pytest.fixture def ConfigTomlEmpty(): return { @@ -79,6 +79,7 @@ def ConfigTomlEmpty(): }, } + def test_no_config(patch_open, ConfigTomlEmpty): _ = patch_open test_buffer.wr = "" @@ -87,6 +88,7 @@ def test_no_config(patch_open, ConfigTomlEmpty): cnf = tomllib.loads(test_buffer.wr) assert cnf == ConfigTomlEmpty + def test_empty_config(patch_open, ConfigTomlEmpty): _ = patch_open test_buffer.wr = "" @@ -95,6 +97,7 @@ def test_empty_config(patch_open, ConfigTomlEmpty): cnf = tomllib.loads(test_buffer.wr) assert cnf == ConfigTomlEmpty + def test_full_config(patch_open, ConfigComplete): _ = patch_open test_buffer.wr = "" @@ -154,6 +157,7 @@ def test_full_config(patch_open, ConfigComplete): validated = Config.conf_schema.validate(cnf) assert validated == ConfigComplete + def test_minimum_config(patch_open, ConfigMinimum): _ = patch_open test_buffer.wr = "" From ac7b02bde9ad1904cf54de941c667bdde3946105 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Tue, 3 Dec 2024 22:49:38 +0100 Subject: [PATCH 30/32] init act_config, def_config even without init() call --- app/src/cnf/config.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/app/src/cnf/config.py b/app/src/cnf/config.py index ecbad0a..081c05b 100644 --- a/app/src/cnf/config.py +++ b/app/src/cnf/config.py @@ -17,6 +17,8 @@ class Config(): Read config.toml file and sanitize it with read(). Get named parts of the config with get()''' + act_config = {} + def_config = {} conf_schema = Schema({ 'tsun': { From b335881500b1487411c9de0b9cd4ca15d5416c02 Mon Sep 17 00:00:00 2001 From: Stefan Allius <122395479+s-allius@users.noreply.github.com> Date: Sun, 8 Dec 2024 13:25:04 +0100 Subject: [PATCH 31/32] S allius/issue217 2 (#230) * add some reader classes to get the configuration * adapt unittests * get config from json or toml file * loop over all config readers to get the configuration * rename config test files * use relative paths for coverage test in vscode * do not throw an error for missing config files * remove obsolete tests * use dotted key notation for pv sub dictonary * log config reading progress * remove create_config_toml.py * remove obsolete tests for the ha_addon * disable mosquitto tests if the server is down * ignore main method for test coverage * increase test coverage * pytest-cov: use relative_files only on github, so coverage will work with vscode locally * remove unneeded imports * add missing test cases * disable branch coverage, cause its not reachable --- .cover_ghaction_rc | 3 + .coveragerc | 1 - .env_example | 9 + .github/workflows/python-app.yml | 2 +- .vscode/settings.json | 16 +- app/src/cnf/config.py | 139 ++++-- app/src/cnf/config_ifc_proxy.py | 34 -- app/src/cnf/config_read_env.py | 25 ++ app/src/cnf/config_read_json.py | 46 ++ app/src/cnf/config_read_toml.py | 21 + app/src/server.py | 37 +- app/tests/test_config.py | 244 ++++++----- app/tests/test_config_ifc_proxy.py | 53 --- app/tests/test_config_read_env.py | 53 +++ app/tests/test_config_read_json.py | 404 ++++++++++++++++++ app/tests/test_mqtt.py | 105 +++-- app/tests/test_server.py | 24 ++ ha_addons/ha_addon/config.yaml | 32 +- .../rootfs/home/create_config_toml.py | 115 ----- ha_addons/ha_addon/rootfs/run.sh | 5 +- .../ha_addon/tests/test_create_config_toml.py | 194 --------- sonar-project.properties | 4 +- 22 files changed, 942 insertions(+), 624 deletions(-) create mode 100644 .cover_ghaction_rc create mode 100644 .env_example delete mode 100644 app/src/cnf/config_ifc_proxy.py create mode 100644 app/src/cnf/config_read_env.py create mode 100644 app/src/cnf/config_read_json.py create mode 100644 app/src/cnf/config_read_toml.py delete mode 100644 app/tests/test_config_ifc_proxy.py create mode 100644 app/tests/test_config_read_env.py create mode 100644 app/tests/test_config_read_json.py create mode 100644 app/tests/test_server.py delete mode 100644 ha_addons/ha_addon/rootfs/home/create_config_toml.py delete mode 100644 ha_addons/ha_addon/tests/test_create_config_toml.py diff --git a/.cover_ghaction_rc b/.cover_ghaction_rc new file mode 100644 index 0000000..d5b2526 --- /dev/null +++ b/.cover_ghaction_rc @@ -0,0 +1,3 @@ +[run] +branch = True +relative_files = True diff --git a/.coveragerc b/.coveragerc index 890dd1b..398ff08 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,3 +1,2 @@ [run] branch = True -relative_files = True \ No newline at end of file diff --git a/.env_example b/.env_example new file mode 100644 index 0000000..4d28078 --- /dev/null +++ b/.env_example @@ -0,0 +1,9 @@ +# example file for the .env file. The .env set private values +# which are needed for builing containers + +# registry for debug an dev container +PRIVAT_CONTAINER_REGISTRY=docker.io// + +# registry for official container (preview, rc, rel) +PUBLIC_CONTAINER_REGISTRY=ghcr.io// +PUBLIC_CR_KEY= \ No newline at end of file diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index 9dc387c..4184019 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -54,7 +54,7 @@ jobs: flake8 --exit-zero --ignore=C901,E121,E123,E126,E133,E226,E241,E242,E704,W503,W504,W505 --format=pylint --output-file=output_flake.txt --exclude=*.pyc app/src/ - name: Test with pytest run: | - python -m pytest app ha_addons --cov=app/src --cov=ha_addons/ha_addon/rootfs/home --cov-report=xml + python -m pytest app --cov=app/src --cov-config=.cover_ghaction_rc --cov-report=xml coverage report - name: Analyze with SonarCloud if: ${{ env.SONAR_TOKEN != 0 }} diff --git a/.vscode/settings.json b/.vscode/settings.json index 5b28c72..626f77f 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,22 +1,20 @@ { "python.analysis.extraPaths": [ - "app/src", - "app/tests", - ".venv/lib", - "ha_addons/ha_addon/rootfs" ], + "app/src", + "app/tests", + ".venv/lib", + ], "python.testing.pytestArgs": [ - "-vvv", + "-vvv", "--cov=app/src", - "--cov=ha_addons/ha_addon/rootfs", "--cov-report=xml", "app", - "system_tests", - "ha_addons" + "system_tests" ], "python.testing.unittestEnabled": false, "python.testing.pytestEnabled": true, "flake8.args": [ - "--extend-exclude=app/tests/*.py,system_tests/*.py,ha_addons/ha_addon/tests/*.py" + "--extend-exclude=app/tests/*.py,system_tests/*.py" ], "sonarlint.connectedMode.project": { "connectionId": "s-allius", diff --git a/app/src/cnf/config.py b/app/src/cnf/config.py index 081c05b..b3ed188 100644 --- a/app/src/cnf/config.py +++ b/app/src/cnf/config.py @@ -1,24 +1,47 @@ -'''Config module handles the proxy configuration in the config.toml file''' +'''Config module handles the proxy configuration''' -import tomllib +import shutil import logging from abc import ABC, abstractmethod from schema import Schema, And, Or, Use, Optional class ConfigIfc(ABC): + '''Abstract basis class for config readers''' + def __init__(self): + Config.add(self) + @abstractmethod - def get_config(cls) -> dict: # pragma: no cover + def get_config(self) -> dict: # pragma: no cover + '''get the unverified config from the reader''' pass + @abstractmethod + def descr(self) -> str: # pragma: no cover + '''return a descriction of the source, e.g. the file name''' + pass + + def _extend_key(self, conf, key, val): + '''split a dotted dict key into a hierarchical dict tree ''' + lst = key.split('.') + d = conf + for i, idx in enumerate(lst, 1): # pragma: no branch + if i == len(lst): + d[idx] = val + break + if idx not in d: + d[idx] = {} + d = d[idx] + class Config(): - '''Static class Config is reads and sanitize the config. + '''Static class Config build and sanitize the internal config dictenary. - Read config.toml file and sanitize it with read(). - Get named parts of the config with get()''' - act_config = {} - def_config = {} + Using config readers, a partial configuration is added to config. + Config readers are a derivation of the abstract ConfigIfc reader. + When a config reader is instantiated, theits `get_config` method is + called automatically and afterwards the config will be merged. + ''' conf_schema = Schema({ 'tsun': { @@ -34,8 +57,10 @@ class Config(): 'mqtt': { 'host': Use(str), 'port': And(Use(int), lambda n: 1024 <= n <= 65535), - 'user': And(Use(str), Use(lambda s: s if len(s) > 0 else None)), - 'passwd': And(Use(str), Use(lambda s: s if len(s) > 0 else None)) + 'user': Or(None, And(Use(str), + Use(lambda s: s if len(s) > 0 else None))), + 'passwd': Or(None, And(Use(str), + Use(lambda s: s if len(s) > 0 else None))) }, 'ha': { 'auto_conf_prefix': Use(str), @@ -99,52 +124,74 @@ class Config(): ) @classmethod - def init(cls, ifc: ConfigIfc, path='') -> None | str: - cls.ifc = ifc - cls.act_config = {} + def init(cls, def_reader: ConfigIfc) -> None | str: + '''Initialise the Proxy-Config + +Copy the internal default config file into the config directory +and initialise the Config with the default configuration ''' + cls.err = None cls.def_config = {} - return cls.read(path) + try: + # make the default config transparaent by copying it + # in the config.example file + logging.debug('Copy Default Config to config.example.toml') + + shutil.copy2("default_config.toml", + "config/config.example.toml") + except Exception: + pass + + # read example config file as default configuration + try: + def_config = def_reader.get_config() + cls.def_config = cls.conf_schema.validate(def_config) + logging.info(f'Read from {def_reader.descr()} => ok') + except Exception as error: + cls.err = f'Config.read: {error}' + logging.error( + f"Can't read from {def_reader.descr()} => error\n {error}") + + cls.act_config = cls.def_config.copy() @classmethod - def read(cls, path) -> None | str: - '''Read config file, merge it with the default config + def add(cls, reader: ConfigIfc): + '''Merge the config from the Config Reader into the config + +Checks if a default config exists. If no default configuration exists, +the Config.init method has not yet been called.This is normal for the very +first Config Reader which creates the default config and must be ignored +here. The default config reader is handled in the Config.init method''' + if hasattr(cls, 'def_config'): + cls.__parse(reader) + + @classmethod + def get_error(cls) -> None | str: + '''return the last error as a string or None if there is no error''' + return cls.err + + @classmethod + def __parse(cls, reader) -> None | str: + '''Read config from the reader, merge it with the default config and sanitize the result''' - err = None - config = {} - logger = logging.getLogger('data') - + res = 'ok' try: - # read example config file as default configuration - cls.def_config = {} - with open(f"{path}default_config.toml", "rb") as f: - def_config = tomllib.load(f) - cls.def_config = cls.conf_schema.validate(def_config) - - # overwrite the default values, with values from - # the config.toml file - usr_config = cls.ifc.get_config() - - # merge the default and the user config - config = def_config.copy() + rd_config = reader.get_config() + config = cls.act_config.copy() for key in ['tsun', 'solarman', 'mqtt', 'ha', 'inverters', 'gen3plus']: - if key in usr_config: - config[key] |= usr_config[key] - - try: - cls.act_config = cls.conf_schema.validate(config) - except Exception as error: - err = f'Config.read: {error}' - logging.error(err) - - # logging.debug(f'Readed config: "{cls.act_config}" ') + if key in rd_config: + config[key] = config[key] | rd_config[key] + cls.act_config = cls.conf_schema.validate(config) + except FileNotFoundError: + res = 'n/a' except Exception as error: - err = f'Config.read: {error}' - logger.error(err) - cls.act_config = {} + cls.err = f'error: {error}' + logging.error( + f"Can't read from {reader.descr()} => error\n {error}") - return err + logging.info(f'Read from {reader.descr()} => {res}') + return cls.err @classmethod def get(cls, member: str = None): diff --git a/app/src/cnf/config_ifc_proxy.py b/app/src/cnf/config_ifc_proxy.py deleted file mode 100644 index cf2f022..0000000 --- a/app/src/cnf/config_ifc_proxy.py +++ /dev/null @@ -1,34 +0,0 @@ -'''Config module handles the proxy configuration in the config.toml file''' - -import shutil -import tomllib -import logging -from cnf.config import ConfigIfc - - -class ConfigIfcProxy(ConfigIfc): - def __init__(self): # pragma: no cover - try: - # make the default config transparaent by copying it - # in the config.example file - logging.info('Copy Default Config to config.example.toml') - - shutil.copy2("default_config.toml", - "config/config.example.toml") - except Exception: - pass - - def get_config(self, cnf_file="config/config.toml") -> dict: - usr_config = {} - - try: - with open(cnf_file, "rb") as f: - usr_config = tomllib.load(f) - except Exception as error: - err = f'Config.read: {error}' - logging.error(err) - logging.info( - '\n To create the missing config.toml file, ' - 'you can rename the template config.example.toml\n' - ' and customize it for your scenario.\n') - return usr_config diff --git a/app/src/cnf/config_read_env.py b/app/src/cnf/config_read_env.py new file mode 100644 index 0000000..693d7cc --- /dev/null +++ b/app/src/cnf/config_read_env.py @@ -0,0 +1,25 @@ +'''Config Reader module which handles config values from the environment''' + +import os +from cnf.config import ConfigIfc + + +class ConfigReadEnv(ConfigIfc): + '''Reader for environment values of the configuration''' + + def get_config(self) -> dict: + conf = {} + data = [ + ('mqtt.host', 'MQTT_HOST'), + ('mqtt.port', 'MQTT_PORT'), + ('mqtt.user', 'MQTT_USER'), + ('mqtt.passwd', 'MQTT_PASSWORD'), + ] + for key, env_var in data: + val = os.getenv(env_var) + if val: + self._extend_key(conf, key, val) + return conf + + def descr(self): + return "Read environment" diff --git a/app/src/cnf/config_read_json.py b/app/src/cnf/config_read_json.py new file mode 100644 index 0000000..785dae7 --- /dev/null +++ b/app/src/cnf/config_read_json.py @@ -0,0 +1,46 @@ +'''Config Reader module which handles *.json config files''' + +import json +from cnf.config import ConfigIfc + + +class ConfigReadJson(ConfigIfc): + '''Reader for json config files''' + def __init__(self, cnf_file='/data/options.json'): + '''Read a json file and add the settings to the config''' + if not isinstance(cnf_file, str): + return + self.cnf_file = cnf_file + super().__init__() + + def convert_inv(self, conf, inv): + if 'serial' in inv: + snr = inv['serial'] + del inv['serial'] + conf[snr] = {} + + for key, val in inv.items(): + self._extend_key(conf[snr], key, val) + + def convert_inv_arr(self, conf, key, val: list): + if key not in conf: + conf[key] = {} + for elm in val: + self.convert_inv(conf[key], elm) + + def convert_to_obj(self, data): + conf = {} + for key, val in data.items(): + if key == 'inverters' and isinstance(val, list): + self.convert_inv_arr(conf, key, val) + else: + self._extend_key(conf, key, val) + return conf + + def get_config(self) -> dict: + with open(self.cnf_file) as f: + data = json.load(f) + return self.convert_to_obj(data) + + def descr(self): + return self.cnf_file diff --git a/app/src/cnf/config_read_toml.py b/app/src/cnf/config_read_toml.py new file mode 100644 index 0000000..e64fd28 --- /dev/null +++ b/app/src/cnf/config_read_toml.py @@ -0,0 +1,21 @@ +'''Config Reader module which handles *.toml config files''' + +import tomllib +from cnf.config import ConfigIfc + + +class ConfigReadToml(ConfigIfc): + '''Reader for toml config files''' + def __init__(self, cnf_file): + '''Read a toml file and add the settings to the config''' + if not isinstance(cnf_file, str): + return + self.cnf_file = cnf_file + super().__init__() + + def get_config(self) -> dict: + with open(self.cnf_file, "rb") as f: + return tomllib.load(f) + + def descr(self): + return self.cnf_file diff --git a/app/src/server.py b/app/src/server.py index 3f997bd..ce5077f 100644 --- a/app/src/server.py +++ b/app/src/server.py @@ -2,6 +2,7 @@ import logging import asyncio import signal import os +import argparse from asyncio import StreamReader, StreamWriter from aiohttp import web from logging import config # noqa F401 @@ -11,7 +12,9 @@ from gen3.inverter_g3 import InverterG3 from gen3plus.inverter_g3p import InverterG3P from scheduler import Schedule from cnf.config import Config -from cnf.config_ifc_proxy import ConfigIfcProxy +from cnf.config_read_env import ConfigReadEnv +from cnf.config_read_toml import ConfigReadToml +from cnf.config_read_json import ConfigReadJson from modbus_tcp import ModbusTcp routes = web.RouteTableDef() @@ -117,6 +120,8 @@ def get_log_level() -> int: '''checks if LOG_LVL is set in the environment and returns the corresponding logging.LOG_LEVEL''' log_level = os.getenv('LOG_LVL', 'INFO') + logging.info(f"LOG_LVL : {log_level}") + if log_level == 'DEBUG': log_level = logging.DEBUG elif log_level == 'WARN': @@ -126,7 +131,17 @@ def get_log_level() -> int: return log_level -if __name__ == "__main__": +if __name__ == "__main__": # pragma: no cover + parser = argparse.ArgumentParser() + parser.add_argument('-p', '--config_path', type=str, + default='./config/', + help='set path for the configuration files') + parser.add_argument('-j', '--json_config', type=str, + help='read user config from json-file') + parser.add_argument('-t', '--toml_config', type=str, + help='read user config from toml-file') + parser.add_argument('--add_on', action='store_true') + args = parser.parse_args() # # Setup our daily, rotating logger # @@ -135,9 +150,14 @@ if __name__ == "__main__": logging.config.fileConfig('logging.ini') logging.info(f'Server "{serv_name} - {version}" will be started') + logging.info(f"AddOn: {args.add_on}") + logging.info(f"config_path: {args.config_path}") + logging.info(f"json_config: {args.json_config}") + logging.info(f"toml_config: {args.toml_config}") + log_level = get_log_level() + logging.info('******') # set lowest-severity for 'root', 'msg', 'conn' and 'data' logger - log_level = get_log_level() logging.getLogger().setLevel(log_level) logging.getLogger('msg').setLevel(log_level) logging.getLogger('conn').setLevel(log_level) @@ -150,9 +170,18 @@ if __name__ == "__main__": asyncio.set_event_loop(loop) # read config file - ConfigErr = Config.init(ConfigIfcProxy()) + Config.init(ConfigReadToml("default_config.toml")) + ConfigReadEnv() + ConfigReadJson(args.config_path + "config.json") + ConfigReadToml(args.config_path + "config.toml") + ConfigReadJson(args.json_config) + ConfigReadToml(args.toml_config) + ConfigErr = Config.get_error() + if ConfigErr is not None: logging.info(f'ConfigErr: {ConfigErr}') + logging.info('******') + Proxy.class_init() Schedule.start() ModbusTcp(loop) diff --git a/app/tests/test_config.py b/app/tests/test_config.py index a0eacb6..d229dac 100644 --- a/app/tests/test_config.py +++ b/app/tests/test_config.py @@ -1,8 +1,47 @@ # test_with_pytest.py import pytest -import tomllib +import json +from mock import patch from schema import SchemaMissingKeyError from cnf.config import Config, ConfigIfc +from cnf.config_read_toml import ConfigReadToml + +class FakeBuffer: + rd = str() + +test_buffer = FakeBuffer + + +class FakeFile(): + def __init__(self): + self.buf = test_buffer + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc, tb): + pass + + +class FakeOptionsFile(FakeFile): + def __init__(self, OpenTextMode): + super().__init__() + self.bin_mode = 'b' in OpenTextMode + + def read(self): + if self.bin_mode: + return bytearray(self.buf.rd.encode('utf-8')).copy() + else: + return self.buf.rd.copy() + +def patch_open(): + def new_open(file: str, OpenTextMode="rb"): + if file == "_no__file__no_": + raise FileNotFoundError + return FakeOptionsFile(OpenTextMode) + + with patch('builtins.open', new_open) as conn: + yield conn class TstConfig(ConfigIfc): @@ -11,7 +50,7 @@ class TstConfig(ConfigIfc): cls.act_config = cnf @classmethod - def get_config(cls) -> dict: + def add_config(cls) -> dict: return cls.act_config @@ -23,6 +62,40 @@ def test_empty_config(): except SchemaMissingKeyError: pass +@pytest.fixture +def ConfigDefault(): + return {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, + 'inverters': { + 'allow_all': False, + 'R170000000000001': { + 'suggested_area': '', + 'modbus_polling': False, + 'monitor_sn': 0, + 'node_id': '', + 'pv1': {'manufacturer': 'Risen', + 'type': 'RSM40-8-395M'}, + 'pv2': {'manufacturer': 'Risen', + 'type': 'RSM40-8-395M'}, + 'sensor_list': 688 + }, + 'Y170000000000001': { + 'modbus_polling': True, + 'monitor_sn': 2000000000, + 'suggested_area': '', + 'node_id': '', + 'pv1': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'pv2': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'pv3': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'pv4': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'sensor_list': 688 + } + } + } + @pytest.fixture def ConfigComplete(): return { @@ -70,38 +143,9 @@ def ConfigComplete(): } } -@pytest.fixture -def ConfigMinimum(): - return { - 'gen3plus': { - 'at_acl': { - 'mqtt': {'allow': ['AT+'], 'block': []}, - 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], - 'block': []} - } - }, - 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', - 'port': 5005}, - 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, - 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, - 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, - 'inverters': { - 'allow_all': True, - 'R170000000000001': {'node_id': '', - 'modbus_polling': True, - 'monitor_sn': 0, - 'suggested_area': '', - 'sensor_list': 688}}} - - def test_default_config(): - with open("app/config/default_config.toml", "rb") as f: - cnf = tomllib.load(f) - - try: - validated = Config.conf_schema.validate(cnf) - except Exception: - assert False + Config.init(ConfigReadToml("app/config/default_config.toml")) + validated = Config.def_config assert validated == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': { 'allow_all': False, @@ -146,76 +190,53 @@ def test_full_config(ConfigComplete): assert False assert validated == ConfigComplete -def test_mininum_config(ConfigMinimum): - cnf = {'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, - 'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+']}, - 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE']}}}, - 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, - 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': '', 'passwd': ''}, - 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, - 'inverters': {'allow_all': True, - 'R170000000000001': {}} - } +def test_read_empty(ConfigDefault): + test_buffer.rd = "" + + Config.init(ConfigReadToml("app/config/default_config.toml")) + for _ in patch_open(): + ConfigReadToml("config/config.toml") + err = Config.get_error() - try: - validated = Config.conf_schema.validate(cnf) - except Exception: - assert False - assert validated == ConfigMinimum - -def test_read_empty(): - cnf = {} - err = Config.init(TstConfig(cnf), 'app/config/') assert err == None cnf = Config.get() - assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, - 'inverters': { - 'allow_all': False, - 'R170000000000001': { - 'suggested_area': '', - 'modbus_polling': False, - 'monitor_sn': 0, - 'node_id': '', - 'pv1': {'manufacturer': 'Risen', - 'type': 'RSM40-8-395M'}, - 'pv2': {'manufacturer': 'Risen', - 'type': 'RSM40-8-395M'}, - 'sensor_list': 688 - }, - 'Y170000000000001': { - 'modbus_polling': True, - 'monitor_sn': 2000000000, - 'suggested_area': '', - 'node_id': '', - 'pv1': {'manufacturer': 'Risen', - 'type': 'RSM40-8-410M'}, - 'pv2': {'manufacturer': 'Risen', - 'type': 'RSM40-8-410M'}, - 'pv3': {'manufacturer': 'Risen', - 'type': 'RSM40-8-410M'}, - 'pv4': {'manufacturer': 'Risen', - 'type': 'RSM40-8-410M'}, - 'sensor_list': 688 - } - } - } + assert cnf == ConfigDefault defcnf = Config.def_config.get('solarman') assert defcnf == {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000} assert True == Config.is_default('solarman') def test_no_file(): - cnf = {} - err = Config.init(TstConfig(cnf), '') + Config.init(ConfigReadToml("default_config.toml")) + err = Config.get_error() assert err == "Config.read: [Errno 2] No such file or directory: 'default_config.toml'" cnf = Config.get() assert cnf == {} defcnf = Config.def_config.get('solarman') assert defcnf == None +def test_no_file2(): + Config.init(ConfigReadToml("app/config/default_config.toml")) + assert Config.err == None + ConfigReadToml("_no__file__no_") + err = Config.get_error() + assert err == None + +def test_invalid_filename(): + Config.init(ConfigReadToml("app/config/default_config.toml")) + assert Config.err == None + ConfigReadToml(None) + err = Config.get_error() + assert err == None + def test_read_cnf1(): - cnf = {'solarman' : {'enabled': False}} - err = Config.init(TstConfig(cnf), 'app/config/') + test_buffer.rd = "solarman.enabled = false" + + Config.init(ConfigReadToml("app/config/default_config.toml")) + for _ in patch_open(): + ConfigReadToml("config/config.toml") + err = Config.get_error() + assert err == None cnf = Config.get() assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': False, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, @@ -256,8 +277,13 @@ def test_read_cnf1(): assert False == Config.is_default('solarman') def test_read_cnf2(): - cnf = {'solarman' : {'enabled': 'FALSE'}} - err = Config.init(TstConfig(cnf), 'app/config/') + test_buffer.rd = "solarman.enabled = 'FALSE'" + + Config.init(ConfigReadToml("app/config/default_config.toml")) + for _ in patch_open(): + ConfigReadToml("config/config.toml") + err = Config.get_error() + assert err == None cnf = Config.get() assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, @@ -293,16 +319,26 @@ def test_read_cnf2(): } assert True == Config.is_default('solarman') -def test_read_cnf3(): - cnf = {'solarman' : {'port': 'FALSE'}} - err = Config.init(TstConfig(cnf), 'app/config/') - assert err == 'Config.read: Key \'solarman\' error:\nKey \'port\' error:\nint(\'FALSE\') raised ValueError("invalid literal for int() with base 10: \'FALSE\'")' +def test_read_cnf3(ConfigDefault): + test_buffer.rd = "solarman.port = 'FALSE'" + + Config.init(ConfigReadToml("app/config/default_config.toml")) + for _ in patch_open(): + ConfigReadToml("config/config.toml") + err = Config.get_error() + + assert err == 'error: Key \'solarman\' error:\nKey \'port\' error:\nint(\'FALSE\') raised ValueError("invalid literal for int() with base 10: \'FALSE\'")' cnf = Config.get() - assert cnf == {} + assert cnf == ConfigDefault def test_read_cnf4(): - cnf = {'solarman' : {'port': 5000}} - err = Config.init(TstConfig(cnf), 'app/config/') + test_buffer.rd = "solarman.port = 5000" + + Config.init(ConfigReadToml("app/config/default_config.toml")) + for _ in patch_open(): + ConfigReadToml("config/config.toml") + err = Config.get_error() + assert err == None cnf = Config.get() assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 5000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, @@ -339,11 +375,19 @@ def test_read_cnf4(): assert False == Config.is_default('solarman') def test_read_cnf5(): - cnf = {'solarman' : {'port': 1023}} - err = Config.init(TstConfig(cnf), 'app/config/') + test_buffer.rd = "solarman.port = 1023" + + Config.init(ConfigReadToml("app/config/default_config.toml")) + for _ in patch_open(): + ConfigReadToml("config/config.toml") + err = Config.get_error() assert err != None def test_read_cnf6(): - cnf = {'solarman' : {'port': 65536}} - err = Config.init(TstConfig(cnf), 'app/config/') + test_buffer.rd = "solarman.port = 65536" + + Config.init(ConfigReadToml("app/config/default_config.toml")) + for _ in patch_open(): + ConfigReadToml("config/config.toml") + err = Config.get_error() assert err != None diff --git a/app/tests/test_config_ifc_proxy.py b/app/tests/test_config_ifc_proxy.py deleted file mode 100644 index 02b0ec7..0000000 --- a/app/tests/test_config_ifc_proxy.py +++ /dev/null @@ -1,53 +0,0 @@ -# test_with_pytest.py -import tomllib -from schema import SchemaMissingKeyError -from cnf.config_ifc_proxy import ConfigIfcProxy - -class CnfIfc(ConfigIfcProxy): - def __init__(self): - pass - -def test_no_config(): - cnf_ifc = CnfIfc() - - cnf = cnf_ifc.get_config("") - assert cnf == {} - -def test_get_config(): - cnf_ifc = CnfIfc() - - cnf = cnf_ifc.get_config("app/config/default_config.toml") - assert cnf == { - 'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, - 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, - 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, - 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': '', 'passwd': ''}, - 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, - 'inverters': { - 'allow_all': False, - 'R170000000000001': { - 'node_id': '', - 'pv1': {'manufacturer': 'Risen', - 'type': 'RSM40-8-395M'}, - 'pv2': {'manufacturer': 'Risen', - 'type': 'RSM40-8-395M'}, - 'modbus_polling': False, - 'suggested_area': '' - }, - 'Y170000000000001': { - 'modbus_polling': True, - 'monitor_sn': 2000000000, - 'node_id': '', - 'pv1': {'manufacturer': 'Risen', - 'type': 'RSM40-8-410M'}, - 'pv2': {'manufacturer': 'Risen', - 'type': 'RSM40-8-410M'}, - 'pv3': {'manufacturer': 'Risen', - 'type': 'RSM40-8-410M'}, - 'pv4': {'manufacturer': 'Risen', - 'type': 'RSM40-8-410M'}, - 'suggested_area': '' - } - } - } - diff --git a/app/tests/test_config_read_env.py b/app/tests/test_config_read_env.py new file mode 100644 index 0000000..3bf33fc --- /dev/null +++ b/app/tests/test_config_read_env.py @@ -0,0 +1,53 @@ +# test_with_pytest.py +import pytest +import os +from mock import patch +from cnf.config import Config +from cnf.config_read_toml import ConfigReadToml +from cnf.config_read_env import ConfigReadEnv + +def patch_getenv(): + def new_getenv(key: str, defval=None): + """Get an environment variable, return None if it doesn't exist. +The optional second argument can specify an alternate default. key, +default and the result are str.""" + if key == 'MQTT_PASSWORD': + return 'passwd' + elif key == 'MQTT_PORT': + return 1234 + elif key == 'MQTT_HOST': + return "" + return defval + + with patch.object(os, 'getenv', new_getenv) as conn: + yield conn + +def test_extend_key(): + cnf_rd = ConfigReadEnv() + + conf = {} + cnf_rd._extend_key(conf, "mqtt.user", "testuser") + assert conf == { + 'mqtt': { + 'user': 'testuser', + }, + } + + conf = {} + cnf_rd._extend_key(conf, "mqtt", "testuser") + assert conf == { + 'mqtt': 'testuser', + } + + conf = {} + cnf_rd._extend_key(conf, "", "testuser") + assert conf == {'': 'testuser'} + +def test_read_env_config(): + Config.init(ConfigReadToml("app/config/default_config.toml")) + assert Config.get('mqtt') == {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None} + for _ in patch_getenv(): + + ConfigReadEnv() + assert Config.get_error() == None + assert Config.get('mqtt') == {'host': 'mqtt', 'port': 1234, 'user': None, 'passwd': 'passwd'} diff --git a/app/tests/test_config_read_json.py b/app/tests/test_config_read_json.py new file mode 100644 index 0000000..0fd7b07 --- /dev/null +++ b/app/tests/test_config_read_json.py @@ -0,0 +1,404 @@ +# test_with_pytest.py +import pytest +from mock import patch +from cnf.config import Config +from cnf.config_read_json import ConfigReadJson +from cnf.config_read_toml import ConfigReadToml + +from test_config import ConfigDefault, ConfigComplete + + +class CnfIfc(ConfigReadJson): + def __init__(self): + pass + + +class FakeBuffer: + rd = str() + wr = str() + + +test_buffer = FakeBuffer + + +class FakeFile(): + def __init__(self): + self.buf = test_buffer + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc, tb): + pass + + +class FakeOptionsFile(FakeFile): + def __init__(self, OpenTextMode): + super().__init__() + self.bin_mode = 'b' in OpenTextMode + + def read(self): + print(f"Fake.read: bmode:{self.bin_mode}") + if self.bin_mode: + return bytearray(self.buf.rd.encode('utf-8')).copy() + else: + print(f"Fake.read: str:{self.buf.rd}") + return self.buf.rd + +def patch_open(): + def new_open(file: str, OpenTextMode="r"): + if file == "_no__file__no_": + raise FileNotFoundError + return FakeOptionsFile(OpenTextMode) + + with patch('builtins.open', new_open) as conn: + yield conn + +@pytest.fixture +def ConfigTomlEmpty(): + return { + 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': '', 'passwd': ''}, + 'ha': {'auto_conf_prefix': 'homeassistant', + 'discovery_prefix': 'homeassistant', + 'entity_prefix': 'tsun', + 'proxy_node_id': 'proxy', + 'proxy_unique_id': 'P170000000000001'}, + 'solarman': { + 'enabled': True, + 'host': 'iot.talent-monitoring.com', + 'port': 10000, + }, + 'tsun': { + 'enabled': True, + 'host': 'logger.talent-monitoring.com', + 'port': 5005, + }, + 'inverters': { + 'allow_all': False + }, + 'gen3plus': {'at_acl': {'tsun': {'allow': [], 'block': []}, + 'mqtt': {'allow': [], 'block': []}}}, + } + + +def test_no_config(ConfigDefault): + test_buffer.rd = "" # empty buffer, no json + + Config.init(ConfigReadToml("app/config/default_config.toml")) + for _ in patch_open(): + ConfigReadJson() + err = Config.get_error() + + assert err == 'error: Expecting value: line 1 column 1 (char 0)' + cnf = Config.get() + assert cnf == ConfigDefault + +def test_no_file(ConfigDefault): + test_buffer.rd = "" # empty buffer, no json + + Config.init(ConfigReadToml("app/config/default_config.toml")) + for _ in patch_open(): + ConfigReadJson("_no__file__no_") + err = Config.get_error() + + assert err == None + cnf = Config.get() + assert cnf == ConfigDefault + +def test_invalid_filename(ConfigDefault): + test_buffer.rd = "" # empty buffer, no json + + Config.init(ConfigReadToml("app/config/default_config.toml")) + for _ in patch_open(): + ConfigReadJson(None) + err = Config.get_error() + + assert err == None + cnf = Config.get() + assert cnf == ConfigDefault + +def test_cnv1(): + """test dotted key converting""" + tst = { + "gen3plus.at_acl.mqtt.block": [ + "AT+SUPDATE", + "AT+" + ] +} + + cnf = ConfigReadJson() + obj = cnf.convert_to_obj(tst) + assert obj == { + 'gen3plus': { + 'at_acl': { + 'mqtt': { + 'block': [ + 'AT+SUPDATE', + "AT+" + ], + }, + }, + }, + } + +def test_cnv2(): + """test a valid list with serials in inverters""" + tst = { + "inverters": [ + { + "serial": "R170000000000001", + }, + { + "serial": "Y170000000000001", + } + ], +} + + cnf = ConfigReadJson() + obj = cnf.convert_to_obj(tst) + assert obj == { + 'inverters': { + 'R170000000000001': {}, + 'Y170000000000001': {} + }, + } + +def test_cnv3(): + """test the combination of a list and a scalar in inverters""" + tst = { + "inverters": [ + { + "serial": "R170000000000001", + }, + { + "serial": "Y170000000000001", + } + ], + "inverters.allow_all": False, +} + + cnf = ConfigReadJson() + obj = cnf.convert_to_obj(tst) + assert obj == { + 'inverters': { + 'R170000000000001': {}, + 'Y170000000000001': {}, + 'allow_all': False, + }, + } + +def test_cnv4(): + tst = { + "inverters": [ + { + "serial": "R170000000000001", + "node_id": "PV-Garage/", + "suggested_area": "Garage", + "modbus_polling": False, + "pv1_manufacturer": "man1", + "pv1_type": "type1", + "pv2_manufacturer": "man2", + "pv2_type": "type2", + "sensor_list": 688 + }, + { + "serial": "Y170000000000001", + "monitor_sn": 2000000000, + "node_id": "PV-Garage2/", + "suggested_area": "Garage2", + "modbus_polling": True, + "client_mode_host": "InverterIP", + "client_mode_port": 1234, + "pv1_manufacturer": "man1", + "pv1_type": "type1", + "pv2_manufacturer": "man2", + "pv2_type": "type2", + "pv3_manufacturer": "man3", + "pv3_type": "type3", + "pv4_manufacturer": "man4", + "pv4_type": "type4", + "sensor_list": 688 + } + ], + "tsun.enabled": True, + "solarman.enabled": True, + "inverters.allow_all": False, + "gen3plus.at_acl.tsun.allow": [ + "AT+Z", + "AT+UPURL", + "AT+SUPDATE" + ], + "gen3plus.at_acl.tsun.block": [ + "AT+SUPDATE" + ], + "gen3plus.at_acl.mqtt.allow": [ + "AT+" + ], + "gen3plus.at_acl.mqtt.block": [ + "AT+SUPDATE" + ] +} + + cnf = ConfigReadJson() + obj = cnf.convert_to_obj(tst) + assert obj == { + 'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': ['AT+SUPDATE']}, + 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], + 'block': ['AT+SUPDATE']}}}, + 'inverters': {'R170000000000001': {'modbus_polling': False, + 'node_id': 'PV-Garage/', + 'pv1_manufacturer': 'man1', + 'pv1_type': 'type1', + 'pv2_manufacturer': 'man2', + 'pv2_type': 'type2', + 'sensor_list': 688, + 'suggested_area': 'Garage'}, + 'Y170000000000001': {'client_mode_host': 'InverterIP', + 'client_mode_port': 1234, + 'modbus_polling': True, + 'monitor_sn': 2000000000, + 'node_id': 'PV-Garage2/', + 'pv1_manufacturer': 'man1', + 'pv1_type': 'type1', + 'pv2_manufacturer': 'man2', + 'pv2_type': 'type2', + 'pv3_manufacturer': 'man3', + 'pv3_type': 'type3', + 'pv4_manufacturer': 'man4', + 'pv4_type': 'type4', + 'sensor_list': 688, + 'suggested_area': 'Garage2'}, + 'allow_all': False}, + 'solarman': {'enabled': True}, + 'tsun': {'enabled': True} + } + +def test_cnv5(): + """test a invalid list with missing serials""" + tst = { + "inverters": [ + { + "node_id": "PV-Garage1/", + }, + { + "serial": "Y170000000000001", + "node_id": "PV-Garage2/", + } + ], +} + cnf = ConfigReadJson() + obj = cnf.convert_to_obj(tst) + assert obj == { + 'inverters': { + 'Y170000000000001': {'node_id': 'PV-Garage2/'} + }, + } + +def test_cnv6(): + """test overwritting a value in inverters""" + tst = { + "inverters": [{ + "serial": "Y170000000000001", + "node_id": "PV-Garage2/", + }], + } + tst2 = { + "inverters": [{ + "serial": "Y170000000000001", + "node_id": "PV-Garden/", + }], + } + cnf = ConfigReadJson() + conf = {} + for key, val in tst.items(): + cnf.convert_inv_arr(conf, key, val) + + assert conf == { + 'inverters': { + 'Y170000000000001': {'node_id': 'PV-Garage2/'} + }, + } + + for key, val in tst2.items(): + cnf.convert_inv_arr(conf, key, val) + + assert conf == { + 'inverters': { + 'Y170000000000001': {'node_id': 'PV-Garden/'} + }, + } + +def test_empty_config(ConfigDefault): + test_buffer.rd = "{}" # empty json + + Config.init(ConfigReadToml("app/config/default_config.toml")) + for _ in patch_open(): + ConfigReadJson() + err = Config.get_error() + + assert err == None + cnf = Config.get() + assert cnf == ConfigDefault + + +def test_full_config(ConfigComplete): + test_buffer.rd = """ +{ + "inverters": [ + { + "serial": "R170000000000001", + "node_id": "PV-Garage/", + "suggested_area": "Garage", + "modbus_polling": false, + "pv1.manufacturer": "man1", + "pv1.type": "type1", + "pv2.manufacturer": "man2", + "pv2.type": "type2", + "sensor_list": 688 + }, + { + "serial": "Y170000000000001", + "monitor_sn": 2000000000, + "node_id": "PV-Garage2/", + "suggested_area": "Garage2", + "modbus_polling": true, + "client_mode_host": "InverterIP", + "client_mode_port": 1234, + "pv1.manufacturer": "man1", + "pv1.type": "type1", + "pv2.manufacturer": "man2", + "pv2.type": "type2", + "pv3.manufacturer": "man3", + "pv3.type": "type3", + "pv4.manufacturer": "man4", + "pv4.type": "type4", + "sensor_list": 688 + } + ], + "tsun.enabled": true, + "solarman.enabled": true, + "inverters.allow_all": false, + "gen3plus.at_acl.tsun.allow": [ + "AT+Z", + "AT+UPURL", + "AT+SUPDATE" + ], + "gen3plus.at_acl.tsun.block": [ + "AT+SUPDATE" + ], + "gen3plus.at_acl.mqtt.allow": [ + "AT+" + ], + "gen3plus.at_acl.mqtt.block": [ + "AT+SUPDATE" + ] +} +""" + Config.init(ConfigReadToml("app/config/default_config.toml")) + for _ in patch_open(): + ConfigReadJson() + err = Config.get_error() + + assert err == None + cnf = Config.get() + assert cnf == ConfigComplete diff --git a/app/tests/test_mqtt.py b/app/tests/test_mqtt.py index 9c2923c..9560767 100644 --- a/app/tests/test_mqtt.py +++ b/app/tests/test_mqtt.py @@ -12,6 +12,8 @@ from modbus import Modbus from gen3plus.solarman_v5 import SolarmanV5 from cnf.config import Config +NO_MOSQUITTO_TEST = False +'''disable all tests with connections to test.mosquitto.org''' pytest_plugins = ('pytest_asyncio',) @@ -69,8 +71,12 @@ def spy_modbus_cmd_client(): def test_native_client(test_hostname, test_port): """Sanity check: Make sure the paho-mqtt client can connect to the test - MQTT server. + MQTT server. Otherwise the test set NO_MOSQUITTO_TEST to True and disable + all test cases which depends on the test.mosquitto.org server """ + global NO_MOSQUITTO_TEST + if NO_MOSQUITTO_TEST: + pytest.skip('skipping, since Mosquitto is not reliable at the moment') import paho.mqtt.client as mqtt import threading @@ -82,10 +88,62 @@ def test_native_client(test_hostname, test_port): on_connect = threading.Event() c.on_connect = Mock(side_effect=lambda *_: on_connect.set()) c.connect_async(test_hostname, test_port) - assert on_connect.wait(10) + if not on_connect.wait(3): + NO_MOSQUITTO_TEST = True # skip all mosquitto tests + pytest.skip('skipping, since Mosquitto is not reliable at the moment') finally: c.loop_stop() +@pytest.mark.asyncio +async def test_mqtt_connection(config_mqtt_conn): + global NO_MOSQUITTO_TEST + if NO_MOSQUITTO_TEST: + pytest.skip('skipping, since Mosquitto is not reliable at the moment') + + _ = config_mqtt_conn + assert asyncio.get_running_loop() + + on_connect = asyncio.Event() + async def cb(): + on_connect.set() + + try: + m = Mqtt(cb) + assert m.task + assert await asyncio.wait_for(on_connect.wait(), 5) + # await asyncio.sleep(1) + assert 0 == m.ha_restarts + await m.publish('homeassistant/status', 'online') + except TimeoutError: + assert False + finally: + await m.close() + await m.publish('homeassistant/status', 'online') + +@pytest.mark.asyncio +async def test_ha_reconnect(config_mqtt_conn): + global NO_MOSQUITTO_TEST + if NO_MOSQUITTO_TEST: + pytest.skip('skipping, since Mosquitto is not reliable at the moment') + + _ = config_mqtt_conn + on_connect = asyncio.Event() + async def cb(): + on_connect.set() + + try: + m = Mqtt(cb) + msg = aiomqtt.Message(topic= 'homeassistant/status', payload= b'offline', qos= 0, retain = False, mid= 0, properties= None) + await m.dispatch_msg(msg) + assert not on_connect.is_set() + + msg = aiomqtt.Message(topic= 'homeassistant/status', payload= b'online', qos= 0, retain = False, mid= 0, properties= None) + await m.dispatch_msg(msg) + assert on_connect.is_set() + + finally: + await m.close() + @pytest.mark.asyncio async def test_mqtt_no_config(config_no_conn): _ = config_no_conn @@ -110,29 +168,6 @@ async def test_mqtt_no_config(config_no_conn): finally: await m.close() -@pytest.mark.asyncio -async def test_mqtt_connection(config_mqtt_conn): - _ = config_mqtt_conn - assert asyncio.get_running_loop() - - on_connect = asyncio.Event() - async def cb(): - on_connect.set() - - try: - m = Mqtt(cb) - assert m.task - assert await asyncio.wait_for(on_connect.wait(), 5) - # await asyncio.sleep(1) - assert 0 == m.ha_restarts - await m.publish('homeassistant/status', 'online') - except TimeoutError: - assert False - finally: - await m.close() - await m.publish('homeassistant/status', 'online') - - @pytest.mark.asyncio async def test_msg_dispatch(config_mqtt_conn, spy_modbus_cmd): _ = config_mqtt_conn @@ -209,26 +244,6 @@ async def test_msg_ignore_client_conn(config_mqtt_conn, spy_modbus_cmd_client): finally: await m.close() -@pytest.mark.asyncio -async def test_ha_reconnect(config_mqtt_conn): - _ = config_mqtt_conn - on_connect = asyncio.Event() - async def cb(): - on_connect.set() - - try: - m = Mqtt(cb) - msg = aiomqtt.Message(topic= 'homeassistant/status', payload= b'offline', qos= 0, retain = False, mid= 0, properties= None) - await m.dispatch_msg(msg) - assert not on_connect.is_set() - - msg = aiomqtt.Message(topic= 'homeassistant/status', payload= b'online', qos= 0, retain = False, mid= 0, properties= None) - await m.dispatch_msg(msg) - assert on_connect.is_set() - - finally: - await m.close() - @pytest.mark.asyncio async def test_ignore_unknown_func(config_mqtt_conn): '''don't dispatch for unknwon function names''' diff --git a/app/tests/test_server.py b/app/tests/test_server.py new file mode 100644 index 0000000..367bf5b --- /dev/null +++ b/app/tests/test_server.py @@ -0,0 +1,24 @@ +# test_with_pytest.py +import pytest +import logging +import os +from mock import patch +from server import get_log_level + +def test_get_log_level(): + + with patch.dict(os.environ, {'LOG_LVL': ''}): + log_lvl = get_log_level() + assert log_lvl == logging.INFO + + with patch.dict(os.environ, {'LOG_LVL': 'DEBUG'}): + log_lvl = get_log_level() + assert log_lvl == logging.DEBUG + + with patch.dict(os.environ, {'LOG_LVL': 'WARN'}): + log_lvl = get_log_level() + assert log_lvl == logging.WARNING + + with patch.dict(os.environ, {'LOG_LVL': 'UNKNOWN'}): + log_lvl = get_log_level() + assert log_lvl == logging.INFO diff --git a/ha_addons/ha_addon/config.yaml b/ha_addons/ha_addon/config.yaml index 213f0ee..3da3876 100755 --- a/ha_addons/ha_addon/config.yaml +++ b/ha_addons/ha_addon/config.yaml @@ -39,18 +39,18 @@ schema: # type: str # manufacturer: str # daher diese variante - pv1_manufacturer: str? - pv1_type: str? - pv2_manufacturer: str? - pv2_type: str? - pv3_manufacturer: str? - pv3_type: str? - pv4_manufacturer: str? - pv4_type: str? - pv5_manufacturer: str? - pv5_type: str? - pv6_manufacturer: str? - pv6_type: str? + pv1.manufacturer: str? + pv1.type: str? + pv2.manufacturer: str? + pv2.type: str? + pv3.manufacturer: str? + pv3.type: str? + pv4.manufacturer: str? + pv4.type: str? + pv5.manufacturer: str? + pv5.type: str? + pv6.manufacturer: str? + pv6.type: str? tsun.enabled: bool solarman.enabled: bool inverters.allow_all: bool @@ -92,10 +92,10 @@ options: # - string: PV2 # type: SF-M18/144550 # manufacturer: Shinefar - pv1_manufacturer: Shinefar - pv1_type: SF-M18/144550 - pv2_manufacturer: Shinefar - pv2_type: SF-M18/144550 + pv1.manufacturer: Shinefar + pv1.type: SF-M18/144550 + pv2.manufacturer: Shinefar + pv2.type: SF-M18/144550 tsun.enabled: true # set default solarman.enabled: true # set default inverters.allow_all: false # set default diff --git a/ha_addons/ha_addon/rootfs/home/create_config_toml.py b/ha_addons/ha_addon/rootfs/home/create_config_toml.py deleted file mode 100644 index f806ac1..0000000 --- a/ha_addons/ha_addon/rootfs/home/create_config_toml.py +++ /dev/null @@ -1,115 +0,0 @@ -import json -import os - -# Dieses file übernimmt die Add-On Konfiguration und schreibt sie in die -# Konfigurationsdatei des tsun-proxy -# Die Addon Konfiguration wird in der Datei /data/options.json bereitgestellt -# Die Konfiguration wird in der Datei /home/proxy/config/config.toml -# gespeichert - -# Übernehme die Umgebungsvariablen -# alternativ kann auch auf die homeassistant supervisor API zugegriffen werden - - -def create_config(): - data = {} - data['mqtt.host'] = os.getenv('MQTT_HOST', "mqtt") - data['mqtt.port'] = os.getenv('MQTT_PORT', 1883) - data['mqtt.user'] = os.getenv('MQTT_USER', "") - data['mqtt.passwd'] = os.getenv('MQTT_PASSWORD', "") - - # Lese die Add-On Konfiguration aus der Datei /data/options.json - # with open('data/options.json') as json_file: - with open('/data/options.json') as json_file: - try: - options_data = json.load(json_file) - data.update(options_data) - except json.JSONDecodeError: - pass - - # Schreibe die Add-On Konfiguration in die Datei /home/proxy/config/config.toml # noqa: E501 - # with open('./config/config.toml', 'w+') as f: - with open('/home/proxy/config/config.toml', 'w+') as f: - f.write(f""" -mqtt.host = '{data.get('mqtt.host')}' # URL or IP address of the mqtt broker -mqtt.port = {data.get('mqtt.port')} -mqtt.user = '{data.get('mqtt.user')}' -mqtt.passwd = '{data.get('mqtt.passwd')}' - - -ha.auto_conf_prefix = '{data.get('ha.auto_conf_prefix', 'homeassistant')}' # MQTT prefix for subscribing for homeassistant status updates -ha.discovery_prefix = '{data.get('ha.discovery_prefix', 'homeassistant')}' # MQTT prefix for discovery topic -ha.entity_prefix = '{data.get('ha.entity_prefix', 'tsun')}' # MQTT topic prefix for publishing inverter values -ha.proxy_node_id = '{data.get('ha.proxy_node_id', 'proxy')}' # MQTT node id, for the proxy_node_id -ha.proxy_unique_id = '{data.get('ha.proxy_unique_id', 'P170000000000001')}' # MQTT unique id, to identify a proxy instance - - -tsun.enabled = {str(data.get('tsun.enabled', True)).lower()} -tsun.host = '{data.get('tsun.host', 'logger.talent-monitoring.com')}' -tsun.port = {data.get('tsun.port', 5005)} - - -solarman.enabled = {str(data.get('solarman.enabled', True)).lower()} -solarman.host = '{data.get('solarman.host', 'iot.talent-monitoring.com')}' -solarman.port = {data.get('solarman.port', 10000)} - - -inverters.allow_all = {str(data.get('inverters.allow_all', False)).lower()} -""") # noqa: E501 - - if 'inverters' in data: - for inverter in data['inverters']: - f.write(f""" -[inverters."{inverter['serial']}"] -node_id = '{inverter['node_id']}' -suggested_area = '{inverter['suggested_area']}' -modbus_polling = {str(inverter['modbus_polling']).lower()} - -# check if inverter has monitor_sn key. if not, skip monitor_sn -{f"monitor_sn = '{inverter['monitor_sn']}'" if 'monitor_sn' in inverter else ''} - - - -# check if inverter has 'pv1_type' and 'pv1_manufacturer' keys. if not, skip pv1 -{f"pv1 = {{type = '{inverter['pv1_type']}', manufacturer = '{inverter['pv1_manufacturer']}'}}" if 'pv1_type' in inverter and 'pv1_manufacturer' in inverter else ''} -# check if inverter has 'pv2_type' and 'pv2_manufacturer' keys. if not, skip pv2 -{f"pv2 = {{type = '{inverter['pv2_type']}', manufacturer = '{inverter['pv2_manufacturer']}'}}" if 'pv2_type' in inverter and 'pv2_manufacturer' in inverter else ''} -# check if inverter has 'pv3_type' and 'pv3_manufacturer' keys. if not, skip pv3 -{f"pv3 = {{type = '{inverter['pv3_type']}', manufacturer = '{inverter['pv3_manufacturer']}'}}" if 'pv3_type' in inverter and 'pv3_manufacturer' in inverter else ''} -# check if inverter has 'pv4_type' and 'pv4_manufacturer' keys. if not, skip pv4 -{f"pv4 = {{type = '{inverter['pv4_type']}', manufacturer = '{inverter['pv4_manufacturer']}'}}" if 'pv4_type' in inverter and 'pv4_manufacturer' in inverter else ''} -# check if inverter has 'pv5_type' and 'pv5_manufacturer' keys. if not, skip pv5 -{f"pv5 = {{type = '{inverter['pv5_type']}', manufacturer = '{inverter['pv5_manufacturer']}'}}" if 'pv5_type' in inverter and 'pv5_manufacturer' in inverter else ''} -# check if inverter has 'pv6_type' and 'pv6_manufacturer' keys. if not, skip pv6 -{f"pv6 = {{type = '{inverter['pv6_type']}', manufacturer = '{inverter['pv6_manufacturer']}'}}" if 'pv6_type' in inverter and 'pv6_manufacturer' in inverter else ''} - - -""") # noqa: E501 - - # add filters - f.write(""" -[gen3plus.at_acl] -# filter for received commands from the internet -tsun.allow = [""") - if 'gen3plus.at_acl.tsun.allow' in data: - for rule in data['gen3plus.at_acl.tsun.allow']: - f.write(f"'{rule}',") - f.write("]\ntsun.block = [") - if 'gen3plus.at_acl.tsun.block' in data: - for rule in data['gen3plus.at_acl.tsun.block']: - f.write(f"'{rule}',") - f.write("""] -# filter for received commands from the MQTT broker -mqtt.allow = [""") - if 'gen3plus.at_acl.mqtt.allow' in data: - for rule in data['gen3plus.at_acl.mqtt.allow']: - f.write(f"'{rule}',") - f.write("]\nmqtt.block = [") - if 'gen3plus.at_acl.mqtt.block' in data: - for rule in data['gen3plus.at_acl.mqtt.block']: - f.write(f"'{rule}',") - f.write("]") - - -if __name__ == "__main__": # pragma: no cover - create_config() diff --git a/ha_addons/ha_addon/rootfs/run.sh b/ha_addons/ha_addon/rootfs/run.sh index 7183c12..5329d6f 100755 --- a/ha_addons/ha_addon/rootfs/run.sh +++ b/ha_addons/ha_addon/rootfs/run.sh @@ -27,12 +27,9 @@ cd /home || exit mkdir -p proxy/log mkdir -p proxy/config -echo "Create config.toml..." -python3 create_config_toml.py - cd /home/proxy || exit export VERSION=$(cat /proxy-version.txt) echo "Start Proxyserver..." -python3 server.py +python3 server.py --json_config=/data/options.json diff --git a/ha_addons/ha_addon/tests/test_create_config_toml.py b/ha_addons/ha_addon/tests/test_create_config_toml.py deleted file mode 100644 index 3fd715e..0000000 --- a/ha_addons/ha_addon/tests/test_create_config_toml.py +++ /dev/null @@ -1,194 +0,0 @@ -# test_with_pytest.py -import pytest -import tomllib -from mock import patch -from cnf.config import Config - -from home.create_config_toml import create_config -from test_config import ConfigComplete, ConfigMinimum - - -class FakeBuffer: - rd = bytearray() - wr = str() - - -test_buffer = FakeBuffer - - -class FakeFile(): - def __init__(self): - self.buf = test_buffer - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc, tb): - pass - - -class FakeOptionsFile(FakeFile): - def read(self): - return self.buf.rd - - -class FakeConfigFile(FakeFile): - def write(self, data: str): - self.buf.wr += data - - -@pytest.fixture -def patch_open(): - - def new_open(file: str, OpenTextMode="r"): - if file == '/data/options.json': - return FakeOptionsFile() - elif file == '/home/proxy/config/config.toml': - # write_buffer += 'bla1'.encode('utf-8') - return FakeConfigFile() - - raise TimeoutError - - with patch('builtins.open', new_open) as conn: - yield conn - - -@pytest.fixture -def ConfigTomlEmpty(): - return { - 'gen3plus': {'at_acl': {'mqtt': {'allow': [], 'block': []}, - 'tsun': {'allow': [], 'block': []}}}, - 'ha': {'auto_conf_prefix': 'homeassistant', - 'discovery_prefix': 'homeassistant', - 'entity_prefix': 'tsun', - 'proxy_node_id': 'proxy', - 'proxy_unique_id': 'P170000000000001'}, - 'inverters': { - 'allow_all': False - }, - 'mqtt': {'host': 'mqtt', 'passwd': '', 'port': 1883, 'user': ''}, - 'solarman': { - 'enabled': True, - 'host': 'iot.talent-monitoring.com', - 'port': 10000, - }, - 'tsun': { - 'enabled': True, - 'host': 'logger.talent-monitoring.com', - 'port': 5005, - }, - } - - -def test_no_config(patch_open, ConfigTomlEmpty): - _ = patch_open - test_buffer.wr = "" - test_buffer.rd = "" # empty buffer, no json - create_config() - cnf = tomllib.loads(test_buffer.wr) - assert cnf == ConfigTomlEmpty - - -def test_empty_config(patch_open, ConfigTomlEmpty): - _ = patch_open - test_buffer.wr = "" - test_buffer.rd = "{}" # empty json - create_config() - cnf = tomllib.loads(test_buffer.wr) - assert cnf == ConfigTomlEmpty - - -def test_full_config(patch_open, ConfigComplete): - _ = patch_open - test_buffer.wr = "" - test_buffer.rd = """ -{ - "inverters": [ - { - "serial": "R170000000000001", - "node_id": "PV-Garage", - "suggested_area": "Garage", - "modbus_polling": false, - "pv1_manufacturer": "man1", - "pv1_type": "type1", - "pv2_manufacturer": "man2", - "pv2_type": "type2" - }, - { - "serial": "Y170000000000001", - "monitor_sn": 2000000000, - "node_id": "PV-Garage2", - "suggested_area": "Garage2", - "modbus_polling": true, - "client_mode_host": "InverterIP", - "client_mode_port": 1234, - "pv1_manufacturer": "man1", - "pv1_type": "type1", - "pv2_manufacturer": "man2", - "pv2_type": "type2", - "pv3_manufacturer": "man3", - "pv3_type": "type3", - "pv4_manufacturer": "man4", - "pv4_type": "type4" - } - ], - "tsun.enabled": true, - "solarman.enabled": true, - "inverters.allow_all": false, - "gen3plus.at_acl.tsun.allow": [ - "AT+Z", - "AT+UPURL", - "AT+SUPDATE" - ], - "gen3plus.at_acl.tsun.block": [ - "AT+SUPDATE" - ], - "gen3plus.at_acl.mqtt.allow": [ - "AT+" - ], - "gen3plus.at_acl.mqtt.block": [ - "AT+SUPDATE" - ] -} -""" - create_config() - cnf = tomllib.loads(test_buffer.wr) - - validated = Config.conf_schema.validate(cnf) - assert validated == ConfigComplete - - -def test_minimum_config(patch_open, ConfigMinimum): - _ = patch_open - test_buffer.wr = "" - test_buffer.rd = """ -{ - "inverters": [ - { - "serial": "R170000000000001", - "monitor_sn": 0, - "node_id": "", - "suggested_area": "", - "modbus_polling": true, - "client_mode_host": "InverterIP", - "client_mode_port": 1234 - } - ], - "tsun.enabled": true, - "solarman.enabled": true, - "inverters.allow_all": true, - "gen3plus.at_acl.tsun.allow": [ - "AT+Z", - "AT+UPURL", - "AT+SUPDATE" - ], - "gen3plus.at_acl.mqtt.allow": [ - "AT+" - ] -} -""" - create_config() - cnf = tomllib.loads(test_buffer.wr) - - validated = Config.conf_schema.validate(cnf) - assert validated == ConfigMinimum diff --git a/sonar-project.properties b/sonar-project.properties index f015e81..61d8dbd 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -7,13 +7,13 @@ sonar.projectName=tsun-gen3-proxy # Path is relative to the sonar-project.properties file. Replace "\" by "/" on Windows. -sonar.sources=app/src/,ha_addons/ha_addon/rootfs/home/ +sonar.sources=app/src/ # Encoding of the source code. Default is default system encoding #sonar.sourceEncoding=UTF-8 sonar.python.version=3.12 -sonar.tests=system_tests/,app/tests/,ha_addons/ha_addon/tests/ +sonar.tests=system_tests/,app/tests/ sonar.exclusions=**/.vscode/**/* # Name your criteria sonar.issue.ignore.multicriteria=e1,e2 From 3c81d446dd0b0896cb1dc6611104dc406d108222 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Sun, 8 Dec 2024 18:57:40 +0100 Subject: [PATCH 32/32] update changelog --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 532e545..e299e75 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [unreleased] +- make the configuration more flexible, add command line args to control this +- fix the python path so we don't need special import paths for unit tests anymore +- support test coverager in vscode +- upgrade SonarQube action to version 4 +- update github action to Ubuntu 24-04 - add initial support for home assistant add-ons from @mime24 - github action: use ubuntu 24.04 and sonar-scanner-action 4 [#222](https://github.com/s-allius/tsun-gen3-proxy/issues/222) - migrate paho.mqtt CallbackAPIVersion to VERSION2 [#224](https://github.com/s-allius/tsun-gen3-proxy/issues/224)