From 1e610af1dffb76f62715d19d9fb0f5d20fe45265 Mon Sep 17 00:00:00 2001 From: Stefan Allius <122395479+s-allius@users.noreply.github.com> Date: Sat, 10 Aug 2024 20:41:31 +0200 Subject: [PATCH 01/39] Code Cleanup (#158) * print coverage report * create sonar-project property file * install all py dependencies in one step * code cleanup * reduce cognitive complexity * do not build on *.yml changes --- .github/workflows/python-app.yml | 16 ++----- app/src/gen3/talent.py | 21 +++++---- app/src/gen3plus/solarman_v5.py | 80 +++++++++++++++++--------------- app/src/messages.py | 34 ++++++++------ app/tests/test_solarman.py | 4 +- app/tests/test_talent.py | 2 +- sonar-project.properties | 17 +++++++ 7 files changed, 100 insertions(+), 74 deletions(-) create mode 100644 sonar-project.properties diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index 9061986..6ec82e7 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -38,7 +38,7 @@ jobs: timezoneWindows: "Europe/Berlin" - uses: actions/checkout@v4 with: - fetch-depth: 0 # Fetch all history for all tags and branches + fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis - name: Set up Python 3.12 uses: actions/setup-python@v5 with: @@ -46,7 +46,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install flake8 pytest pytest-asyncio + pip install flake8 pytest pytest-asyncio pytest-cov coverage if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - name: Lint with flake8 run: | @@ -56,19 +56,13 @@ jobs: flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - name: Test with pytest run: | - pip install pytest pytest-cov #pytest app --doctest-modules --junitxml=junit/test-results.xml --cov=com --cov-report=xml --cov-report=html python -m pytest app --cov=app/src --cov-report=xml + coverage report - name: Analyze with SonarCloud uses: SonarSource/sonarcloud-github-action@v2.2.0 env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} with: - projectBaseDir: . - args: - -Dsonar.projectKey=s-allius_tsun-gen3-proxy - -Dsonar.organization=s-allius - -Dsonar.python.version=3.12 - -Dsonar.python.coverage.reportPaths=coverage.xml - -Dsonar.tests=system_tests,app/tests - -Dsonar.source=app/src + projectBaseDir: . \ No newline at end of file diff --git a/app/src/gen3/talent.py b/app/src/gen3/talent.py index 7cfcc64..90f258c 100644 --- a/app/src/gen3/talent.py +++ b/app/src/gen3/talent.py @@ -1,6 +1,6 @@ import struct import logging -import pytz +from zoneinfo import ZoneInfo from datetime import datetime from tzlocal import get_localzone @@ -42,6 +42,7 @@ class Control: class Talent(Message): MB_START_TIMEOUT = 40 MB_REGULAR_TIMEOUT = 60 + TXT_UNKNOWN_CTRL = 'Unknown Ctrl' def __init__(self, server_side: bool, id_str=b''): super().__init__(server_side, self.send_modbus_cb, mb_timeout=15) @@ -75,7 +76,7 @@ class Talent(Message): self.node_id = 'G3' # will be overwritten in __set_serial_no self.mb_timer = Timer(self.mb_timout_cb, self.node_id) self.mb_timeout = self.MB_REGULAR_TIMEOUT - self.mb_start_timeout = self.MB_START_TIMEOUT + self.mb_first_timeout = self.MB_START_TIMEOUT self.modbus_polling = False ''' @@ -246,7 +247,7 @@ class Talent(Message): def _utcfromts(self, ts: float): '''converts inverter timestamp into unix time (epoche)''' - dt = datetime.fromtimestamp(ts/1000, pytz.UTC). \ + dt = datetime.fromtimestamp(ts/1000, tz=ZoneInfo("UTC")). \ replace(tzinfo=get_localzone()) return dt.timestamp() @@ -354,7 +355,7 @@ class Talent(Message): else: self.forward() else: - logger.warning('Unknown Ctrl') + logger.warning(self.TXT_UNKNOWN_CTRL) self.inc_counter('Unknown_Ctrl') self.forward() @@ -397,7 +398,7 @@ class Talent(Message): f' offset: {self.ts_offset}') return # ignore received response else: - logger.warning('Unknown Ctrl') + logger.warning(self.TXT_UNKNOWN_CTRL) self.inc_counter('Unknown_Ctrl') self.forward() @@ -431,7 +432,7 @@ class Talent(Message): elif self.ctrl.is_resp(): return # ignore received response else: - logger.warning('Unknown Ctrl') + logger.warning(self.TXT_UNKNOWN_CTRL) self.inc_counter('Unknown_Ctrl') self.forward() @@ -444,14 +445,14 @@ class Talent(Message): self.__process_data() self.state = State.up # allow MODBUS cmds if (self.modbus_polling): - self.mb_timer.start(self.mb_start_timeout) + self.mb_timer.start(self.mb_first_timeout) self.db.set_db_def_value(Register.POLLING_INTERVAL, self.mb_timeout) elif self.ctrl.is_resp(): return # ignore received response else: - logger.warning('Unknown Ctrl') + logger.warning(self.TXT_UNKNOWN_CTRL) self.inc_counter('Unknown_Ctrl') self.forward() @@ -471,7 +472,7 @@ class Talent(Message): elif self.ctrl.is_ind(): pass # Ok, nothing to do else: - logger.warning('Unknown Ctrl') + logger.warning(self.TXT_UNKNOWN_CTRL) self.inc_counter('Unknown_Ctrl') self.forward() @@ -519,7 +520,7 @@ class Talent(Message): self.new_data[key] = True self.modbus_elms += 1 # count for unit tests else: - logger.warning('Unknown Ctrl') + logger.warning(self.TXT_UNKNOWN_CTRL) self.inc_counter('Unknown_Ctrl') self.forward() diff --git a/app/src/gen3plus/solarman_v5.py b/app/src/gen3plus/solarman_v5.py index 6ea06f0..7feccd5 100644 --- a/app/src/gen3plus/solarman_v5.py +++ b/app/src/gen3plus/solarman_v5.py @@ -57,6 +57,8 @@ class SolarmanV5(Message): '''regular Modbus polling time in server mode''' MB_CLIENT_DATA_UP = 30 '''Data up time in client mode''' + HDR_FMT = '= \ (self.header_len + self.data_len+2): - log_lvl = self.log_lvl.get(self.control, logging.WARNING) - if callable(log_lvl): - log_lvl = log_lvl() - hex_dump_memory(log_lvl, f'Received from {self.addr}:', - self._recv_buffer, self.header_len + - self.data_len+2) - if self.__trailer_is_ok(self._recv_buffer, self.header_len - + self.data_len + 2): - if self.state == State.init: - self.state = State.received - - self.__set_serial_no(self.snr) - self.__dispatch_msg() + self.__process_complete_received_msg() self.__flush_recv_msg() else: return 0 # wait 0s before sending a response + def __process_complete_received_msg(self): + log_lvl = self.log_lvl.get(self.control, logging.WARNING) + if callable(log_lvl): + log_lvl = log_lvl() + hex_dump_memory(log_lvl, f'Received from {self.addr}:', + self._recv_buffer, self.header_len + + self.data_len+2) + if self.__trailer_is_ok(self._recv_buffer, self.header_len + + self.data_len + 2): + if self.state == State.init: + self.state = State.received + self.__set_serial_no(self.snr) + self.__dispatch_msg() + def forward(self, buffer, buflen) -> None: '''add the actual receive msg to the forwarding queue''' if self.no_forwarding: @@ -500,7 +504,7 @@ class SolarmanV5(Message): def msg_dev_ind(self): data = self._recv_buffer[self.header_len:] - result = struct.unpack_from(' 4: - # logger.info(f'first byte modbus:{data[14]}') - inv_update = False - self.modbus_elms = 0 - - for key, update, _ in self.mb.recv_resp(self.db, data[14:], - self.node_id): - self.modbus_elms += 1 - if update: - if key == 'inverter': - inv_update = True - self._set_mqtt_timestamp(key, self._timestamp()) - self.new_data[key] = True - - if inv_update: - self.__build_model_name() + self.__modbus_command_rsp(data) return self.__forward_msg() + def __modbus_command_rsp(self, data): + '''precess MODBUS RTU response''' + valid = data[1] + modbus_msg_len = self.data_len - 14 + # logger.debug(f'modbus_len:{modbus_msg_len} accepted:{valid}') + if valid == 1 and modbus_msg_len > 4: + # logger.info(f'first byte modbus:{data[14]}') + inv_update = False + self.modbus_elms = 0 + for key, update, _ in self.mb.recv_resp(self.db, data[14:], + self.node_id): + self.modbus_elms += 1 + if update: + if key == 'inverter': + inv_update = True + self._set_mqtt_timestamp(key, self._timestamp()) + self.new_data[key] = True + if inv_update: + self.__build_model_name() + def msg_hbeat_ind(self): data = self._recv_buffer[self.header_len:] result = struct.unpack_from('= data_len: + break + line += '%02x ' % abs(data[j]) + return line + + +def __asc_val(n, data, data_len): + line = '' + for j in range(n-16, n): + if j >= data_len: + break + c = data[j] if not (data[j] < 0x20 or data[j] > 0x7e) else '.' + line += '%c' % c + return line + + def hex_dump_memory(level, info, data, data_len): n = 0 lines = [] @@ -26,20 +45,9 @@ def hex_dump_memory(level, info, data, data_len): line = ' ' line += '%04x | ' % (i) n += 16 - - for j in range(n-16, n): - if j >= data_len: - break - line += '%02x ' % abs(data[j]) - + line += __hex_val(n, data, data_len) line += ' ' * (3 * 16 + 9 - len(line)) + ' | ' - - for j in range(n-16, n): - if j >= data_len: - break - c = data[j] if not (data[j] < 0x20 or data[j] > 0x7e) else '.' - line += '%c' % c - + line += __asc_val(n, data, data_len) lines.append(line) tracer.log(level, '\n'.join(lines)) diff --git a/app/tests/test_solarman.py b/app/tests/test_solarman.py index ac76aae..6a23e93 100644 --- a/app/tests/test_solarman.py +++ b/app/tests/test_solarman.py @@ -41,7 +41,7 @@ class MemoryStream(SolarmanV5): super().__init__(server_side, client_mode=False) if server_side: self.mb.timeout = 0.4 # overwrite for faster testing - self.mb_start_timeout = 0.5 + self.mb_first_timeout = 0.5 self.mb_timeout = 0.5 self.writer = Writer() self.mqtt = Mqtt() @@ -1692,7 +1692,7 @@ async def test_start_client_mode(config_tsun_inv1): assert m.no_forwarding == False assert m.mb_timer.tim == None assert asyncio.get_running_loop() == m.mb_timer.loop - await m.send_start_cmd(get_sn_int(), '192.168.1.1', m.mb_start_timeout) + await m.send_start_cmd(get_sn_int(), '192.168.1.1', m.mb_first_timeout) assert m.writer.sent_pdu==bytearray(b'\xa5\x17\x00\x10E\x01\x00!Ce{\x02\xb0\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x030\x00\x000J\xde\xf1\x15') assert m.db.get_db_value(Register.IP_ADDRESS) == '192.168.1.1' assert m.db.get_db_value(Register.POLLING_INTERVAL) == 0.5 diff --git a/app/tests/test_talent.py b/app/tests/test_talent.py index 941add2..5c9a9d7 100644 --- a/app/tests/test_talent.py +++ b/app/tests/test_talent.py @@ -27,7 +27,7 @@ class MemoryStream(Talent): super().__init__(server_side) if server_side: self.mb.timeout = 0.4 # overwrite for faster testing - self.mb_start_timeout = 0.5 + self.mb_first_timeout = 0.5 self.mb_timeout = 0.5 self.writer = Writer() self.__msg = msg diff --git a/sonar-project.properties b/sonar-project.properties new file mode 100644 index 0000000..7eca32a --- /dev/null +++ b/sonar-project.properties @@ -0,0 +1,17 @@ +sonar.projectKey=s-allius_tsun-gen3-proxy +sonar.organization=s-allius + +# This is the name and version displayed in the SonarCloud UI. +sonar.projectName=tsun-gen3-proxy +#sonar.projectVersion=1.0 + + +# Path is relative to the sonar-project.properties file. Replace "\" by "/" on Windows. +sonar.sources=app/src + +# Encoding of the source code. Default is default system encoding +#sonar.sourceEncoding=UTF-8 + +sonar.python.version=3.12 +sonar.python.coverage.reportPaths=coverage.xml +sonar.tests=system_tests,app/tests From 33d385db10d47beb2a3a3d7307499fffa877722f Mon Sep 17 00:00:00 2001 From: Stefan Allius <122395479+s-allius@users.noreply.github.com> Date: Sat, 10 Aug 2024 22:53:25 +0200 Subject: [PATCH 02/39] optimise versionstring handling (#159) - Reading the version string from the image updates it even if the image is re-pulled without re-deployment --- CHANGELOG.md | 2 ++ app/Dockerfile | 2 +- app/entrypoint.sh | 2 ++ 3 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 56aacb4..e41d65b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [unreleased] +- Reading the version string from the image updates it even if the image is re-pulled without re-deployment + ## [0.10.1] - 2024-08-10 - fix displaying the version string at startup and in HA [#153](https://github.com/s-allius/tsun-gen3-proxy/issues/153) diff --git a/app/Dockerfile b/app/Dockerfile index 68440d1..90e8b0e 100644 --- a/app/Dockerfile +++ b/app/Dockerfile @@ -34,7 +34,6 @@ ARG GID ARG LOG_LVL ARG environment -ENV VERSION=$VERSION ENV SERVICE_NAME=$SERVICE_NAME ENV UID=$UID ENV GID=$GID @@ -63,6 +62,7 @@ RUN python -m pip install --no-cache --no-index /root/wheels/* && \ COPY --chmod=0700 entrypoint.sh /root/entrypoint.sh COPY config . COPY src . +RUN echo ${VERSION} > /proxy-version.txt RUN date > /build-date.txt EXPOSE 5005 8127 10000 diff --git a/app/entrypoint.sh b/app/entrypoint.sh index b6f3d11..092ea51 100644 --- a/app/entrypoint.sh +++ b/app/entrypoint.sh @@ -2,6 +2,8 @@ set -e user="$(id -u)" +export VERSION=$(cat /proxy-version.txt) + echo "######################################################" echo "# prepare: '$SERVICE_NAME' Version:$VERSION" echo "# for running with UserID:$UID, GroupID:$GID" From 65de9469926d202f28b0dca53297625f6773c6dd Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Sat, 10 Aug 2024 23:53:35 +0200 Subject: [PATCH 03/39] fix linter warning --- .github/workflows/python-app.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index 6ec82e7..c2a0b1d 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -62,7 +62,7 @@ jobs: - name: Analyze with SonarCloud uses: SonarSource/sonarcloud-github-action@v2.2.0 env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} with: projectBaseDir: . \ No newline at end of file From 117e6a7570eef2651e071212b4e4287b229bf246 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Sat, 10 Aug 2024 23:55:19 +0200 Subject: [PATCH 04/39] exclude *.pyi filese --- .vscode/settings.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.vscode/settings.json b/.vscode/settings.json index dd2d0cf..57033c8 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -15,5 +15,8 @@ "sonarlint.connectedMode.project": { "connectionId": "s-allius", "projectKey": "s-allius_tsun-gen3-proxy" + }, + "files.exclude": { + "**/*.pyi": true } } \ No newline at end of file From 22df381da5270d9e5b92dcc31dd729484851d701 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Sun, 11 Aug 2024 00:48:19 +0200 Subject: [PATCH 05/39] ignore some rules for tests --- sonar-project.properties | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/sonar-project.properties b/sonar-project.properties index 7eca32a..d2b9b66 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -7,11 +7,22 @@ sonar.projectName=tsun-gen3-proxy # Path is relative to the sonar-project.properties file. Replace "\" by "/" on Windows. -sonar.sources=app/src +sonar.sources=app/src/ # Encoding of the source code. Default is default system encoding #sonar.sourceEncoding=UTF-8 sonar.python.version=3.12 sonar.python.coverage.reportPaths=coverage.xml -sonar.tests=system_tests,app/tests +sonar.tests=system_tests/,app/tests/ +sonar.exclusions=**/.vscode/**/* + +# Name your criteria +sonar.issue.ignore.multicriteria=e1,e2 + +# python:S905 : Remove or refactor this statement; it has no side effects +sonar.issue.ignore.multicriteria.e1.ruleKey=python:S905 +sonar.issue.ignore.multicriteria.e1.resourceKey=app/tests/*.py + +sonar.issue.ignore.multicriteria.e2.ruleKey=python:S905 +sonar.issue.ignore.multicriteria.e2.resourceKey=systems_tests/*.py From e34afcb5230c4606e3b456edf48ac330d09af4a1 Mon Sep 17 00:00:00 2001 From: Stefan Allius <122395479+s-allius@users.noreply.github.com> Date: Sun, 11 Aug 2024 23:22:07 +0200 Subject: [PATCH 06/39] cleanup (#160) --- app/src/mqtt.py | 14 +++---- app/tests/test_infos_g3.py | 84 +++++++++++++++++++------------------- app/tests/test_solarman.py | 7 ++-- app/tests/test_talent.py | 11 ++--- 4 files changed, 59 insertions(+), 57 deletions(-) diff --git a/app/src/mqtt.py b/app/src/mqtt.py index 1ebbd10..7bc9ce0 100644 --- a/app/src/mqtt.py +++ b/app/src/mqtt.py @@ -12,12 +12,12 @@ logger_mqtt = logging.getLogger('mqtt') class Mqtt(metaclass=Singleton): __client = None - __cb_MqttIsUp = None + __cb_mqtt_is_up = None - def __init__(self, cb_MqttIsUp): + def __init__(self, cb_mqtt_is_up): logger_mqtt.debug('MQTT: __init__') - if cb_MqttIsUp: - self.__cb_MqttIsUp = cb_MqttIsUp + if cb_mqtt_is_up: + self.__cb_mqtt_is_up = cb_mqtt_is_up loop = asyncio.get_event_loop() self.task = loop.create_task(self.__loop()) self.ha_restarts = 0 @@ -71,8 +71,8 @@ class Mqtt(metaclass=Singleton): async with self.__client: logger_mqtt.info('MQTT broker connection established') - if self.__cb_MqttIsUp: - await self.__cb_MqttIsUp() + if self.__cb_mqtt_is_up: + await self.__cb_mqtt_is_up() # async with self.__client.messages() as messages: await self.__client.subscribe(ha_status_topic) @@ -89,7 +89,7 @@ class Mqtt(metaclass=Singleton): f' {status}') if status == 'online': self.ha_restarts += 1 - await self.__cb_MqttIsUp() + await self.__cb_mqtt_is_up() if message.topic.matches(mb_rated_topic): await self.modbus_cmd(message, diff --git a/app/tests/test_infos_g3.py b/app/tests/test_infos_g3.py index d335db8..e811d90 100644 --- a/app/tests/test_infos_g3.py +++ b/app/tests/test_infos_g3.py @@ -4,7 +4,7 @@ from app.src.infos import Register, ClrAtMidnight from app.src.gen3.infos_g3 import InfosG3 @pytest.fixture -def ContrDataSeq(): # Get Time Request message +def contr_data_seq(): # Get Time Request message msg = b'\x00\x00\x00\x15\x00\x09\x2b\xa8\x54\x10\x52\x53\x57\x5f\x34\x30\x30\x5f\x56\x31\x2e\x30\x30\x2e\x30\x36\x00\x09\x27\xc0\x54\x06\x52\x61\x79\x6d\x6f' msg += b'\x6e\x00\x09\x2f\x90\x54\x0b\x52\x53\x57\x2d\x31\x2d\x31\x30\x30\x30\x31\x00\x09\x5a\x88\x54\x0f\x74\x2e\x72\x61\x79\x6d\x6f\x6e\x69\x6f\x74\x2e\x63\x6f\x6d\x00\x09\x5a\xec\x54' msg += b'\x1c\x6c\x6f\x67\x67\x65\x72\x2e\x74\x61\x6c\x65\x6e\x74\x2d\x6d\x6f\x6e\x69\x74\x6f\x72\x69\x6e\x67\x2e\x63\x6f\x6d\x00\x0d\x00\x20\x49\x00\x00\x00\x01\x00\x0c\x35\x00\x49\x00' @@ -14,7 +14,7 @@ def ContrDataSeq(): # Get Time Request message return msg @pytest.fixture -def Contr2DataSeq(): # Get Time Request message +def contr2_data_seq(): # Get Time Request message msg = b'\x00\x00\x00\x39\x00\x09\x2b\xa8\x54\x10\x52' msg += b'\x53\x57\x5f\x34\x30\x30\x5f\x56\x31\x2e\x30\x30\x2e\x32\x30\x00' msg += b'\x09\x27\xc0\x54\x06\x52\x61\x79\x6d\x6f\x6e\x00\x09\x2f\x90\x54' @@ -94,19 +94,19 @@ def Contr2DataSeq(): # Get Time Request message return msg @pytest.fixture -def InvDataSeq(): # Data indication from the controller +def inv_data_seq(): # Data indication from the controller msg = b'\x00\x00\x00\x06\x00\x00\x00\x0a\x54\x08\x4d\x69\x63\x72\x6f\x69\x6e\x76\x00\x00\x00\x14\x54\x04\x54\x53\x55\x4e\x00\x00\x00\x1E\x54\x07\x56\x35\x2e\x30\x2e\x31\x31\x00\x00\x00\x28' msg += b'\x54\x10T170000000000001\x00\x00\x00\x32\x54\x0a\x54\x53\x4f\x4c\x2d\x4d\x53\x36\x30\x30\x00\x00\x00\x3c\x54\x05\x41\x2c\x42\x2c\x43' return msg @pytest.fixture -def InvalidDataSeq(): # Data indication from the controller +def invalid_data_seq(): # Data indication from the controller msg = b'\x00\x00\x00\x06\x00\x00\x00\x0a\x54\x08\x4d\x69\x63\x72\x6f\x69\x6e\x76\x00\x00\x00\x14\x64\x04\x54\x53\x55\x4e\x00\x00\x00\x1E\x54\x07\x56\x35\x2e\x30\x2e\x31\x31\x00\x00\x00\x28' msg += b'\x54\x10T170000000000001\x00\x00\x00\x32\x54\x0a\x54\x53\x4f\x4c\x2d\x4d\x53\x36\x30\x30\x00\x00\x00\x3c\x54\x05\x41\x2c\x42\x2c\x43' return msg @pytest.fixture -def InvDataSeq2(): # Data indication from the controller +def inv_data_seq2(): # Data indication from the controller msg = b'\x00\x00\x00\xa3\x00\x00\x00\x64\x53\x00\x01\x00\x00\x00\xc8\x53\x00\x02\x00\x00\x01\x2c\x53\x00\x00\x00\x00\x01\x90\x49\x00\x00\x00\x00\x00\x00\x01\x91\x53\x00\x00' msg += b'\x00\x00\x01\x92\x53\x00\x00\x00\x00\x01\x93\x53\x00\x00\x00\x00\x01\x94\x53\x00\x00\x00\x00\x01\x95\x53\x00\x00\x00\x00\x01\x96\x53\x00\x00\x00\x00\x01\x97\x53\x00' msg += b'\x00\x00\x00\x01\x98\x53\x00\x00\x00\x00\x01\x99\x53\x00\x00\x00\x00\x01\x9a\x53\x00\x00\x00\x00\x01\x9b\x53\x00\x00\x00\x00\x01\x9c\x53\x00\x00\x00\x00\x01\x9d\x53' @@ -141,7 +141,7 @@ def InvDataSeq2(): # Data indication from the controller return msg @pytest.fixture -def InvDataNew(): # Data indication from DSP V5.0.17 +def inv_data_new(): # Data indication from DSP V5.0.17 msg = b'\x00\x00\x00\xa3\x00\x00\x00\x00\x53\x00\x00' msg += b'\x00\x00\x00\x80\x53\x00\x00\x00\x00\x01\x04\x53\x00\x00\x00\x00' msg += b'\x01\x90\x41\x00\x00\x01\x91\x53\x00\x00\x00\x00\x01\x90\x53\x00' @@ -217,7 +217,7 @@ def InvDataNew(): # Data indication from DSP V5.0.17 return msg @pytest.fixture -def InvDataSeq2_Zero(): # Data indication from the controller +def inv_data_seq2_zero(): # Data indication from the controller msg = b'\x00\x00\x00\xa3\x00\x00\x00\x64\x53\x00\x01\x00\x00\x00\xc8\x53\x00\x02\x00\x00\x01\x2c\x53\x00\x00\x00\x00\x01\x90\x49\x00\x00\x00\x00\x00\x00\x01\x91\x53\x00\x00' msg += b'\x00\x00\x01\x92\x53\x00\x00\x00\x00\x01\x93\x53\x00\x00\x00\x00\x01\x94\x53\x00\x00\x00\x00\x01\x95\x53\x00\x00\x00\x00\x01\x96\x53\x00\x00\x00\x00\x01\x97\x53\x00' msg += b'\x00\x00\x00\x01\x98\x53\x00\x00\x00\x00\x01\x99\x53\x00\x00\x00\x00\x01\x9a\x53\x00\x00\x00\x00\x01\x9b\x53\x00\x00\x00\x00\x01\x9c\x53\x00\x00\x00\x00\x01\x9d\x53' @@ -252,37 +252,37 @@ def InvDataSeq2_Zero(): # Data indication from the controller return msg -def test_parse_control(ContrDataSeq): +def test_parse_control(contr_data_seq): i = InfosG3() - for key, result in i.parse (ContrDataSeq): - pass + for key, result in i.parse (contr_data_seq): + pass # side effect in calling i.parse() assert json.dumps(i.db) == json.dumps( {"collector": {"Collector_Fw_Version": "RSW_400_V1.00.06", "Chip_Type": "Raymon", "Chip_Model": "RSW-1-10001", "Trace_URL": "t.raymoniot.com", "Logger_URL": "logger.talent-monitoring.com"}, "controller": {"Collect_Interval": 1, "Signal_Strength": 100, "Power_On_Time": 29, "Communication_Type": 1, "Connect_Count": 1, "Data_Up_Interval": 300}}) -def test_parse_control2(Contr2DataSeq): +def test_parse_control2(contr2_data_seq): i = InfosG3() - for key, result in i.parse (Contr2DataSeq): - pass + for key, result in i.parse (contr2_data_seq): + pass # side effect in calling i.parse() assert json.dumps(i.db) == json.dumps( {"collector": {"Collector_Fw_Version": "RSW_400_V1.00.20", "Chip_Type": "Raymon", "Chip_Model": "RSW-1-10001", "Trace_URL": "t.raymoniot.com", "Logger_URL": "logger.talent-monitoring.com"}, "controller": {"Collect_Interval": 1, "Signal_Strength": 16, "Power_On_Time": 334, "Communication_Type": 1, "Connect_Count": 1, "Data_Up_Interval": 300}}) -def test_parse_inverter(InvDataSeq): +def test_parse_inverter(inv_data_seq): i = InfosG3() - for key, result in i.parse (InvDataSeq): - pass + for key, result in i.parse (inv_data_seq): + pass # side effect in calling i.parse() assert json.dumps(i.db) == json.dumps( {"inverter": {"Product_Name": "Microinv", "Manufacturer": "TSUN", "Version": "V5.0.11", "Serial_Number": "T170000000000001", "Equipment_Model": "TSOL-MS600"}}) -def test_parse_cont_and_invert(ContrDataSeq, InvDataSeq): +def test_parse_cont_and_invert(contr_data_seq, inv_data_seq): i = InfosG3() - for key, result in i.parse (ContrDataSeq): - pass + for key, result in i.parse (contr_data_seq): + pass # side effect in calling i.parse() - for key, result in i.parse (InvDataSeq): - pass + for key, result in i.parse (inv_data_seq): + pass # side effect in calling i.parse() assert json.dumps(i.db) == json.dumps( { @@ -290,7 +290,7 @@ def test_parse_cont_and_invert(ContrDataSeq, InvDataSeq): "inverter": {"Product_Name": "Microinv", "Manufacturer": "TSUN", "Version": "V5.0.11", "Serial_Number": "T170000000000001", "Equipment_Model": "TSOL-MS600"}}) -def test_build_ha_conf1(ContrDataSeq): +def test_build_ha_conf1(contr_data_seq): i = InfosG3() i.static_init() # initialize counter @@ -346,14 +346,14 @@ def test_build_ha_conf1(ContrDataSeq): assert tests==5 -def test_build_ha_conf2(ContrDataSeq, InvDataSeq, InvDataSeq2): +def test_build_ha_conf2(contr_data_seq, inv_data_seq, inv_data_seq2): i = InfosG3() - for key, result in i.parse (ContrDataSeq): - pass - for key, result in i.parse (InvDataSeq): - pass - for key, result in i.parse (InvDataSeq2): - pass + for key, result in i.parse (contr_data_seq): + pass # side effect in calling i.parse() + for key, result in i.parse (inv_data_seq): + pass # side effect in calling i.parse() + for key, result in i.parse (inv_data_seq2): + pass # side effect in calling i.parse() tests = 0 for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id="garagendach/", snr='123', sug_area = 'roof'): @@ -384,10 +384,10 @@ def test_build_ha_conf2(ContrDataSeq, InvDataSeq, InvDataSeq2): tests +=1 assert tests==5 -def test_must_incr_total(InvDataSeq2, InvDataSeq2_Zero): +def test_must_incr_total(inv_data_seq2, inv_data_seq2_zero): i = InfosG3() tests = 0 - for key, update in i.parse (InvDataSeq2): + for key, update in i.parse (inv_data_seq2): if key == 'total' or key == 'inverter' or key == 'env': assert update == True tests +=1 @@ -396,7 +396,7 @@ def test_must_incr_total(InvDataSeq2, InvDataSeq2_Zero): assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 64.5, "Daily_Generation": 1.08, "Total_Generation": 9.74}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 45.7, "Daily_Generation": 0.62, "Total_Generation": 7.62}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}}) assert json.dumps(i.db['env']) == json.dumps({"Inverter_Temp": 23}) tests = 0 - for key, update in i.parse (InvDataSeq2): + for key, update in i.parse (inv_data_seq2): if key == 'total': assert update == False tests +=1 @@ -411,7 +411,7 @@ def test_must_incr_total(InvDataSeq2, InvDataSeq2_Zero): assert json.dumps(i.db['inverter']) == json.dumps({"Rated_Power": 600, "No_Inputs": 2}) tests = 0 - for key, update in i.parse (InvDataSeq2_Zero): + for key, update in i.parse (inv_data_seq2_zero): if key == 'total': assert update == False tests +=1 @@ -424,10 +424,10 @@ def test_must_incr_total(InvDataSeq2, InvDataSeq2_Zero): assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 0.0, "Daily_Generation": 1.08, "Total_Generation": 9.74}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 0.0, "Daily_Generation": 0.62, "Total_Generation": 7.62}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}}) assert json.dumps(i.db['env']) == json.dumps({"Inverter_Temp": 0}) -def test_must_incr_total2(InvDataSeq2, InvDataSeq2_Zero): +def test_must_incr_total2(inv_data_seq2, inv_data_seq2_zero): i = InfosG3() tests = 0 - for key, update in i.parse (InvDataSeq2_Zero): + for key, update in i.parse (inv_data_seq2_zero): if key == 'total': assert update == False tests +=1 @@ -441,7 +441,7 @@ def test_must_incr_total2(InvDataSeq2, InvDataSeq2_Zero): assert json.dumps(i.db['env']) == json.dumps({"Inverter_Temp": 0}) tests = 0 - for key, update in i.parse (InvDataSeq2_Zero): + for key, update in i.parse (inv_data_seq2_zero): if key == 'total': assert update == False tests +=1 @@ -455,7 +455,7 @@ def test_must_incr_total2(InvDataSeq2, InvDataSeq2_Zero): assert json.dumps(i.db['env']) == json.dumps({"Inverter_Temp": 0}) tests = 0 - for key, update in i.parse (InvDataSeq2): + for key, update in i.parse (inv_data_seq2): if key == 'total': assert update == True tests +=1 @@ -467,10 +467,10 @@ def test_must_incr_total2(InvDataSeq2, InvDataSeq2_Zero): assert json.dumps(i.db['total']) == json.dumps({'Daily_Generation': 1.7, 'Total_Generation': 17.36}) assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 64.5, "Daily_Generation": 1.08, "Total_Generation": 9.74}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 45.7, "Daily_Generation": 0.62, "Total_Generation": 7.62}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}}) -def test_new_data_types(InvDataNew): +def test_new_data_types(inv_data_new): i = InfosG3() tests = 0 - for key, update in i.parse (InvDataNew): + for key, update in i.parse (inv_data_new): if key == 'events': tests +=1 elif key == 'inverter': @@ -487,7 +487,7 @@ def test_new_data_types(InvDataNew): assert json.dumps(i.db['input']) == json.dumps({"pv1": {}}) assert json.dumps(i.db['events']) == json.dumps({"401_": 0, "404_": 0, "405_": 0, "408_": 0, "409_No_Utility": 0, "406_": 0, "416_": 0}) -def test_invalid_data_type(InvalidDataSeq): +def test_invalid_data_type(invalid_data_seq): i = InfosG3() i.static_init() # initialize counter @@ -495,8 +495,8 @@ def test_invalid_data_type(InvalidDataSeq): assert val == 0 - for key, result in i.parse (InvalidDataSeq): - pass + for key, result in i.parse (invalid_data_seq): + pass # side effect in calling i.parse() assert json.dumps(i.db) == json.dumps({"inverter": {"Product_Name": "Microinv"}}) val = i.dev_value(Register.INVALID_DATA_TYPE) # check invalid data type counter diff --git a/app/tests/test_solarman.py b/app/tests/test_solarman.py index 6a23e93..7ead651 100644 --- a/app/tests/test_solarman.py +++ b/app/tests/test_solarman.py @@ -3,6 +3,7 @@ import struct import time import asyncio import logging +from math import isclose from app.src.gen3plus.solarman_v5 import SolarmanV5 from app.src.config import Config from app.src.infos import Infos, Register @@ -1665,7 +1666,7 @@ async def test_modbus_polling(config_tsun_inv1, heartbeat_ind_msg, heartbeat_rsp m._send_buffer = bytearray(0) # clear send buffer for next test assert m.state == State.up - assert m.mb_timeout == 0.5 + assert isclose(m.mb_timeout, 0.5) assert next(m.mb_timer.exp_count) == 0 await asyncio.sleep(0.5) @@ -1695,14 +1696,14 @@ async def test_start_client_mode(config_tsun_inv1): await m.send_start_cmd(get_sn_int(), '192.168.1.1', m.mb_first_timeout) assert m.writer.sent_pdu==bytearray(b'\xa5\x17\x00\x10E\x01\x00!Ce{\x02\xb0\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x030\x00\x000J\xde\xf1\x15') assert m.db.get_db_value(Register.IP_ADDRESS) == '192.168.1.1' - assert m.db.get_db_value(Register.POLLING_INTERVAL) == 0.5 + assert isclose(m.db.get_db_value(Register.POLLING_INTERVAL), 0.5) assert m.db.get_db_value(Register.HEARTBEAT_INTERVAL) == 120 assert m.state == State.up assert m.no_forwarding == True assert m._send_buffer==b'' - assert m.mb_timeout == 0.5 + assert isclose(m.mb_timeout, 0.5) assert next(m.mb_timer.exp_count) == 0 await asyncio.sleep(0.5) diff --git a/app/tests/test_talent.py b/app/tests/test_talent.py index 5c9a9d7..cf70cd1 100644 --- a/app/tests/test_talent.py +++ b/app/tests/test_talent.py @@ -1,5 +1,6 @@ # test_with_pytest.py import pytest, logging, asyncio +from math import isclose from app.src.gen3.talent import Talent, Control from app.src.config import Config from app.src.infos import Infos, Register @@ -1023,7 +1024,7 @@ def test_msg_inv_ind3(config_tsun_inv1, msg_inverter_ind_0w, msg_inverter_ack): assert m._forward_buffer==msg_inverter_ind_0w assert m._send_buffer==msg_inverter_ack assert m.db.get_db_value(Register.INVERTER_STATUS) == None - assert m.db.db['grid']['Output_Power'] == 0.5 + assert isclose(m.db.db['grid']['Output_Power'], 0.5) m.close() assert m.db.get_db_value(Register.INVERTER_STATUS) == 0 @@ -1206,15 +1207,15 @@ def test_timestamp_cnv(): m = MemoryStream(b'') ts = 1722645998453 # Saturday, 3. August 2024 00:46:38.453 (GMT+2:00) utc =1722638798.453 # GMT: Friday, 2. August 2024 22:46:38.453 - assert utc == m._utcfromts(ts) + assert isclose(utc, m._utcfromts(ts)) ts = 1691246944000 # Saturday, 5. August 2023 14:49:04 (GMT+2:00) utc =1691239744.0 # GMT: Saturday, 5. August 2023 12:49:04 - assert utc == m._utcfromts(ts) + assert isclose(utc, m._utcfromts(ts)) ts = 1704152544000 # Monday, 1. January 2024 23:42:24 (GMT+1:00) utc =1704148944.0 # GMT: Monday, 1. January 2024 22:42:24 - assert utc == m._utcfromts(ts) + assert isclose(utc, m._utcfromts(ts)) m.close() @@ -1581,7 +1582,7 @@ async def test_modbus_polling(config_tsun_inv1, msg_inverter_ind): assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 m._send_buffer = bytearray(0) # clear send buffer for next test - assert m.mb_timeout == 0.5 + assert isclose(m.mb_timeout, 0.5) assert next(m.mb_timer.exp_count) == 0 await asyncio.sleep(0.5) From 7a9b23d06835387a8fe670903852c0a7d971ef42 Mon Sep 17 00:00:00 2001 From: Stefan Allius <122395479+s-allius@users.noreply.github.com> Date: Tue, 13 Aug 2024 21:11:56 +0200 Subject: [PATCH 07/39] Sonar qube 3 (#163) fix SonarQube warnings in modbus.py --- app/src/modbus.py | 122 ++++++++++++++++++++++----------------- app/tests/test_modbus.py | 6 +- 2 files changed, 73 insertions(+), 55 deletions(-) diff --git a/app/src/modbus.py b/app/src/modbus.py index f7dbc27..9a0c918 100644 --- a/app/src/modbus.py +++ b/app/src/modbus.py @@ -39,7 +39,7 @@ class Modbus(): '''Modbus function code: Write Single Register''' __crc_tab = [] - map = { + mb_reg_mapping = { 0x2007: {'reg': Register.MAX_DESIGNED_POWER, 'fmt': '!H', 'ratio': 1}, # noqa: E501 0x202c: {'reg': Register.OUTPUT_COEFFICIENT, 'fmt': '!H', 'ratio': 100/1024}, # noqa: E501 @@ -139,7 +139,7 @@ class Modbus(): if self.que.qsize() == 1: self.__send_next_from_que() - def recv_req(self, buf: bytearray, + def recv_req(self, buf: bytes, rsp_handler: Callable[[None], None] = None) -> bool: """Add the received Modbus RTU request to the tx queue @@ -164,7 +164,7 @@ class Modbus(): return True - def recv_resp(self, info_db, buf: bytearray, node_id: str) -> \ + def recv_resp(self, info_db, buf: bytes, node_id: str) -> \ Generator[tuple[str, bool, int | float | str], None, None]: """Generator which check and parse a received MODBUS response. @@ -183,58 +183,18 @@ class Modbus(): # logging.info(f'recv_resp: first byte modbus:{buf[0]} len:{len(buf)}') self.node_id = node_id - if not self.req_pend: - self.err = 5 - return - if not self.__check_crc(buf): - logger.error(f'[{node_id}] Modbus resp: CRC error') - self.err = 1 - return - if buf[0] != self.last_addr: - logger.info(f'[{node_id}] Modbus resp: Wrong addr {buf[0]}') - self.err = 2 - return fcode = buf[1] - if fcode != self.last_fcode: - logger.info(f'[{node_id}] Modbus: Wrong fcode {fcode}' - f' != {self.last_fcode}') - self.err = 3 + data_available = self.last_addr == self.INV_ADDR and \ + (fcode == 3 or fcode == 4) + + if self.__resp_error_check(buf, data_available): return - if self.last_addr == self.INV_ADDR and \ - (fcode == 3 or fcode == 4): + + if data_available: elmlen = buf[2] >> 1 - if elmlen != self.last_len: - logger.info(f'[{node_id}] Modbus: len error {elmlen}' - f' != {self.last_len}') - self.err = 4 - return first_reg = self.last_reg # save last_reg before sending next pdu self.__stop_timer() # stop timer and send next pdu - - for i in range(0, elmlen): - addr = first_reg+i - if addr in self.map: - row = self.map[addr] - info_id = row['reg'] - fmt = row['fmt'] - val = struct.unpack_from(fmt, buf, 3+2*i) - result = val[0] - - if 'eval' in row: - result = eval(row['eval']) - if 'ratio' in row: - result = round(result * row['ratio'], 2) - - keys, level, unit, must_incr = info_db._key_obj(info_id) - - if keys: - name, update = info_db.update_db(keys, must_incr, - result) - yield keys[0], update, result - if update: - info_db.tracer.log(level, - f'[{node_id}] MODBUS: {name}' - f' : {result}{unit}') + yield from self.__process_data(info_db, buf, first_reg, elmlen) else: self.__stop_timer() @@ -243,6 +203,64 @@ class Modbus(): self.rsp_handler() self.__send_next_from_que() + def __resp_error_check(self, buf: bytes, data_available: bool) -> bool: + '''Check the MODBUS response for errors, returns True if one accure''' + if not self.req_pend: + self.err = 5 + return True + if not self.__check_crc(buf): + logger.error(f'[{self.node_id}] Modbus resp: CRC error') + self.err = 1 + return True + if buf[0] != self.last_addr: + logger.info(f'[{self.node_id}] Modbus resp: Wrong addr {buf[0]}') + self.err = 2 + return True + fcode = buf[1] + if fcode != self.last_fcode: + logger.info(f'[{self.node_id}] Modbus: Wrong fcode {fcode}' + f' != {self.last_fcode}') + self.err = 3 + return True + if data_available: + elmlen = buf[2] >> 1 + if elmlen != self.last_len: + logger.info(f'[{self.node_id}] Modbus: len error {elmlen}' + f' != {self.last_len}') + self.err = 4 + return True + + return False + + def __get_value(self, buf: bytes, idx: int, row: dict): + '''get a value from the received buffer''' + val = struct.unpack_from(row['fmt'], buf, idx) + result = val[0] + + if 'eval' in row: + result = eval(row['eval']) + if 'ratio' in row: + result = round(result * row['ratio'], 2) + return result + + def __process_data(self, info_db, buf: bytes, first_reg, elmlen): + '''Generator over received registers, updates the db''' + for i in range(0, elmlen): + addr = first_reg+i + if addr in self.mb_reg_mapping: + row = self.mb_reg_mapping[addr] + info_id = row['reg'] + keys, level, unit, must_incr = info_db._key_obj(info_id) + if keys: + result = self.__get_value(buf, 3+2*i, row) + name, update = info_db.update_db(keys, must_incr, + result) + yield keys[0], update, result + if update: + info_db.tracer.log(level, + f'[{self.node_id}] MODBUS: {name}' + f' : {result}{unit}') + ''' MODBUS response timer ''' @@ -302,11 +320,11 @@ class Modbus(): ''' Helper function for CRC-16 handling ''' - def __check_crc(self, msg: bytearray) -> bool: + def __check_crc(self, msg: bytes) -> bool: '''Check CRC-16 and returns True if valid''' return 0 == self.__calc_crc(msg) - def __calc_crc(self, buffer: bytearray) -> int: + def __calc_crc(self, buffer: bytes) -> int: '''Build CRC-16 for buffer and returns it''' crc = CRC_INIT diff --git a/app/tests/test_modbus.py b/app/tests/test_modbus.py index 970a161..d0e321e 100644 --- a/app/tests/test_modbus.py +++ b/app/tests/test_modbus.py @@ -366,8 +366,8 @@ async def test_timeout(): def test_recv_unknown_data(): '''Receive a response with an unknwon register''' mb = ModbusTestHelper() - assert 0x9000 not in mb.map - mb.map[0x9000] = {'reg': Register.TEST_REG1, 'fmt': '!H', 'ratio': 1} + assert 0x9000 not in mb.mb_reg_mapping + mb.mb_reg_mapping[0x9000] = {'reg': Register.TEST_REG1, 'fmt': '!H', 'ratio': 1} mb.build_msg(1,3,0x9000,2) @@ -379,7 +379,7 @@ def test_recv_unknown_data(): assert 0 == call assert not mb.req_pend - del mb.map[0x9000] + del mb.mb_reg_mapping[0x9000] def test_close(): '''Check queue handling for build_msg() calls''' From 5a39370cc357e73aad089763d878ff257013f8a3 Mon Sep 17 00:00:00 2001 From: Stefan Allius <122395479+s-allius@users.noreply.github.com> Date: Tue, 13 Aug 2024 22:22:45 +0200 Subject: [PATCH 08/39] Sonar qube 3 (#164) * fix SonarQube warnings --- app/src/gen3/infos_g3.py | 27 +++++++++++++-------------- app/src/gen3plus/infos_g3p.py | 23 ++++++++++++++--------- 2 files changed, 27 insertions(+), 23 deletions(-) diff --git a/app/src/gen3/infos_g3.py b/app/src/gen3/infos_g3.py index f20183a..3594f9d 100644 --- a/app/src/gen3/infos_g3.py +++ b/app/src/gen3/infos_g3.py @@ -139,7 +139,6 @@ class InfosG3(Infos): i = elms # abort the loop elif data_type == 0x41: # 'A' -> Nop ?? - # result = struct.unpack_from('!l', buf, ind)[0] ind += 0 i += 1 continue @@ -171,17 +170,17 @@ class InfosG3(Infos): " not supported") return - keys, level, unit, must_incr = self._key_obj(info_id) - - if keys: - name, update = self.update_db(keys, must_incr, result) - yield keys[0], update - else: - update = False - name = str(f'info-id.0x{addr:x}') - - if update: - self.tracer.log(level, f'[{node_id}] GEN3: {name} :' - f' {result}{unit}') - + yield from self.__store_result(addr, result, info_id, node_id) i += 1 + + def __store_result(self, addr, result, info_id, node_id): + keys, level, unit, must_incr = self._key_obj(info_id) + if keys: + name, update = self.update_db(keys, must_incr, result) + yield keys[0], update + else: + update = False + name = str(f'info-id.0x{addr:x}') + if update: + self.tracer.log(level, f'[{node_id}] GEN3: {name} :' + f' {result}{unit}') diff --git a/app/src/gen3plus/infos_g3p.py b/app/src/gen3plus/infos_g3p.py index 77655fd..277b8c7 100644 --- a/app/src/gen3plus/infos_g3p.py +++ b/app/src/gen3plus/infos_g3p.py @@ -118,15 +118,7 @@ class InfosG3P(Infos): if not isinstance(row, dict): continue info_id = row['reg'] - fmt = row['fmt'] - res = struct.unpack_from(fmt, buf, addr) - result = res[0] - if isinstance(result, (bytearray, bytes)): - result = result.decode().split('\x00')[0] - if 'eval' in row: - result = eval(row['eval']) - if 'ratio' in row: - result = round(result * row['ratio'], 2) + result = self.__get_value(buf, addr, row) keys, level, unit, must_incr = self._key_obj(info_id) @@ -140,3 +132,16 @@ class InfosG3P(Infos): if update: self.tracer.log(level, f'[{node_id}] GEN3PLUS: {name}' f' : {result}{unit}') + + def __get_value(self, buf, idx, row): + '''Get a value from buf and interpret as in row''' + fmt = row['fmt'] + res = struct.unpack_from(fmt, buf, idx) + result = res[0] + if isinstance(result, (bytearray, bytes)): + result = result.decode().split('\x00')[0] + if 'eval' in row: + result = eval(row['eval']) + if 'ratio' in row: + result = round(result * row['ratio'], 2) + return result From 54de2aecfea88e6760481fca5112b945edf373ca Mon Sep 17 00:00:00 2001 From: Stefan Allius <122395479+s-allius@users.noreply.github.com> Date: Fri, 16 Aug 2024 21:07:08 +0200 Subject: [PATCH 09/39] Sonar qube 3 (#165) * cleanup * Add support for TSUN Titan inverter Fixes #161 * fix SonarQube warnings * fix error * rename field "config" * SonarQube reads flake8 output * don't stop on flake8 errors * flake8 scan only app/src for SonarQube * update flake8 run * ignore flake8 C901 * cleanup * fix linter warnings * ignore changed *.yml files * read sensor list solarman data packets * catch 'No route to' error and log only in debug mode * fix unit tests * add sensor_list configuration * adapt unit tests * fix SonarQube warnings --- .github/workflows/python-app.yml | 11 ++- app/src/config.py | 77 ++++++++++--------- app/src/gen3plus/infos_g3p.py | 4 +- app/src/gen3plus/solarman_v5.py | 37 +++++---- app/src/infos.py | 127 +++++++++++++++++-------------- app/src/modbus_tcp.py | 8 +- app/tests/test_config.py | 22 +++--- app/tests/test_infos_g3p.py | 4 +- app/tests/test_solarman.py | 14 +++- app/tests/test_talent.py | 12 +-- sonar-project.properties | 2 - 11 files changed, 179 insertions(+), 139 deletions(-) diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index c2a0b1d..deb3530 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -53,10 +53,9 @@ jobs: # stop the build if there are Python syntax errors or undefined names flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics + flake8 --exit-zero --ignore=C901,E121,E123,E126,E133,E226,E241,E242,E704,W503,W504,W505 --format=pylint --output-file=output_flake.txt --exclude=*.pyc app/src/ - name: Test with pytest run: | - #pytest app --doctest-modules --junitxml=junit/test-results.xml --cov=com --cov-report=xml --cov-report=html python -m pytest app --cov=app/src --cov-report=xml coverage report - name: Analyze with SonarCloud @@ -65,4 +64,10 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} with: - projectBaseDir: . \ No newline at end of file + projectBaseDir: . + args: + -Dsonar.projectKey=s-allius_tsun-gen3-proxy + -Dsonar.python.coverage.reportPaths=coverage.xml + -Dsonar.python.flake8.reportPaths=output_flake.txt + # -Dsonar.docker.hadolint.reportPaths= + \ No newline at end of file diff --git a/app/src/config.py b/app/src/config.py index 6f16049..02138e7 100644 --- a/app/src/config.py +++ b/app/src/config.py @@ -12,81 +12,84 @@ class Config(): Read config.toml file and sanitize it with read(). Get named parts of the config with get()''' - config = {} + act_config = {} def_config = {} conf_schema = Schema({ 'tsun': { 'enabled': Use(bool), - 'host': Use(str), - 'port': And(Use(int), lambda n: 1024 <= n <= 65535) - }, + 'host': Use(str), + 'port': And(Use(int), lambda n: 1024 <= n <= 65535) + }, 'solarman': { 'enabled': Use(bool), - 'host': Use(str), - 'port': And(Use(int), lambda n: 1024 <= n <= 65535) - }, + 'host': Use(str), + 'port': And(Use(int), lambda n: 1024 <= n <= 65535) + }, 'mqtt': { - 'host': Use(str), - 'port': And(Use(int), lambda n: 1024 <= n <= 65535), - 'user': And(Use(str), Use(lambda s: s if len(s) > 0 else None)), - 'passwd': And(Use(str), Use(lambda s: s if len(s) > 0 else None)) - }, + 'host': Use(str), + 'port': And(Use(int), lambda n: 1024 <= n <= 65535), + 'user': And(Use(str), Use(lambda s: s if len(s) > 0 else None)), + 'passwd': And(Use(str), Use(lambda s: s if len(s) > 0 else None)) + }, 'ha': { 'auto_conf_prefix': Use(str), 'discovery_prefix': Use(str), - 'entity_prefix': Use(str), - 'proxy_node_id': Use(str), - 'proxy_unique_id': Use(str) - }, + 'entity_prefix': Use(str), + 'proxy_node_id': Use(str), + 'proxy_unique_id': Use(str) + }, 'gen3plus': { 'at_acl': { Or('mqtt', 'tsun'): { 'allow': [str], Optional('block', default=[]): [str] - } } - }, + } + }, 'inverters': { 'allow_all': Use(bool), And(Use(str), lambda s: len(s) == 16): { Optional('monitor_sn', default=0): Use(int), Optional('node_id', default=""): And(Use(str), Use(lambda s: s + '/' - if len(s) > 0 and - s[-1] != '/' else s)), + if len(s) > 0 + and s[-1] != '/' + else s)), Optional('client_mode'): { 'host': Use(str), Optional('port', default=8899): And(Use(int), lambda n: 1024 <= n <= 65535) - }, + }, Optional('modbus_polling', default=True): Use(bool), - Optional('suggested_area', default=""): Use(str), + Optional('suggested_area', default=""): Use(str), + Optional('sensor_list', default=0x2b0): Use(int), Optional('pv1'): { Optional('type'): Use(str), Optional('manufacturer'): Use(str), - }, + }, Optional('pv2'): { Optional('type'): Use(str), Optional('manufacturer'): Use(str), - }, + }, Optional('pv3'): { Optional('type'): Use(str), Optional('manufacturer'): Use(str), - }, + }, Optional('pv4'): { Optional('type'): Use(str), Optional('manufacturer'): Use(str), - }, + }, Optional('pv5'): { Optional('type'): Use(str), Optional('manufacturer'): Use(str), - }, + }, Optional('pv6'): { Optional('type'): Use(str), Optional('manufacturer'): Use(str), - } - }} - }, ignore_extra_keys=True - ) + } + } + } + }, ignore_extra_keys=True + ) @classmethod def class_init(cls) -> None | str: # pragma: no cover @@ -146,17 +149,17 @@ class Config(): config[key] |= usr_config[key] try: - cls.config = cls.conf_schema.validate(config) + cls.act_config = cls.conf_schema.validate(config) except Exception as error: err = f'Config.read: {error}' logging.error(err) - # logging.debug(f'Readed config: "{cls.config}" ') + # logging.debug(f'Readed config: "{cls.act_config}" ') except Exception as error: err = f'Config.read: {error}' logger.error(err) - cls.config = {} + cls.act_config = {} return err @@ -166,12 +169,12 @@ class Config(): None it returns the complete config dict''' if member: - return cls.config.get(member, {}) + return cls.act_config.get(member, {}) else: - return cls.config + return cls.act_config @classmethod def is_default(cls, member: str) -> bool: '''Check if the member is the default value''' - return cls.config.get(member) == cls.def_config.get(member) + return cls.act_config.get(member) == cls.def_config.get(member) diff --git a/app/src/gen3plus/infos_g3p.py b/app/src/gen3plus/infos_g3p.py index 277b8c7..2d6a2fc 100644 --- a/app/src/gen3plus/infos_g3p.py +++ b/app/src/gen3plus/infos_g3p.py @@ -20,9 +20,11 @@ class RegisterMap: 0x4102001c: {'reg': Register.SIGNAL_STRENGTH, 'fmt': '>12)}.{(result>>8)&0xf}.{(result>>4)&0xf}{result&0xf}'"}, # noqa: E501 diff --git a/app/src/gen3plus/solarman_v5.py b/app/src/gen3plus/solarman_v5.py index 7feccd5..0cbdfab 100644 --- a/app/src/gen3plus/solarman_v5.py +++ b/app/src/gen3plus/solarman_v5.py @@ -61,7 +61,7 @@ class SolarmanV5(Message): '''format string for packing of the header''' def __init__(self, server_side: bool, client_mode: bool): - super().__init__(server_side, self.send_modbus_cb, mb_timeout=5) + super().__init__(server_side, self.send_modbus_cb, mb_timeout=8) self.header_len = 11 # overwrite construcor in class Message self.control = 0 @@ -138,6 +138,7 @@ class SolarmanV5(Message): self.mb_first_timeout = self.MB_START_TIMEOUT '''timer value for next Modbus polling request''' self.modbus_polling = False + self.sensor_list = 0x0000 ''' Our puplic methods @@ -186,12 +187,19 @@ class SolarmanV5(Message): self.db.set_db_def_value(Register.POLLING_INTERVAL, self.mb_timeout) + def __set_config_parms(self, inv: dict): + '''init connection with params from the configuration''' + self.node_id = inv['node_id'] + self.sug_area = inv['suggested_area'] + self.modbus_polling = inv['modbus_polling'] + self.sensor_list = inv['sensor_list'] + def __set_serial_no(self, snr: int): + '''check the serial number and configure the inverter connection''' serial_no = str(snr) if self.unique_id == serial_no: logger.debug(f'SerialNo: {serial_no}') else: - found = False inverters = Config.get('inverters') # logger.debug(f'Inverters: {inverters}') @@ -199,14 +207,11 @@ class SolarmanV5(Message): # logger.debug(f'key: {key} -> {inv}') if (type(inv) is dict and 'monitor_sn' in inv and inv['monitor_sn'] == snr): - found = True - self.node_id = inv['node_id'] - self.sug_area = inv['suggested_area'] - self.modbus_polling = inv['modbus_polling'] - logger.debug(f'SerialNo {serial_no} allowed! area:{self.sug_area}') # noqa: E501 + self.__set_config_parms(inv) self.db.set_pv_module_details(inv) - - if not found: + logger.debug(f'SerialNo {serial_no} allowed! area:{self.sug_area}') # noqa: E501 + break + else: self.node_id = '' self.sug_area = '' if 'allow_all' not in inverters or not inverters['allow_all']: @@ -214,7 +219,7 @@ class SolarmanV5(Message): self.unique_id = None logger.warning(f'ignore message from unknow inverter! (SerialNo: {serial_no})') # noqa: E501 return - logger.debug(f'SerialNo {serial_no} not known but accepted!') + logger.warning(f'SerialNo {serial_no} not known but accepted!') self.unique_id = serial_no @@ -405,7 +410,7 @@ class SolarmanV5(Message): return self.__build_header(0x4510) self._send_buffer += struct.pack(' dict: - return cls.config + return cls.act_config def test_empty_config(): @@ -30,7 +30,7 @@ def test_default_config(): validated = Config.conf_schema.validate(cnf) except Exception: assert False - assert validated == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'node_id': '', 'modbus_polling': False, 'monitor_sn': 0, 'suggested_area': ''}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'node_id': '', 'suggested_area': ''}}} + assert validated == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'node_id': '', 'modbus_polling': False, 'monitor_sn': 0, 'suggested_area': '', 'sensor_list': 688}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'node_id': '', 'suggested_area': '', 'sensor_list': 688}}} def test_full_config(): cnf = {'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, @@ -40,13 +40,13 @@ def test_full_config(): 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': '', 'passwd': ''}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, - 'R170000000000001': {'modbus_polling': True, 'node_id': '', 'suggested_area': '', 'pv1': {'type': 'type1', 'manufacturer': 'man1'}, 'pv2': {'type': 'type2', 'manufacturer': 'man2'}, 'pv3': {'type': 'type3', 'manufacturer': 'man3'}}, - 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'node_id': '', 'suggested_area': ''}}} + 'R170000000000001': {'modbus_polling': True, 'node_id': '', 'sensor_list': 0, 'suggested_area': '', 'pv1': {'type': 'type1', 'manufacturer': 'man1'}, 'pv2': {'type': 'type2', 'manufacturer': 'man2'}, 'pv3': {'type': 'type3', 'manufacturer': 'man3'}}, + 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'node_id': '', 'sensor_list': 0x1511, 'suggested_area': ''}}} try: validated = Config.conf_schema.validate(cnf) except Exception: assert False - assert validated == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'node_id': '', 'modbus_polling': True, 'monitor_sn': 0, 'pv1': {'manufacturer': 'man1','type': 'type1'},'pv2': {'manufacturer': 'man2','type': 'type2'},'pv3': {'manufacturer': 'man3','type': 'type3'}, 'suggested_area': ''}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'node_id': '', 'suggested_area': ''}}} + assert validated == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'node_id': '', 'modbus_polling': True, 'monitor_sn': 0, 'pv1': {'manufacturer': 'man1','type': 'type1'},'pv2': {'manufacturer': 'man2','type': 'type2'},'pv3': {'manufacturer': 'man3','type': 'type3'}, 'suggested_area': '', 'sensor_list': 0}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'node_id': '', 'suggested_area': '', 'sensor_list': 5393}}} def test_mininum_config(): cnf = {'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, @@ -63,7 +63,7 @@ def test_mininum_config(): validated = Config.conf_schema.validate(cnf) except Exception: assert False - assert validated == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'node_id': '', 'modbus_polling': True, 'monitor_sn': 0, 'suggested_area': ''}}} + assert validated == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'node_id': '', 'modbus_polling': True, 'monitor_sn': 0, 'suggested_area': '', 'sensor_list': 688}}} def test_read_empty(): cnf = {} @@ -71,7 +71,7 @@ def test_read_empty(): err = TstConfig.read('app/config/') assert err == None cnf = TstConfig.get() - assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'suggested_area': '', 'modbus_polling': False, 'monitor_sn': 0, 'node_id': ''}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'suggested_area': '', 'node_id': ''}}} + assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'suggested_area': '', 'modbus_polling': False, 'monitor_sn': 0, 'node_id': '', 'sensor_list': 688}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'suggested_area': '', 'node_id': '', 'sensor_list': 688}}} defcnf = TstConfig.def_config.get('solarman') assert defcnf == {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000} @@ -93,7 +93,7 @@ def test_read_cnf1(): err = TstConfig.read('app/config/') assert err == None cnf = TstConfig.get() - assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': False, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'suggested_area': '', 'modbus_polling': False, 'monitor_sn': 0, 'node_id': ''}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'suggested_area': '', 'node_id': ''}}} + assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': False, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'suggested_area': '', 'modbus_polling': False, 'monitor_sn': 0, 'node_id': '', 'sensor_list': 688}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'suggested_area': '', 'node_id': '', 'sensor_list': 688}}} cnf = TstConfig.get('solarman') assert cnf == {'enabled': False, 'host': 'iot.talent-monitoring.com', 'port': 10000} defcnf = TstConfig.def_config.get('solarman') @@ -106,7 +106,7 @@ def test_read_cnf2(): err = TstConfig.read('app/config/') assert err == None cnf = TstConfig.get() - assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'suggested_area': '', 'modbus_polling': False, 'monitor_sn': 0, 'node_id': ''}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'suggested_area': '', 'node_id': ''}}} + assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'suggested_area': '', 'modbus_polling': False, 'monitor_sn': 0, 'node_id': '', 'sensor_list': 688}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'suggested_area': '', 'node_id': '', 'sensor_list': 688}}} assert True == TstConfig.is_default('solarman') def test_read_cnf3(): @@ -123,7 +123,7 @@ def test_read_cnf4(): err = TstConfig.read('app/config/') assert err == None cnf = TstConfig.get() - assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 5000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'suggested_area': '', 'modbus_polling': False, 'monitor_sn': 0, 'node_id': ''}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'suggested_area': '', 'node_id': ''}}} + assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 5000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'suggested_area': '', 'modbus_polling': False, 'monitor_sn': 0, 'node_id': '', 'sensor_list': 688}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'suggested_area': '', 'node_id': '', 'sensor_list': 688}}} assert False == TstConfig.is_default('solarman') def test_read_cnf5(): diff --git a/app/tests/test_infos_g3p.py b/app/tests/test_infos_g3p.py index 549c8d3..21ef570 100644 --- a/app/tests/test_infos_g3p.py +++ b/app/tests/test_infos_g3p.py @@ -70,7 +70,7 @@ def test_parse_4110(device_data: bytes): pass # side effect is calling generator i.parse() assert json.dumps(i.db) == json.dumps({ - 'controller': {"Data_Up_Interval": 300, "Collect_Interval": 1, "Heartbeat_Interval": 120, "Signal_Strength": 100, "IP_Address": "192.168.80.49"}, + 'controller': {"Data_Up_Interval": 300, "Collect_Interval": 1, "Heartbeat_Interval": 120, "Signal_Strength": 100, "IP_Address": "192.168.80.49", "Sensor_List": "02b0"}, 'collector': {"Chip_Model": "LSW5BLE_17_02B0_1.05", "Collector_Fw_Version": "V1.1.00.0B"}, }) @@ -82,7 +82,7 @@ def test_parse_4210(inverter_data: bytes): pass # side effect is calling generator i.parse() assert json.dumps(i.db) == json.dumps({ - "controller": {"Power_On_Time": 2051}, + "controller": {"Sensor_List": "02b0", "Power_On_Time": 2051}, "inverter": {"Serial_Number": "Y17E00000000000E", "Version": "V4.0.10", "Rated_Power": 600, "Max_Designed_Power": 2000}, "env": {"Inverter_Status": 1, "Inverter_Temp": 14}, "grid": {"Voltage": 224.8, "Current": 0.73, "Frequency": 50.05, "Output_Power": 165.8}, diff --git a/app/tests/test_solarman.py b/app/tests/test_solarman.py index 7ead651..66d17f0 100644 --- a/app/tests/test_solarman.py +++ b/app/tests/test_solarman.py @@ -633,15 +633,15 @@ def msg_unknown_cmd_rsp(): # 0x1510 @pytest.fixture def config_tsun_allow_all(): - Config.config = {'solarman':{'enabled': True}, 'inverters':{'allow_all':True}} + Config.act_config = {'solarman':{'enabled': True}, 'inverters':{'allow_all':True}} @pytest.fixture def config_no_tsun_inv1(): - Config.config = {'solarman':{'enabled': False},'inverters':{'Y170000000000001':{'monitor_sn': 2070233889, 'node_id':'inv1', 'modbus_polling': True, 'suggested_area':'roof'}}} + Config.act_config = {'solarman':{'enabled': False},'inverters':{'Y170000000000001':{'monitor_sn': 2070233889, 'node_id':'inv1', 'modbus_polling': True, 'suggested_area':'roof', 'sensor_list': 688}}} @pytest.fixture def config_tsun_inv1(): - Config.config = {'solarman':{'enabled': True},'inverters':{'Y170000000000001':{'monitor_sn': 2070233889, 'node_id':'inv1', 'modbus_polling': True, 'suggested_area':'roof'}}} + Config.act_config = {'solarman':{'enabled': True},'inverters':{'Y170000000000001':{'monitor_sn': 2070233889, 'node_id':'inv1', 'modbus_polling': True, 'suggested_area':'roof', 'sensor_list': 688}}} def test_read_message(device_ind_msg): m = MemoryStream(device_ind_msg, (0,)) @@ -843,7 +843,7 @@ def test_read_two_messages(config_tsun_allow_all, device_ind_msg, device_rsp_msg config_tsun_allow_all m = MemoryStream(device_ind_msg, (0,)) m.append_msg(inverter_ind_msg) - + assert 0 == m.sensor_list m._init_new_client_conn() m.read() # read complete msg, and dispatch msg assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0 @@ -858,6 +858,8 @@ def test_read_two_messages(config_tsun_allow_all, device_ind_msg, device_rsp_msg assert m.msg_recvd[1]['control']==0x4210 assert m.msg_recvd[1]['seq']=='02:02' assert m.msg_recvd[1]['data_len']==0x199 + assert '02b0' == m.db.get_db_value(Register.SENSOR_LIST, None) + assert 0x02b0 == m.sensor_list assert m._forward_buffer==device_ind_msg+inverter_ind_msg assert m._send_buffer==device_rsp_msg+inverter_rsp_msg @@ -1078,6 +1080,7 @@ def test_sync_end_rsp(config_tsun_inv1, sync_end_rsp_msg): def test_build_modell_600(config_tsun_allow_all, inverter_ind_msg): config_tsun_allow_all m = MemoryStream(inverter_ind_msg, (0,)) + assert 0 == m.sensor_list assert 0 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0) assert None == m.db.get_db_value(Register.RATED_POWER, None) assert None == m.db.get_db_value(Register.INVERTER_TEMP, None) @@ -1085,6 +1088,8 @@ def test_build_modell_600(config_tsun_allow_all, inverter_ind_msg): assert 2000 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0) assert 600 == m.db.get_db_value(Register.RATED_POWER, 0) assert 'TSOL-MS2000(600)' == m.db.get_db_value(Register.EQUIPMENT_MODEL, 0) + assert '02b0' == m.db.get_db_value(Register.SENSOR_LIST, None) + assert 0 == m.sensor_list # must not been set by an inverter data ind m._send_buffer = bytearray(0) # clear send buffer for next test m._init_new_client_conn() @@ -1420,6 +1425,7 @@ def test_msg_modbus_req(config_tsun_inv1, msg_modbus_cmd, msg_modbus_cmd_fwd): config_tsun_inv1 m = MemoryStream(b'') m.snr = get_sn_int() + m.sensor_list = 0x2b0 m.state = State.up c = m.createClientStream(msg_modbus_cmd) diff --git a/app/tests/test_talent.py b/app/tests/test_talent.py index cf70cd1..5aadf8d 100644 --- a/app/tests/test_talent.py +++ b/app/tests/test_talent.py @@ -98,12 +98,12 @@ class MemoryStream(Talent): @pytest.fixture def msg_contact_info(): # Contact Info message - Config.config = {'tsun':{'enabled': True}} + Config.act_config = {'tsun':{'enabled': True}} return b'\x00\x00\x00\x2c\x10R170000000000001\x91\x00\x08solarhub\x0fsolarhub\x40123456' @pytest.fixture def msg_contact_info_long_id(): # Contact Info message with longer ID - Config.config = {'tsun':{'enabled': True}} + Config.act_config = {'tsun':{'enabled': True}} return b'\x00\x00\x00\x2d\x11R1700000000000011\x91\x00\x08solarhub\x0fsolarhub\x40123456' @pytest.fixture @@ -353,19 +353,19 @@ def msg_unknown(): # Get Time Request message @pytest.fixture def config_tsun_allow_all(): - Config.config = {'tsun':{'enabled': True}, 'inverters':{'allow_all':True}} + Config.act_config = {'tsun':{'enabled': True}, 'inverters':{'allow_all':True}} @pytest.fixture def config_no_tsun_inv1(): - Config.config = {'tsun':{'enabled': False},'inverters':{'R170000000000001':{'node_id':'inv1', 'modbus_polling': True, 'suggested_area':'roof'}}} + Config.act_config = {'tsun':{'enabled': False},'inverters':{'R170000000000001':{'node_id':'inv1', 'modbus_polling': True, 'suggested_area':'roof'}}} @pytest.fixture def config_tsun_inv1(): - Config.config = {'tsun':{'enabled': True},'inverters':{'R170000000000001':{'node_id':'inv1', 'modbus_polling': True, 'suggested_area':'roof'}}} + Config.act_config = {'tsun':{'enabled': True},'inverters':{'R170000000000001':{'node_id':'inv1', 'modbus_polling': True, 'suggested_area':'roof'}}} @pytest.fixture def config_no_modbus_poll(): - Config.config = {'tsun':{'enabled': True},'inverters':{'R170000000000001':{'node_id':'inv1', 'modbus_polling': False, 'suggested_area':'roof'}}} + Config.act_config = {'tsun':{'enabled': True},'inverters':{'R170000000000001':{'node_id':'inv1', 'modbus_polling': False, 'suggested_area':'roof'}}} @pytest.fixture def msg_ota_req(): # Over the air update request from tsun cloud diff --git a/sonar-project.properties b/sonar-project.properties index d2b9b66..61d8dbd 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -13,10 +13,8 @@ sonar.sources=app/src/ #sonar.sourceEncoding=UTF-8 sonar.python.version=3.12 -sonar.python.coverage.reportPaths=coverage.xml sonar.tests=system_tests/,app/tests/ sonar.exclusions=**/.vscode/**/* - # Name your criteria sonar.issue.ignore.multicriteria=e1,e2 From 27045cac6e64466a207ddf5e95af58707230527b Mon Sep 17 00:00:00 2001 From: Stefan Allius <122395479+s-allius@users.noreply.github.com> Date: Fri, 23 Aug 2024 00:26:01 +0200 Subject: [PATCH 10/39] Sonar qube 3 (#166) * add unittests for mqtt.py * add mock * move test requirements into a file * fix unit tests * fix formating * initial version * fix SonarQube warning --- .github/workflows/python-app.yml | 11 +- app/requirements-test.txt | 6 + app/src/gen3/talent.py | 2 +- app/src/mqtt.py | 157 +++++++++---------- app/src/singleton.py | 11 +- app/tests/test_mqtt.py | 250 +++++++++++++++++++++++++++++++ app/tests/test_singleton.py | 18 +++ app/tests/test_solarman.py | 79 +++++----- app/tests/test_talent.py | 76 +++++----- requirements-test.txt | 1 + 10 files changed, 451 insertions(+), 160 deletions(-) create mode 100644 app/requirements-test.txt create mode 100644 app/tests/test_mqtt.py create mode 100644 app/tests/test_singleton.py create mode 100644 requirements-test.txt diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index deb3530..a9b5fcb 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -36,6 +36,15 @@ jobs: timezoneLinux: "Europe/Berlin" timezoneMacos: "Europe/Berlin" timezoneWindows: "Europe/Berlin" + # - name: Start Mosquitto + # uses: namoshek/mosquitto-github-action@v1 + # with: + # version: '1.6' + # ports: '1883:1883 8883:8883' + # certificates: ${{ github.workspace }}/.ci/tls-certificates + # config: ${{ github.workspace }}/.ci/mosquitto.conf + # password-file: ${{ github.workspace}}/.ci/mosquitto.passwd + # container-name: 'mqtt' - uses: actions/checkout@v4 with: fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis @@ -46,7 +55,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install flake8 pytest pytest-asyncio pytest-cov coverage + if [ -f requirements-test.txt ]; then pip install -r requirements-test.txt; fi if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - name: Lint with flake8 run: | diff --git a/app/requirements-test.txt b/app/requirements-test.txt new file mode 100644 index 0000000..d07ed29 --- /dev/null +++ b/app/requirements-test.txt @@ -0,0 +1,6 @@ + flake8 + pytest + pytest-asyncio + pytest-cov + mock + coverage \ No newline at end of file diff --git a/app/src/gen3/talent.py b/app/src/gen3/talent.py index 90f258c..611c9fc 100644 --- a/app/src/gen3/talent.py +++ b/app/src/gen3/talent.py @@ -177,7 +177,7 @@ class Talent(Message): return self.__build_header(0x70, 0x77) - self._send_buffer += b'\x00\x01\xa3\x28' # fixme + self._send_buffer += b'\x00\x01\xa3\x28' # magic ? self._send_buffer += struct.pack('!B', len(modbus_pdu)) self._send_buffer += modbus_pdu self.__finish_send_msg() diff --git a/app/src/mqtt.py b/app/src/mqtt.py index 7bc9ce0..a51f039 100644 --- a/app/src/mqtt.py +++ b/app/src/mqtt.py @@ -2,10 +2,16 @@ import asyncio import logging import aiomqtt import traceback -from modbus import Modbus -from messages import Message -from config import Config -from singleton import Singleton +if __name__ == "app.src.mqtt": + from app.src.modbus import Modbus + from app.src.messages import Message + from app.src.config import Config + from app.src.singleton import Singleton +else: # pragma: no cover + from modbus import Modbus + from messages import Message + from config import Config + from singleton import Singleton logger_mqtt = logging.getLogger('mqtt') @@ -22,6 +28,14 @@ class Mqtt(metaclass=Singleton): self.task = loop.create_task(self.__loop()) self.ha_restarts = 0 + ha = Config.get('ha') + self.ha_status_topic = f"{ha['auto_conf_prefix']}/status" + self.mb_rated_topic = f"{ha['entity_prefix']}/+/rated_load" + self.mb_out_coeff_topic = f"{ha['entity_prefix']}/+/out_coeff" + self.mb_reads_topic = f"{ha['entity_prefix']}/+/modbus_read_regs" + self.mb_inputs_topic = f"{ha['entity_prefix']}/+/modbus_read_inputs" + self.mb_at_cmd_topic = f"{ha['entity_prefix']}/+/at_cmd" + @property def ha_restarts(self): return self._ha_restarts @@ -49,7 +63,6 @@ class Mqtt(metaclass=Singleton): async def __loop(self) -> None: mqtt = Config.get('mqtt') - ha = Config.get('ha') logger_mqtt.info(f'start MQTT: host:{mqtt["host"]} port:' f'{mqtt["port"]} ' f'user:{mqtt["user"]}') @@ -59,12 +72,6 @@ class Mqtt(metaclass=Singleton): password=mqtt['passwd']) interval = 5 # Seconds - ha_status_topic = f"{ha['auto_conf_prefix']}/status" - mb_rated_topic = "tsun/+/rated_load" # fixme - mb_out_coeff_topic = "tsun/+/out_coeff" # fixme - mb_reads_topic = "tsun/+/modbus_read_regs" # fixme - mb_inputs_topic = "tsun/+/modbus_read_inputs" # fixme - mb_at_cmd_topic = "tsun/+/at_cmd" # fixme while True: try: @@ -74,51 +81,15 @@ class Mqtt(metaclass=Singleton): if self.__cb_mqtt_is_up: await self.__cb_mqtt_is_up() - # async with self.__client.messages() as messages: - await self.__client.subscribe(ha_status_topic) - await self.__client.subscribe(mb_rated_topic) - await self.__client.subscribe(mb_out_coeff_topic) - await self.__client.subscribe(mb_reads_topic) - await self.__client.subscribe(mb_inputs_topic) - await self.__client.subscribe(mb_at_cmd_topic) + await self.__client.subscribe(self.ha_status_topic) + await self.__client.subscribe(self.mb_rated_topic) + await self.__client.subscribe(self.mb_out_coeff_topic) + await self.__client.subscribe(self.mb_reads_topic) + await self.__client.subscribe(self.mb_inputs_topic) + await self.__client.subscribe(self.mb_at_cmd_topic) async for message in self.__client.messages: - if message.topic.matches(ha_status_topic): - status = message.payload.decode("UTF-8") - logger_mqtt.info('Home-Assistant Status:' - f' {status}') - if status == 'online': - self.ha_restarts += 1 - await self.__cb_mqtt_is_up() - - if message.topic.matches(mb_rated_topic): - await self.modbus_cmd(message, - Modbus.WRITE_SINGLE_REG, - 1, 0x2008) - - if message.topic.matches(mb_out_coeff_topic): - payload = message.payload.decode("UTF-8") - val = round(float(payload) * 1024/100) - - if val < 0 or val > 1024: - logger_mqtt.error('out_coeff: value must be in' - 'the range 0..100,' - f' got: {payload}') - else: - await self.modbus_cmd(message, - Modbus.WRITE_SINGLE_REG, - 0, 0x202c, val) - - if message.topic.matches(mb_reads_topic): - await self.modbus_cmd(message, - Modbus.READ_REGS, 2) - - if message.topic.matches(mb_inputs_topic): - await self.modbus_cmd(message, - Modbus.READ_INPUTS, 2) - - if message.topic.matches(mb_at_cmd_topic): - await self.at_cmd(message) + await self.dispatch_msg(message) except aiomqtt.MqttError: if Config.is_default('mqtt'): @@ -142,46 +113,76 @@ class Mqtt(metaclass=Singleton): f"Exception:\n" f"{traceback.format_exc()}") + async def dispatch_msg(self, message): + if message.topic.matches(self.ha_status_topic): + status = message.payload.decode("UTF-8") + logger_mqtt.info('Home-Assistant Status:' + f' {status}') + if status == 'online': + self.ha_restarts += 1 + await self.__cb_mqtt_is_up() + + if message.topic.matches(self.mb_rated_topic): + await self.modbus_cmd(message, + Modbus.WRITE_SINGLE_REG, + 1, 0x2008) + + if message.topic.matches(self.mb_out_coeff_topic): + payload = message.payload.decode("UTF-8") + try: + val = round(float(payload) * 1024/100) + if val < 0 or val > 1024: + logger_mqtt.error('out_coeff: value must be in' + 'the range 0..100,' + f' got: {payload}') + else: + await self.modbus_cmd(message, + Modbus.WRITE_SINGLE_REG, + 0, 0x202c, val) + except Exception: + pass + + if message.topic.matches(self.mb_reads_topic): + await self.modbus_cmd(message, + Modbus.READ_REGS, 2) + + if message.topic.matches(self.mb_inputs_topic): + await self.modbus_cmd(message, + Modbus.READ_INPUTS, 2) + + if message.topic.matches(self.mb_at_cmd_topic): + await self.at_cmd(message) + def each_inverter(self, message, func_name: str): topic = str(message.topic) node_id = topic.split('/')[1] + '/' - found = False for m in Message: if m.server_side and (m.node_id == node_id): - found = True logger_mqtt.debug(f'Found: {node_id}') fnc = getattr(m, func_name, None) if callable(fnc): yield fnc else: logger_mqtt.warning(f'Cmd not supported by: {node_id}') + break - if not found: + else: logger_mqtt.warning(f'Node_id: {node_id} not found') async def modbus_cmd(self, message, func, params=0, addr=0, val=0): - topic = str(message.topic) - node_id = topic.split('/')[1] + '/' - # refactor into a loop over a table payload = message.payload.decode("UTF-8") - logger_mqtt.info(f'MODBUS via MQTT: {topic} = {payload}') - for m in Message: - if m.server_side and (m.node_id == node_id): - logger_mqtt.debug(f'Found: {node_id}') - fnc = getattr(m, "send_modbus_cmd", None) - res = payload.split(',') - if params > 0 and params != len(res): - logger_mqtt.error(f'Parameter expected: {params}, ' - f'got: {len(res)}') - return - - if callable(fnc): - if params == 1: - val = int(payload) - elif params == 2: - addr = int(res[0], base=16) - val = int(res[1]) # lenght - await fnc(func, addr, val, logging.INFO) + for fnc in self.each_inverter(message, "send_modbus_cmd"): + res = payload.split(',') + if params > 0 and params != len(res): + logger_mqtt.error(f'Parameter expected: {params}, ' + f'got: {len(res)}') + return + if params == 1: + val = int(payload) + elif params == 2: + addr = int(res[0], base=16) + val = int(res[1]) # lenght + await fnc(func, addr, val, logging.INFO) async def at_cmd(self, message): payload = message.payload.decode("UTF-8") diff --git a/app/src/singleton.py b/app/src/singleton.py index 48778b9..8222146 100644 --- a/app/src/singleton.py +++ b/app/src/singleton.py @@ -1,9 +1,14 @@ +from weakref import WeakValueDictionary + + class Singleton(type): - _instances = {} + _instances = WeakValueDictionary() def __call__(cls, *args, **kwargs): # logger_mqtt.debug('singleton: __call__') if cls not in cls._instances: - cls._instances[cls] = super(Singleton, - cls).__call__(*args, **kwargs) + instance = super(Singleton, + cls).__call__(*args, **kwargs) + cls._instances[cls] = instance + return cls._instances[cls] diff --git a/app/tests/test_mqtt.py b/app/tests/test_mqtt.py new file mode 100644 index 0000000..7dea973 --- /dev/null +++ b/app/tests/test_mqtt.py @@ -0,0 +1,250 @@ +# test_with_pytest.py +import pytest +import asyncio +import aiomqtt +import logging + +from mock import patch, Mock +from app.src.mqtt import Mqtt +from app.src.modbus import Modbus +from app.src.gen3plus.solarman_v5 import SolarmanV5 +from app.src.config import Config + + +pytest_plugins = ('pytest_asyncio',) + + + +@pytest.fixture(scope="module") +def test_port(): + return 1883 + +@pytest.fixture(scope="module") +def test_hostname(): + # if getenv("GITHUB_ACTIONS") == "true": + # return 'mqtt' + # else: + return 'test.mosquitto.org' + +@pytest.fixture +def config_mqtt_conn(test_hostname, test_port): + Config.act_config = {'mqtt':{'host': test_hostname, 'port': test_port, 'user': '', 'passwd': ''}, + 'ha':{'auto_conf_prefix': 'homeassistant','discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun'} + } + +@pytest.fixture +def config_no_conn(test_port): + Config.act_config = {'mqtt':{'host': "", 'port': test_port, 'user': '', 'passwd': ''}, + 'ha':{'auto_conf_prefix': 'homeassistant','discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun'} + } + +@pytest.fixture +def spy_at_cmd(): + conn = SolarmanV5(server_side=True, client_mode= False) + conn.node_id = 'inv_2/' + with patch.object(conn, 'send_at_cmd', wraps=conn.send_at_cmd) as wrapped_conn: + yield wrapped_conn + conn.close() + +@pytest.fixture +def spy_modbus_cmd(): + conn = SolarmanV5(server_side=True, client_mode= False) + conn.node_id = 'inv_1/' + with patch.object(conn, 'send_modbus_cmd', wraps=conn.send_modbus_cmd) as wrapped_conn: + yield wrapped_conn + conn.close() + +@pytest.fixture +def spy_modbus_cmd_client(): + conn = SolarmanV5(server_side=False, client_mode= False) + conn.node_id = 'inv_1/' + with patch.object(conn, 'send_modbus_cmd', wraps=conn.send_modbus_cmd) as wrapped_conn: + yield wrapped_conn + conn.close() + +def test_native_client(test_hostname, test_port): + """Sanity check: Make sure the paho-mqtt client can connect to the test + MQTT server. + """ + + import paho.mqtt.client as mqtt + import threading + + c = mqtt.Client() + c.loop_start() + try: + # Just make sure the client connects successfully + on_connect = threading.Event() + c.on_connect = Mock(side_effect=lambda *_: on_connect.set()) + c.connect_async(test_hostname, test_port) + assert on_connect.wait(5) + finally: + c.loop_stop() + +@pytest.mark.asyncio +async def test_mqtt_no_config(config_no_conn): + _ = config_no_conn + assert asyncio.get_running_loop() + + on_connect = asyncio.Event() + async def cb(): + on_connect.set() + + try: + m = Mqtt(cb) + assert m.task + await asyncio.sleep(1) + assert not on_connect.is_set() + try: + await m.publish('homeassistant/status', 'online') + assert False + except Exception: + pass + except TimeoutError: + assert False + finally: + await m.close() + +@pytest.mark.asyncio +async def test_mqtt_connection(config_mqtt_conn): + _ = config_mqtt_conn + assert asyncio.get_running_loop() + + on_connect = asyncio.Event() + async def cb(): + on_connect.set() + + try: + m = Mqtt(cb) + assert m.task + assert await asyncio.wait_for(on_connect.wait(), 5) + # await asyncio.sleep(1) + assert 0 == m.ha_restarts + await m.publish('homeassistant/status', 'online') + except TimeoutError: + assert False + finally: + await m.close() + await m.publish('homeassistant/status', 'online') + + +@pytest.mark.asyncio +async def test_msg_dispatch(config_mqtt_conn, spy_modbus_cmd): + _ = config_mqtt_conn + spy = spy_modbus_cmd + try: + m = Mqtt(None) + msg = aiomqtt.Message(topic= 'tsun/inv_1/rated_load', payload= b'2', qos= 0, retain = False, mid= 0, properties= None) + await m.dispatch_msg(msg) + spy.assert_awaited_once_with(Modbus.WRITE_SINGLE_REG, 0x2008, 2, logging.INFO) + + spy.reset_mock() + msg = aiomqtt.Message(topic= 'tsun/inv_1/out_coeff', payload= b'100', qos= 0, retain = False, mid= 0, properties= None) + await m.dispatch_msg(msg) + spy.assert_awaited_once_with(Modbus.WRITE_SINGLE_REG, 0x202c, 1024, logging.INFO) + + spy.reset_mock() + msg = aiomqtt.Message(topic= 'tsun/inv_1/out_coeff', payload= b'50', qos= 0, retain = False, mid= 0, properties= None) + await m.dispatch_msg(msg) + spy.assert_awaited_once_with(Modbus.WRITE_SINGLE_REG, 0x202c, 512, logging.INFO) + + spy.reset_mock() + msg = aiomqtt.Message(topic= 'tsun/inv_1/modbus_read_regs', payload= b'0x3000, 10', qos= 0, retain = False, mid= 0, properties= None) + await m.dispatch_msg(msg) + spy.assert_awaited_once_with(Modbus.READ_REGS, 0x3000, 10, logging.INFO) + + spy.reset_mock() + msg = aiomqtt.Message(topic= 'tsun/inv_1/modbus_read_inputs', payload= b'0x3000, 10', qos= 0, retain = False, mid= 0, properties= None) + await m.dispatch_msg(msg) + spy.assert_awaited_once_with(Modbus.READ_INPUTS, 0x3000, 10, logging.INFO) + + finally: + await m.close() + +@pytest.mark.asyncio +async def test_msg_dispatch_err(config_mqtt_conn, spy_modbus_cmd): + _ = config_mqtt_conn + spy = spy_modbus_cmd + try: + m = Mqtt(None) + # test out of range param + msg = aiomqtt.Message(topic= 'tsun/inv_1/out_coeff', payload= b'-1', qos= 0, retain = False, mid= 0, properties= None) + await m.dispatch_msg(msg) + spy.assert_not_called() + + # test unknown node_id + spy.reset_mock() + msg = aiomqtt.Message(topic= 'tsun/inv_2/out_coeff', payload= b'2', qos= 0, retain = False, mid= 0, properties= None) + await m.dispatch_msg(msg) + spy.assert_not_called() + + # test invalid fload param + spy.reset_mock() + msg = aiomqtt.Message(topic= 'tsun/inv_1/out_coeff', payload= b'2, 3', qos= 0, retain = False, mid= 0, properties= None) + await m.dispatch_msg(msg) + spy.assert_not_called() + + spy.reset_mock() + msg = aiomqtt.Message(topic= 'tsun/inv_1/modbus_read_regs', payload= b'0x3000, 10, 7', qos= 0, retain = False, mid= 0, properties= None) + await m.dispatch_msg(msg) + spy.assert_not_called() + finally: + await m.close() + +@pytest.mark.asyncio +async def test_msg_ignore_client_conn(config_mqtt_conn, spy_modbus_cmd_client): + '''don't call function if connnection is not in server mode''' + _ = config_mqtt_conn + spy = spy_modbus_cmd_client + try: + m = Mqtt(None) + msg = aiomqtt.Message(topic= 'tsun/inv_1/rated_load', payload= b'2', qos= 0, retain = False, mid= 0, properties= None) + await m.dispatch_msg(msg) + spy.assert_not_called() + finally: + await m.close() + +@pytest.mark.asyncio +async def test_ha_reconnect(config_mqtt_conn): + _ = config_mqtt_conn + on_connect = asyncio.Event() + async def cb(): + on_connect.set() + + try: + m = Mqtt(cb) + msg = aiomqtt.Message(topic= 'homeassistant/status', payload= b'offline', qos= 0, retain = False, mid= 0, properties= None) + await m.dispatch_msg(msg) + assert not on_connect.is_set() + + msg = aiomqtt.Message(topic= 'homeassistant/status', payload= b'online', qos= 0, retain = False, mid= 0, properties= None) + await m.dispatch_msg(msg) + assert on_connect.is_set() + + finally: + await m.close() + +@pytest.mark.asyncio +async def test_ignore_unknown_func(config_mqtt_conn): + '''don't dispatch for unknwon function names''' + _ = config_mqtt_conn + try: + m = Mqtt(None) + msg = aiomqtt.Message(topic= 'tsun/inv_1/rated_load', payload= b'2', qos= 0, retain = False, mid= 0, properties= None) + for _ in m.each_inverter(msg, 'unkown_fnc'): + assert False + finally: + await m.close() + +@pytest.mark.asyncio +async def test_at_cmd_dispatch(config_mqtt_conn, spy_at_cmd): + _ = config_mqtt_conn + spy = spy_at_cmd + try: + m = Mqtt(None) + msg = aiomqtt.Message(topic= 'tsun/inv_2/at_cmd', payload= b'AT+', qos= 0, retain = False, mid= 0, properties= None) + await m.dispatch_msg(msg) + spy.assert_awaited_once_with('AT+') + + finally: + await m.close() diff --git a/app/tests/test_singleton.py b/app/tests/test_singleton.py new file mode 100644 index 0000000..2ea82eb --- /dev/null +++ b/app/tests/test_singleton.py @@ -0,0 +1,18 @@ +# test_with_pytest.py +import pytest +from app.src.singleton import Singleton + +class Test(metaclass=Singleton): + def __init__(self): + pass # is a dummy test class + +def test_singleton_metaclass(): + a = Test() + assert 1 == len(Singleton._instances) + b = Test() + assert 1 == len(Singleton._instances) + assert a is b + del a + assert 1 == len(Singleton._instances) + del b + assert 0 == len(Singleton._instances) diff --git a/app/tests/test_solarman.py b/app/tests/test_solarman.py index 66d17f0..c9227bd 100644 --- a/app/tests/test_solarman.py +++ b/app/tests/test_solarman.py @@ -8,7 +8,7 @@ from app.src.gen3plus.solarman_v5 import SolarmanV5 from app.src.config import Config from app.src.infos import Infos, Register from app.src.modbus import Modbus -from app.src.messages import State +from app.src.messages import State, Message pytest_plugins = ('pytest_asyncio',) @@ -773,7 +773,7 @@ def test_invalid_checksum(invalid_checksum, device_ind_msg): m.close() def test_read_message_twice(config_no_tsun_inv1, device_ind_msg, device_rsp_msg): - config_no_tsun_inv1 + _ = config_no_tsun_inv1 m = MemoryStream(device_ind_msg, (0,)) m.append_msg(device_ind_msg) m.read() # read complete msg, and dispatch msg @@ -815,7 +815,7 @@ def test_read_message_in_chunks(device_ind_msg): m.close() def test_read_message_in_chunks2(config_tsun_inv1, device_ind_msg): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(device_ind_msg, (4,10,0)) m.read() # read 4 bytes, header incomplere assert not m.header_valid @@ -840,7 +840,7 @@ def test_read_message_in_chunks2(config_tsun_inv1, device_ind_msg): m.close() def test_read_two_messages(config_tsun_allow_all, device_ind_msg, device_rsp_msg, inverter_ind_msg, inverter_rsp_msg): - config_tsun_allow_all + _ = config_tsun_allow_all m = MemoryStream(device_ind_msg, (0,)) m.append_msg(inverter_ind_msg) assert 0 == m.sensor_list @@ -869,7 +869,7 @@ def test_read_two_messages(config_tsun_allow_all, device_ind_msg, device_rsp_msg m.close() def test_read_two_messages2(config_tsun_allow_all, inverter_ind_msg, inverter_ind_msg_81, inverter_rsp_msg, inverter_rsp_msg_81): - config_tsun_allow_all + _ = config_tsun_allow_all m = MemoryStream(inverter_ind_msg, (0,)) m.append_msg(inverter_ind_msg_81) m.read() # read complete msg, and dispatch msg @@ -895,7 +895,7 @@ def test_read_two_messages2(config_tsun_allow_all, inverter_ind_msg, inverter_in m.close() def test_unkown_message(config_tsun_inv1, unknown_msg): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(unknown_msg, (0,)) m.read() # read complete msg, and dispatch msg assert not m.header_valid # must be invalid, since msg was handled and buffer flushed @@ -913,7 +913,7 @@ def test_unkown_message(config_tsun_inv1, unknown_msg): m.close() def test_device_rsp(config_tsun_inv1, device_rsp_msg): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(device_rsp_msg, (0,), False) m.read() # read complete msg, and dispatch msg assert not m.header_valid # must be invalid, since msg was handled and buffer flushed @@ -931,7 +931,7 @@ def test_device_rsp(config_tsun_inv1, device_rsp_msg): m.close() def test_inverter_rsp(config_tsun_inv1, inverter_rsp_msg): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(inverter_rsp_msg, (0,), False) m.read() # read complete msg, and dispatch msg assert not m.header_valid # must be invalid, since msg was handled and buffer flushed @@ -949,7 +949,7 @@ def test_inverter_rsp(config_tsun_inv1, inverter_rsp_msg): m.close() def test_heartbeat_ind(config_tsun_inv1, heartbeat_ind_msg, heartbeat_rsp_msg): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(heartbeat_ind_msg, (0,)) m.read() # read complete msg, and dispatch msg assert not m.header_valid # must be invalid, since msg was handled and buffer flushed @@ -966,7 +966,7 @@ def test_heartbeat_ind(config_tsun_inv1, heartbeat_ind_msg, heartbeat_rsp_msg): m.close() def test_heartbeat_ind2(config_tsun_inv1, heartbeat_ind_msg, heartbeat_rsp_msg): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(heartbeat_ind_msg, (0,)) m.no_forwarding = True m.read() # read complete msg, and dispatch msg @@ -984,7 +984,7 @@ def test_heartbeat_ind2(config_tsun_inv1, heartbeat_ind_msg, heartbeat_rsp_msg): m.close() def test_heartbeat_rsp(config_tsun_inv1, heartbeat_rsp_msg): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(heartbeat_rsp_msg, (0,), False) m.read() # read complete msg, and dispatch msg assert not m.header_valid # must be invalid, since msg was handled and buffer flushed @@ -1002,7 +1002,7 @@ def test_heartbeat_rsp(config_tsun_inv1, heartbeat_rsp_msg): m.close() def test_sync_start_ind(config_tsun_inv1, sync_start_ind_msg, sync_start_rsp_msg, sync_start_fwd_msg): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(sync_start_ind_msg, (0,)) m.read() # read complete msg, and dispatch msg assert not m.header_valid # must be invalid, since msg was handled and buffer flushed @@ -1025,7 +1025,7 @@ def test_sync_start_ind(config_tsun_inv1, sync_start_ind_msg, sync_start_rsp_msg m.close() def test_sync_start_rsp(config_tsun_inv1, sync_start_rsp_msg): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(sync_start_rsp_msg, (0,), False) m.read() # read complete msg, and dispatch msg assert not m.header_valid # must be invalid, since msg was handled and buffer flushed @@ -1043,7 +1043,7 @@ def test_sync_start_rsp(config_tsun_inv1, sync_start_rsp_msg): m.close() def test_sync_end_ind(config_tsun_inv1, sync_end_ind_msg, sync_end_rsp_msg): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(sync_end_ind_msg, (0,)) m.read() # read complete msg, and dispatch msg assert not m.header_valid # must be invalid, since msg was handled and buffer flushed @@ -1060,7 +1060,7 @@ def test_sync_end_ind(config_tsun_inv1, sync_end_ind_msg, sync_end_rsp_msg): m.close() def test_sync_end_rsp(config_tsun_inv1, sync_end_rsp_msg): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(sync_end_rsp_msg, (0,), False) m.read() # read complete msg, and dispatch msg assert not m.header_valid # must be invalid, since msg was handled and buffer flushed @@ -1078,7 +1078,7 @@ def test_sync_end_rsp(config_tsun_inv1, sync_end_rsp_msg): m.close() def test_build_modell_600(config_tsun_allow_all, inverter_ind_msg): - config_tsun_allow_all + _ = config_tsun_allow_all m = MemoryStream(inverter_ind_msg, (0,)) assert 0 == m.sensor_list assert 0 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0) @@ -1097,7 +1097,7 @@ def test_build_modell_600(config_tsun_allow_all, inverter_ind_msg): m.close() def test_build_modell_1600(config_tsun_allow_all, inverter_ind_msg1600): - config_tsun_allow_all + _ = config_tsun_allow_all m = MemoryStream(inverter_ind_msg1600, (0,)) assert 0 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0) assert None == m.db.get_db_value(Register.RATED_POWER, None) @@ -1109,7 +1109,7 @@ def test_build_modell_1600(config_tsun_allow_all, inverter_ind_msg1600): m.close() def test_build_modell_1800(config_tsun_allow_all, inverter_ind_msg1800): - config_tsun_allow_all + _ = config_tsun_allow_all m = MemoryStream(inverter_ind_msg1800, (0,)) assert 0 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0) assert None == m.db.get_db_value(Register.RATED_POWER, None) @@ -1121,7 +1121,7 @@ def test_build_modell_1800(config_tsun_allow_all, inverter_ind_msg1800): m.close() def test_build_modell_2000(config_tsun_allow_all, inverter_ind_msg2000): - config_tsun_allow_all + _ = config_tsun_allow_all m = MemoryStream(inverter_ind_msg2000, (0,)) assert 0 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0) assert None == m.db.get_db_value(Register.RATED_POWER, None) @@ -1133,7 +1133,7 @@ def test_build_modell_2000(config_tsun_allow_all, inverter_ind_msg2000): m.close() def test_build_modell_800(config_tsun_allow_all, inverter_ind_msg800): - config_tsun_allow_all + _ = config_tsun_allow_all m = MemoryStream(inverter_ind_msg800, (0,)) assert 0 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0) assert None == m.db.get_db_value(Register.RATED_POWER, None) @@ -1145,7 +1145,7 @@ def test_build_modell_800(config_tsun_allow_all, inverter_ind_msg800): m.close() def test_build_logger_modell(config_tsun_allow_all, device_ind_msg): - config_tsun_allow_all + _ = config_tsun_allow_all m = MemoryStream(device_ind_msg, (0,)) assert 0 == m.db.get_db_value(Register.COLLECTOR_FW_VERSION, 0) assert 'IGEN TECH' == m.db.get_db_value(Register.CHIP_TYPE, None) @@ -1156,6 +1156,7 @@ def test_build_logger_modell(config_tsun_allow_all, device_ind_msg): m.close() def test_msg_iterator(): + Message._registry.clear() m1 = SolarmanV5(server_side=True, client_mode=False) m2 = SolarmanV5(server_side=True, client_mode=False) m3 = SolarmanV5(server_side=True, client_mode=False) @@ -1195,7 +1196,7 @@ def test_proxy_counter(): @pytest.mark.asyncio async def test_msg_build_modbus_req(config_tsun_inv1, device_ind_msg, device_rsp_msg, inverter_ind_msg, inverter_rsp_msg, msg_modbus_cmd): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(device_ind_msg, (0,), True) m.read() assert m.control == 0x4110 @@ -1241,7 +1242,7 @@ async def test_msg_build_modbus_req(config_tsun_inv1, device_ind_msg, device_rsp @pytest.mark.asyncio async def test_at_cmd(config_tsun_allow_all, device_ind_msg, device_rsp_msg, inverter_ind_msg, inverter_rsp_msg, at_command_ind_msg, at_command_rsp_msg): - config_tsun_allow_all + _ = config_tsun_allow_all m = MemoryStream(device_ind_msg, (0,), True) m.read() # read device ind assert m.control == 0x4110 @@ -1298,7 +1299,7 @@ async def test_at_cmd(config_tsun_allow_all, device_ind_msg, device_rsp_msg, inv @pytest.mark.asyncio async def test_at_cmd_blocked(config_tsun_allow_all, device_ind_msg, device_rsp_msg, inverter_ind_msg, inverter_rsp_msg, at_command_ind_msg): - config_tsun_allow_all + _ = config_tsun_allow_all m = MemoryStream(device_ind_msg, (0,), True) m.read() assert m.control == 0x4110 @@ -1336,7 +1337,7 @@ async def test_at_cmd_blocked(config_tsun_allow_all, device_ind_msg, device_rsp_ m.close() def test_at_cmd_ind(config_tsun_inv1, at_command_ind_msg): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(at_command_ind_msg, (0,), False) m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.db.stat['proxy']['AT_Command'] = 0 @@ -1360,7 +1361,7 @@ def test_at_cmd_ind(config_tsun_inv1, at_command_ind_msg): m.close() def test_at_cmd_ind_block(config_tsun_inv1, at_command_ind_msg_block): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(at_command_ind_msg_block, (0,), False) m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.db.stat['proxy']['AT_Command'] = 0 @@ -1384,7 +1385,7 @@ def test_at_cmd_ind_block(config_tsun_inv1, at_command_ind_msg_block): m.close() def test_msg_at_command_rsp1(config_tsun_inv1, at_command_rsp_msg): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(at_command_rsp_msg) m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.db.stat['proxy']['Modbus_Command'] = 0 @@ -1403,7 +1404,7 @@ def test_msg_at_command_rsp1(config_tsun_inv1, at_command_rsp_msg): m.close() def test_msg_at_command_rsp2(config_tsun_inv1, at_command_rsp_msg): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(at_command_rsp_msg) m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.db.stat['proxy']['Modbus_Command'] = 0 @@ -1422,7 +1423,7 @@ def test_msg_at_command_rsp2(config_tsun_inv1, at_command_rsp_msg): m.close() def test_msg_modbus_req(config_tsun_inv1, msg_modbus_cmd, msg_modbus_cmd_fwd): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(b'') m.snr = get_sn_int() m.sensor_list = 0x2b0 @@ -1450,7 +1451,7 @@ def test_msg_modbus_req(config_tsun_inv1, msg_modbus_cmd, msg_modbus_cmd_fwd): m.close() def test_msg_modbus_req2(config_tsun_inv1, msg_modbus_cmd_crc_err): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(b'') m.snr = get_sn_int() m.state = State.up @@ -1477,7 +1478,7 @@ def test_msg_modbus_req2(config_tsun_inv1, msg_modbus_cmd_crc_err): m.close() def test_msg_unknown_cmd_req(config_tsun_inv1, msg_unknown_cmd): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(msg_unknown_cmd, (0,), False) m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.db.stat['proxy']['AT_Command'] = 0 @@ -1500,7 +1501,7 @@ def test_msg_unknown_cmd_req(config_tsun_inv1, msg_unknown_cmd): def test_msg_modbus_rsp1(config_tsun_inv1, msg_modbus_rsp): '''Modbus response without a valid Modbus request must be dropped''' - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(msg_modbus_rsp) m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.db.stat['proxy']['Modbus_Command'] = 0 @@ -1519,7 +1520,7 @@ def test_msg_modbus_rsp1(config_tsun_inv1, msg_modbus_rsp): def test_msg_modbus_rsp2(config_tsun_inv1, msg_modbus_rsp): '''Modbus response with a valid Modbus request must be forwarded''' - config_tsun_inv1 # setup config structure + _ = config_tsun_inv1 # setup config structure m = MemoryStream(msg_modbus_rsp) m.mb.rsp_handler = m._SolarmanV5__forward_msg @@ -1557,7 +1558,7 @@ def test_msg_modbus_rsp2(config_tsun_inv1, msg_modbus_rsp): def test_msg_modbus_rsp3(config_tsun_inv1, msg_modbus_rsp): '''Modbus response with a valid Modbus request must be forwarded''' - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(msg_modbus_rsp) m.mb.rsp_handler = m._SolarmanV5__forward_msg @@ -1593,7 +1594,7 @@ def test_msg_modbus_rsp3(config_tsun_inv1, msg_modbus_rsp): m.close() def test_msg_unknown_rsp(config_tsun_inv1, msg_unknown_cmd_rsp): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(msg_unknown_cmd_rsp) m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.db.stat['proxy']['Modbus_Command'] = 0 @@ -1611,7 +1612,7 @@ def test_msg_unknown_rsp(config_tsun_inv1, msg_unknown_cmd_rsp): m.close() def test_msg_modbus_invalid(config_tsun_inv1, msg_modbus_invalid): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(msg_modbus_invalid, (0,), False) m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.db.stat['proxy']['Modbus_Command'] = 0 @@ -1625,7 +1626,7 @@ def test_msg_modbus_invalid(config_tsun_inv1, msg_modbus_invalid): m.close() def test_msg_modbus_fragment(config_tsun_inv1, msg_modbus_rsp): - config_tsun_inv1 + _ = config_tsun_inv1 # receive more bytes than expected (7 bytes from the next msg) m = MemoryStream(msg_modbus_rsp+b'\x00\x00\x00\x45\x10\x52\x31', (0,)) m.db.stat['proxy']['Unknown_Ctrl'] = 0 @@ -1651,7 +1652,7 @@ def test_msg_modbus_fragment(config_tsun_inv1, msg_modbus_rsp): @pytest.mark.asyncio async def test_modbus_polling(config_tsun_inv1, heartbeat_ind_msg, heartbeat_rsp_msg): - config_tsun_inv1 + _ = config_tsun_inv1 assert asyncio.get_running_loop() m = MemoryStream(heartbeat_ind_msg, (0,)) assert asyncio.get_running_loop() == m.mb_timer.loop @@ -1692,7 +1693,7 @@ async def test_modbus_polling(config_tsun_inv1, heartbeat_ind_msg, heartbeat_rsp @pytest.mark.asyncio async def test_start_client_mode(config_tsun_inv1): - config_tsun_inv1 + _ = config_tsun_inv1 assert asyncio.get_running_loop() m = MemoryStream(b'') assert m.state == State.init diff --git a/app/tests/test_talent.py b/app/tests/test_talent.py index 5aadf8d..62df532 100644 --- a/app/tests/test_talent.py +++ b/app/tests/test_talent.py @@ -541,7 +541,7 @@ def test_read_message(msg_contact_info): m.close() def test_read_message_twice(config_no_tsun_inv1, msg_inverter_ind): - config_no_tsun_inv1 + _ = config_no_tsun_inv1 m = MemoryStream(msg_inverter_ind, (0,)) m.append_msg(msg_inverter_ind) m.read() # read complete msg, and dispatch msg @@ -622,7 +622,7 @@ def test_read_message_in_chunks2(msg_contact_info): m.close() def test_read_two_messages(config_tsun_allow_all, msg2_contact_info,msg_contact_rsp,msg_contact_rsp2): - config_tsun_allow_all + _ = config_tsun_allow_all m = MemoryStream(msg2_contact_info, (0,)) m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.read() # read complete msg, and dispatch msg @@ -652,7 +652,7 @@ def test_read_two_messages(config_tsun_allow_all, msg2_contact_info,msg_contact_ m.close() def test_msg_contact_resp(config_tsun_inv1, msg_contact_rsp): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(msg_contact_rsp, (0,), False) m.await_conn_resp_cnt = 1 m.db.stat['proxy']['Unknown_Ctrl'] = 0 @@ -672,7 +672,7 @@ def test_msg_contact_resp(config_tsun_inv1, msg_contact_rsp): m.close() def test_msg_contact_resp_2(config_tsun_inv1, msg_contact_rsp): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(msg_contact_rsp, (0,), False) m.await_conn_resp_cnt = 0 m.db.stat['proxy']['Unknown_Ctrl'] = 0 @@ -692,7 +692,7 @@ def test_msg_contact_resp_2(config_tsun_inv1, msg_contact_rsp): m.close() def test_msg_contact_resp_3(config_tsun_inv1, msg_contact_rsp): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(msg_contact_rsp, (0,), True) m.await_conn_resp_cnt = 0 m.db.stat['proxy']['Unknown_Ctrl'] = 0 @@ -712,7 +712,7 @@ def test_msg_contact_resp_3(config_tsun_inv1, msg_contact_rsp): m.close() def test_msg_contact_invalid(config_tsun_inv1, msg_contact_invalid): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(msg_contact_invalid, (0,)) m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.read() # read complete msg, and dispatch msg @@ -730,7 +730,7 @@ def test_msg_contact_invalid(config_tsun_inv1, msg_contact_invalid): m.close() def test_msg_get_time(config_tsun_inv1, msg_get_time): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(msg_get_time, (0,)) m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.read() # read complete msg, and dispatch msg @@ -749,7 +749,7 @@ def test_msg_get_time(config_tsun_inv1, msg_get_time): m.close() def test_msg_get_time_autark(config_no_tsun_inv1, msg_get_time): - config_no_tsun_inv1 + _ = config_no_tsun_inv1 m = MemoryStream(msg_get_time, (0,)) m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.read() # read complete msg, and dispatch msg @@ -768,7 +768,7 @@ def test_msg_get_time_autark(config_no_tsun_inv1, msg_get_time): m.close() def test_msg_time_resp(config_tsun_inv1, msg_time_rsp): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(msg_time_rsp, (0,), False) m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.read() # read complete msg, and dispatch msg @@ -787,7 +787,7 @@ def test_msg_time_resp(config_tsun_inv1, msg_time_rsp): m.close() def test_msg_time_resp_autark(config_no_tsun_inv1, msg_time_rsp): - config_no_tsun_inv1 + _ = config_no_tsun_inv1 m = MemoryStream(msg_time_rsp, (0,), False) m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.read() # read complete msg, and dispatch msg @@ -806,7 +806,7 @@ def test_msg_time_resp_autark(config_no_tsun_inv1, msg_time_rsp): m.close() def test_msg_time_inv_resp(config_tsun_inv1, msg_time_rsp_inv): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(msg_time_rsp_inv, (0,), False) m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.read() # read complete msg, and dispatch msg @@ -825,7 +825,7 @@ def test_msg_time_inv_resp(config_tsun_inv1, msg_time_rsp_inv): m.close() def test_msg_time_invalid(config_tsun_inv1, msg_time_invalid): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(msg_time_invalid, (0,), False) m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.read() # read complete msg, and dispatch msg @@ -844,7 +844,7 @@ def test_msg_time_invalid(config_tsun_inv1, msg_time_invalid): m.close() def test_msg_time_invalid_autark(config_no_tsun_inv1, msg_time_invalid): - config_no_tsun_inv1 + _ = config_no_tsun_inv1 m = MemoryStream(msg_time_invalid, (0,), False) m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.read() # read complete msg, and dispatch msg @@ -863,7 +863,7 @@ def test_msg_time_invalid_autark(config_no_tsun_inv1, msg_time_invalid): m.close() def test_msg_cntrl_ind(config_tsun_inv1, msg_controller_ind, msg_controller_ind_ts_offs, msg_controller_ack): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(msg_controller_ind, (0,)) m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.read() # read complete msg, and dispatch msg @@ -886,7 +886,7 @@ def test_msg_cntrl_ind(config_tsun_inv1, msg_controller_ind, msg_controller_ind_ m.close() def test_msg_cntrl_ack(config_tsun_inv1, msg_controller_ack): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(msg_controller_ack, (0,), False) m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.read() # read complete msg, and dispatch msg @@ -904,7 +904,7 @@ def test_msg_cntrl_ack(config_tsun_inv1, msg_controller_ack): m.close() def test_msg_cntrl_invalid(config_tsun_inv1, msg_controller_invalid): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(msg_controller_invalid, (0,)) m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.read() # read complete msg, and dispatch msg @@ -927,7 +927,7 @@ def test_msg_cntrl_invalid(config_tsun_inv1, msg_controller_invalid): m.close() def test_msg_inv_ind(config_tsun_inv1, msg_inverter_ind, msg_inverter_ind_ts_offs, msg_inverter_ack): - config_tsun_inv1 + _ = config_tsun_inv1 tracer.setLevel(logging.DEBUG) m = MemoryStream(msg_inverter_ind, (0,)) m.db.stat['proxy']['Unknown_Ctrl'] = 0 @@ -951,7 +951,7 @@ def test_msg_inv_ind(config_tsun_inv1, msg_inverter_ind, msg_inverter_ind_ts_off m.close() def test_msg_inv_ind1(config_tsun_inv1, msg_inverter_ind2, msg_inverter_ind_ts_offs, msg_inverter_ack): - config_tsun_inv1 + _ = config_tsun_inv1 tracer.setLevel(logging.DEBUG) m = MemoryStream(msg_inverter_ind2, (0,)) m.db.stat['proxy']['Unknown_Ctrl'] = 0 @@ -975,7 +975,7 @@ def test_msg_inv_ind1(config_tsun_inv1, msg_inverter_ind2, msg_inverter_ind_ts_o m.close() def test_msg_inv_ind2(config_tsun_inv1, msg_inverter_ind_new, msg_inverter_ind_ts_offs, msg_inverter_ack): - config_tsun_inv1 + _ = config_tsun_inv1 tracer.setLevel(logging.DEBUG) m = MemoryStream(msg_inverter_ind_new, (0,)) m.db.stat['proxy']['Unknown_Ctrl'] = 0 @@ -1003,7 +1003,7 @@ def test_msg_inv_ind2(config_tsun_inv1, msg_inverter_ind_new, msg_inverter_ind_t def test_msg_inv_ind3(config_tsun_inv1, msg_inverter_ind_0w, msg_inverter_ack): '''test that after close the invert_status will be resetted if the grid power is <2W''' - config_tsun_inv1 + _ = config_tsun_inv1 tracer.setLevel(logging.DEBUG) m = MemoryStream(msg_inverter_ind_0w, (0,)) m.db.stat['proxy']['Unknown_Ctrl'] = 0 @@ -1030,7 +1030,7 @@ def test_msg_inv_ind3(config_tsun_inv1, msg_inverter_ind_0w, msg_inverter_ack): def test_msg_inv_ack(config_tsun_inv1, msg_inverter_ack): - config_tsun_inv1 + _ = config_tsun_inv1 tracer.setLevel(logging.ERROR) m = MemoryStream(msg_inverter_ack, (0,), False) @@ -1050,7 +1050,7 @@ def test_msg_inv_ack(config_tsun_inv1, msg_inverter_ack): m.close() def test_msg_inv_invalid(config_tsun_inv1, msg_inverter_invalid): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(msg_inverter_invalid, (0,), False) m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.read() # read complete msg, and dispatch msg @@ -1073,7 +1073,7 @@ def test_msg_inv_invalid(config_tsun_inv1, msg_inverter_invalid): m.close() def test_msg_ota_req(config_tsun_inv1, msg_ota_req): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(msg_ota_req, (0,), False) m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.db.stat['proxy']['OTA_Start_Msg'] = 0 @@ -1098,7 +1098,7 @@ def test_msg_ota_req(config_tsun_inv1, msg_ota_req): m.close() def test_msg_ota_ack(config_tsun_inv1, msg_ota_ack): - config_tsun_inv1 + _ = config_tsun_inv1 tracer.setLevel(logging.ERROR) m = MemoryStream(msg_ota_ack, (0,), False) @@ -1125,7 +1125,7 @@ def test_msg_ota_ack(config_tsun_inv1, msg_ota_ack): m.close() def test_msg_ota_invalid(config_tsun_inv1, msg_ota_invalid): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(msg_ota_invalid, (0,), False) m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.db.stat['proxy']['OTA_Start_Msg'] = 0 @@ -1268,7 +1268,7 @@ def test_proxy_counter(): m.close() def test_msg_modbus_req(config_tsun_inv1, msg_modbus_cmd): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(b'') m.id_str = b"R170000000000001" m.state = State.up @@ -1299,7 +1299,7 @@ def test_msg_modbus_req(config_tsun_inv1, msg_modbus_cmd): m.close() def test_msg_modbus_req2(config_tsun_inv1, msg_modbus_cmd): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(b'') m.id_str = b"R170000000000001" @@ -1329,7 +1329,7 @@ def test_msg_modbus_req2(config_tsun_inv1, msg_modbus_cmd): m.close() def test_msg_modbus_req3(config_tsun_inv1, msg_modbus_cmd_crc_err): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(b'') m.id_str = b"R170000000000001" c = m.createClientStream(msg_modbus_cmd_crc_err) @@ -1358,7 +1358,7 @@ def test_msg_modbus_req3(config_tsun_inv1, msg_modbus_cmd_crc_err): def test_msg_modbus_rsp1(config_tsun_inv1, msg_modbus_rsp): '''Modbus response without a valid Modbus request must be dropped''' - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(msg_modbus_rsp) m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.db.stat['proxy']['Modbus_Command'] = 0 @@ -1379,7 +1379,7 @@ def test_msg_modbus_rsp1(config_tsun_inv1, msg_modbus_rsp): def test_msg_modbus_cloud_rsp(config_tsun_inv1, msg_modbus_rsp): '''Modbus response from TSUN without a valid Modbus request must be dropped''' - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(msg_modbus_rsp, (0,), False) m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.db.stat['proxy']['Unknown_Msg'] = 0 @@ -1402,7 +1402,7 @@ def test_msg_modbus_cloud_rsp(config_tsun_inv1, msg_modbus_rsp): def test_msg_modbus_rsp2(config_tsun_inv1, msg_modbus_rsp20): '''Modbus response with a valid Modbus request must be forwarded''' - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(msg_modbus_rsp20) m.append_msg(msg_modbus_rsp20) @@ -1432,7 +1432,7 @@ def test_msg_modbus_rsp2(config_tsun_inv1, msg_modbus_rsp20): def test_msg_modbus_rsp3(config_tsun_inv1, msg_modbus_rsp21): '''Modbus response with a valid Modbus request must be forwarded''' - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(msg_modbus_rsp21) m.append_msg(msg_modbus_rsp21) @@ -1461,7 +1461,7 @@ def test_msg_modbus_rsp3(config_tsun_inv1, msg_modbus_rsp21): m.close() def test_msg_modbus_invalid(config_tsun_inv1, msg_modbus_inv): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(msg_modbus_inv, (0,), False) m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.db.stat['proxy']['Modbus_Command'] = 0 @@ -1481,7 +1481,7 @@ def test_msg_modbus_invalid(config_tsun_inv1, msg_modbus_inv): m.close() def test_msg_modbus_fragment(config_tsun_inv1, msg_modbus_rsp20): - config_tsun_inv1 + _ = config_tsun_inv1 # receive more bytes than expected (7 bytes from the next msg) m = MemoryStream(msg_modbus_rsp20+b'\x00\x00\x00\x45\x10\x52\x31', (0,)) m.db.stat['proxy']['Unknown_Ctrl'] = 0 @@ -1513,7 +1513,7 @@ def test_msg_modbus_fragment(config_tsun_inv1, msg_modbus_rsp20): @pytest.mark.asyncio async def test_msg_build_modbus_req(config_tsun_inv1, msg_modbus_cmd): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(b'', (0,), True) m.id_str = b"R170000000000001" await m.send_modbus_cmd(Modbus.WRITE_SINGLE_REG, 0x2008, 0, logging.DEBUG) @@ -1539,7 +1539,7 @@ async def test_msg_build_modbus_req(config_tsun_inv1, msg_modbus_cmd): m.close() def test_modbus_no_polling(config_no_modbus_poll, msg_get_time): - config_no_modbus_poll + _ = config_no_modbus_poll m = MemoryStream(msg_get_time, (0,)) m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.modbus_polling = False @@ -1560,7 +1560,7 @@ def test_modbus_no_polling(config_no_modbus_poll, msg_get_time): @pytest.mark.asyncio async def test_modbus_polling(config_tsun_inv1, msg_inverter_ind): - config_tsun_inv1 + _ = config_tsun_inv1 assert asyncio.get_running_loop() m = MemoryStream(msg_inverter_ind, (0,)) @@ -1600,7 +1600,7 @@ async def test_modbus_polling(config_tsun_inv1, msg_inverter_ind): m.close() def test_broken_recv_buf(config_tsun_allow_all, broken_recv_buf): - config_tsun_allow_all + _ = config_tsun_allow_all m = MemoryStream(broken_recv_buf, (0,)) m.db.stat['proxy']['Unknown_Ctrl'] = 0 assert m.db.stat['proxy']['Invalid_Data_Type'] == 0 diff --git a/requirements-test.txt b/requirements-test.txt new file mode 100644 index 0000000..a6797c0 --- /dev/null +++ b/requirements-test.txt @@ -0,0 +1 @@ +-r ./app/requirements-test.txt \ No newline at end of file From f9c1b83ccd4214369336a45bd8a4260c1f4488e6 Mon Sep 17 00:00:00 2001 From: Stefan Allius <122395479+s-allius@users.noreply.github.com> Date: Sat, 24 Aug 2024 22:21:55 +0200 Subject: [PATCH 11/39] Sonar qube 4 (#169) * add unit test for inverter.py * fix SonarQube warning --- app/src/gen3plus/solarman_v5.py | 2 +- app/src/inverter.py | 15 ++++-- app/tests/test_inverter.py | 90 +++++++++++++++++++++++++++++++++ app/tests/test_mqtt.py | 6 ++- app/tests/test_singleton.py | 1 + app/tests/test_talent.py | 2 +- system_tests/test_tcp_socket.py | 16 +++--- 7 files changed, 116 insertions(+), 16 deletions(-) create mode 100644 app/tests/test_inverter.py diff --git a/app/src/gen3plus/solarman_v5.py b/app/src/gen3plus/solarman_v5.py index 0cbdfab..94089f3 100644 --- a/app/src/gen3plus/solarman_v5.py +++ b/app/src/gen3plus/solarman_v5.py @@ -172,7 +172,7 @@ class SolarmanV5(Message): self.db.set_db_def_value(Register.POLLING_INTERVAL, self.mb_timeout) self.db.set_db_def_value(Register.HEARTBEAT_INTERVAL, - 120) # fixme + 120) self.new_data['controller'] = True self.state = State.up diff --git a/app/src/inverter.py b/app/src/inverter.py index cf20c96..7f3914f 100644 --- a/app/src/inverter.py +++ b/app/src/inverter.py @@ -1,11 +1,15 @@ import asyncio import logging import json -from config import Config -from mqtt import Mqtt -from infos import Infos +if __name__ == "app.src.inverter": + from app.src.config import Config + from app.src.mqtt import Mqtt + from app.src.infos import Infos +else: # pragma: no cover + from config import Config + from mqtt import Mqtt + from infos import Infos -# logger = logging.getLogger('conn') logger_mqtt = logging.getLogger('mqtt') @@ -24,6 +28,7 @@ class Inverter(): cls.proxy_unique_id = ha['proxy_unique_id'] # call Mqtt singleton to establisch the connection to the mqtt broker + print('call Mqtt.init') cls.mqtt = Mqtt(cls._cb_mqtt_is_up) # register all counters which should be reset at midnight. @@ -72,7 +77,7 @@ class Inverter(): Infos.new_stat_data[key] = False @classmethod - def class_close(cls, loop) -> None: + def class_close(cls, loop) -> None: # pragma: no cover logging.debug('Inverter.class_close') logging.info('Close MQTT Task') loop.run_until_complete(cls.mqtt.close()) diff --git a/app/tests/test_inverter.py b/app/tests/test_inverter.py new file mode 100644 index 0000000..40b23bf --- /dev/null +++ b/app/tests/test_inverter.py @@ -0,0 +1,90 @@ +# test_with_pytest.py +import pytest +import asyncio +import aiomqtt +import logging + +from mock import patch, Mock +from app.src.singleton import Singleton +from app.src.inverter import Inverter +from app.src.mqtt import Mqtt +from app.src.gen3plus.solarman_v5 import SolarmanV5 +from app.src.config import Config + + +pytest_plugins = ('pytest_asyncio',) + + +@pytest.fixture(scope="module", autouse=True) +def module_init(): + def new_init(cls, cb_mqtt_is_up): + cb_mqtt_is_up() + + Singleton._instances.clear() + with patch.object(Mqtt, '__init__', new_init): + yield + +@pytest.fixture(scope="module") +def test_port(): + return 1883 + +@pytest.fixture(scope="module") +def test_hostname(): + # if getenv("GITHUB_ACTIONS") == "true": + # return 'mqtt' + # else: + return 'test.mosquitto.org' + +@pytest.fixture +def config_conn(test_hostname, test_port): + Config.act_config = { + 'mqtt':{ + 'host': test_hostname, + 'port': test_port, + 'user': '', + 'passwd': '' + }, + 'ha':{ + 'auto_conf_prefix': 'homeassistant', + 'discovery_prefix': 'homeassistant', + 'entity_prefix': 'tsun', + 'proxy_node_id': 'test_1', + 'proxy_unique_id': '' + }, + 'inverters': { + 'allow_all': True, + "R170000000000001":{ + 'node_id': 'inv_1' + } + } + } + +@pytest.mark.asyncio +async def test_inverter_cb(config_conn): + _ = config_conn + + with patch.object(Inverter, '_cb_mqtt_is_up', wraps=Inverter._cb_mqtt_is_up) as spy: + print('call Inverter.class_init') + Inverter.class_init() + assert 'homeassistant/' == Inverter.discovery_prfx + assert 'tsun/' == Inverter.entity_prfx + assert 'test_1/' == Inverter.proxy_node_id + spy.assert_called_once() + +@pytest.mark.asyncio +async def test_mqtt_is_up(config_conn): + _ = config_conn + + with patch.object(Mqtt, 'publish') as spy: + Inverter.class_init() + await Inverter._cb_mqtt_is_up() + spy.assert_called() + +@pytest.mark.asyncio +async def test_mqtt_proxy_statt_invalid(config_conn): + _ = config_conn + + with patch.object(Mqtt, 'publish') as spy: + Inverter.class_init() + await Inverter._async_publ_mqtt_proxy_stat('InValId_kEy') + spy.assert_not_called() diff --git a/app/tests/test_mqtt.py b/app/tests/test_mqtt.py index 7dea973..1d7c5dd 100644 --- a/app/tests/test_mqtt.py +++ b/app/tests/test_mqtt.py @@ -5,6 +5,7 @@ import aiomqtt import logging from mock import patch, Mock +from app.src.singleton import Singleton from app.src.mqtt import Mqtt from app.src.modbus import Modbus from app.src.gen3plus.solarman_v5 import SolarmanV5 @@ -13,7 +14,10 @@ from app.src.config import Config pytest_plugins = ('pytest_asyncio',) - +@pytest.fixture(scope="module", autouse=True) +def module_init(): + Singleton._instances.clear() + yield @pytest.fixture(scope="module") def test_port(): diff --git a/app/tests/test_singleton.py b/app/tests/test_singleton.py index 2ea82eb..d645e08 100644 --- a/app/tests/test_singleton.py +++ b/app/tests/test_singleton.py @@ -7,6 +7,7 @@ class Test(metaclass=Singleton): pass # is a dummy test class def test_singleton_metaclass(): + Singleton._instances.clear() a = Test() assert 1 == len(Singleton._instances) b = Test() diff --git a/app/tests/test_talent.py b/app/tests/test_talent.py index 62df532..13f462e 100644 --- a/app/tests/test_talent.py +++ b/app/tests/test_talent.py @@ -1150,7 +1150,7 @@ def test_msg_ota_invalid(config_tsun_inv1, msg_ota_invalid): m.close() def test_msg_unknown(config_tsun_inv1, msg_unknown): - config_tsun_inv1 + _ = config_tsun_inv1 m = MemoryStream(msg_unknown, (0,), False) m.db.stat['proxy']['Unknown_Msg'] = 0 m.read() # read complete msg, and dispatch msg diff --git a/system_tests/test_tcp_socket.py b/system_tests/test_tcp_socket.py index 00ec70f..795184d 100644 --- a/system_tests/test_tcp_socket.py +++ b/system_tests/test_tcp_socket.py @@ -138,9 +138,9 @@ def tempClientConnection(): def test_open_close(): try: - for s in tempClientConnection(): + for _ in tempClientConnection(): pass - except: + except Exception: assert False def test_send_contact_info1(ClientConnection, MsgContactInfo, MsgContactResp): @@ -199,14 +199,14 @@ def test_send_ctrl_data(ClientConnection, MsgTimeStampReq, MsgTimeStampResp, Msg s = ClientConnection try: s.sendall(MsgTimeStampReq) - data = s.recv(1024) + _ = s.recv(1024) except TimeoutError: pass # time.sleep(2.5) # assert data == MsgTimeStampResp try: s.sendall(MsgContollerInd) - data = s.recv(1024) + _ = s.recv(1024) except TimeoutError: pass @@ -214,16 +214,16 @@ def test_send_inv_data(ClientConnection, MsgTimeStampReq, MsgTimeStampResp, MsgI s = ClientConnection try: s.sendall(MsgTimeStampReq) - data = s.recv(1024) + _ = s.recv(1024) except TimeoutError: pass # time.sleep(32.5) # assert data == MsgTimeStampResp try: s.sendall(MsgInvData) - data = s.recv(1024) + _ = s.recv(1024) s.sendall(MsgInverterInd) - data = s.recv(1024) + _ = s.recv(1024) except TimeoutError: pass @@ -231,6 +231,6 @@ def test_ota_req(ClientConnection, MsgOtaUpdateReq): s = ClientConnection try: s.sendall(MsgOtaUpdateReq) - data = s.recv(1024) + _ = s.recv(1024) except TimeoutError: pass From 6d9a446bfe561dfb172e7a2917e88d11d2e2773a Mon Sep 17 00:00:00 2001 From: Stefan Allius <122395479+s-allius@users.noreply.github.com> Date: Sat, 24 Aug 2024 23:03:02 +0200 Subject: [PATCH 12/39] Sonar qube 5 (#170) * fix SonarLints warnings --- app/tests/test_infos_g3.py | 23 ++++----- app/tests/test_infos_g3p.py | 14 +++-- system_tests/test_tcp_socket.py | 90 ++++++++++++++++----------------- 3 files changed, 65 insertions(+), 62 deletions(-) diff --git a/app/tests/test_infos_g3.py b/app/tests/test_infos_g3.py index e811d90..37a8076 100644 --- a/app/tests/test_infos_g3.py +++ b/app/tests/test_infos_g3.py @@ -325,7 +325,11 @@ def test_build_ha_conf1(contr_data_seq): assert tests==4 +def test_build_ha_conf2(contr_data_seq): + i = InfosG3() + i.static_init() # initialize counter + tests = 0 for d_json, comp, node_id, id in i.ha_proxy_confs(ha_prfx="tsun/", node_id = 'proxy/', snr = '456'): if id == 'out_power_123': @@ -344,9 +348,9 @@ def test_build_ha_conf1(contr_data_seq): assert d_json == json.dumps({"name": "Active Inverter Connections", "stat_t": "tsun/proxy/proxy", "dev_cla": None, "stat_cla": None, "uniq_id": "inv_count_456", "val_tpl": "{{value_json['Inverter_Cnt'] | int}}", "ic": "mdi:counter", "dev": {"name": "Proxy", "sa": "Proxy", "mdl": "proxy", "mf": "Stefan Allius", "sw": "unknown", "ids": ["proxy"]}, "o": {"name": "proxy", "sw": "unknown"}}) tests +=1 - assert tests==5 + assert tests==1 -def test_build_ha_conf2(contr_data_seq, inv_data_seq, inv_data_seq2): +def test_build_ha_conf3(contr_data_seq, inv_data_seq, inv_data_seq2): i = InfosG3() for key, result in i.parse (contr_data_seq): pass # side effect in calling i.parse() @@ -397,12 +401,9 @@ def test_must_incr_total(inv_data_seq2, inv_data_seq2_zero): assert json.dumps(i.db['env']) == json.dumps({"Inverter_Temp": 23}) tests = 0 for key, update in i.parse (inv_data_seq2): - if key == 'total': + if key == 'total' or key == 'env': assert update == False tests +=1 - elif key == 'env': - assert update == False - tests +=1 assert tests==3 assert json.dumps(i.db['total']) == json.dumps({'Daily_Generation': 1.7, 'Total_Generation': 17.36}) @@ -442,12 +443,9 @@ def test_must_incr_total2(inv_data_seq2, inv_data_seq2_zero): tests = 0 for key, update in i.parse (inv_data_seq2_zero): - if key == 'total': + if key == 'total' or key == 'env': assert update == False tests +=1 - elif key == 'env': - assert update == False - tests +=1 assert tests==3 assert json.dumps(i.db['total']) == json.dumps({}) @@ -456,12 +454,9 @@ def test_must_incr_total2(inv_data_seq2, inv_data_seq2_zero): tests = 0 for key, update in i.parse (inv_data_seq2): - if key == 'total': + if key == 'total' or key == 'env': assert update == True tests +=1 - elif key == 'env': - assert update == True - tests +=1 assert tests==3 assert json.dumps(i.db['total']) == json.dumps({'Daily_Generation': 1.7, 'Total_Generation': 17.36}) diff --git a/app/tests/test_infos_g3p.py b/app/tests/test_infos_g3p.py index 21ef570..be27aed 100644 --- a/app/tests/test_infos_g3p.py +++ b/app/tests/test_infos_g3p.py @@ -139,7 +139,11 @@ def test_build_ha_conf1(): assert tests==7 +def test_build_ha_conf2(): + i = InfosG3P(client_mode=False) + i.static_init() # initialize counter + tests = 0 for d_json, comp, node_id, id in i.ha_proxy_confs(ha_prfx="tsun/", node_id = 'proxy/', snr = '456'): if id == 'out_power_123': @@ -161,9 +165,9 @@ def test_build_ha_conf1(): assert d_json == json.dumps({"name": "Active Inverter Connections", "stat_t": "tsun/proxy/proxy", "dev_cla": None, "stat_cla": None, "uniq_id": "inv_count_456", "val_tpl": "{{value_json['Inverter_Cnt'] | int}}", "ic": "mdi:counter", "dev": {"name": "Proxy", "sa": "Proxy", "mdl": "proxy", "mf": "Stefan Allius", "sw": "unknown", "ids": ["proxy"]}, "o": {"name": "proxy", "sw": "unknown"}}) tests +=1 - assert tests==8 + assert tests==1 -def test_build_ha_conf2(): +def test_build_ha_conf3(): i = InfosG3P(client_mode=True) i.static_init() # initialize counter @@ -209,7 +213,11 @@ def test_build_ha_conf2(): assert tests==7 +def test_build_ha_conf4(): + i = InfosG3P(client_mode=True) + i.static_init() # initialize counter + tests = 0 for d_json, comp, node_id, id in i.ha_proxy_confs(ha_prfx="tsun/", node_id = 'proxy/', snr = '456'): if id == 'out_power_123': @@ -231,7 +239,7 @@ def test_build_ha_conf2(): assert d_json == json.dumps({"name": "Active Inverter Connections", "stat_t": "tsun/proxy/proxy", "dev_cla": None, "stat_cla": None, "uniq_id": "inv_count_456", "val_tpl": "{{value_json['Inverter_Cnt'] | int}}", "ic": "mdi:counter", "dev": {"name": "Proxy", "sa": "Proxy", "mdl": "proxy", "mf": "Stefan Allius", "sw": "unknown", "ids": ["proxy"]}, "o": {"name": "proxy", "sw": "unknown"}}) tests +=1 - assert tests==8 + assert tests==1 def test_exception_and_eval(inverter_data: bytes): diff --git a/system_tests/test_tcp_socket.py b/system_tests/test_tcp_socket.py index 795184d..e663577 100644 --- a/system_tests/test_tcp_socket.py +++ b/system_tests/test_tcp_socket.py @@ -13,31 +13,31 @@ def get_invalid_sn(): @pytest.fixture -def MsgContactInfo(): # Contact Info message +def msg_contact_info(): # Contact Info message return b'\x00\x00\x00\x2c\x10'+get_sn()+b'\x91\x00\x08solarhub\x0fsolarhub\x40123456' @pytest.fixture -def MsgContactResp(): # Contact Response message +def msg_contact_resp(): # Contact Response message return b'\x00\x00\x00\x14\x10'+get_sn()+b'\x91\x00\x01' @pytest.fixture -def MsgContactInfo2(): # Contact Info message +def msg_contact_info2(): # Contact Info message return b'\x00\x00\x00\x2c\x10'+get_invalid_sn()+b'\x91\x00\x08solarhub\x0fsolarhub\x40123456' @pytest.fixture -def MsgContactResp2(): # Contact Response message +def msg_contact_resp2(): # Contact Response message return b'\x00\x00\x00\x14\x10'+get_invalid_sn()+b'\x91\x00\x01' @pytest.fixture -def MsgTimeStampReq(): # Get Time Request message +def msg_timestamp_req(): # Get Time Request message return b'\x00\x00\x00\x13\x10'+get_sn()+b'\x91\x22' @pytest.fixture -def MsgTimeStampResp(): # Get Time Resonse message +def msg_timestamp_resp(): # Get Time Resonse message return b'\x00\x00\x00\x1b\x10'+get_sn()+b'\x99\x22\x00\x00\x01\x89\xc6\x63\x4d\x80' @pytest.fixture -def MsgContollerInd(): # Data indication from the controller +def msg_controller_ind(): # Data indication from the controller msg = b'\x00\x00\x01\x2f\x10'+ get_sn() + b'\x91\x71\x0e\x10\x00\x00\x10'+get_sn() msg += b'\x01\x00\x00\x01\x89\xc6\x63\x55\x50' msg += b'\x00\x00\x00\x15\x00\x09\x2b\xa8\x54\x10\x52\x53\x57\x5f\x34\x30\x30\x5f\x56\x31\x2e\x30\x30\x2e\x30\x36\x00\x09\x27\xc0\x54\x06\x52\x61\x79\x6d\x6f' @@ -49,7 +49,7 @@ def MsgContollerInd(): # Data indication from the controller return msg @pytest.fixture -def MsgInvData(): # Data indication from the controller +def msg_inv_data(): # Data indication from the controller msg = b'\x00\x00\x00\x8b\x10'+ get_sn() + b'\x91\x04\x01\x90\x00\x01\x10'+get_inv_no() msg += b'\x01\x00\x00\x01\x89\xc6\x63\x61\x08' msg += b'\x00\x00\x00\x06\x00\x00\x00\x0a\x54\x08\x4d\x69\x63\x72\x6f\x69\x6e\x76\x00\x00\x00\x14\x54\x04\x54\x53\x55\x4e\x00\x00\x00\x1E\x54\x07\x56\x35\x2e\x30\x2e\x31\x31\x00\x00\x00\x28' @@ -57,7 +57,7 @@ def MsgInvData(): # Data indication from the controller return msg @pytest.fixture -def MsgInverterInd(): # Data indication from the inverter +def msg_inverter_ind(): # Data indication from the inverter msg = b'\x00\x00\x05\x02\x10'+ get_sn() + b'\x91\x04\x01\x90\x00\x01\x10'+get_inv_no() msg += b'\x01\x00\x00\x01\x89\xc6\x63\x61\x08' msg += b'\x00\x00\x00\xa3\x00\x00\x00\x64\x53\x00\x01\x00\x00\x00\xc8\x53\x00\x02\x00\x00\x01\x2c\x53\x00\x00\x00\x00\x01\x90\x49\x00\x00\x00\x00\x00\x00\x01\x91\x53\x00\x00' @@ -94,7 +94,7 @@ def MsgInverterInd(): # Data indication from the inverter return msg @pytest.fixture -def MsgOtaUpdateReq(): # Over the air update request from talent cloud +def msg_ota_update_req(): # Over the air update request from talent cloud msg = b'\x00\x00\x01\x16\x10'+ get_sn() + b'\x70\x13\x01\x02\x76\x35' msg += b'\x70\x68\x74\x74\x70' msg += b'\x3a\x2f\x2f\x77\x77\x77\x2e\x74\x61\x6c\x65\x6e\x74\x2d\x6d\x6f' @@ -117,7 +117,7 @@ def MsgOtaUpdateReq(): # Over the air update request from talent cloud @pytest.fixture(scope="session") -def ClientConnection(): +def client_connection(): host = 'logger.talent-monitoring.com' port = 5005 with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: @@ -127,7 +127,7 @@ def ClientConnection(): time.sleep(2.5) s.close() -def tempClientConnection(): +def tempclient_connection(): host = 'logger.talent-monitoring.com' port = 5005 with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: @@ -138,25 +138,25 @@ def tempClientConnection(): def test_open_close(): try: - for _ in tempClientConnection(): - pass + for _ in tempclient_connection(): + pass # test side effect of generator except Exception: assert False -def test_send_contact_info1(ClientConnection, MsgContactInfo, MsgContactResp): - s = ClientConnection +def test_send_contact_info1(client_connection, msg_contact_info, msg_contact_resp): + s = client_connection try: - s.sendall(MsgContactInfo) + s.sendall(msg_contact_info) data = s.recv(1024) except TimeoutError: pass - assert data == MsgContactResp + assert data == msg_contact_resp -def test_send_contact_info2(ClientConnection, MsgContactInfo2, MsgContactInfo, MsgContactResp): - s = ClientConnection +def test_send_contact_info2(client_connection, msg_contact_info2, msg_contact_info, msg_contact_resp): + s = client_connection try: - s.sendall(MsgContactInfo2) + s.sendall(msg_contact_info2) data = s.recv(1024) except TimeoutError: pass @@ -164,73 +164,73 @@ def test_send_contact_info2(ClientConnection, MsgContactInfo2, MsgContactInfo, M assert False try: - s.sendall(MsgContactInfo) + s.sendall(msg_contact_info) data = s.recv(1024) except TimeoutError: pass - assert data == MsgContactResp + assert data == msg_contact_resp -def test_send_contact_info3(ClientConnection, MsgContactInfo, MsgContactResp, MsgTimeStampReq): - s = ClientConnection +def test_send_contact_info3(client_connection, msg_contact_info, msg_contact_resp, msg_timestamp_req): + s = client_connection try: - s.sendall(MsgContactInfo) + s.sendall(msg_contact_info) data = s.recv(1024) except TimeoutError: pass - assert data == MsgContactResp + assert data == msg_contact_resp try: - s.sendall(MsgTimeStampReq) + s.sendall(msg_timestamp_req) data = s.recv(1024) except TimeoutError: pass -def test_send_contact_resp(ClientConnection, MsgContactResp): - s = ClientConnection +def test_send_contact_resp(client_connection, msg_contact_resp): + s = client_connection try: - s.sendall(MsgContactResp) + s.sendall(msg_contact_resp) data = s.recv(1024) except TimeoutError: pass else: assert data == b'' -def test_send_ctrl_data(ClientConnection, MsgTimeStampReq, MsgTimeStampResp, MsgContollerInd): - s = ClientConnection +def test_send_ctrl_data(client_connection, msg_timestamp_req, msg_timestamp_resp, msg_controller_ind): + s = client_connection try: - s.sendall(MsgTimeStampReq) + s.sendall(msg_timestamp_req) _ = s.recv(1024) except TimeoutError: pass # time.sleep(2.5) - # assert data == MsgTimeStampResp + # assert data == msg_timestamp_resp try: - s.sendall(MsgContollerInd) + s.sendall(msg_controller_ind) _ = s.recv(1024) except TimeoutError: pass -def test_send_inv_data(ClientConnection, MsgTimeStampReq, MsgTimeStampResp, MsgInvData, MsgInverterInd): - s = ClientConnection +def test_send_inv_data(client_connection, msg_timestamp_req, msg_timestamp_resp, msg_inv_data, msg_inverter_ind): + s = client_connection try: - s.sendall(MsgTimeStampReq) + s.sendall(msg_timestamp_req) _ = s.recv(1024) except TimeoutError: pass # time.sleep(32.5) - # assert data == MsgTimeStampResp + # assert data == msg_timestamp_resp try: - s.sendall(MsgInvData) + s.sendall(msg_inv_data) _ = s.recv(1024) - s.sendall(MsgInverterInd) + s.sendall(msg_inverter_ind) _ = s.recv(1024) except TimeoutError: pass -def test_ota_req(ClientConnection, MsgOtaUpdateReq): - s = ClientConnection +def test_ota_req(client_connection, msg_ota_update_req): + s = client_connection try: - s.sendall(MsgOtaUpdateReq) + s.sendall(msg_ota_update_req) _ = s.recv(1024) except TimeoutError: pass From 7dc2595d7161dac40185aea604ce409f916115a3 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Sun, 25 Aug 2024 12:02:27 +0200 Subject: [PATCH 13/39] use random IP adresses for unit tests --- app/tests/test_infos_g3p.py | 27 +++++++++++++++++++++------ app/tests/test_solarman.py | 13 ++++++++++--- 2 files changed, 31 insertions(+), 9 deletions(-) diff --git a/app/tests/test_infos_g3p.py b/app/tests/test_infos_g3p.py index be27aed..f7aef4d 100644 --- a/app/tests/test_infos_g3p.py +++ b/app/tests/test_infos_g3p.py @@ -1,18 +1,33 @@ # test_with_pytest.py -import pytest, json, math +import pytest, json, math, random from app.src.infos import Register from app.src.gen3plus.infos_g3p import InfosG3P from app.src.gen3plus.infos_g3p import RegisterMap +@pytest.fixture(scope="session") +def str_test_ip(): + ip = ".".join(str(random.randint(1, 254)) for _ in range(4)) + print(f'random_ip: {ip}') + return ip + +@pytest.fixture(scope="session") +def bytes_test_ip(str_test_ip): + ip = bytes(str.encode(str_test_ip)) + l = len(ip) + if l < 16: + ip = ip + bytearray(16-l) + print(f'random_ip: {ip}') + return ip + @pytest.fixture -def device_data(): # 0x4110 ftype: 0x02 +def device_data(bytes_test_ip): # 0x4110 ftype: 0x02 msg = b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xba\xd2\x00\x00' msg += b'\x19\x00\x00\x00\x00\x00\x00\x00\x05\x3c\x78\x01\x64\x01\x4c\x53' msg += b'\x57\x35\x42\x4c\x45\x5f\x31\x37\x5f\x30\x32\x42\x30\x5f\x31\x2e' msg += b'\x30\x35\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' - msg += b'\x00\x00\x00\x00\x00\x00\x40\x2a\x8f\x4f\x51\x54\x31\x39\x32\x2e' - msg += b'\x31\x36\x38\x2e\x38\x30\x2e\x34\x39\x00\x00\x00\x0f\x00\x01\xb0' + msg += b'\x00\x00\x00\x00\x00\x00\x40\x2a\x8f\x4f\x51\x54' + bytes_test_ip + msg += b'\x0f\x00\x01\xb0' msg += b'\x02\x0f\x00\xff\x56\x31\x2e\x31\x2e\x30\x30\x2e\x30\x42\x00\x00' msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xfe\x00\x00' @@ -63,14 +78,14 @@ def test_default_db(): "collector": {"Chip_Type": "IGEN TECH"}, }) -def test_parse_4110(device_data: bytes): +def test_parse_4110(str_test_ip, device_data: bytes): i = InfosG3P(client_mode=False) i.db.clear() for key, update in i.parse (device_data, 0x41, 2): pass # side effect is calling generator i.parse() assert json.dumps(i.db) == json.dumps({ - 'controller': {"Data_Up_Interval": 300, "Collect_Interval": 1, "Heartbeat_Interval": 120, "Signal_Strength": 100, "IP_Address": "192.168.80.49", "Sensor_List": "02b0"}, + 'controller': {"Data_Up_Interval": 300, "Collect_Interval": 1, "Heartbeat_Interval": 120, "Signal_Strength": 100, "IP_Address": str_test_ip, "Sensor_List": "02b0"}, 'collector': {"Chip_Model": "LSW5BLE_17_02B0_1.05", "Collector_Fw_Version": "V1.1.00.0B"}, }) diff --git a/app/tests/test_solarman.py b/app/tests/test_solarman.py index c9227bd..521ef9b 100644 --- a/app/tests/test_solarman.py +++ b/app/tests/test_solarman.py @@ -3,6 +3,7 @@ import struct import time import asyncio import logging +import random from math import isclose from app.src.gen3plus.solarman_v5 import SolarmanV5 from app.src.config import Config @@ -148,6 +149,12 @@ def incorrect_checksum(buf): checksum = (sum(buf[1:])+1) & 0xff return checksum.to_bytes(length=1) +@pytest.fixture(scope="session") +def str_test_ip(): + ip = ".".join(str(random.randint(1, 254)) for _ in range(4)) + print(f'random_ip: {ip}') + return ip + @pytest.fixture def device_ind_msg(): # 0x4110 msg = b'\xa5\xd4\x00\x10\x41\x00\x01' +get_sn() +b'\x02\xba\xd2\x00\x00' @@ -1692,7 +1699,7 @@ async def test_modbus_polling(config_tsun_inv1, heartbeat_ind_msg, heartbeat_rsp m.close() @pytest.mark.asyncio -async def test_start_client_mode(config_tsun_inv1): +async def test_start_client_mode(config_tsun_inv1, str_test_ip): _ = config_tsun_inv1 assert asyncio.get_running_loop() m = MemoryStream(b'') @@ -1700,9 +1707,9 @@ async def test_start_client_mode(config_tsun_inv1): assert m.no_forwarding == False assert m.mb_timer.tim == None assert asyncio.get_running_loop() == m.mb_timer.loop - await m.send_start_cmd(get_sn_int(), '192.168.1.1', m.mb_first_timeout) + await m.send_start_cmd(get_sn_int(), str_test_ip, m.mb_first_timeout) assert m.writer.sent_pdu==bytearray(b'\xa5\x17\x00\x10E\x01\x00!Ce{\x02\xb0\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x030\x00\x000J\xde\xf1\x15') - assert m.db.get_db_value(Register.IP_ADDRESS) == '192.168.1.1' + assert m.db.get_db_value(Register.IP_ADDRESS) == str_test_ip assert isclose(m.db.get_db_value(Register.POLLING_INTERVAL), 0.5) assert m.db.get_db_value(Register.HEARTBEAT_INTERVAL) == 120 From 1985557bcef6a25bb0228366ff5d4fefe5be8c8f Mon Sep 17 00:00:00 2001 From: Stefan Allius <122395479+s-allius@users.noreply.github.com> Date: Sun, 25 Aug 2024 23:05:25 +0200 Subject: [PATCH 14/39] Docker: The description ist missing (#171) Fixes #167 --- app/Dockerfile | 12 +----- app/build.sh | 39 +++++-------------- app/docker-bake.hcl | 94 +++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 105 insertions(+), 40 deletions(-) create mode 100644 app/docker-bake.hcl diff --git a/app/Dockerfile b/app/Dockerfile index 90e8b0e..300c1e0 100644 --- a/app/Dockerfile +++ b/app/Dockerfile @@ -62,18 +62,10 @@ RUN python -m pip install --no-cache --no-index /root/wheels/* && \ COPY --chmod=0700 entrypoint.sh /root/entrypoint.sh COPY config . COPY src . -RUN echo ${VERSION} > /proxy-version.txt -RUN date > /build-date.txt +RUN echo ${VERSION} > /proxy-version.txt \ + && date > /build-date.txt EXPOSE 5005 8127 10000 # command to run on container start ENTRYPOINT ["/root/entrypoint.sh"] CMD [ "python3", "./server.py" ] - - -LABEL org.opencontainers.image.title="TSUN Gen3 Proxy" -LABEL org.opencontainers.image.authors="Stefan Allius" -LABEL org.opencontainers.image.source=https://github.com/s-allius/tsun-gen3-proxy -LABEL org.opencontainers.image.description='This proxy enables a reliable connection between TSUN third generation inverters (eg. TSOL MS600, MS800, MS2000) and an MQTT broker to integrate the inverter into typical home automations.' -LABEL org.opencontainers.image.licenses="BSD-3-Clause" -LABEL org.opencontainers.image.vendor="Stefan Allius" diff --git a/app/build.sh b/app/build.sh index 7d66c91..ac1c23c 100755 --- a/app/build.sh +++ b/app/build.sh @@ -17,6 +17,7 @@ VERSION="${VERSION:1}" arr=(${VERSION//./ }) MAJOR=${arr[0]} IMAGE=tsun-gen3-proxy + GREEN='\033[0;32m' BLUE='\033[0;34m' NC='\033[0m' @@ -26,44 +27,22 @@ IMAGE=docker.io/sallius/${IMAGE} VERSION=${VERSION}+$1 elif [[ $1 == rc ]] || [[ $1 == rel ]] || [[ $1 == preview ]] ;then IMAGE=ghcr.io/s-allius/${IMAGE} +echo 'login to ghcr.io' +echo $GHCR_TOKEN | docker login ghcr.io -u s-allius --password-stdin else echo argument missing! echo try: $0 '[debug|dev|preview|rc|rel]' exit 1 fi -if [[ $1 == debug ]] ;then -BUILD_ENV="dev" -else -BUILD_ENV="production" -fi - -BUILD_CMD="buildx build --push --build-arg VERSION=${VERSION} --build-arg environment=${BUILD_ENV} --attest type=provenance,mode=max --attest type=sbom,generator=docker/scout-sbom-indexer:latest" -ARCH="--platform linux/amd64,linux/arm64,linux/arm/v7" -LABELS="--label org.opencontainers.image.created=${BUILD_DATE} --label org.opencontainers.image.version=${VERSION} --label org.opencontainers.image.revision=${BRANCH}" +export IMAGE +export VERSION +export BUILD_DATE +export BRANCH +export MAJOR echo version: $VERSION build-date: $BUILD_DATE image: $IMAGE -if [[ $1 == debug ]];then -docker ${BUILD_CMD} ${ARCH} ${LABELS} --build-arg "LOG_LVL=DEBUG" -t ${IMAGE}:debug app - -elif [[ $1 == dev ]];then -docker ${BUILD_CMD} ${ARCH} ${LABELS} -t ${IMAGE}:dev app - -elif [[ $1 == preview ]];then -echo 'login to ghcr.io' -echo $GHCR_TOKEN | docker login ghcr.io -u s-allius --password-stdin -docker ${BUILD_CMD} ${ARCH} ${LABELS} -t ${IMAGE}:preview -t ${IMAGE}:${VERSION} app - -elif [[ $1 == rc ]];then -echo 'login to ghcr.io' -echo $GHCR_TOKEN | docker login ghcr.io -u s-allius --password-stdin -docker ${BUILD_CMD} ${ARCH} ${LABELS} -t ${IMAGE}:rc -t ${IMAGE}:${VERSION} app - -elif [[ $1 == rel ]];then -echo 'login to ghcr.io' -echo $GHCR_TOKEN | docker login ghcr.io -u s-allius --password-stdin -docker ${BUILD_CMD} ${ARCH} ${LABELS} --no-cache -t ${IMAGE}:latest -t ${IMAGE}:${MAJOR} -t ${IMAGE}:${VERSION} app -fi +docker buildx bake -f app/docker-bake.hcl $1 echo -e "${BLUE} => checking docker-compose.yaml file${NC}" docker-compose config -q diff --git a/app/docker-bake.hcl b/app/docker-bake.hcl new file mode 100644 index 0000000..dbd90de --- /dev/null +++ b/app/docker-bake.hcl @@ -0,0 +1,94 @@ +variable "IMAGE" { + default = "tsun-gen3-proxy" +} +variable "VERSION" { + default = "0.0.0" +} +variable "MAJOR" { + default = "0" +} +variable "BUILD_DATE" { + default = "dev" +} +variable "BRANCH" { + default = "" +} +variable "DESCRIPTION" { + default = "This proxy enables a reliable connection between TSUN third generation inverters (eg. TSOL MS600, MS800, MS2000) and an MQTT broker to integrate the inverter into typical home automations." +} + +target "_common" { + context = "app" + dockerfile = "Dockerfile" + args = { + VERSION = "${VERSION}" + environment = "production" + } + attest = [ + "type =provenance,mode=max", + "type =sbom,generator=docker/scout-sbom-indexer:latest" + ] + annotations = [ + "index:org.opencontainers.image.title=TSUN Gen3 Proxy", + "index:org.opencontainers.image.authors=Stefan Allius", + "index:org.opencontainers.image.created=${BUILD_DATE}", + "index:org.opencontainers.image.version=${VERSION}", + "index:org.opencontainers.image.revision=${BRANCH}", + "index:org.opencontainers.image.description=${DESCRIPTION}", + "index:org.opencontainers.image.licenses=BSD-3-Clause", + "index:org.opencontainers.image.source=https://github.com/s-allius/tsun-gen3-proxy" + ] + labels = { + "org.opencontainers.image.title" = "TSUN Gen3 Proxy" + "org.opencontainers.image.authors" = "Stefan Allius" + "org.opencontainers.image.created" = "${BUILD_DATE}" + "org.opencontainers.image.version" = "${VERSION}" + "org.opencontainers.image.revision" = "${BRANCH}" + "org.opencontainers.image.description" = "${DESCRIPTION}" + "org.opencontainers.image.licenses" = "BSD-3-Clause" + "org.opencontainers.image.source" = "https://github.com/s-allius/tsun-gen3-proxy" +} + output = [ + "type=image,push=true" + ] + + no-cache = false + platforms = ["linux/amd64", "linux/arm64", "linux/arm/v7"] +} + +target "_debug" { + args = { + LOG_LVL = "DEBUG" + environment = "dev" + } +} +target "_prod" { + args = { + } +} +target "debug" { + inherits = ["_common", "_debug"] + tags = ["${IMAGE}:debug"] +} + +target "dev" { + inherits = ["_common"] + tags = ["${IMAGE}:dev"] +} + +target "preview" { + inherits = ["_common", "_prod"] + tags = ["${IMAGE}:dev", "${IMAGE}:${VERSION}"] +} + +target "rc" { + inherits = ["_common", "_prod"] + tags = ["${IMAGE}:rc", "${IMAGE}:${VERSION}"] +} + +target "rel" { + inherits = ["_common", "_prod"] + + tags = ["${IMAGE}:latest", "${IMAGE}:${MAJOR}", "${IMAGE}:${VERSION}"] + no-cache = true +} From d44726c0f3db007dadd4b9756e5f231a2c615261 Mon Sep 17 00:00:00 2001 From: Stefan Allius <122395479+s-allius@users.noreply.github.com> Date: Sun, 25 Aug 2024 23:28:35 +0200 Subject: [PATCH 15/39] S allius/issue167 (#172) * cleanup --- app/docker-bake.hcl | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/app/docker-bake.hcl b/app/docker-bake.hcl index dbd90de..b6d58d9 100644 --- a/app/docker-bake.hcl +++ b/app/docker-bake.hcl @@ -47,7 +47,7 @@ target "_common" { "org.opencontainers.image.description" = "${DESCRIPTION}" "org.opencontainers.image.licenses" = "BSD-3-Clause" "org.opencontainers.image.source" = "https://github.com/s-allius/tsun-gen3-proxy" -} + } output = [ "type=image,push=true" ] @@ -88,7 +88,6 @@ target "rc" { target "rel" { inherits = ["_common", "_prod"] - tags = ["${IMAGE}:latest", "${IMAGE}:${MAJOR}", "${IMAGE}:${VERSION}"] no-cache = true } From 7f41365815fed4ecc58ea045b990e3d4a66df6f4 Mon Sep 17 00:00:00 2001 From: Stefan Allius <122395479+s-allius@users.noreply.github.com> Date: Mon, 26 Aug 2024 23:37:24 +0200 Subject: [PATCH 16/39] Sonar qube 6 (#174) * test class ModbusConn --- app/src/async_stream.py | 7 ++- app/src/gen3plus/connection_g3p.py | 9 +++- app/src/gen3plus/inverter_g3p.py | 15 ++++-- app/src/inverter.py | 1 - app/src/modbus_tcp.py | 8 ++- app/tests/test_modbus_tcp.py | 78 ++++++++++++++++++++++++++++++ 6 files changed, 108 insertions(+), 10 deletions(-) create mode 100644 app/tests/test_modbus_tcp.py diff --git a/app/src/async_stream.py b/app/src/async_stream.py index f6b58b5..5892ba2 100644 --- a/app/src/async_stream.py +++ b/app/src/async_stream.py @@ -3,10 +3,15 @@ import logging import traceback import time from asyncio import StreamReader, StreamWriter -from messages import hex_dump_memory, State from typing import Self from itertools import count +if __name__ == "app.src.async_stream": + from app.src.messages import hex_dump_memory, State +else: # pragma: no cover + from messages import hex_dump_memory, State + + import gc logger = logging.getLogger('conn') diff --git a/app/src/gen3plus/connection_g3p.py b/app/src/gen3plus/connection_g3p.py index 6591110..dc4eca1 100644 --- a/app/src/gen3plus/connection_g3p.py +++ b/app/src/gen3plus/connection_g3p.py @@ -1,7 +1,12 @@ import logging from asyncio import StreamReader, StreamWriter -from async_stream import AsyncStream -from gen3plus.solarman_v5 import SolarmanV5 + +if __name__ == "app.src.gen3plus.connection_g3p": + from app.src.async_stream import AsyncStream + from app.src.gen3plus.solarman_v5 import SolarmanV5 +else: # pragma: no cover + from async_stream import AsyncStream + from gen3plus.solarman_v5 import SolarmanV5 logger = logging.getLogger('conn') diff --git a/app/src/gen3plus/inverter_g3p.py b/app/src/gen3plus/inverter_g3p.py index 6c1d6b5..d9bf0f2 100644 --- a/app/src/gen3plus/inverter_g3p.py +++ b/app/src/gen3plus/inverter_g3p.py @@ -3,11 +3,18 @@ import traceback import json import asyncio from asyncio import StreamReader, StreamWriter -from config import Config -from inverter import Inverter -from gen3plus.connection_g3p import ConnectionG3P from aiomqtt import MqttCodeError -from infos import Infos + +if __name__ == "app.src.gen3plus.inverter_g3p": + from app.src.config import Config + from app.src.inverter import Inverter + from app.src.gen3plus.connection_g3p import ConnectionG3P + from app.src.infos import Infos +else: # pragma: no cover + from config import Config + from inverter import Inverter + from gen3plus.connection_g3p import ConnectionG3P + from infos import Infos logger_mqtt = logging.getLogger('mqtt') diff --git a/app/src/inverter.py b/app/src/inverter.py index 7f3914f..996fa0f 100644 --- a/app/src/inverter.py +++ b/app/src/inverter.py @@ -28,7 +28,6 @@ class Inverter(): cls.proxy_unique_id = ha['proxy_unique_id'] # call Mqtt singleton to establisch the connection to the mqtt broker - print('call Mqtt.init') cls.mqtt = Mqtt(cls._cb_mqtt_is_up) # register all counters which should be reset at midnight. diff --git a/app/src/modbus_tcp.py b/app/src/modbus_tcp.py index 5116bc8..a06e38f 100644 --- a/app/src/modbus_tcp.py +++ b/app/src/modbus_tcp.py @@ -1,9 +1,13 @@ import logging import traceback import asyncio -from config import Config -from gen3plus.inverter_g3p import InverterG3P +if __name__ == "app.src.modbus_tcp": + from app.src.config import Config + from app.src.gen3plus.inverter_g3p import InverterG3P +else: # pragma: no cover + from config import Config + from gen3plus.inverter_g3p import InverterG3P logger = logging.getLogger('conn') diff --git a/app/tests/test_modbus_tcp.py b/app/tests/test_modbus_tcp.py new file mode 100644 index 0000000..7792894 --- /dev/null +++ b/app/tests/test_modbus_tcp.py @@ -0,0 +1,78 @@ +# test_with_pytest.py +import pytest +import asyncio + +from mock import patch +from app.src.singleton import Singleton +from app.src.config import Config +from app.src.infos import Infos +from app.src.modbus_tcp import ModbusConn + + +pytest_plugins = ('pytest_asyncio',) + +# initialize the proxy statistics +Infos.static_init() + +@pytest.fixture(scope="module", autouse=True) +def module_init(): + Singleton._instances.clear() + yield + +@pytest.fixture(scope="module") +def test_port(): + return 1883 + +@pytest.fixture(scope="module") +def test_hostname(): + # if getenv("GITHUB_ACTIONS") == "true": + # return 'mqtt' + # else: + return 'test.mosquitto.org' + +@pytest.fixture +def config_mqtt_conn(test_hostname, test_port): + Config.act_config = {'mqtt':{'host': test_hostname, 'port': test_port, 'user': '', 'passwd': ''}, + 'ha':{'auto_conf_prefix': 'homeassistant','discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun'} + } + +@pytest.fixture +def config_no_conn(test_port): + Config.act_config = {'mqtt':{'host': "", 'port': test_port, 'user': '', 'passwd': ''}, + 'ha':{'auto_conf_prefix': 'homeassistant','discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun'} + } + +class FakeReader(): + pass + + +class FakeWriter(): + pass + + +@pytest.fixture +def patch_open(): + async def new_conn(conn): + await asyncio.sleep(0.01) + return FakeReader(), FakeWriter() + + def new_open(host: str, port: int): + return new_conn(None) + + with patch.object(asyncio, 'open_connection', new_open) as conn: + yield conn + + +@pytest.mark.asyncio +async def test_modbus_conn(patch_open): + _ = patch_open + assert Infos.stat['proxy']['Inverter_Cnt'] == 0 + + async with ModbusConn('test.local', 1234) as stream: + assert stream.node_id == 'G3P' + assert stream.addr == ('test.local', 1234) + assert type(stream.reader) is FakeReader + assert type(stream.writer) is FakeWriter + assert Infos.stat['proxy']['Inverter_Cnt'] == 1 + + assert Infos.stat['proxy']['Inverter_Cnt'] == 0 From d2b88ab838cb3c5d13c74551e2eaef9b8e1e9943 Mon Sep 17 00:00:00 2001 From: Stefan Allius <122395479+s-allius@users.noreply.github.com> Date: Thu, 29 Aug 2024 23:47:30 +0200 Subject: [PATCH 17/39] Sonar qube 3 (#178) * add more unit tests --- app/src/modbus_tcp.py | 6 +- app/tests/test_modbus_tcp.py | 186 ++++++++++++++++++++++++++++++++--- 2 files changed, 179 insertions(+), 13 deletions(-) diff --git a/app/src/modbus_tcp.py b/app/src/modbus_tcp.py index a06e38f..429710b 100644 --- a/app/src/modbus_tcp.py +++ b/app/src/modbus_tcp.py @@ -38,7 +38,9 @@ class ModbusConn(): class ModbusTcp(): - def __init__(self, loop) -> None: + def __init__(self, loop, tim_restart=10) -> None: + self.tim_restart = tim_restart + inverters = Config.get('inverters') # logging.info(f'Inverters: {inverters}') @@ -79,4 +81,4 @@ class ModbusTcp(): f"ModbusTcpCreate: Exception for {(host, port)}:\n" f"{traceback.format_exc()}") - await asyncio.sleep(10) + await asyncio.sleep(self.tim_restart) diff --git a/app/tests/test_modbus_tcp.py b/app/tests/test_modbus_tcp.py index 7792894..c2ed8a2 100644 --- a/app/tests/test_modbus_tcp.py +++ b/app/tests/test_modbus_tcp.py @@ -3,10 +3,14 @@ import pytest import asyncio from mock import patch +from enum import Enum from app.src.singleton import Singleton from app.src.config import Config from app.src.infos import Infos -from app.src.modbus_tcp import ModbusConn +from app.src.mqtt import Mqtt +from app.src.messages import Message, State +from app.src.inverter import Inverter +from app.src.modbus_tcp import ModbusConn, ModbusTcp pytest_plugins = ('pytest_asyncio',) @@ -31,23 +35,76 @@ def test_hostname(): return 'test.mosquitto.org' @pytest.fixture -def config_mqtt_conn(test_hostname, test_port): - Config.act_config = {'mqtt':{'host': test_hostname, 'port': test_port, 'user': '', 'passwd': ''}, - 'ha':{'auto_conf_prefix': 'homeassistant','discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun'} +def config_conn(test_hostname, test_port): + Config.act_config = { + 'mqtt':{ + 'host': test_hostname, + 'port': test_port, + 'user': '', + 'passwd': '' + }, + 'ha':{ + 'auto_conf_prefix': 'homeassistant', + 'discovery_prefix': 'homeassistant', + 'entity_prefix': 'tsun', + 'proxy_node_id': 'test_1', + 'proxy_unique_id': '' + }, + 'inverters':{ + 'allow_all': True, + "R170000000000001":{ + 'node_id': 'inv_1' + }, + "Y170000000000001":{ + 'node_id': 'inv_2', + 'monitor_sn': 2000000000, + 'modbus_polling': True, + 'suggested_area': "", + 'sensor_list': 0x2b0, + 'client_mode':{ + 'host': '192.168.0.1', + 'port': 8899 + } + } } + } -@pytest.fixture -def config_no_conn(test_port): - Config.act_config = {'mqtt':{'host': "", 'port': test_port, 'user': '', 'passwd': ''}, - 'ha':{'auto_conf_prefix': 'homeassistant','discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun'} - } + +class TestType(Enum): + RD_TEST_0_BYTES = 1 + RD_TEST_TIMEOUT = 2 + + +test = TestType.RD_TEST_0_BYTES class FakeReader(): - pass + def __init__(self): + self.on_recv = asyncio.Event() + async def read(self, max_len: int): + await self.on_recv.wait() + if test == TestType.RD_TEST_0_BYTES: + return b'' + elif test == TestType.RD_TEST_TIMEOUT: + raise TimeoutError + def feed_eof(self): + return class FakeWriter(): - pass + def write(self, buf: bytes): + return + def get_extra_info(self, sel: str): + if sel == 'peername': + return 'remote.intern' + elif sel == 'sockname': + return 'sock:1234' + assert False + def is_closing(self): + return False + def close(self): + return + async def wait_closed(self): + return @pytest.fixture @@ -57,11 +114,19 @@ def patch_open(): return FakeReader(), FakeWriter() def new_open(host: str, port: int): + global test + if test == TestType.RD_TEST_TIMEOUT: + raise TimeoutError return new_conn(None) with patch.object(asyncio, 'open_connection', new_open) as conn: yield conn +@pytest.fixture +def patch_no_mqtt(): + with patch.object(Mqtt, 'publish') as conn: + yield conn + @pytest.mark.asyncio async def test_modbus_conn(patch_open): @@ -76,3 +141,102 @@ async def test_modbus_conn(patch_open): assert Infos.stat['proxy']['Inverter_Cnt'] == 1 assert Infos.stat['proxy']['Inverter_Cnt'] == 0 + +@pytest.mark.asyncio +async def test_modbus_no_cnf(): + assert Infos.stat['proxy']['Inverter_Cnt'] == 0 + loop = asyncio.get_event_loop() + ModbusTcp(loop) + assert Infos.stat['proxy']['Inverter_Cnt'] == 0 + +@pytest.mark.asyncio +async def test_modbus_cnf1(config_conn, patch_open): + _ = config_conn + _ = patch_open + global test + assert asyncio.get_running_loop() + Inverter.class_init() + test = TestType.RD_TEST_TIMEOUT + + assert Infos.stat['proxy']['Inverter_Cnt'] == 0 + loop = asyncio.get_event_loop() + ModbusTcp(loop) + await asyncio.sleep(0.1) + for m in Message: + if (m.node_id == 'inv_2'): + assert False + + await asyncio.sleep(0.1) + assert Infos.stat['proxy']['Inverter_Cnt'] == 0 + +@pytest.mark.asyncio +async def test_modbus_cnf2(config_conn, patch_no_mqtt, patch_open): + _ = config_conn + _ = patch_open + _ = patch_no_mqtt + global test + assert asyncio.get_running_loop() + Inverter.class_init() + test = TestType.RD_TEST_0_BYTES + + assert Infos.stat['proxy']['Inverter_Cnt'] == 0 + ModbusTcp(asyncio.get_event_loop()) + await asyncio.sleep(0.1) + test = 0 + for m in Message: + if (m.node_id == 'inv_2'): + test += 1 + assert Infos.stat['proxy']['Inverter_Cnt'] == 1 + m.shutdown_started = True + m.reader.on_recv.set() + del m + + assert 1 == test + await asyncio.sleep(0.1) + assert Infos.stat['proxy']['Inverter_Cnt'] == 0 + # check that the connection is released + for m in Message: + if (m.node_id == 'inv_2'): + assert False + + +@pytest.mark.asyncio +async def test_modbus_cnf3(config_conn, patch_no_mqtt, patch_open): + _ = config_conn + _ = patch_open + _ = patch_no_mqtt + global test + assert asyncio.get_running_loop() + Inverter.class_init() + test = TestType.RD_TEST_0_BYTES + + assert Infos.stat['proxy']['Inverter_Cnt'] == 0 + ModbusTcp(asyncio.get_event_loop(), tim_restart= 0.1) + await asyncio.sleep(0.1) + test = 0 + for m in Message: + if (m.node_id == 'inv_2'): + assert Infos.stat['proxy']['Inverter_Cnt'] == 1 + m.shutdown_started = False + m.reader.on_recv.set() + test += 1 + await asyncio.sleep(0.1) + assert m.state == State.closed + + assert 1 == test + await asyncio.sleep(0.1) + assert Infos.stat['proxy']['Inverter_Cnt'] == 1 + # check that the connection is released + for m in Message: + if (m.node_id == 'inv_2'): + test += 1 + m.shutdown_started = True + m.reader.on_recv.set() + del m + + assert 3 == test + await asyncio.sleep(0.1) + assert Infos.stat['proxy']['Inverter_Cnt'] == 0 + for m in Message: + if (m.node_id == 'inv_2'): + assert False From 215dcd98e6288316fa705561ec608db4870c0342 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Tue, 3 Sep 2024 17:22:34 +0200 Subject: [PATCH 18/39] GEN3: don't crash on overwritten msg in the receive buffer --- CHANGELOG.md | 1 + app/src/async_stream.py | 1 + app/src/gen3/talent.py | 7 ++ app/tests/test_talent.py | 181 +++++++++++++++++++++++++++++++++++++++ 4 files changed, 190 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index e41d65b..3db2d75 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [unreleased] +- GEN3: don't crash on overwritten msg in the receive buffer - Reading the version string from the image updates it even if the image is re-pulled without re-deployment ## [0.10.1] - 2024-08-10 diff --git a/app/src/async_stream.py b/app/src/async_stream.py index 5892ba2..ae7e584 100644 --- a/app/src/async_stream.py +++ b/app/src/async_stream.py @@ -147,6 +147,7 @@ class AsyncStream(): logger.error( f"Exception for {self.addr}:\n" f"{traceback.format_exc()}") + await asyncio.sleep(0) # be cooperative to other task async def async_write(self, headline: str = 'Transmit to ') -> None: """Async write handler to transmit the send_buffer""" diff --git a/app/src/gen3/talent.py b/app/src/gen3/talent.py index 611c9fc..015efd9 100644 --- a/app/src/gen3/talent.py +++ b/app/src/gen3/talent.py @@ -294,6 +294,13 @@ class Talent(Message): result = struct.unpack_from('!lB', buf, 0) msg_len = result[0] # len of complete message id_len = result[1] # len of variable id string + if id_len > 17: + logger.warning(f'len of ID string must == 16 but is {id_len}') + self.inc_counter('Invalid_Msg_Format') + + # erase broken recv buffer + self._recv_buffer = bytearray() + return hdr_len = 5+id_len+2 diff --git a/app/tests/test_talent.py b/app/tests/test_talent.py index 13f462e..16bb8d8 100644 --- a/app/tests/test_talent.py +++ b/app/tests/test_talent.py @@ -526,6 +526,166 @@ def broken_recv_buf(): # There are two message in the buffer, but the second has msg += b'\x08\x00\x00\x00\x00\x31' return msg +@pytest.fixture +def multiple_recv_buf(): # There are three message in the buffer, but the second has overwritten the first partly + msg = b'\x00\x00\x05\x02\x10R170000000000001\x91\x04\x01\x90\x00\x01\x10R170000000000001' + msg += b'\x01\x00\x00\x01\x89\xc6\x63\x61\x08' + msg += b'\x00\x00\x00\xa3\x00\x00\x00\x64\x53\x00\x01' + msg += b'\x00\x00\x00\xc8\x53\x00\x00\x00\x00\x01\x2c\x53\x00\x02\x00\x00' # | ....S.....,S.... + msg += b'\x01\x90\x49\x00\x00\x00\x00\x00\x00\x01\x91\x53\x00\x00\x00\x00' # | ..I........S.... + msg += b'\x13\x10\x52\x31\x37\x45\x37\x33\x30\x37\x30\x32\x31\x44\x30\x30' # | ..R17E7307021D00 + msg += b'\x36\x41\x91\x22\x00\x00\x03\xbf\x10\x52\x31\x37\x45\x37\x33\x30' # | 6A.".....R17E730 + msg += b'\x37\x30\x32\x31\x44\x30\x30\x36\x41\x91\x71\x0e\x10\x00\x00\x10' # | 7021D006A.q..... + msg += b'\x52\x31\x37\x45\x37\x33\x30\x37\x30\x32\x31\x44\x30\x30\x36\x41' # | R17E7307021D006A + msg += b'\x01\x00\x00\x01\x91\xa3\xfe\xaf\x98\x00\x00\x00\x35\x00\x09\x2b' # | ............5..+ + msg += b'\xa8\x54\x10\x52\x53\x57\x5f\x34\x30\x30\x5f\x56\x31\x2e\x30\x30' # | .T.RSW_400_V1.00 + msg += b'\x2e\x31\x37\x00\x09\x27\xc0\x54\x06\x52\x61\x79\x6d\x6f\x6e\x00' # | .17..'.T.Raymon. + msg += b'\x09\x2f\x90\x54\x0b\x52\x53\x57\x2d\x31\x2d\x31\x30\x30\x30\x31' # | ./.T.RSW-1-10001 + msg += b'\x00\x09\x5a\x88\x54\x0f\x74\x2e\x72\x61\x79\x6d\x6f\x6e\x69\x6f' # | ..Z.T.t.raymonio + msg += b'\x74\x2e\x63\x6f\x6d\x00\x09\x5a\xec\x54\x1c\x6c\x6f\x67\x67\x65' # | t.com..Z.T.logge + msg += b'\x72\x2e\x74\x61\x6c\x65\x6e\x74\x2d\x6d\x6f\x6e\x69\x74\x6f\x72' # | r.talent-monitor + msg += b'\x69\x6e\x67\x2e\x63\x6f\x6d\x00\x0d\x2f\x00\x54\x10\xff\xff\xff' # | ing.com../.T.... + msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x32' # | ...............2 + msg += b'\xe8\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff' # | .T.............. + msg += b'\xff\xff\xff\x00\x0d\x36\xd0\x54\x10\xff\xff\xff\xff\xff\xff\xff' # | .....6.T........ + msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x3a\xb8\x54\x10\xff' # | ...........:.T.. + msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00' # | ................ + msg += b'\x0d\x3e\xa0\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff' # | .>.T............ + msg += b'\xff\xff\xff\xff\xff\x00\x0d\x42\x88\x54\x10\xff\xff\xff\xff\xff' # | .......B.T...... + msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x46\x70\x54' # | .............FpT + msg += b'\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff' # | ................ + msg += b'\xff\x00\x0d\x4a\x58\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff' # | ...JXT.......... + msg += b'\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x4e\x40\x54\x10\xff\xff\xff' # | .........N@T.... + msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x52' # | ...............R + msg += b'\x28\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff' # | (T.............. + msg += b'\xff\xff\xff\x00\x0d\x56\x10\x54\x10\xff\xff\xff\xff\xff\xff\xff' # | .....V.T........ + msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x59\xf8\x54\x10\xff' # | ...........Y.T.. + msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00' # | ................ + msg += b'\x0d\x5d\xe0\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff' # | .].T............ + msg += b'\xff\xff\xff\xff\xff\x00\x0d\x61\xc8\x54\x10\xff\xff\xff\xff\xff' # | .......a.T...... + msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x65\xb0\x54' # | .............e.T + msg += b'\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff' # | ................ + msg += b'\xff\x00\x0d\x69\x98\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff' # | ...i.T.......... + msg += b'\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x6d\x80\x54\x10\xff\xff\xff' # | .........m.T.... + msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x71' # | ...............q + msg += b'\x68\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff' # | hT.............. + msg += b'\xff\xff\xff\x00\x0d\x75\x50\x54\x10\xff\xff\xff\xff\xff\xff\xff' # | .....uPT........ + msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x79\x38\x54\x10\xff' # | ...........y8T.. + msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00' # | ................ + msg += b'\x0d\x7d\x20\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff' # | .} T............ + msg += b'\xff\xff\xff\xff\xff\x00\x0d\x81\x08\x54\x10\xff\xff\xff\xff\xff' # | .........T...... + msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x84\xf0\x54' # | ...............T + msg += b'\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff' # | ................ + msg += b'\xff\x00\x0d\x88\xd8\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff' # | .....T.......... + msg += b'\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x8c\xc0\x54\x10\xff\xff\xff' # | ...........T.... + msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x90' # | ................ + msg += b'\xa8\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff' # | .T.............. + msg += b'\xff\xff\xff\x00\x0d\x94\x90\x54\x10\xff\xff\xff\xff\xff\xff\xff' # | .......T........ + msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x98\x78\x54\x10\xff' # | ............xT.. + msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00' # | ................ + msg += b'\x0d\x00\x20\x49\x00\x00\x00\x01\x00\x0c\x35\x00\x49\x00\x00\x00' # | .. I......5.I... + msg += b'\x28\x00\x0c\x96\xa8\x49\x00\x00\x01\x69\x00\x0c\x7f\x38\x49\x00' # | (....I...i...8I. + msg += b'\x00\x00\x01\x00\x0c\xfc\x38\x49\x00\x00\x00\x01\x00\x0c\xf8\x50' # | ......8I.......P + msg += b'\x49\x00\x00\x01\x2c\x00\x0c\x63\xe0\x49\x00\x00\x00\x00\x00\x0c' # | I...,..c.I...... + msg += b'\x67\xc8\x49\x00\x00\x00\x00\x00\x0c\x50\x58\x49\x00\x00\x00\x01' # | g.I......PXI.... + msg += b'\x00\x09\x5e\x70\x49\x00\x00\x13\x8d\x00\x09\x5e\xd4\x49\x00\x00' # | ..^pI......^.I.. + msg += b'\x13\x8d\x00\x09\x5b\x50\x49\x00\x00\x00\x02\x00\x0d\x04\x08\x49' # | ....[PI........I + msg += b'\x00\x00\x00\x00\x00\x07\xa1\x84\x49\x00\x00\x00\x01\x00\x0c\x50' # | ........I......P + msg += b'\x59\x49\x00\x00\x00\x3e\x00\x0d\x1f\x60\x49\x00\x00\x00\x00\x00' # | YI...>...`I..... + msg += b'\x0d\x23\x48\x49\xff\xff\xff\xff\x00\x0d\x27\x30\x49\xff\xff\xff' # | .#HI......'0I... + msg += b'\xff\x00\x0d\x2b\x18\x4c\x00\x00\x00\x00\xff\xff\xff\xff\x00\x0c' # | ...+.L.......... + msg += b'\xa2\x60\x49\x00\x00\x00\x00\x00\x00\x05\x02\x10\x52\x31\x37\x45' # | .`I.........R17E + msg += b'\x37\x33\x30\x37\x30\x32\x31\x44\x30\x30\x36\x41\x91\x04\x01\x90' # | 7307021D006A.... + msg += b'\x00\x01\x10\x54\x31\x37\x45\x37\x33\x30\x37\x30\x32\x31\x44\x30' # | ...T17E7307021D0 + msg += b'\x30\x36\x41\x01\x00\x00\x01\x91\xa3\xfe\xb3\x80\x00\x00\x00\xa3' # | 06A............. + msg += b'\x00\x00\x00\x64\x53\x00\x01\x00\x00\x00\xc8\x53\x00\x00\x00\x00' # | ...dS......S.... + msg += b'\x01\x2c\x53\x00\x02\x00\x00\x01\x90\x49\x00\x00\x00\x00\x00\x00' # | .,S......I...... + msg += b'\x01\x91\x53\x00\x00\x00\x00\x01\x92\x53\x00\x00\x00\x00\x01\x93' # | ..S......S...... + msg += b'\x53\x00\x00\x00\x00\x01\x94\x53\x00\x00\x00\x00\x01\x95\x53\x00' # | S......S......S. + msg += b'\x00\x00\x00\x01\x96\x53\x00\x00\x00\x00\x01\x97\x53\x00\x00\x00' # | .....S......S... + msg += b'\x00\x01\x98\x53\x00\x00\x00\x00\x01\x99\x53\x00\x00\x00\x00\x01' # | ...S......S..... + msg += b'\x9a\x53\x00\x00\x00\x00\x01\x9b\x53\x00\x00\x00\x00\x01\x9c\x53' # | .S......S......S + msg += b'\x00\x00\x00\x00\x01\x9d\x53\x00\x00\x00\x00\x01\x9e\x53\x00\x00' # | ......S......S.. + msg += b'\x00\x00\x01\x9f\x53\x00\x00\x00\x00\x01\xa0\x53\x00\x00\x00\x00' # | ....S......S.... + msg += b'\x01\xf4\x49\x00\x00\x00\x00\x00\x00\x01\xf5\x53\x00\x00\x00\x00' # | ..I........S.... + msg += b'\x01\xf6\x53\x00\x00\x00\x00\x01\xf7\x53\x00\x00\x00\x00\x01\xf8' # | ..S......S...... + msg += b'\x53\x00\x00\x00\x00\x01\xf9\x53\x00\x00\x00\x00\x01\xfa\x53\x00' # | S......S......S. + msg += b'\x00\x00\x00\x01\xfb\x53\x00\x00\x00\x00\x01\xfc\x53\x00\x00\x00' # | .....S......S... + msg += b'\x00\x01\xfd\x53\x00\x00\x00\x00\x01\xfe\x53\x00\x00\x00\x00\x01' # | ...S......S..... + msg += b'\xff\x53\x00\x00\x00\x00\x02\x00\x53\x00\x00\x00\x00\x02\x01\x53' # | .S......S......S + msg += b'\x00\x00\x00\x00\x02\x02\x53\x00\x00\x00\x00\x02\x03\x53\x00\x00' # | ......S......S.. + msg += b'\x00\x00\x02\x04\x53\x00\x00\x00\x00\x02\x58\x49\x00\x00\x00\x00' # | ....S.....XI.... + msg += b'\x00\x00\x02\x59\x53\x00\x00\x00\x00\x02\x5a\x53\x00\x00\x00\x00' # | ...YS.....ZS.... + msg += b'\x02\x5b\x53\x00\x00\x00\x00\x02\x5c\x53\x00\x00\x00\x00\x02\x5d' # | .[S.....\S.....] + msg += b'\x53\x00\x00\x00\x00\x02\x5e\x53\x00\x00\x00\x00\x02\x5f\x53\x00' # | S.....^S....._S. + msg += b'\x00\x00\x00\x02\x60\x53\x00\x00\x00\x00\x02\x61\x53\x00\x00\x00' # | ....`S.....aS... + msg += b'\x00\x02\x62\x53\x00\x00\x00\x00\x02\x63\x53\x00\x00\x00\x00\x02' # | ..bS.....cS..... + msg += b'\x64\x53\x00\x00\x00\x00\x02\x65\x53\x00\x00\x00\x00\x02\x66\x53' # | dS.....eS.....fS + msg += b'\x00\x00\x00\x00\x02\x67\x53\x00\x00\x00\x00\x02\x68\x53\x00\x00' # | .....gS.....hS.. + msg += b'\x00\x00\x02\xbc\x49\x00\x00\x00\x00\x00\x00\x02\xbd\x53\x00\x00' # | ....I........S.. + msg += b'\x00\x00\x02\xbe\x53\x00\x00\x00\x00\x02\xbf\x53\x00\x00\x00\x00' # | ....S......S.... + msg += b'\x02\xc0\x53\x00\x00\x00\x00\x02\xc1\x53\x00\x00\x00\x00\x02\xc2' # | ..S......S...... + msg += b'\x53\x00\x00\x00\x00\x02\xc3\x53\x00\x00\x00\x00\x02\xc4\x53\x00' # | S......S......S. + msg += b'\x00\x00\x00\x02\xc5\x53\x00\x00\x00\x00\x02\xc6\x53\x00\x00\x00' # | .....S......S... + msg += b'\x00\x02\xc7\x53\x00\x00\x00\x00\x02\xc8\x53\x00\x00\x00\x00\x02' # | ...S......S..... + msg += b'\xc9\x53\x00\x00\x00\x00\x02\xca\x53\x00\x00\x00\x00\x02\xcb\x53' # | .S......S......S + msg += b'\x00\x00\x00\x00\x02\xcc\x53\x00\x00\x00\x00\x03\x20\x53\x00\x00' # | ......S..... S.. + msg += b'\x00\x00\x03\x84\x53\x51\x09\x00\x00\x03\xe8\x46\x43\x65\x99\x9a' # | ....SQ.....FCe.. + msg += b'\x00\x00\x04\x4c\x46\x3e\xd7\x0a\x3d\x00\x00\x04\xb0\x46\x42\x48' # | ...LF>..=....FBH + msg += b'\x28\xf6\x00\x00\x05\x14\x53\x00\x1f\x00\x00\x05\x78\x53\x00\x00' # | (.....S.....xS.. + msg += b'\x00\x00\x05\xdc\x53\x02\x58\x00\x00\x06\x40\x46\x42\xc1\x33\x33' # | ....S.X...@FB.33 + msg += b'\x00\x00\x06\xa4\x46\x3f\x33\x33\x33\x00\x00\x07\x08\x46\x00\x00' # | ....F?333....F.. + msg += b'\x00\x00\x00\x00\x07\x6c\x46\x00\x00\x00\x00\x00\x00\x07\xd0\x46' # | .....lF........F + msg += b'\x42\x05\x99\x9a\x00\x00\x08\x34\x46\x40\x41\xeb\x85\x00\x00\x08' # | B......4F@A..... + msg += b'\x98\x46\x42\xcb\x66\x66\x00\x00\x08\xfc\x46\x00\x00\x00\x00\x00' # | .FB.ff....F..... + msg += b'\x00\x09\x60\x46\x00\x00\x00\x00\x00\x00\x09\xc4\x46\x00\x00\x00' # | ..`F........F... + msg += b'\x00\x00\x00\x0a\x28\x46\x00\x00\x00\x00\x00\x00\x0a\x8c\x46\x00' # | ....(F........F. + msg += b'\x00\x00\x00\x00\x00\x0a\xf0\x46\x00\x00\x00\x00\x00\x00\x0b\x54' # | .......F.......T + msg += b'\x46\x3f\x19\x99\x9a\x00\x00\x0b\xb8\x46\x43\xf3\x95\xc3\x00\x00' # | F?.......FC..... + msg += b'\x0c\x1c\x46\x00\x00\x00\x00\x00\x00\x0c\x80\x46\x43\x04\x4a\x3d' # | ..F........FC.J= + msg += b'\x00\x00\x0c\xe4\x46\x3f\x23\xd7\x0a\x00\x00\x0d\x48\x46\x43\xbf' # | ....F?#.....HFC. + msg += b'\x9e\xb8\x00\x00\x0d\xac\x46\x00\x00\x00\x00\x00\x00\x0e\x10\x46' # | ......F........F + msg += b'\x00\x00\x00\x00\x00\x00\x0e\x74\x46\x00\x00\x00\x00\x00\x00\x0e' # | .......tF....... + msg += b'\xd8\x46\x00\x00\x00\x00\x00\x00\x0f\x3c\x53\x00\x00\x00\x00\x0f' # | .F.......=q..* + msg += b'\x30\x53\x00\x01\x00\x00\x2a\x94\x46\x43\x37\x00\x00\x00\x00\x2a' # | 0S....*.FC7....* + msg += b'\xf8\x46\x42\xce\x00\x00\x00\x00\x2b\x5c\x53\x00\x96\x00\x00\x2b' # | .FB.....+\S....+ + msg += b'\xc0\x53\x00\x10\x00\x00\x2c\x24\x46\x43\x90\x00\x00\x00\x00\x2c' # | .S....,$FC....., + msg += b'\x88\x46\x43\x95\x00\x00\x00\x00\x2c\xec\x53\x00\x06\x00\x00\x2d' # | .FC.....,.S....- + msg += b'\x50\x53\x00\x06\x00\x00\x2d\xb4\x46\x43\x7d\x00\x00\x00\x00\x2e' # | PS....-.FC}..... + msg += b'\x18\x46\x42\x3d\xeb\x85\x00\x00\x2e\x7c\x46\x42\x3d\xeb\x85\x00' # | .FB=.....|FB=... + msg += b'\x00\x2e\xe0\x53\x00\x03\x00\x00\x2f\x44\x53\x00\x03\x00\x00\x2f' # | ...S..../DS..../ + msg += b'\xa8\x46\x42\x4d\xeb\x85\x00\x00\x30\x0c\x46\x42\x4d\xeb\x85\x00' # | .FBM....0.FBM... + msg += b'\x00\x30\x70\x53\x00\x03\x00\x00\x30\xd4\x53\x00\x03\x00\x00\x31' # | .0pS....0.S....1 + msg += b'\x38\x46\x42\x08\x00\x00\x00\x00\x31\x9c\x53\x00\x05\x00\x00\x32' # | 8FB.....1.S....2 + msg += b'\x00\x53\x01\x61\x00\x00\x32\x64\x53\x00\x01\x00\x00\x32\xc8\x53' # | .S.a..2dS....2.S + msg += b'\x13\x9c\x00\x00\x33\x2c\x53\x0f\xa0\x00\x00\x33\x90\x53\x00\x4f' # | ....3,S....3.S.O + msg += b'\x00\x00\x33\xf4\x53\x00\x66\x00\x00\x34\x58\x53\x03\xe8\x00\x00' # | ..3.S.f..4XS.... + msg += b'\x34\xbc\x53\x04\x00\x00\x00\x35\x20\x53\x09\xc4\x00\x00\x35\x84' # | 4.S....5 S....5. + msg += b'\x53\x07\xc6\x00\x00\x35\xe8\x53\x13\x8c\x00\x00\x36\x4c\x53\x12' # | S....5.S....6LS. + msg += b'\x94\x00\x01\x38\x80\x53\x00\x02\x00\x01\x38\x81\x53\x00\x01\x00' # | ...8.S....8.S... + msg += b'\x01\x38\x82\x53\x00\x01\x00\x01\x38\x83\x53\x00\x00\x00\x00\x00' # | .8.S....8.S..... + msg += b'\x8b\x10\x52\x31\x37\x45\x37\x33\x30\x37\x30\x32\x31\x44\x30\x30' # | ..R17E7307021D00 + msg += b'\x36\x41\x91\x04\x01\x90\x00\x01\x10\x54\x31\x37\x45\x37\x33\x30' # | 6A.......T17E730 + msg += b'\x37\x30\x32\x31\x44\x30\x30\x36\x41\x01\x00\x00\x01\x91\xa3\xfe' # | 7021D006A....... + msg += b'\xb3\x80\x00\x00\x00\x06\x00\x00\x00\x0a\x54\x08\x4d\x69\x63\x72' # | ..........T.Micr + msg += b'\x6f\x69\x6e\x76\x00\x00\x00\x14\x54\x04\x54\x53\x55\x4e\x00\x00' # | oinv....T.TSUN.. + msg += b'\x00\x1e\x54\x07\x56\x35\x2e\x31\x2e\x30\x39\x00\x00\x00\x28\x54' # | ..T.V5.1.09...(T + msg += b'\x10\x54\x31\x37\x45\x37\x33\x30\x37\x30\x32\x31\x44\x30\x30\x36' # | .T17E7307021D006 + msg += b'\x41\x00\x00\x00\x32\x54\x0a\x54\x53\x4f\x4c\x2d\x4d\x53\x36\x30' # | A...2T.TSOL-MS60 + msg += b'\x30\x00\x00\x00\x3c\x54\x05\x41\x2c\x42\x2c\x43' # | 0... Date: Tue, 3 Sep 2024 17:23:09 +0200 Subject: [PATCH 19/39] improve test coverage und reduce test delays --- app/src/gen3/connection_g3.py | 4 +-- app/src/gen3plus/connection_g3p.py | 4 +-- app/tests/test_modbus_tcp.py | 46 ++++++++++++------------------ app/tests/test_mqtt.py | 2 +- 4 files changed, 24 insertions(+), 32 deletions(-) diff --git a/app/src/gen3/connection_g3.py b/app/src/gen3/connection_g3.py index 66683fa..5aea231 100644 --- a/app/src/gen3/connection_g3.py +++ b/app/src/gen3/connection_g3.py @@ -25,10 +25,10 @@ class ConnectionG3(AsyncStream, Talent): # logger.info(f'AsyncStream refs: {gc.get_referrers(self)}') async def async_create_remote(self) -> None: - pass # virtual interface + pass # virtual interface # pragma: no cover async def async_publ_mqtt(self) -> None: - pass # virtual interface + pass # virtual interface # pragma: no cover def healthy(self) -> bool: logger.debug('ConnectionG3 healthy()') diff --git a/app/src/gen3plus/connection_g3p.py b/app/src/gen3plus/connection_g3p.py index dc4eca1..89dfc1a 100644 --- a/app/src/gen3plus/connection_g3p.py +++ b/app/src/gen3plus/connection_g3p.py @@ -31,10 +31,10 @@ class ConnectionG3P(AsyncStream, SolarmanV5): # logger.info(f'AsyncStream refs: {gc.get_referrers(self)}') async def async_create_remote(self) -> None: - pass # virtual interface + pass # virtual interface # pragma: no cover async def async_publ_mqtt(self) -> None: - pass # virtual interface + pass # virtual interface # pragma: no cover def healthy(self) -> bool: logger.debug('ConnectionG3P healthy()') diff --git a/app/tests/test_modbus_tcp.py b/app/tests/test_modbus_tcp.py index c2ed8a2..52aabee 100644 --- a/app/tests/test_modbus_tcp.py +++ b/app/tests/test_modbus_tcp.py @@ -110,7 +110,7 @@ class FakeWriter(): @pytest.fixture def patch_open(): async def new_conn(conn): - await asyncio.sleep(0.01) + await asyncio.sleep(0) return FakeReader(), FakeWriter() def new_open(host: str, port: int): @@ -161,12 +161,12 @@ async def test_modbus_cnf1(config_conn, patch_open): assert Infos.stat['proxy']['Inverter_Cnt'] == 0 loop = asyncio.get_event_loop() ModbusTcp(loop) - await asyncio.sleep(0.1) + await asyncio.sleep(0.01) for m in Message: if (m.node_id == 'inv_2'): assert False - await asyncio.sleep(0.1) + await asyncio.sleep(0.01) assert Infos.stat['proxy']['Inverter_Cnt'] == 0 @pytest.mark.asyncio @@ -181,7 +181,7 @@ async def test_modbus_cnf2(config_conn, patch_no_mqtt, patch_open): assert Infos.stat['proxy']['Inverter_Cnt'] == 0 ModbusTcp(asyncio.get_event_loop()) - await asyncio.sleep(0.1) + await asyncio.sleep(0.01) test = 0 for m in Message: if (m.node_id == 'inv_2'): @@ -192,7 +192,7 @@ async def test_modbus_cnf2(config_conn, patch_no_mqtt, patch_open): del m assert 1 == test - await asyncio.sleep(0.1) + await asyncio.sleep(0.01) assert Infos.stat['proxy']['Inverter_Cnt'] == 0 # check that the connection is released for m in Message: @@ -211,32 +211,24 @@ async def test_modbus_cnf3(config_conn, patch_no_mqtt, patch_open): test = TestType.RD_TEST_0_BYTES assert Infos.stat['proxy']['Inverter_Cnt'] == 0 - ModbusTcp(asyncio.get_event_loop(), tim_restart= 0.1) - await asyncio.sleep(0.1) + ModbusTcp(asyncio.get_event_loop(), tim_restart= 0) + await asyncio.sleep(0.01) test = 0 for m in Message: if (m.node_id == 'inv_2'): assert Infos.stat['proxy']['Inverter_Cnt'] == 1 - m.shutdown_started = False - m.reader.on_recv.set() test += 1 - await asyncio.sleep(0.1) - assert m.state == State.closed - - assert 1 == test - await asyncio.sleep(0.1) - assert Infos.stat['proxy']['Inverter_Cnt'] == 1 - # check that the connection is released - for m in Message: - if (m.node_id == 'inv_2'): - test += 1 - m.shutdown_started = True - m.reader.on_recv.set() - del m + if test == 1: + m.shutdown_started = False + m.reader.on_recv.set() + await asyncio.sleep(0.1) + assert m.state == State.closed + await asyncio.sleep(0.1) + else: + m.shutdown_started = True + m.reader.on_recv.set() + del m - assert 3 == test - await asyncio.sleep(0.1) + assert 2 == test + await asyncio.sleep(0.01) assert Infos.stat['proxy']['Inverter_Cnt'] == 0 - for m in Message: - if (m.node_id == 'inv_2'): - assert False diff --git a/app/tests/test_mqtt.py b/app/tests/test_mqtt.py index 1d7c5dd..3072627 100644 --- a/app/tests/test_mqtt.py +++ b/app/tests/test_mqtt.py @@ -97,7 +97,7 @@ async def test_mqtt_no_config(config_no_conn): try: m = Mqtt(cb) assert m.task - await asyncio.sleep(1) + await asyncio.sleep(0) assert not on_connect.is_set() try: await m.publish('homeassistant/status', 'online') From ab5256659b01a4f97dfa79fa0297346e8b61e082 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Tue, 3 Sep 2024 18:32:44 +0200 Subject: [PATCH 20/39] reduce cognitive complexity --- app/src/infos.py | 198 ++++++++++++++++++++++++++--------------------- 1 file changed, 109 insertions(+), 89 deletions(-) diff --git a/app/src/infos.py b/app/src/infos.py index f5f529a..2192b5b 100644 --- a/app/src/infos.py +++ b/app/src/infos.py @@ -427,100 +427,120 @@ class Infos: return None elif singleton: return None - prfx = ha_prfx + node_id # check if we have details for home assistant if 'ha' in row: - ha = row['ha'] - if 'comp' in ha: - component = ha['comp'] - else: - component = 'sensor' - attr = {} - if 'name' in ha: - attr['name'] = ha['name'] - else: - attr['name'] = row['name'][-1] - attr['stat_t'] = prfx + row['name'][0] - attr['dev_cla'] = ha['dev_cla'] - attr['stat_cla'] = ha['stat_cla'] - attr['uniq_id'] = ha['id']+snr - if 'val_tpl' in ha: - attr['val_tpl'] = ha['val_tpl'] - elif 'fmt' in ha: - attr['val_tpl'] = '{{value_json' + f"['{row['name'][-1]}'] {ha['fmt']}" + '}}' # eg. 'val_tpl': "{{ value_json['Output_Power']|float }} # noqa: E501 - else: - self.inc_counter('Internal_Error') - logging.error(f"Infos.info_defs: the row for {key} do" - " not have a 'val_tpl' nor a 'fmt' value") - # add unit_of_meas only, if status_class isn't none. If - # status_cla is None we want a number format and not line - # graph in home assistant. A unit will change the number - # format to a line graph - if 'unit' in row and attr['stat_cla'] is not None: - attr['unit_of_meas'] = row['unit'] # 'unit_of_meas' - if 'icon' in ha: - attr['ic'] = ha['icon'] # icon for the entity - if 'nat_prc' in ha: # pragma: no cover - attr['sug_dsp_prc'] = ha['nat_prc'] # precison of floats - if 'ent_cat' in ha: - attr['ent_cat'] = ha['ent_cat'] # diagnostic, config - # enabled_by_default is deactivated, since it avoid the via - # setup of the devices. It seems, that there is a bug in home - # assistant. tested with 'Home Assistant 2023.10.4' - # if 'en' in ha: # enabled_by_default - # attr['en'] = ha['en'] - if 'dev' in ha: - device = self.info_devs[ha['dev']] - if 'dep' in device and self.ignore_this_device(device['dep']): # noqa: E501 - return None - dev = {} - # the same name for 'name' and 'suggested area', so we get - # dedicated devices in home assistant with short value - # name and headline - if (sug_area == '' or - ('singleton' in device and device['singleton'])): - dev['name'] = device['name'] - dev['sa'] = device['name'] - else: - dev['name'] = device['name']+' - '+sug_area - dev['sa'] = device['name']+' - '+sug_area - if 'via' in device: # add the link to the parent device - via = device['via'] - if via in self.info_devs: - via_dev = self.info_devs[via] - if 'singleton' in via_dev and via_dev['singleton']: - dev['via_device'] = via - else: - dev['via_device'] = f"{via}_{snr}" - else: - self.inc_counter('Internal_Error') - logging.error(f"Infos.info_defs: the row for " - f"{key} has an invalid via value: " - f"{via}") - for key in ('mdl', 'mf', 'sw', 'hw'): # add optional - # values fpr 'modell', 'manufacturer', 'sw version' and - # 'hw version' - if key in device: - data = self.dev_value(device[key]) - if data is not None: - dev[key] = data - if 'singleton' in device and device['singleton']: - dev['ids'] = [f"{ha['dev']}"] - else: - dev['ids'] = [f"{ha['dev']}_{snr}"] - attr['dev'] = dev - origin = {} - origin['name'] = self.app_name - origin['sw'] = self.version - attr['o'] = origin - else: - self.inc_counter('Internal_Error') - logging.error(f"Infos.info_defs: the row for {key} " - "missing 'dev' value for ha register") - return json.dumps(attr), component, node_id, attr['uniq_id'] + return self.__ha_conf(row, key, ha_prfx, node_id, snr, sug_area) return None + def __ha_conf(self, row, key, ha_prfx, node_id, snr, + sug_area: str) -> tuple[str, str, str, str] | None: + ha = row['ha'] + if 'comp' in ha: + component = ha['comp'] + else: + component = 'sensor' + attr = self.__build_attr(row, key, ha_prfx, node_id, snr) + if 'dev' in ha: + device = self.info_devs[ha['dev']] + if 'dep' in device and self.ignore_this_device(device['dep']): # noqa: E501 + return None + attr['dev'] = self.__build_dev(device, key, ha, snr, + sug_area) + attr['o'] = self.__build_origin() + + else: + self.inc_counter('Internal_Error') + logging.error(f"Infos.info_defs: the row for {key} " + "missing 'dev' value for ha register") + return json.dumps(attr), component, node_id, attr['uniq_id'] + + def __build_attr(self, row, key, ha_prfx, node_id, snr): + attr = {} + ha = row['ha'] + if 'name' in ha: + attr['name'] = ha['name'] + else: + attr['name'] = row['name'][-1] + prfx = ha_prfx + node_id + attr['stat_t'] = prfx + row['name'][0] + attr['dev_cla'] = ha['dev_cla'] + attr['stat_cla'] = ha['stat_cla'] + attr['uniq_id'] = ha['id']+snr + if 'val_tpl' in ha: + attr['val_tpl'] = ha['val_tpl'] + elif 'fmt' in ha: + attr['val_tpl'] = '{{value_json' + f"['{row['name'][-1]}'] {ha['fmt']}" + '}}' # eg. 'val_tpl': "{{ value_json['Output_Power']|float }} # noqa: E501 + else: + self.inc_counter('Internal_Error') + logging.error(f"Infos.info_defs: the row for {key} do" + " not have a 'val_tpl' nor a 'fmt' value") + # add unit_of_meas only, if status_class isn't none. If + # status_cla is None we want a number format and not line + # graph in home assistant. A unit will change the number + # format to a line graph + if 'unit' in row and attr['stat_cla'] is not None: + attr['unit_of_meas'] = row['unit'] # 'unit_of_meas' + if 'icon' in ha: + attr['ic'] = ha['icon'] # icon for the entity + if 'nat_prc' in ha: # pragma: no cover + attr['sug_dsp_prc'] = ha['nat_prc'] # precison of floats + if 'ent_cat' in ha: + attr['ent_cat'] = ha['ent_cat'] # diagnostic, config + # enabled_by_default is deactivated, since it avoid the via + # setup of the devices. It seems, that there is a bug in home + # assistant. tested with 'Home Assistant 2023.10.4' + # if 'en' in ha: # enabled_by_default + # attr['en'] = ha['en'] + return attr + + def __build_dev(self, device, key, ha, snr, sug_area): + dev = {} + singleton = 'singleton' in device and device['singleton'] + # the same name for 'name' and 'suggested area', so we get + # dedicated devices in home assistant with short value + # name and headline + if (sug_area == '' or singleton): + dev['name'] = device['name'] + dev['sa'] = device['name'] + else: + dev['name'] = device['name']+' - '+sug_area + dev['sa'] = device['name']+' - '+sug_area + self.__add_via_dev(dev, device, key, snr) + for key in ('mdl', 'mf', 'sw', 'hw'): # add optional + # values fpr 'modell', 'manufacturer', 'sw version' and + # 'hw version' + if key in device: + data = self.dev_value(device[key]) + if data is not None: + dev[key] = data + if singleton: + dev['ids'] = [f"{ha['dev']}"] + else: + dev['ids'] = [f"{ha['dev']}_{snr}"] + return dev + + def __add_via_dev(self, dev, device, key, snr): + if 'via' in device: # add the link to the parent device + via = device['via'] + if via in self.info_devs: + via_dev = self.info_devs[via] + if 'singleton' in via_dev and via_dev['singleton']: + dev['via_device'] = via + else: + dev['via_device'] = f"{via}_{snr}" + else: + self.inc_counter('Internal_Error') + logging.error(f"Infos.info_defs: the row for " + f"{key} has an invalid via value: " + f"{via}") + + def __build_origin(self): + origin = {} + origin['name'] = self.app_name + origin['sw'] = self.version + return origin + def ha_remove(self, key, node_id, snr) -> tuple[str, str, str, str] | None: '''Method to build json unregister struct for home-assistant to remove topics per auto configuration. Only for inverer topics. From 2351ec314a2415709ad240e03b6927f7b44c59eb Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Tue, 3 Sep 2024 18:42:48 +0200 Subject: [PATCH 21/39] fix merge --- app/tests/test_modbus_tcp.py | 24 ------------------------ 1 file changed, 24 deletions(-) diff --git a/app/tests/test_modbus_tcp.py b/app/tests/test_modbus_tcp.py index 41640ec..9c20e80 100644 --- a/app/tests/test_modbus_tcp.py +++ b/app/tests/test_modbus_tcp.py @@ -134,16 +134,6 @@ class FakeReader(): raise TimeoutError def feed_eof(self): return - def __init__(self): - self.on_recv = asyncio.Event() - async def read(self, max_len: int): - await self.on_recv.wait() - if test == TestType.RD_TEST_0_BYTES: - return b'' - elif test == TestType.RD_TEST_TIMEOUT: - raise TimeoutError - def feed_eof(self): - return class FakeWriter(): @@ -161,20 +151,6 @@ class FakeWriter(): return async def wait_closed(self): return - def write(self, buf: bytes): - return - def get_extra_info(self, sel: str): - if sel == 'peername': - return 'remote.intern' - elif sel == 'sockname': - return 'sock:1234' - assert False - def is_closing(self): - return False - def close(self): - return - async def wait_closed(self): - return @pytest.fixture From 7b4fabdc25d1de53870c0aa7e65fa20c1be20041 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Tue, 3 Sep 2024 18:48:21 +0200 Subject: [PATCH 22/39] fix merge conflikt --- app/tests/test_modbus_tcp.py | 1 - 1 file changed, 1 deletion(-) diff --git a/app/tests/test_modbus_tcp.py b/app/tests/test_modbus_tcp.py index 9c20e80..5ce73af 100644 --- a/app/tests/test_modbus_tcp.py +++ b/app/tests/test_modbus_tcp.py @@ -253,7 +253,6 @@ async def test_modbus_cnf2(config_conn, patch_no_mqtt, patch_open): if (m.node_id == 'inv_2'): assert False - @pytest.mark.asyncio async def test_modbus_cnf3(config_conn, patch_no_mqtt, patch_open): _ = config_conn From 270732f1d073972d630ed4c4619daae299253f3d Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Tue, 3 Sep 2024 18:54:49 +0200 Subject: [PATCH 23/39] fix merge conflict --- app/tests/test_modbus_tcp.py | 43 ------------------------------------ 1 file changed, 43 deletions(-) diff --git a/app/tests/test_modbus_tcp.py b/app/tests/test_modbus_tcp.py index 5ce73af..f9ec1ef 100644 --- a/app/tests/test_modbus_tcp.py +++ b/app/tests/test_modbus_tcp.py @@ -81,48 +81,8 @@ class TestType(Enum): test = TestType.RD_TEST_0_BYTES -def config_conn(test_hostname, test_port): - Config.act_config = { - 'mqtt':{ - 'host': test_hostname, - 'port': test_port, - 'user': '', - 'passwd': '' - }, - 'ha':{ - 'auto_conf_prefix': 'homeassistant', - 'discovery_prefix': 'homeassistant', - 'entity_prefix': 'tsun', - 'proxy_node_id': 'test_1', - 'proxy_unique_id': '' - }, - 'inverters':{ - 'allow_all': True, - "R170000000000001":{ - 'node_id': 'inv_1' - }, - "Y170000000000001":{ - 'node_id': 'inv_2', - 'monitor_sn': 2000000000, - 'modbus_polling': True, - 'suggested_area': "", - 'sensor_list': 0x2b0, - 'client_mode':{ - 'host': '192.168.0.1', - 'port': 8899 - } - } - } - } -class TestType(Enum): - RD_TEST_0_BYTES = 1 - RD_TEST_TIMEOUT = 2 - - -test = TestType.RD_TEST_0_BYTES - class FakeReader(): def __init__(self): self.on_recv = asyncio.Event() @@ -161,9 +121,6 @@ def patch_open(): def new_open(host: str, port: int): global test - if test == TestType.RD_TEST_TIMEOUT: - raise TimeoutError - global test if test == TestType.RD_TEST_TIMEOUT: raise TimeoutError return new_conn(None) From be4c6ac77f035dd3b4d4361d50dd4135c8aa7882 Mon Sep 17 00:00:00 2001 From: Stefan Allius <122395479+s-allius@users.noreply.github.com> Date: Sat, 7 Sep 2024 11:45:16 +0200 Subject: [PATCH 24/39] S allius/issue182 (#183) * GEN3: After inverter firmware update the 'Unknown Msg Type' increases continuously Fixes #182 * add support for Controller serial no and MAC * test hardening * GEN3: add support for new messages of version 3 firmwares * bump libraries to latest versions - bump aiomqtt to version 2.3.0 - bump aiohttp to version 3.10.5 * improve test coverage * reduce cognective complexity --- CHANGELOG.md | 2 + app/requirements.txt | 4 +- app/src/gen3/infos_g3.py | 1 + app/src/gen3/talent.py | 77 +++++++++ app/src/gen3plus/solarman_v5.py | 4 +- app/src/infos.py | 20 ++- app/tests/test_infos_g3.py | 30 +++- app/tests/test_modbus_tcp.py | 4 - app/tests/test_solarman.py | 77 +++++++++ app/tests/test_talent.py | 270 +++++++++++++++++++++++++++++++- 10 files changed, 475 insertions(+), 14 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3db2d75..93cfc9d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [unreleased] +- GEN3: add support for new messages of version 3 firmwares +- add support for controller MAC and serial number - GEN3: don't crash on overwritten msg in the receive buffer - Reading the version string from the image updates it even if the image is re-pulled without re-deployment diff --git a/app/requirements.txt b/app/requirements.txt index 2aa2067..6546f18 100644 --- a/app/requirements.txt +++ b/app/requirements.txt @@ -1,4 +1,4 @@ - aiomqtt==2.2.0 + aiomqtt==2.3.0 schema==0.7.7 aiocron==1.8 - aiohttp==3.10.2 \ No newline at end of file + aiohttp==3.10.5 \ No newline at end of file diff --git a/app/src/gen3/infos_g3.py b/app/src/gen3/infos_g3.py index 3594f9d..6a9c1cf 100644 --- a/app/src/gen3/infos_g3.py +++ b/app/src/gen3/infos_g3.py @@ -14,6 +14,7 @@ class RegisterMap: 0x00092ba8: Register.COLLECTOR_FW_VERSION, 0x000927c0: Register.CHIP_TYPE, 0x00092f90: Register.CHIP_MODEL, + 0x00094ae8: Register.MAC_ADDR, 0x00095a88: Register.TRACE_URL, 0x00095aec: Register.LOGGER_URL, 0x0000000a: Register.PRODUCT_NAME, diff --git a/app/src/gen3/talent.py b/app/src/gen3/talent.py index 015efd9..c225691 100644 --- a/app/src/gen3/talent.py +++ b/app/src/gen3/talent.py @@ -56,20 +56,24 @@ class Talent(Message): 0x00: self.msg_contact_info, 0x13: self.msg_ota_update, 0x22: self.msg_get_time, + 0x99: self.msg_act_time, 0x71: self.msg_collector_data, # 0x76: 0x77: self.msg_modbus, # 0x78: + 0x87: self.msg_modbus2, 0x04: self.msg_inverter_data, } self.log_lvl = { 0x00: logging.INFO, 0x13: logging.INFO, 0x22: logging.INFO, + 0x99: logging.INFO, 0x71: logging.INFO, # 0x76: 0x77: self.get_modbus_log_lvl, # 0x78: + 0x87: self.get_modbus_log_lvl, 0x04: logging.INFO, } self.modbus_elms = 0 # for unit tests @@ -127,6 +131,7 @@ class Talent(Message): logger.debug(f'SerialNo {serial_no} not known but accepted!') self.unique_id = serial_no + self.db.set_db_def_value(Register.COLLECTOR_SNR, serial_no) def read(self) -> float: '''process all received messages in the _recv_buffer''' @@ -170,6 +175,25 @@ class Talent(Message): logger.info(self.__flow_str(self.server_side, 'forwrd') + f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}') + def forward_snd(self) -> None: + '''add the actual receive msg to the forwarding queue''' + tsun = Config.get('tsun') + if tsun['enabled']: + _len = len(self._send_buffer) - self.send_msg_ofs + struct.pack_into('!l', self._send_buffer, self.send_msg_ofs, + _len-4) + + buffer = self._send_buffer[self.send_msg_ofs:] + buflen = _len + self._forward_buffer += buffer[:buflen] + hex_dump_memory(logging.INFO, 'Store for forwarding:', + buffer, buflen) + + fnc = self.switch.get(self.msg_id, self.msg_unknown) + logger.info(self.__flow_str(self.server_side, 'forwrd') + + f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}') + self._send_buffer = self._send_buffer[:self.send_msg_ofs] + def send_modbus_cb(self, modbus_pdu: bytearray, log_lvl: int, state: str): if self.state != State.up: logger.warning(f'[{self.node_id}] ignore MODBUS cmd,' @@ -400,6 +424,8 @@ class Talent(Message): result = struct.unpack_from('!q', self._recv_buffer, self.header_len) self.ts_offset = result[0]-ts + if self.remote_stream: + self.remote_stream.ts_offset = self.ts_offset logger.debug(f'tsun-time: {int(result[0]):08x}' f' proxy-time: {ts:08x}' f' offset: {self.ts_offset}') @@ -410,6 +436,41 @@ class Talent(Message): self.forward() + def msg_act_time(self): + if self.ctrl.is_ind(): + if self.data_len == 9: + self.state = State.up # allow MODBUS cmds + if (self.modbus_polling): + self.mb_timer.start(self.mb_first_timeout) + self.db.set_db_def_value(Register.POLLING_INTERVAL, + self.mb_timeout) + self.__build_header(0x99) + self._send_buffer += b'\x02' + self.__finish_send_msg() + + result = struct.unpack_from('!Bq', self._recv_buffer, + self.header_len) + resp_code = result[0] + ts = result[1]+self.ts_offset + logger.debug(f'inv-time: {int(result[1]):08x}' + f' tsun-time: {ts:08x}' + f' offset: {self.ts_offset}') + self.__build_header(0x91) + self._send_buffer += struct.pack('!Bq', resp_code, ts) + self.forward_snd() + return + elif self.ctrl.is_resp(): + result = struct.unpack_from('!B', self._recv_buffer, + self.header_len) + resp_code = result[0] + logging.debug(f'TimeActRespCode: {resp_code}') + return + else: + logger.warning(self.TXT_UNKNOWN_CTRL) + self.inc_counter('Unknown_Ctrl') + + self.forward() + def parse_msg_header(self): result = struct.unpack_from('!lB', self._recv_buffer, self.header_len) @@ -492,6 +553,15 @@ class Talent(Message): modbus_len = result[1] return msg_hdr_len, modbus_len + def parse_modbus_header2(self): + + msg_hdr_len = 6 + + result = struct.unpack_from('!lBBB', self._recv_buffer, + self.header_len) + modbus_len = result[2] + return msg_hdr_len, modbus_len + def get_modbus_log_lvl(self) -> int: if self.ctrl.is_req(): return logging.INFO @@ -501,6 +571,13 @@ class Talent(Message): def msg_modbus(self): hdr_len, _ = self.parse_modbus_header() + self.__msg_modbus(hdr_len) + + def msg_modbus2(self): + hdr_len, _ = self.parse_modbus_header2() + self.__msg_modbus(hdr_len) + + def __msg_modbus(self, hdr_len): data = self._recv_buffer[self.header_len: self.header_len+self.data_len] diff --git a/app/src/gen3plus/solarman_v5.py b/app/src/gen3plus/solarman_v5.py index 94089f3..e1b6e3e 100644 --- a/app/src/gen3plus/solarman_v5.py +++ b/app/src/gen3plus/solarman_v5.py @@ -203,13 +203,15 @@ class SolarmanV5(Message): inverters = Config.get('inverters') # logger.debug(f'Inverters: {inverters}') - for inv in inverters.values(): + for key, inv in inverters.items(): # logger.debug(f'key: {key} -> {inv}') if (type(inv) is dict and 'monitor_sn' in inv and inv['monitor_sn'] == snr): self.__set_config_parms(inv) self.db.set_pv_module_details(inv) logger.debug(f'SerialNo {serial_no} allowed! area:{self.sug_area}') # noqa: E501 + + self.db.set_db_def_value(Register.COLLECTOR_SNR, key) break else: self.node_id = '' diff --git a/app/src/infos.py b/app/src/infos.py index 2192b5b..e06348e 100644 --- a/app/src/infos.py +++ b/app/src/infos.py @@ -16,6 +16,8 @@ class Register(Enum): CHIP_MODEL = 3 TRACE_URL = 4 LOGGER_URL = 5 + MAC_ADDR = 6 + COLLECTOR_SNR = 7 PRODUCT_NAME = 20 MANUFACTURER = 21 VERSION = 22 @@ -188,8 +190,8 @@ class Infos: __info_devs = { 'proxy': {'singleton': True, 'name': 'Proxy', 'mf': 'Stefan Allius'}, # noqa: E501 - 'controller': {'via': 'proxy', 'name': 'Controller', 'mdl': Register.CHIP_MODEL, 'mf': Register.CHIP_TYPE, 'sw': Register.COLLECTOR_FW_VERSION}, # noqa: E501 - 'inverter': {'via': 'controller', 'name': 'Micro Inverter', 'mdl': Register.EQUIPMENT_MODEL, 'mf': Register.MANUFACTURER, 'sw': Register.VERSION}, # noqa: E501 + 'controller': {'via': 'proxy', 'name': 'Controller', 'mdl': Register.CHIP_MODEL, 'mf': Register.CHIP_TYPE, 'sw': Register.COLLECTOR_FW_VERSION, 'mac': Register.MAC_ADDR, 'sn': Register.COLLECTOR_SNR}, # noqa: E501 + 'inverter': {'via': 'controller', 'name': 'Micro Inverter', 'mdl': Register.EQUIPMENT_MODEL, 'mf': Register.MANUFACTURER, 'sw': Register.VERSION, 'sn': Register.SERIAL_NUMBER}, # noqa: E501 'input_pv1': {'via': 'inverter', 'name': 'Module PV1', 'mdl': Register.PV1_MODEL, 'mf': Register.PV1_MANUFACTURER}, # noqa: E501 'input_pv2': {'via': 'inverter', 'name': 'Module PV2', 'mdl': Register.PV2_MODEL, 'mf': Register.PV2_MANUFACTURER, 'dep': {'reg': Register.NO_INPUTS, 'gte': 2}}, # noqa: E501 'input_pv3': {'via': 'inverter', 'name': 'Module PV3', 'mdl': Register.PV3_MODEL, 'mf': Register.PV3_MANUFACTURER, 'dep': {'reg': Register.NO_INPUTS, 'gte': 3}}, # noqa: E501 @@ -222,6 +224,9 @@ class Infos: Register.CHIP_MODEL: {'name': ['collector', 'Chip_Model'], 'singleton': False, 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 Register.TRACE_URL: {'name': ['collector', 'Trace_URL'], 'singleton': False, 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 Register.LOGGER_URL: {'name': ['collector', 'Logger_URL'], 'singleton': False, 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.MAC_ADDR: {'name': ['collector', 'MAC-Addr'], 'singleton': False, 'level': logging.INFO, 'unit': ''}, # noqa: E501 + Register.COLLECTOR_SNR: {'name': ['collector', 'Serial_Number'], 'singleton': False, 'level': logging.INFO, 'unit': ''}, # noqa: E501 + # inverter values used for device registration: Register.PRODUCT_NAME: {'name': ['inverter', 'Product_Name'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 @@ -507,7 +512,7 @@ class Infos: dev['name'] = device['name']+' - '+sug_area dev['sa'] = device['name']+' - '+sug_area self.__add_via_dev(dev, device, key, snr) - for key in ('mdl', 'mf', 'sw', 'hw'): # add optional + for key in ('mdl', 'mf', 'sw', 'hw', 'sn'): # add optional # values fpr 'modell', 'manufacturer', 'sw version' and # 'hw version' if key in device: @@ -518,8 +523,17 @@ class Infos: dev['ids'] = [f"{ha['dev']}"] else: dev['ids'] = [f"{ha['dev']}_{snr}"] + self.__add_connection(dev, device) return dev + def __add_connection(self, dev, device): + if 'mac' in device: + mac_str = self.dev_value(device['mac']) + if mac_str is not None: + if 12 == len(mac_str): + mac_str = ':'.join(mac_str[i:i+2] for i in range(0, 12, 2)) + dev['cns'] = [["mac", f"{mac_str}"]] + def __add_via_dev(self, dev, device, key, snr): if 'via' in device: # add the link to the parent device via = device['via'] diff --git a/app/tests/test_infos_g3.py b/app/tests/test_infos_g3.py index 37a8076..1c39d46 100644 --- a/app/tests/test_infos_g3.py +++ b/app/tests/test_infos_g3.py @@ -364,12 +364,12 @@ def test_build_ha_conf3(contr_data_seq, inv_data_seq, inv_data_seq2): if id == 'out_power_123': assert comp == 'sensor' - assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/grid", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "out_power_123", "val_tpl": "{{value_json['Output_Power'] | float}}", "unit_of_meas": "W", "dev": {"name": "Micro Inverter - roof", "sa": "Micro Inverter - roof", "via_device": "controller_123", "mdl": "TSOL-MS600", "mf": "TSUN", "sw": "V5.0.11", "ids": ["inverter_123"]}, "o": {"name": "proxy", "sw": "unknown"}}) + assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/grid", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "out_power_123", "val_tpl": "{{value_json['Output_Power'] | float}}", "unit_of_meas": "W", "dev": {"name": "Micro Inverter - roof", "sa": "Micro Inverter - roof", "via_device": "controller_123", "mdl": "TSOL-MS600", "mf": "TSUN", "sw": "V5.0.11", "sn": "T170000000000001", "ids": ["inverter_123"]}, "o": {"name": "proxy", "sw": "unknown"}}) tests +=1 if id == 'daily_gen_123': assert comp == 'sensor' - assert d_json == json.dumps({"name": "Daily Generation", "stat_t": "tsun/garagendach/total", "dev_cla": "energy", "stat_cla": "total_increasing", "uniq_id": "daily_gen_123", "val_tpl": "{{value_json['Daily_Generation'] | float}}", "unit_of_meas": "kWh", "ic": "mdi:solar-power-variant", "dev": {"name": "Micro Inverter - roof", "sa": "Micro Inverter - roof", "via_device": "controller_123", "mdl": "TSOL-MS600", "mf": "TSUN", "sw": "V5.0.11", "ids": ["inverter_123"]}, "o": {"name": "proxy", "sw": "unknown"}}) + assert d_json == json.dumps({"name": "Daily Generation", "stat_t": "tsun/garagendach/total", "dev_cla": "energy", "stat_cla": "total_increasing", "uniq_id": "daily_gen_123", "val_tpl": "{{value_json['Daily_Generation'] | float}}", "unit_of_meas": "kWh", "ic": "mdi:solar-power-variant", "dev": {"name": "Micro Inverter - roof", "sa": "Micro Inverter - roof", "via_device": "controller_123", "mdl": "TSOL-MS600", "mf": "TSUN", "sw": "V5.0.11", "sn": "T170000000000001", "ids": ["inverter_123"]}, "o": {"name": "proxy", "sw": "unknown"}}) tests +=1 elif id == 'power_pv1_123': @@ -388,6 +388,32 @@ def test_build_ha_conf3(contr_data_seq, inv_data_seq, inv_data_seq2): tests +=1 assert tests==5 +def test_build_ha_conf4(contr_data_seq, inv_data_seq): + i = InfosG3() + for key, result in i.parse (contr_data_seq): + pass # side effect in calling i.parse() + for key, result in i.parse (inv_data_seq): + pass # side effect in calling i.parse() + i.set_db_def_value(Register.MAC_ADDR, "00a057123456") + + tests = 0 + for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id="garagendach/", snr='123', sug_area = 'roof'): + if id == 'signal_123': + assert comp == 'sensor' + assert d_json == json.dumps({"name": "Signal Strength", "stat_t": "tsun/garagendach/controller", "dev_cla": None, "stat_cla": "measurement", "uniq_id": "signal_123", "val_tpl": "{{value_json[\'Signal_Strength\'] | int}}", "unit_of_meas": "%", "ic": "mdi:wifi", "dev": {"name": "Controller - roof", "sa": "Controller - roof", "via_device": "proxy", "mdl": "RSW-1-10001", "mf": "Raymon", "sw": "RSW_400_V1.00.06", "ids": ["controller_123"], "cns": [["mac", "00:a0:57:12:34:56"]]}, "o": {"name": "proxy", "sw": "unknown"}}) + tests +=1 + assert tests==1 + + i.set_db_def_value(Register.MAC_ADDR, "00:a0:57:12:34:57") + + tests = 0 + for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id="garagendach/", snr='123', sug_area = 'roof'): + if id == 'signal_123': + assert comp == 'sensor' + assert d_json == json.dumps({"name": "Signal Strength", "stat_t": "tsun/garagendach/controller", "dev_cla": None, "stat_cla": "measurement", "uniq_id": "signal_123", "val_tpl": "{{value_json[\'Signal_Strength\'] | int}}", "unit_of_meas": "%", "ic": "mdi:wifi", "dev": {"name": "Controller - roof", "sa": "Controller - roof", "via_device": "proxy", "mdl": "RSW-1-10001", "mf": "Raymon", "sw": "RSW_400_V1.00.06", "ids": ["controller_123"], "cns": [["mac", "00:a0:57:12:34:57"]]}, "o": {"name": "proxy", "sw": "unknown"}}) + tests +=1 + assert tests==1 + def test_must_incr_total(inv_data_seq2, inv_data_seq2_zero): i = InfosG3() tests = 0 diff --git a/app/tests/test_modbus_tcp.py b/app/tests/test_modbus_tcp.py index f9ec1ef..df3fe02 100644 --- a/app/tests/test_modbus_tcp.py +++ b/app/tests/test_modbus_tcp.py @@ -205,10 +205,6 @@ async def test_modbus_cnf2(config_conn, patch_no_mqtt, patch_open): assert 1 == test await asyncio.sleep(0.01) assert Infos.stat['proxy']['Inverter_Cnt'] == 0 - # check that the connection is released - for m in Message: - if (m.node_id == 'inv_2'): - assert False @pytest.mark.asyncio async def test_modbus_cnf3(config_conn, patch_no_mqtt, patch_open): diff --git a/app/tests/test_solarman.py b/app/tests/test_solarman.py index 521ef9b..80778a2 100644 --- a/app/tests/test_solarman.py +++ b/app/tests/test_solarman.py @@ -184,6 +184,35 @@ def device_rsp_msg(): # 0x1110 msg += b'\x15' return msg +@pytest.fixture +def device_ind_msg2(): # 0x4110 + msg = b'\xa5\xd4\x00\x10\x41\x02\x03' +get_sn() +b'\x02\xba\xd2\x00\x00' + msg += b'\x19\x00\x00\x00\x00\x00\x00\x00\x05\x3c\x78\x01\x64\x01\x4c\x53' + msg += b'\x57\x35\x42\x4c\x45\x5f\x31\x37\x5f\x30\x32\x42\x30\x5f\x31\x2e' + msg += b'\x30\x35\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x40\x2a\x8f\x4f\x51\x54\x31\x39\x32\x2e' + msg += b'\x31\x36\x38\x2e\x38\x30\x2e\x34\x39\x00\x00\x00\x0f\x00\x01\xb0' + msg += b'\x02\x0f\x00\xff\x56\x31\x2e\x31\x2e\x30\x30\x2e\x30\x42\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xfe\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x41\x6c\x6c\x69\x75\x73\x2d\x48\x6f' + msg += b'\x6d\x65\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += correct_checksum(msg) + msg += b'\x15' + return msg + +@pytest.fixture +def device_rsp_msg2(): # 0x1110 + msg = b'\xa5\x0a\x00\x10\x11\x03\x03' +get_sn() +b'\x02\x01' + msg += total() + msg += hb() + msg += correct_checksum(msg) + msg += b'\x15' + return msg + @pytest.fixture def invalid_start_byte(): # 0x4110 msg = b'\xa4\xd4\x00\x10\x41\x00\x01' +get_sn() +b'\x02\xba\xd2\x00\x00' @@ -901,6 +930,54 @@ def test_read_two_messages2(config_tsun_allow_all, inverter_ind_msg, inverter_in assert m._send_buffer==b'' m.close() +def test_read_two_messages3(config_tsun_allow_all, device_ind_msg2, device_rsp_msg2, inverter_ind_msg, inverter_rsp_msg): + # test device message received after the inverter masg + _ = config_tsun_allow_all + m = MemoryStream(inverter_ind_msg, (0,)) + m.append_msg(device_ind_msg2) + assert 0 == m.sensor_list + m._init_new_client_conn() + m.read() # read complete msg, and dispatch msg + assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0 + assert not m.header_valid # must be invalid, since msg was handled and buffer flushed + assert m.msg_count == 2 + assert m.header_len==11 + assert m.snr == 2070233889 + assert m.unique_id == '2070233889' + assert m.msg_recvd[0]['control']==0x4210 + assert m.msg_recvd[0]['seq']=='02:02' + assert m.msg_recvd[0]['data_len']==0x199 + assert m.msg_recvd[1]['control']==0x4110 + assert m.msg_recvd[1]['seq']=='03:03' + assert m.msg_recvd[1]['data_len']==0xd4 + assert '02b0' == m.db.get_db_value(Register.SENSOR_LIST, None) + assert 0x02b0 == m.sensor_list + assert m._forward_buffer==inverter_ind_msg+device_ind_msg2 + assert m._send_buffer==inverter_rsp_msg+device_rsp_msg2 + + m._send_buffer = bytearray(0) # clear send buffer for next test + m._init_new_client_conn() + assert m._send_buffer==b'' + m.close() + +def test_unkown_frame_code(config_tsun_inv1, inverter_ind_msg_81, inverter_rsp_msg_81): + _ = config_tsun_inv1 + m = MemoryStream(inverter_ind_msg_81, (0,)) + m.read() # read complete msg, and dispatch msg + assert not m.header_valid # must be invalid, since msg was handled and buffer flushed + assert m.msg_count == 1 + assert m.header_len==11 + assert m.snr == 2070233889 + assert m.unique_id == '2070233889' + assert m.control == 0x4210 + assert str(m.seq) == '03:03' + assert m.data_len == 0x199 + assert m._recv_buffer==b'' + assert m._send_buffer==inverter_rsp_msg_81 + assert m._forward_buffer==inverter_ind_msg_81 + assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0 + m.close() + def test_unkown_message(config_tsun_inv1, unknown_msg): _ = config_tsun_inv1 m = MemoryStream(unknown_msg, (0,)) diff --git a/app/tests/test_talent.py b/app/tests/test_talent.py index 16bb8d8..342e769 100644 --- a/app/tests/test_talent.py +++ b/app/tests/test_talent.py @@ -41,6 +41,7 @@ class MemoryStream(Talent): self.send_msg_ofs = 0 self.test_exception_async_write = False self.msg_recvd = [] + self.remote_stream = None def append_msg(self, msg): self.__msg += msg @@ -138,6 +139,26 @@ def msg_time_rsp_inv(): # Get Time Resonse message def msg_time_invalid(): # Get Time Request message return b'\x00\x00\x00\x13\x10R170000000000001\x94\x22' +@pytest.fixture +def msg_act_time(): # Act Time Indication message + return b'\x00\x00\x00\x1c\x10R170000000000001\x91\x99\x01\x00\x00\x01\x89\xc6\x53\x4d\x80' + +@pytest.fixture +def msg_act_time_ofs(): # Act Time Indication message withoffset 3600 + return b'\x00\x00\x00\x1c\x10R170000000000001\x91\x99\x01\x00\x00\x01\x89\xc6\x53\x5b\x90' + +@pytest.fixture +def msg_act_time_ack(): # Act Time Response message + return b'\x00\x00\x00\x14\x10R170000000000001\x99\x99\x02' + +@pytest.fixture +def msg_act_time_cmd(): # Act Time Response message + return b'\x00\x00\x00\x14\x10R170000000000001\x70\x99\x02' + +@pytest.fixture +def msg_act_time_inv(): # Act Time Indication message withoffset 3600 + return b'\x00\x00\x00\x1b\x10R170000000000001\x91\x99\x00\x00\x01\x89\xc6\x53\x5b\x90' + @pytest.fixture def msg_controller_ind(): # Data indication from the controller msg = b'\x00\x00\x01\x2f\x10R170000000000001\x91\x71\x0e\x10\x00\x00\x10R170000000000001' @@ -442,6 +463,26 @@ def msg_modbus_rsp21(): msg += b'\x00\x00\x00\x00\x00\x00\x00\xe6\xef' return msg +@pytest.fixture +def msg_modbus_cmd_new(): + msg = b'\x00\x00\x00\x20\x10R170000000000001' + msg += b'\x70\x77\x00\x01\xa3\x28\x08\x01\x03\x30\x00' + msg += b'\x00\x30\x4a\xde' + return msg + +@pytest.fixture +def msg_modbus_rsp20_new(): + msg = b'\x00\x00\x00\x7e\x10R170000000000001' + msg += b'\x91\x87\x00\x01\xa3\x28\x00\x65\x01\x03\x60' + msg += b'\x00\x01\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x51\x09\x09\x17\x00\x17\x13\x88\x00\x40\x00\x00\x02\x58\x02\x23' + msg += b'\x00\x07\x00\x00\x00\x00\x01\x4f\x00\xab\x02\x40\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\xc0\x93\x00\x00' + msg += b'\x00\x00\x33\xad\x00\x09\x00\x00\x98\x1c\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\xa7\xab' + return msg + @pytest.fixture def broken_recv_buf(): # There are two message in the buffer, but the second has overwritten the first partly msg = b'\x00\x00\x05\x02\x10R170000000000001\x91\x04\x01\x90\x00\x01\x10R170000000000001' @@ -892,6 +933,7 @@ def test_msg_contact_invalid(config_tsun_inv1, msg_contact_invalid): def test_msg_get_time(config_tsun_inv1, msg_get_time): _ = config_tsun_inv1 m = MemoryStream(msg_get_time, (0,)) + m.state = State.up m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.read() # read complete msg, and dispatch msg assert not m.header_valid # must be invalid, since msg was handled and buffer flushed @@ -903,6 +945,7 @@ def test_msg_get_time(config_tsun_inv1, msg_get_time): assert m.header_len==23 assert m.ts_offset==0 assert m.data_len==0 + assert m.state==State.pend assert m._forward_buffer==msg_get_time assert m._send_buffer==b'\x00\x00\x00\x1b\x10R170000000000001\x91"\x00\x00\x01\x89\xc6,_\x00' assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 @@ -911,6 +954,7 @@ def test_msg_get_time(config_tsun_inv1, msg_get_time): def test_msg_get_time_autark(config_no_tsun_inv1, msg_get_time): _ = config_no_tsun_inv1 m = MemoryStream(msg_get_time, (0,)) + m.state = State.received m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.read() # read complete msg, and dispatch msg assert not m.header_valid # must be invalid, since msg was handled and buffer flushed @@ -922,14 +966,19 @@ def test_msg_get_time_autark(config_no_tsun_inv1, msg_get_time): assert m.header_len==23 assert m.ts_offset==0 assert m.data_len==0 + assert m.state==State.received assert m._forward_buffer==b'' assert m._send_buffer==bytearray(b'\x00\x00\x00\x1b\x10R170000000000001\x91"\x00\x00\x01\x89\xc6,_\x00') assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 m.close() def test_msg_time_resp(config_tsun_inv1, msg_time_rsp): + # test if ts_offset will be set on client and server side _ = config_tsun_inv1 m = MemoryStream(msg_time_rsp, (0,), False) + s = MemoryStream(b'', (0,), True) + assert s.ts_offset==0 + m.remote_stream = s m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.read() # read complete msg, and dispatch msg assert not m.header_valid # must be invalid, since msg was handled and buffer flushed @@ -940,10 +989,13 @@ def test_msg_time_resp(config_tsun_inv1, msg_time_rsp): assert m.msg_id==34 assert m.header_len==23 assert m.ts_offset==3600000 + assert s.ts_offset==3600000 assert m.data_len==8 assert m._forward_buffer==b'' assert m._send_buffer==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 + m.remote_stream = None + s.close() m.close() def test_msg_time_resp_autark(config_no_tsun_inv1, msg_time_rsp): @@ -1022,6 +1074,169 @@ def test_msg_time_invalid_autark(config_no_tsun_inv1, msg_time_invalid): assert m.db.stat['proxy']['Unknown_Ctrl'] == 1 m.close() +def test_msg_act_time(config_no_modbus_poll, msg_act_time, msg_act_time_ack): + _ = config_no_modbus_poll + m = MemoryStream(msg_act_time, (0,)) + m.ts_offset=0 + m.mb_timeout = 124 + m.db.set_db_def_value(Register.POLLING_INTERVAL, 125) + m.state = State.received + m.db.stat['proxy']['Unknown_Ctrl'] = 0 + m.read() # read complete msg, and dispatch msg + assert not m.header_valid # must be invalid, since msg was handled and buffer flushed + assert m.msg_count == 1 + assert m.id_str == b"R170000000000001" + assert m.unique_id == 'R170000000000001' + assert int(m.ctrl)==145 + assert m.msg_id==153 + assert m.ts_offset==0 + assert m.header_len==23 + assert m.data_len==9 + assert m.state == State.up + assert m._forward_buffer==msg_act_time + assert m._send_buffer==msg_act_time_ack + assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 + assert 125 == m.db.get_db_value(Register.POLLING_INTERVAL, 0) + m.close() + +def test_msg_act_time2(config_tsun_inv1, msg_act_time, msg_act_time_ack): + _ = config_tsun_inv1 + m = MemoryStream(msg_act_time, (0,)) + m.ts_offset=0 + m.modbus_polling = True + m.mb_timeout = 123 + m.db.set_db_def_value(Register.POLLING_INTERVAL, 125) + m.db.stat['proxy']['Unknown_Ctrl'] = 0 + m.read() # read complete msg, and dispatch msg + assert not m.header_valid # must be invalid, since msg was handled and buffer flushed + assert m.msg_count == 1 + assert m.id_str == b"R170000000000001" + assert m.unique_id == 'R170000000000001' + assert int(m.ctrl)==145 + assert m.msg_id==153 + assert m.ts_offset==0 + assert m.header_len==23 + assert m.data_len==9 + assert m._forward_buffer==msg_act_time + assert m._send_buffer==msg_act_time_ack + assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 + assert 123 == m.db.get_db_value(Register.POLLING_INTERVAL, 0) + m.close() + +def test_msg_act_time_ofs(config_tsun_inv1, msg_act_time, msg_act_time_ofs, msg_act_time_ack): + _ = config_tsun_inv1 + m = MemoryStream(msg_act_time, (0,)) + m.ts_offset=3600 + m.db.stat['proxy']['Unknown_Ctrl'] = 0 + m.read() # read complete msg, and dispatch msg + assert not m.header_valid # must be invalid, since msg was handled and buffer flushed + assert m.msg_count == 1 + assert m.id_str == b"R170000000000001" + assert m.unique_id == 'R170000000000001' + assert int(m.ctrl)==145 + assert m.msg_id==153 + assert m.ts_offset==3600 + assert m.header_len==23 + assert m.data_len==9 + assert m._forward_buffer==msg_act_time_ofs + assert m._send_buffer==msg_act_time_ack + assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 + m.close() + +def test_msg_act_time_ofs2(config_tsun_inv1, msg_act_time, msg_act_time_ofs, msg_act_time_ack): + _ = config_tsun_inv1 + m = MemoryStream(msg_act_time_ofs, (0,)) + m.ts_offset=-3600 + m.db.stat['proxy']['Unknown_Ctrl'] = 0 + m.read() # read complete msg, and dispatch msg + assert not m.header_valid # must be invalid, since msg was handled and buffer flushed + assert m.msg_count == 1 + assert m.id_str == b"R170000000000001" + assert m.unique_id == 'R170000000000001' + assert int(m.ctrl)==145 + assert m.msg_id==153 + assert m.ts_offset==-3600 + assert m.header_len==23 + assert m.data_len==9 + assert m._forward_buffer==msg_act_time + assert m._send_buffer==msg_act_time_ack + assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 + m.close() + +def test_msg_act_time_autark(config_no_tsun_inv1, msg_act_time, msg_act_time_ack): + _ = config_no_tsun_inv1 + m = MemoryStream(msg_act_time, (0,)) + m.ts_offset=0 + m.db.stat['proxy']['Unknown_Ctrl'] = 0 + m.read() # read complete msg, and dispatch msg + assert not m.header_valid # must be invalid, since msg was handled and buffer flushed + assert m.msg_count == 1 + assert m.id_str == b"R170000000000001" + assert m.unique_id == 'R170000000000001' + assert int(m.ctrl)==145 + assert m.msg_id==153 + assert m.ts_offset==0 + assert m.header_len==23 + assert m.data_len==9 + assert m._forward_buffer==b'' + assert m._send_buffer==msg_act_time_ack + assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 + m.close() + +def test_msg_act_time_ack(config_tsun_inv1, msg_act_time_ack): + _ = config_tsun_inv1 + m = MemoryStream(msg_act_time_ack, (0,)) + m.db.stat['proxy']['Unknown_Ctrl'] = 0 + m.read() # read complete msg, and dispatch msg + assert not m.header_valid # must be invalid, since msg was handled and buffer flushed + assert m.msg_count == 1 + assert m.id_str == b"R170000000000001" + assert m.unique_id == 'R170000000000001' + assert int(m.ctrl)==153 + assert m.msg_id==153 + assert m.header_len==23 + assert m.data_len==1 + assert m._forward_buffer==b'' + assert m._send_buffer==b'' + assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 + m.close() + +def test_msg_act_time_cmd(config_tsun_inv1, msg_act_time_cmd): + _ = config_tsun_inv1 + m = MemoryStream(msg_act_time_cmd, (0,)) + m.db.stat['proxy']['Unknown_Ctrl'] = 0 + m.read() # read complete msg, and dispatch msg + assert not m.header_valid # must be invalid, since msg was handled and buffer flushed + assert m.msg_count == 1 + assert m.id_str == b"R170000000000001" + assert m.unique_id == 'R170000000000001' + assert int(m.ctrl)==112 + assert m.msg_id==153 + assert m.header_len==23 + assert m.data_len==1 + assert m._forward_buffer==msg_act_time_cmd + assert m._send_buffer==b'' + assert m.db.stat['proxy']['Unknown_Ctrl'] == 1 + m.close() + +def test_msg_act_time_inv(config_tsun_inv1, msg_act_time_inv): + _ = config_tsun_inv1 + m = MemoryStream(msg_act_time_inv, (0,)) + m.db.stat['proxy']['Unknown_Ctrl'] = 0 + m.read() # read complete msg, and dispatch msg + assert not m.header_valid # must be invalid, since msg was handled and buffer flushed + assert m.msg_count == 1 + assert m.id_str == b"R170000000000001" + assert m.unique_id == 'R170000000000001' + assert int(m.ctrl)==145 + assert m.msg_id==153 + assert m.header_len==23 + assert m.data_len==8 + assert m._forward_buffer==msg_act_time_inv + assert m._send_buffer==b'' + assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 + m.close() + def test_msg_cntrl_ind(config_tsun_inv1, msg_controller_ind, msg_controller_ind_ts_offs, msg_controller_ack): _ = config_tsun_inv1 m = MemoryStream(msg_controller_ind, (0,)) @@ -1583,7 +1798,7 @@ def test_msg_modbus_rsp2(config_tsun_inv1, msg_modbus_rsp20): assert m.msg_count == 2 assert m._forward_buffer==msg_modbus_rsp20 assert m._send_buffer==b'' - assert m.db.db == {'inverter': {'Version': 'V5.1.09', 'Rated_Power': 300}, 'grid': {'Timestamp': m._utc(), 'Voltage': 225.9, 'Current': 0.41, 'Frequency': 49.99, 'Output_Power': 94.8}, 'env': {'Inverter_Temp': 22}, 'input': {'Timestamp': m._utc(), 'pv1': {'Voltage': 0.8, 'Current': 0.0, 'Power': 0.0}, 'pv2': {'Voltage': 34.5, 'Current': 2.89, 'Power': 99.8}, 'pv3': {'Voltage': 0.0, 'Current': 0.0, 'Power': 0.0}, 'pv4': {'Voltage': 0.0, 'Current': 0.0, 'Power': 0.0}}} + assert m.db.db == {'collector': {'Serial_Number': 'R170000000000001'}, 'inverter': {'Version': 'V5.1.09', 'Rated_Power': 300}, 'grid': {'Timestamp': m._utc(), 'Voltage': 225.9, 'Current': 0.41, 'Frequency': 49.99, 'Output_Power': 94.8}, 'env': {'Inverter_Temp': 22}, 'input': {'Timestamp': m._utc(), 'pv1': {'Voltage': 0.8, 'Current': 0.0, 'Power': 0.0}, 'pv2': {'Voltage': 34.5, 'Current': 2.89, 'Power': 99.8}, 'pv3': {'Voltage': 0.0, 'Current': 0.0, 'Power': 0.0}, 'pv4': {'Voltage': 0.0, 'Current': 0.0, 'Power': 0.0}}} assert m.db.get_db_value(Register.VERSION) == 'V5.1.09' assert m.db.get_db_value(Register.TS_GRID) == m._utc() assert m.new_data['inverter'] == True @@ -1613,13 +1828,64 @@ def test_msg_modbus_rsp3(config_tsun_inv1, msg_modbus_rsp21): assert m.msg_count == 2 assert m._forward_buffer==msg_modbus_rsp21 assert m._send_buffer==b'' - assert m.db.db == {'inverter': {'Version': 'V5.1.0E', 'Rated_Power': 300}, 'grid': {'Timestamp': m._utc(), 'Voltage': 225.9, 'Current': 0.41, 'Frequency': 49.99, 'Output_Power': 94.8}, 'env': {'Inverter_Temp': 22}, 'input': {'Timestamp': m._utc(), 'pv1': {'Voltage': 0.8, 'Current': 0.0, 'Power': 0.0}, 'pv2': {'Voltage': 34.5, 'Current': 2.89, 'Power': 99.8}, 'pv3': {'Voltage': 0.0, 'Current': 0.0, 'Power': 0.0}, 'pv4': {'Voltage': 0.0, 'Current': 0.0, 'Power': 0.0}}} + assert m.db.db == {'collector': {'Serial_Number': 'R170000000000001'}, 'inverter': {'Version': 'V5.1.0E', 'Rated_Power': 300}, 'grid': {'Timestamp': m._utc(), 'Voltage': 225.9, 'Current': 0.41, 'Frequency': 49.99, 'Output_Power': 94.8}, 'env': {'Inverter_Temp': 22}, 'input': {'Timestamp': m._utc(), 'pv1': {'Voltage': 0.8, 'Current': 0.0, 'Power': 0.0}, 'pv2': {'Voltage': 34.5, 'Current': 2.89, 'Power': 99.8}, 'pv3': {'Voltage': 0.0, 'Current': 0.0, 'Power': 0.0}, 'pv4': {'Voltage': 0.0, 'Current': 0.0, 'Power': 0.0}}} assert m.db.get_db_value(Register.VERSION) == 'V5.1.0E' assert m.db.get_db_value(Register.TS_GRID) == m._utc() assert m.new_data['inverter'] == True m.close() +def test_msg_modbus_rsp4(config_tsun_inv1, msg_modbus_rsp21): + '''Modbus response with a valid Modbus but no new values request must be forwarded''' + _ = config_tsun_inv1 + m = MemoryStream(msg_modbus_rsp21) + + m.mb.rsp_handler = m.msg_forward + m.mb.last_addr = 1 + m.mb.last_fcode = 3 + m.mb.last_len = 20 + m.mb.last_reg = 0x3008 + m.mb.req_pend = True + m.mb.err = 0 + db_values = {'collector': {'Serial_Number': 'R170000000000001'}, 'inverter': {'Version': 'V5.1.0E', 'Rated_Power': 300}, 'grid': {'Timestamp': m._utc(), 'Voltage': 225.9, 'Current': 0.41, 'Frequency': 49.99, 'Output_Power': 94.8}, 'env': {'Inverter_Temp': 22}, 'input': {'Timestamp': m._utc(), 'pv1': {'Voltage': 0.8, 'Current': 0.0, 'Power': 0.0}, 'pv2': {'Voltage': 34.5, 'Current': 2.89, 'Power': 99.8}, 'pv3': {'Voltage': 0.0, 'Current': 0.0, 'Power': 0.0}, 'pv4': {'Voltage': 0.0, 'Current': 0.0, 'Power': 0.0}}} + m.db.db = db_values + m.new_data['inverter'] = False + + m.read() # read complete msg, and dispatch msg + assert not m.header_valid # must be invalid, since msg was handled and buffer flushed + assert m.mb.err == 0 + assert m.msg_count == 1 + assert m._forward_buffer==msg_modbus_rsp21 + assert m.modbus_elms == 19 + assert m._send_buffer==b'' + assert m.db.db == db_values + assert m.db.get_db_value(Register.VERSION) == 'V5.1.0E' + assert m.db.get_db_value(Register.TS_GRID) == m._utc() + assert m.new_data['inverter'] == False + + m.close() + +def test_msg_modbus_rsp_new(config_tsun_inv1, msg_modbus_rsp20_new): + '''Modbus response in new format with a valid Modbus request must be forwarded''' + _ = config_tsun_inv1 + m = MemoryStream(msg_modbus_rsp20_new) + m.db.stat['proxy']['Unknown_Ctrl'] = 0 + m.db.stat['proxy']['Modbus_Command'] = 0 + m.read() # read complete msg, and dispatch msg + assert not m.header_valid # must be invalid, since msg was handled and buffer flushed + assert m.msg_count == 1 + assert m.id_str == b"R170000000000001" + assert m.unique_id == 'R170000000000001' + assert int(m.ctrl)==145 + assert m.msg_id==135 + assert m.header_len==23 + assert m.data_len==107 + assert m._forward_buffer==b'' + assert m._send_buffer==b'' + assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 + assert m.db.stat['proxy']['Modbus_Command'] == 0 + m.close() + def test_msg_modbus_invalid(config_tsun_inv1, msg_modbus_inv): _ = config_tsun_inv1 m = MemoryStream(msg_modbus_inv, (0,), False) From 5d5d7c218fbb5012b3878fd7b381c8940c6b3a17 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Sat, 7 Sep 2024 13:49:45 +0200 Subject: [PATCH 25/39] fix target preview --- app/docker-bake.hcl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/docker-bake.hcl b/app/docker-bake.hcl index b6d58d9..56317d2 100644 --- a/app/docker-bake.hcl +++ b/app/docker-bake.hcl @@ -78,7 +78,7 @@ target "dev" { target "preview" { inherits = ["_common", "_prod"] - tags = ["${IMAGE}:dev", "${IMAGE}:${VERSION}"] + tags = ["${IMAGE}:preview", "${IMAGE}:${VERSION}"] } target "rc" { From 865216b8d92c79449363bbeeba47469a6ded4cde Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Sat, 7 Sep 2024 18:03:50 +0200 Subject: [PATCH 26/39] remove dubbled fixtures --- app/tests/test_modbus_tcp.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/app/tests/test_modbus_tcp.py b/app/tests/test_modbus_tcp.py index df3fe02..0b08c49 100644 --- a/app/tests/test_modbus_tcp.py +++ b/app/tests/test_modbus_tcp.py @@ -4,7 +4,6 @@ import asyncio from mock import patch from enum import Enum -from enum import Enum from app.src.singleton import Singleton from app.src.config import Config from app.src.infos import Infos @@ -133,12 +132,6 @@ def patch_no_mqtt(): with patch.object(Mqtt, 'publish') as conn: yield conn -@pytest.fixture -def patch_no_mqtt(): - with patch.object(Mqtt, 'publish') as conn: - yield conn - - @pytest.mark.asyncio async def test_modbus_conn(patch_open): _ = patch_open From 2ab35a82571e0beefd00f27ae3e0129954ede548 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Sat, 7 Sep 2024 18:04:28 +0200 Subject: [PATCH 27/39] increase test coverage --- app/src/gen3/connection_g3.py | 9 +++- app/src/gen3/inverter_g3.py | 14 +++-- app/tests/test_connection_g3.py | 84 ++++++++++++++++++++++++++++++ app/tests/test_connection_g3p.py | 89 ++++++++++++++++++++++++++++++++ 4 files changed, 190 insertions(+), 6 deletions(-) create mode 100644 app/tests/test_connection_g3.py create mode 100644 app/tests/test_connection_g3p.py diff --git a/app/src/gen3/connection_g3.py b/app/src/gen3/connection_g3.py index 5aea231..b7e246b 100644 --- a/app/src/gen3/connection_g3.py +++ b/app/src/gen3/connection_g3.py @@ -1,7 +1,12 @@ import logging from asyncio import StreamReader, StreamWriter -from async_stream import AsyncStream -from gen3.talent import Talent + +if __name__ == "app.src.gen3.connection_g3": + from app.src.async_stream import AsyncStream + from app.src.gen3.talent import Talent +else: # pragma: no cover + from async_stream import AsyncStream + from gen3.talent import Talent logger = logging.getLogger('conn') diff --git a/app/src/gen3/inverter_g3.py b/app/src/gen3/inverter_g3.py index 9e2a40a..e7690f5 100644 --- a/app/src/gen3/inverter_g3.py +++ b/app/src/gen3/inverter_g3.py @@ -3,11 +3,17 @@ import traceback import json import asyncio from asyncio import StreamReader, StreamWriter -from config import Config -from inverter import Inverter -from gen3.connection_g3 import ConnectionG3 from aiomqtt import MqttCodeError -from infos import Infos + +if __name__ == "app.src.gen3.inverter_g3": + from app.src.config import Config + from app.src.inverter import Inverter + from app.src.gen3.connection_g3 import ConnectionG3 +else: # pragma: no cover + from config import Config + from inverter import Inverter + from gen3.connection_g3 import ConnectionG3 + from infos import Infos logger_mqtt = logging.getLogger('mqtt') diff --git a/app/tests/test_connection_g3.py b/app/tests/test_connection_g3.py new file mode 100644 index 0000000..452bf18 --- /dev/null +++ b/app/tests/test_connection_g3.py @@ -0,0 +1,84 @@ +# test_with_pytest.py +import pytest +import asyncio + +from mock import patch +from app.src.async_stream import AsyncStream +from app.src.gen3.connection_g3 import ConnectionG3 +from app.src.gen3.talent import Talent + +@pytest.fixture +def patch_async_init(): + with patch.object(AsyncStream, '__init__') as conn: + yield conn + +@pytest.fixture +def patch_talent_init(): + with patch.object(Talent, '__init__') as conn: + yield conn + +@pytest.fixture +def patch_healthy(): + with patch.object(AsyncStream, 'healthy') as conn: + yield conn + +@pytest.fixture +def patch_async_close(): + with patch.object(AsyncStream, 'close') as conn: + yield conn + +@pytest.fixture +def patch_talent_close(): + with patch.object(Talent, 'close') as conn: + yield conn + +class FakeReader(): + def __init__(self): + self.on_recv = asyncio.Event() + async def read(self, max_len: int): + await self.on_recv.wait() + return b'' + def feed_eof(self): + return + + +class FakeWriter(): + def write(self, buf: bytes): + return + def get_extra_info(self, sel: str): + if sel == 'peername': + return 'remote.intern' + elif sel == 'sockname': + return 'sock:1234' + assert False + def is_closing(self): + return False + def close(self): + return + async def wait_closed(self): + return + + + +def test_method_calls(patch_async_init, patch_talent_init, patch_healthy, patch_async_close, patch_talent_close): + spy1 = patch_async_init + spy2 = patch_talent_init + spy3 = patch_healthy + spy4 = patch_async_close + spy5 = patch_talent_close + reader = FakeReader() + writer = FakeWriter() + id_str = "id_string" + addr = ('proxy.local', 10000) + conn = ConnectionG3(reader, writer, addr, + remote_stream= None, server_side=True, id_str=id_str) + spy1.assert_called_once_with(conn, reader, writer, addr) + spy2.assert_called_once_with(conn, True, id_str) + conn.healthy() + + spy3.assert_called_once() + + conn.close() + spy4.assert_called_once() + spy5.assert_called_once() + diff --git a/app/tests/test_connection_g3p.py b/app/tests/test_connection_g3p.py new file mode 100644 index 0000000..67607f1 --- /dev/null +++ b/app/tests/test_connection_g3p.py @@ -0,0 +1,89 @@ +# test_with_pytest.py +import pytest +import asyncio + +from mock import patch +from app.src.singleton import Singleton +from app.src.async_stream import AsyncStream +from app.src.gen3plus.connection_g3p import ConnectionG3P +from app.src.gen3plus.solarman_v5 import SolarmanV5 + +@pytest.fixture +def patch_async_init(): + with patch.object(AsyncStream, '__init__') as conn: + yield conn + +@pytest.fixture +def patch_solarman_init(): + with patch.object(SolarmanV5, '__init__') as conn: + yield conn + +@pytest.fixture(scope="module", autouse=True) +def module_init(): + Singleton._instances.clear() + yield + +@pytest.fixture +def patch_healthy(): + with patch.object(AsyncStream, 'healthy') as conn: + yield conn + +@pytest.fixture +def patch_async_close(): + with patch.object(AsyncStream, 'close') as conn: + yield conn + +@pytest.fixture +def patch_solarman_close(): + with patch.object(SolarmanV5, 'close') as conn: + yield conn + +class FakeReader(): + def __init__(self): + self.on_recv = asyncio.Event() + async def read(self, max_len: int): + await self.on_recv.wait() + return b'' + def feed_eof(self): + return + + +class FakeWriter(): + def write(self, buf: bytes): + return + def get_extra_info(self, sel: str): + if sel == 'peername': + return 'remote.intern' + elif sel == 'sockname': + return 'sock:1234' + assert False + def is_closing(self): + return False + def close(self): + return + async def wait_closed(self): + return + + + +def test_method_calls(patch_async_init, patch_solarman_init, patch_healthy, patch_async_close, patch_solarman_close): + spy1 = patch_async_init + spy2 = patch_solarman_init + spy3 = patch_healthy + spy4 = patch_async_close + spy5 = patch_solarman_close + reader = FakeReader() + writer = FakeWriter() + addr = ('proxy.local', 10000) + conn = ConnectionG3P(reader, writer, addr, + remote_stream= None, server_side=True, client_mode=False) + spy1.assert_called_once_with(conn, reader, writer, addr) + spy2.assert_called_once_with(conn, True, False) + conn.healthy() + + spy3.assert_called_once() + + conn.close() + spy4.assert_called_once() + spy5.assert_called_once() + From e12c78212f0f8ffcb74e35a3e5105e037b1d85da Mon Sep 17 00:00:00 2001 From: Stefan Allius <122395479+s-allius@users.noreply.github.com> Date: Sat, 14 Sep 2024 08:40:53 +0200 Subject: [PATCH 28/39] Update README.md (#185) update badges --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 16c90f5..8f222c8 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@

License: BSD-3-Clause Supported Python versions - Supported aiomqtt versions + Supported aiomqtt versions Supported aiocron versions Supported toml versions
From 5ef68280b1574b75c862d1561b65f9bec0b9fd5d Mon Sep 17 00:00:00 2001 From: Stefan Allius <122395479+s-allius@users.noreply.github.com> Date: Sat, 14 Sep 2024 19:49:29 +0200 Subject: [PATCH 29/39] S allius/issue186 (#187) * Parse more values in Server Mode Fixes #186 * read OUTPUT_COEFFICIENT and MAC_ADDR in SrvMode * fix unit test * increase test coverage --- app/src/gen3/infos_g3.py | 169 ++++++++++++++++++--------------- app/src/gen3plus/infos_g3p.py | 3 +- app/tests/test_infos_g3.py | 44 +++++---- app/tests/test_infos_g3p.py | 4 +- app/tests/test_inverter_g3p.py | 160 +++++++++++++++++++++++++++++++ app/tests/test_modbus_tcp.py | 81 ++++++++++++++++ app/tests/test_talent.py | 2 +- 7 files changed, 364 insertions(+), 99 deletions(-) create mode 100644 app/tests/test_inverter_g3p.py diff --git a/app/src/gen3/infos_g3.py b/app/src/gen3/infos_g3.py index 6a9c1cf..c39bed9 100644 --- a/app/src/gen3/infos_g3.py +++ b/app/src/gen3/infos_g3.py @@ -11,82 +11,82 @@ else: # pragma: no cover class RegisterMap: map = { - 0x00092ba8: Register.COLLECTOR_FW_VERSION, - 0x000927c0: Register.CHIP_TYPE, - 0x00092f90: Register.CHIP_MODEL, - 0x00094ae8: Register.MAC_ADDR, - 0x00095a88: Register.TRACE_URL, - 0x00095aec: Register.LOGGER_URL, - 0x0000000a: Register.PRODUCT_NAME, - 0x00000014: Register.MANUFACTURER, - 0x0000001e: Register.VERSION, - 0x00000028: Register.SERIAL_NUMBER, - 0x00000032: Register.EQUIPMENT_MODEL, - 0x00013880: Register.NO_INPUTS, - 0xffffff00: Register.INVERTER_CNT, - 0xffffff01: Register.UNKNOWN_SNR, - 0xffffff02: Register.UNKNOWN_MSG, - 0xffffff03: Register.INVALID_DATA_TYPE, - 0xffffff04: Register.INTERNAL_ERROR, - 0xffffff05: Register.UNKNOWN_CTRL, - 0xffffff06: Register.OTA_START_MSG, - 0xffffff07: Register.SW_EXCEPTION, - 0xffffff08: Register.MAX_DESIGNED_POWER, - 0xffffff09: Register.OUTPUT_COEFFICIENT, - 0xffffff0a: Register.INVERTER_STATUS, - 0xffffff0b: Register.POLLING_INTERVAL, - 0xfffffffe: Register.TEST_REG1, - 0xffffffff: Register.TEST_REG2, - 0x00000640: Register.OUTPUT_POWER, - 0x000005dc: Register.RATED_POWER, - 0x00000514: Register.INVERTER_TEMP, - 0x000006a4: Register.PV1_VOLTAGE, - 0x00000708: Register.PV1_CURRENT, - 0x0000076c: Register.PV1_POWER, - 0x000007d0: Register.PV2_VOLTAGE, - 0x00000834: Register.PV2_CURRENT, - 0x00000898: Register.PV2_POWER, - 0x000008fc: Register.PV3_VOLTAGE, - 0x00000960: Register.PV3_CURRENT, - 0x000009c4: Register.PV3_POWER, - 0x00000a28: Register.PV4_VOLTAGE, - 0x00000a8c: Register.PV4_CURRENT, - 0x00000af0: Register.PV4_POWER, - 0x00000c1c: Register.PV1_DAILY_GENERATION, - 0x00000c80: Register.PV1_TOTAL_GENERATION, - 0x00000ce4: Register.PV2_DAILY_GENERATION, - 0x00000d48: Register.PV2_TOTAL_GENERATION, - 0x00000dac: Register.PV3_DAILY_GENERATION, - 0x00000e10: Register.PV3_TOTAL_GENERATION, - 0x00000e74: Register.PV4_DAILY_GENERATION, - 0x00000ed8: Register.PV4_TOTAL_GENERATION, - 0x00000b54: Register.DAILY_GENERATION, - 0x00000bb8: Register.TOTAL_GENERATION, - 0x000003e8: Register.GRID_VOLTAGE, - 0x0000044c: Register.GRID_CURRENT, - 0x000004b0: Register.GRID_FREQUENCY, - 0x000cfc38: Register.CONNECT_COUNT, - 0x000c3500: Register.SIGNAL_STRENGTH, - 0x000c96a8: Register.POWER_ON_TIME, - 0x000d0020: Register.COLLECT_INTERVAL, - 0x000cf850: Register.DATA_UP_INTERVAL, - 0x000c7f38: Register.COMMUNICATION_TYPE, - 0x00000191: Register.EVENT_401, - 0x00000192: Register.EVENT_402, - 0x00000193: Register.EVENT_403, - 0x00000194: Register.EVENT_404, - 0x00000195: Register.EVENT_405, - 0x00000196: Register.EVENT_406, - 0x00000197: Register.EVENT_407, - 0x00000198: Register.EVENT_408, - 0x00000199: Register.EVENT_409, - 0x0000019a: Register.EVENT_410, - 0x0000019b: Register.EVENT_411, - 0x0000019c: Register.EVENT_412, - 0x0000019d: Register.EVENT_413, - 0x0000019e: Register.EVENT_414, - 0x0000019f: Register.EVENT_415, - 0x000001a0: Register.EVENT_416, + 0x00092ba8: {'reg': Register.COLLECTOR_FW_VERSION}, + 0x000927c0: {'reg': Register.CHIP_TYPE}, + 0x00092f90: {'reg': Register.CHIP_MODEL}, + 0x00094ae8: {'reg': Register.MAC_ADDR}, + 0x00095a88: {'reg': Register.TRACE_URL}, + 0x00095aec: {'reg': Register.LOGGER_URL}, + 0x0000000a: {'reg': Register.PRODUCT_NAME}, + 0x00000014: {'reg': Register.MANUFACTURER}, + 0x0000001e: {'reg': Register.VERSION}, + 0x00000028: {'reg': Register.SERIAL_NUMBER}, + 0x00000032: {'reg': Register.EQUIPMENT_MODEL}, + 0x00013880: {'reg': Register.NO_INPUTS}, + 0xffffff00: {'reg': Register.INVERTER_CNT}, + 0xffffff01: {'reg': Register.UNKNOWN_SNR}, + 0xffffff02: {'reg': Register.UNKNOWN_MSG}, + 0xffffff03: {'reg': Register.INVALID_DATA_TYPE}, + 0xffffff04: {'reg': Register.INTERNAL_ERROR}, + 0xffffff05: {'reg': Register.UNKNOWN_CTRL}, + 0xffffff06: {'reg': Register.OTA_START_MSG}, + 0xffffff07: {'reg': Register.SW_EXCEPTION}, + 0xffffff08: {'reg': Register.POLLING_INTERVAL}, + 0xfffffffe: {'reg': Register.TEST_REG1}, + 0xffffffff: {'reg': Register.TEST_REG2}, + 0x00000640: {'reg': Register.OUTPUT_POWER}, + 0x000005dc: {'reg': Register.RATED_POWER}, + 0x00000514: {'reg': Register.INVERTER_TEMP}, + 0x000006a4: {'reg': Register.PV1_VOLTAGE}, + 0x00000708: {'reg': Register.PV1_CURRENT}, + 0x0000076c: {'reg': Register.PV1_POWER}, + 0x000007d0: {'reg': Register.PV2_VOLTAGE}, + 0x00000834: {'reg': Register.PV2_CURRENT}, + 0x00000898: {'reg': Register.PV2_POWER}, + 0x000008fc: {'reg': Register.PV3_VOLTAGE}, + 0x00000960: {'reg': Register.PV3_CURRENT}, + 0x000009c4: {'reg': Register.PV3_POWER}, + 0x00000a28: {'reg': Register.PV4_VOLTAGE}, + 0x00000a8c: {'reg': Register.PV4_CURRENT}, + 0x00000af0: {'reg': Register.PV4_POWER}, + 0x00000c1c: {'reg': Register.PV1_DAILY_GENERATION}, + 0x00000c80: {'reg': Register.PV1_TOTAL_GENERATION}, + 0x00000ce4: {'reg': Register.PV2_DAILY_GENERATION}, + 0x00000d48: {'reg': Register.PV2_TOTAL_GENERATION}, + 0x00000dac: {'reg': Register.PV3_DAILY_GENERATION}, + 0x00000e10: {'reg': Register.PV3_TOTAL_GENERATION}, + 0x00000e74: {'reg': Register.PV4_DAILY_GENERATION}, + 0x00000ed8: {'reg': Register.PV4_TOTAL_GENERATION}, + 0x00000b54: {'reg': Register.DAILY_GENERATION}, + 0x00000bb8: {'reg': Register.TOTAL_GENERATION}, + 0x000003e8: {'reg': Register.GRID_VOLTAGE}, + 0x0000044c: {'reg': Register.GRID_CURRENT}, + 0x000004b0: {'reg': Register.GRID_FREQUENCY}, + 0x000cfc38: {'reg': Register.CONNECT_COUNT}, + 0x000c3500: {'reg': Register.SIGNAL_STRENGTH}, + 0x000c96a8: {'reg': Register.POWER_ON_TIME}, + 0x000d0020: {'reg': Register.COLLECT_INTERVAL}, + 0x000cf850: {'reg': Register.DATA_UP_INTERVAL}, + 0x000c7f38: {'reg': Register.COMMUNICATION_TYPE}, + 0x00000191: {'reg': Register.EVENT_401}, + 0x00000192: {'reg': Register.EVENT_402}, + 0x00000193: {'reg': Register.EVENT_403}, + 0x00000194: {'reg': Register.EVENT_404}, + 0x00000195: {'reg': Register.EVENT_405}, + 0x00000196: {'reg': Register.EVENT_406}, + 0x00000197: {'reg': Register.EVENT_407}, + 0x00000198: {'reg': Register.EVENT_408}, + 0x00000199: {'reg': Register.EVENT_409}, + 0x0000019a: {'reg': Register.EVENT_410}, + 0x0000019b: {'reg': Register.EVENT_411}, + 0x0000019c: {'reg': Register.EVENT_412}, + 0x0000019d: {'reg': Register.EVENT_413}, + 0x0000019e: {'reg': Register.EVENT_414}, + 0x0000019f: {'reg': Register.EVENT_415}, + 0x000001a0: {'reg': Register.EVENT_416}, + 0x00000064: {'reg': Register.INVERTER_STATUS}, + 0x0000125c: {'reg': Register.MAX_DESIGNED_POWER}, + 0x00003200: {'reg': Register.OUTPUT_COEFFICIENT, 'ratio': 100/1024}, } @@ -104,7 +104,8 @@ class InfosG3(Infos): entity strings sug_area:str ==> suggested area string from the config file''' # iterate over RegisterMap.map and get the register values - for reg in RegisterMap.map.values(): + for row in RegisterMap.map.values(): + reg = row['reg'] res = self.ha_conf(reg, ha_prfx, node_id, snr, False, sug_area) # noqa: E501 if res: yield res @@ -123,9 +124,11 @@ class InfosG3(Infos): result = struct.unpack_from('!lB', buf, ind) addr = result[0] if addr not in RegisterMap.map: + row = None info_id = -1 else: - info_id = RegisterMap.map[addr] + row = RegisterMap.map[addr] + info_id = row['reg'] data_type = result[1] ind += 5 @@ -171,9 +174,19 @@ class InfosG3(Infos): " not supported") return + result = self.__modify_val(row, result) + yield from self.__store_result(addr, result, info_id, node_id) i += 1 + def __modify_val(self, row, result): + if row: + if 'eval' in row: + result = eval(row['eval']) + if 'ratio' in row: + result = round(result * row['ratio'], 2) + return result + def __store_result(self, addr, result, info_id, node_id): keys, level, unit, must_incr = self._key_obj(info_id) if keys: diff --git a/app/src/gen3plus/infos_g3p.py b/app/src/gen3plus/infos_g3p.py index 2d6a2fc..135aa3d 100644 --- a/app/src/gen3plus/infos_g3p.py +++ b/app/src/gen3plus/infos_g3p.py @@ -19,6 +19,7 @@ class RegisterMap: 0x4102001a: {'reg': Register.HEARTBEAT_INTERVAL, 'fmt': ' Date: Sun, 15 Sep 2024 01:02:49 +0200 Subject: [PATCH 30/39] S allius/issue186 (#188) * increase test coverage --- app/config/default_config.toml | 7 + app/src/gen3/inverter_g3.py | 1 + app/tests/test_inverter_g3.py | 235 +++++++++++++++++++++++++++++++++ app/tests/test_inverter_g3p.py | 120 ++++++++++++++--- 4 files changed, 341 insertions(+), 22 deletions(-) create mode 100644 app/tests/test_inverter_g3.py diff --git a/app/config/default_config.toml b/app/config/default_config.toml index 744835a..2c135bc 100644 --- a/app/config/default_config.toml +++ b/app/config/default_config.toml @@ -34,6 +34,8 @@ inverters.allow_all = true # allow inverters, even if we have no inverter mapp modbus_polling = false # Disable optional MODBUS polling #pv1 = {type = 'RSM40-8-395M', manufacturer = 'Risen'} # Optional, PV module descr #pv2 = {type = 'RSM40-8-395M', manufacturer = 'Risen'} # Optional, PV module descr +# end of block [inverters."R170000000000001"] + #[inverters."R17xxxxxxxxxxxx2"] #node_id = '' # Optional, MQTT replacement for inverters serial number @@ -41,6 +43,8 @@ modbus_polling = false # Disable optional MODBUS polling #modbus_polling = false # Disable optional MODBUS polling #pv1 = {type = 'RSM40-8-405M', manufacturer = 'Risen'} # Optional, PV module descr #pv2 = {type = 'RSM40-8-405M', manufacturer = 'Risen'} # Optional, PV module descr +# end of block [inverters."R170000000000002"] + [inverters."Y170000000000001"] monitor_sn = 2000000000 # The "Monitoring SN:" can be found on a sticker enclosed with the inverter @@ -56,9 +60,12 @@ modbus_polling = true # Enable optional MODBUS polling #pv2 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr #pv3 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr #pv4 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr +# end of block [inverters."Y170000000000001"] + [gen3plus.at_acl] tsun.allow = ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'] tsun.block = [] mqtt.allow = ['AT+'] mqtt.block = [] +# end of block [gen3plus.at_acl] diff --git a/app/src/gen3/inverter_g3.py b/app/src/gen3/inverter_g3.py index e7690f5..c365286 100644 --- a/app/src/gen3/inverter_g3.py +++ b/app/src/gen3/inverter_g3.py @@ -9,6 +9,7 @@ if __name__ == "app.src.gen3.inverter_g3": from app.src.config import Config from app.src.inverter import Inverter from app.src.gen3.connection_g3 import ConnectionG3 + from app.src.infos import Infos else: # pragma: no cover from config import Config from inverter import Inverter diff --git a/app/tests/test_inverter_g3.py b/app/tests/test_inverter_g3.py new file mode 100644 index 0000000..017e897 --- /dev/null +++ b/app/tests/test_inverter_g3.py @@ -0,0 +1,235 @@ +# test_with_pytest.py +import pytest +import asyncio + +from mock import patch +from enum import Enum +from app.src.infos import Infos +from app.src.config import Config +from app.src.inverter import Inverter +from app.src.singleton import Singleton +from app.src.gen3.connection_g3 import ConnectionG3 +from app.src.gen3.inverter_g3 import InverterG3 + +from app.tests.test_modbus_tcp import patch_mqtt_err, patch_mqtt_except, test_port, test_hostname + +pytest_plugins = ('pytest_asyncio',) + +# initialize the proxy statistics +Infos.static_init() + +@pytest.fixture +def config_conn(): + Config.act_config = { + 'mqtt':{ + 'host': test_hostname, + 'port': test_port, + 'user': '', + 'passwd': '' + }, + 'ha':{ + 'auto_conf_prefix': 'homeassistant', + 'discovery_prefix': 'homeassistant', + 'entity_prefix': 'tsun', + 'proxy_node_id': 'test_1', + 'proxy_unique_id': '' + }, + 'tsun':{'enabled': True, 'host': 'test_cloud.local', 'port': 1234}, 'inverters':{'allow_all':True} + } + +@pytest.fixture(scope="module", autouse=True) +def module_init(): + Singleton._instances.clear() + yield + +@pytest.fixture +def patch_conn_init(): + with patch.object(ConnectionG3, '__init__', return_value= None) as conn: + yield conn + +@pytest.fixture +def patch_conn_close(): + with patch.object(ConnectionG3, 'close') as conn: + yield conn + +class FakeReader(): + def __init__(self): + self.on_recv = asyncio.Event() + async def read(self, max_len: int): + await self.on_recv.wait() + return b'' + def feed_eof(self): + return + + +class FakeWriter(): + def write(self, buf: bytes): + return + def get_extra_info(self, sel: str): + if sel == 'peername': + return 'remote.intern' + elif sel == 'sockname': + return 'sock:1234' + assert False + def is_closing(self): + return False + def close(self): + return + async def wait_closed(self): + return + +class TestType(Enum): + RD_TEST_0_BYTES = 1 + RD_TEST_TIMEOUT = 2 + RD_TEST_EXCEPT = 3 + + +test = TestType.RD_TEST_0_BYTES + +@pytest.fixture +def patch_open_connection(): + async def new_conn(conn): + await asyncio.sleep(0) + return FakeReader(), FakeWriter() + + def new_open(host: str, port: int): + global test + if test == TestType.RD_TEST_TIMEOUT: + raise ConnectionRefusedError + elif test == TestType.RD_TEST_EXCEPT: + raise ValueError("Value cannot be negative") # Compliant + return new_conn(None) + + with patch.object(asyncio, 'open_connection', new_open) as conn: + yield conn + + +def test_method_calls(patch_conn_init, patch_conn_close): + spy1 = patch_conn_init + spy2 = patch_conn_close + reader = FakeReader() + writer = FakeWriter() + addr = ('proxy.local', 10000) + inverter = InverterG3(reader, writer, addr) + inverter.l_addr = '' + inverter.r_addr = '' + + spy1.assert_called_once() + spy1.assert_called_once_with(reader, writer, addr, None, True) + + inverter.close() + spy2.assert_called_once() + +@pytest.mark.asyncio +async def test_remote_conn(config_conn, patch_open_connection, patch_conn_close): + _ = config_conn + _ = patch_open_connection + assert asyncio.get_running_loop() + + spy1 = patch_conn_close + + inverter = InverterG3(FakeReader(), FakeWriter(), ('proxy.local', 10000)) + + await inverter.async_create_remote() + await asyncio.sleep(0) + assert inverter.remote_stream + inverter.close() + spy1.assert_called_once() + +@pytest.mark.asyncio +async def test_remote_except(config_conn, patch_open_connection, patch_conn_close): + _ = config_conn + _ = patch_open_connection + assert asyncio.get_running_loop() + + spy1 = patch_conn_close + + global test + test = TestType.RD_TEST_TIMEOUT + + inverter = InverterG3(FakeReader(), FakeWriter(), ('proxy.local', 10000)) + + await inverter.async_create_remote() + await asyncio.sleep(0) + assert inverter.remote_stream==None + + test = TestType.RD_TEST_EXCEPT + await inverter.async_create_remote() + await asyncio.sleep(0) + assert inverter.remote_stream==None + inverter.close() + spy1.assert_called_once() + +@pytest.mark.asyncio +async def test_mqtt_publish(config_conn, patch_open_connection, patch_conn_close): + _ = config_conn + _ = patch_open_connection + assert asyncio.get_running_loop() + + spy1 = patch_conn_close + + Inverter.class_init() + + inverter = InverterG3(FakeReader(), FakeWriter(), ('proxy.local', 10000)) + inverter._Talent__set_serial_no(serial_no= "123344") + + inverter.new_data['inverter'] = True + inverter.db.db['inverter'] = {} + await inverter.async_publ_mqtt() + assert inverter.new_data['inverter'] == False + + inverter.new_data['env'] = True + inverter.db.db['env'] = {} + await inverter.async_publ_mqtt() + assert inverter.new_data['env'] == False + + Infos.new_stat_data['proxy'] = True + await inverter.async_publ_mqtt() + assert Infos.new_stat_data['proxy'] == False + + inverter.close() + spy1.assert_called_once() + +@pytest.mark.asyncio +async def test_mqtt_err(config_conn, patch_open_connection, patch_mqtt_err, patch_conn_close): + _ = config_conn + _ = patch_open_connection + _ = patch_mqtt_err + assert asyncio.get_running_loop() + + spy1 = patch_conn_close + + Inverter.class_init() + + inverter = InverterG3(FakeReader(), FakeWriter(), ('proxy.local', 10000)) + inverter._Talent__set_serial_no(serial_no= "123344") + + inverter.new_data['inverter'] = True + inverter.db.db['inverter'] = {} + await inverter.async_publ_mqtt() + assert inverter.new_data['inverter'] == True + + inverter.close() + spy1.assert_called_once() + +@pytest.mark.asyncio +async def test_mqtt_except(config_conn, patch_open_connection, patch_mqtt_except, patch_conn_close): + _ = config_conn + _ = patch_open_connection + _ = patch_mqtt_except + assert asyncio.get_running_loop() + + spy1 = patch_conn_close + + Inverter.class_init() + + inverter = InverterG3(FakeReader(), FakeWriter(), ('proxy.local', 10000)) + inverter._Talent__set_serial_no(serial_no= "123344") + + inverter.new_data['inverter'] = True + inverter.db.db['inverter'] = {} + await inverter.async_publ_mqtt() + assert inverter.new_data['inverter'] == True + + inverter.close() + spy1.assert_called_once() diff --git a/app/tests/test_inverter_g3p.py b/app/tests/test_inverter_g3p.py index 8971e95..07d1160 100644 --- a/app/tests/test_inverter_g3p.py +++ b/app/tests/test_inverter_g3p.py @@ -4,13 +4,14 @@ import asyncio from mock import patch from enum import Enum +from app.src.infos import Infos from app.src.config import Config +from app.src.inverter import Inverter from app.src.singleton import Singleton from app.src.gen3plus.connection_g3p import ConnectionG3P from app.src.gen3plus.inverter_g3p import InverterG3P -from app.src.gen3plus.infos_g3p import InfosG3P -from app.src.infos import Infos +from app.tests.test_modbus_tcp import patch_mqtt_err, patch_mqtt_except, test_port, test_hostname pytest_plugins = ('pytest_asyncio',) @@ -20,7 +21,22 @@ Infos.static_init() @pytest.fixture def config_conn(): - Config.act_config = {'solarman':{'enabled': True, 'host': 'test_cloud.local', 'port': 1234}, 'inverters':{'allow_all':True}} + Config.act_config = { + 'mqtt':{ + 'host': test_hostname, + 'port': test_port, + 'user': '', + 'passwd': '' + }, + 'ha':{ + 'auto_conf_prefix': 'homeassistant', + 'discovery_prefix': 'homeassistant', + 'entity_prefix': 'tsun', + 'proxy_node_id': 'test_1', + 'proxy_unique_id': '' + }, + 'solarman':{'enabled': True, 'host': 'test_cloud.local', 'port': 1234}, 'inverters':{'allow_all':True} + } @pytest.fixture(scope="module", autouse=True) def module_init(): @@ -112,16 +128,9 @@ async def test_remote_conn(config_conn, patch_open_connection, patch_conn_close) assert asyncio.get_running_loop() spy1 = patch_conn_close - reader = FakeReader() - writer = FakeWriter() - addr = ('proxy.local', 10000) - - inverter = InverterG3P(reader, writer, addr, client_mode=False) - inverter.l_addr = '' - inverter.r_addr = '' - inverter.node_id = 'Test' - inverter.db = InfosG3P(client_mode= False) + inverter = InverterG3P(FakeReader(), FakeWriter(), ('proxy.local', 10000), client_mode=False) + await inverter.async_create_remote() await asyncio.sleep(0) assert inverter.remote_stream @@ -135,19 +144,12 @@ async def test_remote_except(config_conn, patch_open_connection, patch_conn_clos assert asyncio.get_running_loop() spy1 = patch_conn_close - reader = FakeReader() - writer = FakeWriter() - - addr = ('proxy.local', 10000) + global test test = TestType.RD_TEST_TIMEOUT - inverter = InverterG3P(reader, writer, addr, client_mode=False) - inverter.l_addr = '' - inverter.r_addr = '' - inverter.node_id = 'Test' - inverter.db = InfosG3P(client_mode= False) - test = TestType.RD_TEST_TIMEOUT + inverter = InverterG3P(FakeReader(), FakeWriter(), ('proxy.local', 10000), client_mode=False) + await inverter.async_create_remote() await asyncio.sleep(0) assert inverter.remote_stream==None @@ -158,3 +160,77 @@ async def test_remote_except(config_conn, patch_open_connection, patch_conn_clos assert inverter.remote_stream==None inverter.close() spy1.assert_called_once() + +@pytest.mark.asyncio +async def test_mqtt_publish(config_conn, patch_open_connection, patch_conn_close): + _ = config_conn + _ = patch_open_connection + assert asyncio.get_running_loop() + + spy1 = patch_conn_close + + Inverter.class_init() + + inverter = InverterG3P(FakeReader(), FakeWriter(), ('proxy.local', 10000), client_mode=False) + inverter._SolarmanV5__set_serial_no(snr= 123344) + + inverter.new_data['inverter'] = True + inverter.db.db['inverter'] = {} + await inverter.async_publ_mqtt() + assert inverter.new_data['inverter'] == False + + inverter.new_data['env'] = True + inverter.db.db['env'] = {} + await inverter.async_publ_mqtt() + assert inverter.new_data['env'] == False + + Infos.new_stat_data['proxy'] = True + await inverter.async_publ_mqtt() + assert Infos.new_stat_data['proxy'] == False + + inverter.close() + spy1.assert_called_once() + +@pytest.mark.asyncio +async def test_mqtt_err(config_conn, patch_open_connection, patch_mqtt_err, patch_conn_close): + _ = config_conn + _ = patch_open_connection + _ = patch_mqtt_err + assert asyncio.get_running_loop() + + spy1 = patch_conn_close + + Inverter.class_init() + + inverter = InverterG3P(FakeReader(), FakeWriter(), ('proxy.local', 10000), client_mode=False) + inverter._SolarmanV5__set_serial_no(snr= 123344) + + inverter.new_data['inverter'] = True + inverter.db.db['inverter'] = {} + await inverter.async_publ_mqtt() + assert inverter.new_data['inverter'] == True + + inverter.close() + spy1.assert_called_once() + +@pytest.mark.asyncio +async def test_mqtt_except(config_conn, patch_open_connection, patch_mqtt_except, patch_conn_close): + _ = config_conn + _ = patch_open_connection + _ = patch_mqtt_except + assert asyncio.get_running_loop() + + spy1 = patch_conn_close + + Inverter.class_init() + + inverter = InverterG3P(FakeReader(), FakeWriter(), ('proxy.local', 10000), client_mode=False) + inverter._SolarmanV5__set_serial_no(snr= 123344) + + inverter.new_data['inverter'] = True + inverter.db.db['inverter'] = {} + await inverter.async_publ_mqtt() + assert inverter.new_data['inverter'] == True + + inverter.close() + spy1.assert_called_once() From 614acbf32dfe5eb78a9fea33857bd0696e87e85a Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Sun, 15 Sep 2024 01:18:36 +0200 Subject: [PATCH 31/39] update changelog --- CHANGELOG.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 93cfc9d..eacd121 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,7 +7,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [unreleased] -- GEN3: add support for new messages of version 3 firmwares +- Parse more values in Server Mode [#186](https://github.com/s-allius/tsun-gen3-proxy/issues/186) +- GEN3: add support for new messages of version 3 firmwares [#182](https://github.com/s-allius/tsun-gen3-proxy/issues/182) - add support for controller MAC and serial number - GEN3: don't crash on overwritten msg in the receive buffer - Reading the version string from the image updates it even if the image is re-pulled without re-deployment From 5642c912a8a07e7d7b1364c9458c8e5a182d1531 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Sun, 15 Sep 2024 15:17:45 +0200 Subject: [PATCH 32/39] add dokumentation --- app/config/default_config.toml | 184 ++++++++++++++++++++++++++------- 1 file changed, 145 insertions(+), 39 deletions(-) diff --git a/app/config/default_config.toml b/app/config/default_config.toml index 2c135bc..57b2baf 100644 --- a/app/config/default_config.toml +++ b/app/config/default_config.toml @@ -1,71 +1,177 @@ -# configuration to reach tsun cloud -tsun.enabled = true # false: disables connecting to the tsun cloud, and avoids updates -tsun.host = 'logger.talent-monitoring.com' -tsun.port = 5005 +########################################################################################## +### +### T S U N - G E N 3 - P R O X Y +### +### from Stefan Allius +### +########################################################################################## +### +### The readme will give you an overview of the project: +### https://s-allius.github.io/tsun-gen3-proxy/ +### +### The proxy supports different operation modes. Select the proper mode +### which depends on your inverter type and you inverter firmware. +### Please read: +### https://github.com/s-allius/tsun-gen3-proxy/wiki/Operation-Modes-Overview +### +### Here you will find a description of all configuration options: +### https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-details +### +### The configration uses the TOML format, which aims to be easy to read due to +### obvious semantics. You find more details here: https://toml.io/en/v1.0.0 +### +########################################################################################## -# configuration to reach the new tsun cloud for G3 Plus inverters -solarman.enabled = true # false: disables connecting to the tsun cloud, and avoids updates -solarman.host = 'iot.talent-monitoring.com' -solarman.port = 10000 -# mqtt broker configuration +########################################################################################## +## +## MQTT broker configuration +## +## In this block, you must configure the connection to your MQTT broker and specify the +## required credentials. As the proxy does not currently support an encrypted connection +## to the MQTT broker, it is strongly recommended that you do not use a public broker. +## +## https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-details#mqtt-broker-account +## + mqtt.host = 'mqtt' # URL or IP address of the mqtt broker mqtt.port = 1883 mqtt.user = '' mqtt.passwd = '' -# home-assistant +########################################################################################## +## +## HOME ASSISTANT +## +## The proxy supports the MQTT autoconfiguration of Home Assistant (HA). The default +## values match the HA default configuration. If you need to change these or want to use +## a different MQTT client, you can adjust the prefixes of the MQTT topics below. +## +## https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-details#home-assistant +## + ha.auto_conf_prefix = 'homeassistant' # MQTT prefix for subscribing for homeassistant status updates ha.discovery_prefix = 'homeassistant' # MQTT prefix for discovery topic ha.entity_prefix = 'tsun' # MQTT topic prefix for publishing inverter values ha.proxy_node_id = 'proxy' # MQTT node id, for the proxy_node_id ha.proxy_unique_id = 'P170000000000001' # MQTT unique id, to identify a proxy instance -# microinverters -inverters.allow_all = true # allow inverters, even if we have no inverter mapping -# inverter mapping, maps a `serial_no* to a `mqtt_id` and defines an optional `suggested_place` for `home-assistant` -# -# for each inverter add a block starting with [inverters."<16-digit serial numbeer>"] +########################################################################################## +## +## GEN3 Proxy Mode Configuration +## +## In this block, you can configure an optional connection to the TSUN cloud for GEN3 +## inverters. This connection is only required if you want send data to the TSUN cloud +## to use the TSUN APPs or receive firmware updates. +## +## https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-details#tsun-cloud-for-gen3-inverter-only +## + +tsun.enabled = true # false: disables connecting to the tsun cloud, and avoids updates +tsun.host = 'logger.talent-monitoring.com' +tsun.port = 5005 + + +########################################################################################## +## +## GEN3PLUS Proxy Mode Configuration +## +## In this block, you can configure an optional connection to the TSUN cloud for GEN3PLUS +## inverters. This connection is only required if you want send data to the TSUN cloud +## to use the TSUN APPs or receive firmware updates. +## +## https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-details#solarman-cloud-for-gen3plus-inverter-only +## + +solarman.enabled = true # false: disables connecting to the tsun cloud, and avoids updates +solarman.host = 'iot.talent-monitoring.com' +solarman.port = 10000 + + +########################################################################################## +### +### Inverter Definitions +### +### The proxy supports the simultaneous operation of several inverters, even of different +### types. A configuration block must be defined for each inverter, in which all necessary +### parameters must be specified. These depend on the operation mode used and also differ +### slightly depending on the inverter type. +### +### In addition, the PV modules can be defined at the individual inputs for documentation +### purposes, whereby these are displayed in Home Assistant. +### +### The proxy only accepts connections from known inverters. This can be switched off for +### test purposes and unknown serial numbers are also accepted. +### + +inverters.allow_all = false # only allow known inverters + + +########################################################################################## +## +## For each GEN3 inverter, the serial number of the inverter must be mapped to an MQTT +## definition. To do this, the corresponding configuration block is started with +## `[Inverter.“<16-digit serial number>”]` so that all subsequent parameters are assigned +## to this inverter. Further inverter-specific parameters (e.g. polling mode) can be set +## in the configuration block +## +## The serial numbers of all GEN3 inverters start with `R17`! +## + [inverters."R170000000000001"] -#node_id = '' # Optional, MQTT replacement for inverters serial number -#suggested_area = '' # Optional, suggested installation area for home-assistant -modbus_polling = false # Disable optional MODBUS polling -#pv1 = {type = 'RSM40-8-395M', manufacturer = 'Risen'} # Optional, PV module descr -#pv2 = {type = 'RSM40-8-395M', manufacturer = 'Risen'} # Optional, PV module descr -# end of block [inverters."R170000000000001"] +node_id = '' # MQTT replacement for inverters serial number +suggested_area = '' # suggested installation area for home-assistant +modbus_polling = false # Disable optional MODBUS polling +pv1 = {type = 'RSM40-8-395M', manufacturer = 'Risen'} # Optional, PV module descr +pv2 = {type = 'RSM40-8-395M', manufacturer = 'Risen'} # Optional, PV module descr -#[inverters."R17xxxxxxxxxxxx2"] -#node_id = '' # Optional, MQTT replacement for inverters serial number -#suggested_area = '' # Optional, suggested installation area for home-assistant -#modbus_polling = false # Disable optional MODBUS polling -#pv1 = {type = 'RSM40-8-405M', manufacturer = 'Risen'} # Optional, PV module descr -#pv2 = {type = 'RSM40-8-405M', manufacturer = 'Risen'} # Optional, PV module descr -# end of block [inverters."R170000000000002"] - +########################################################################################## +## +## For each GEN3PLUS inverter, the serial number of the inverter must be mapped to an MQTT +## definition. To do this, the corresponding configuration block is started with +## `[Inverter.“<16-digit serial number>”]` so that all subsequent parameters are assigned +## to this inverter. Further inverter-specific parameters (e.g. polling mode, client mode) +## can be set in the configuration block +## +## The serial numbers of all GEN3PLUS inverters start with `Y17` or Y47! Each GEN3PLUS +## inverter is supplied with a “Monitoring SN:”. This can be found on a sticker enclosed +## with the inverter. +## [inverters."Y170000000000001"] -monitor_sn = 2000000000 # The "Monitoring SN:" can be found on a sticker enclosed with the inverter -#node_id = '' # Optional, MQTT replacement for inverters serial number -#suggested_area = '' # Optional, suggested installation place for home-assistant -modbus_polling = true # Enable optional MODBUS polling +monitor_sn = 2000000000 # The GEN3PLUS "Monitoring SN:" +node_id = '' # MQTT replacement for inverters serial number +suggested_area = '' # suggested installation place for home-assistant +modbus_polling = true # Enable optional MODBUS polling # if your inverter supports SSL connections you must use the client_mode. Pls, uncomment # the next line and configure the fixed IP of your inverter #client_mode = {host = '192.168.0.1', port = 8899} -#pv1 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr -#pv2 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr -#pv3 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr -#pv4 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr -# end of block [inverters."Y170000000000001"] +pv1 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr +pv2 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr +pv3 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr +pv4 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr +########################################################################################## +### +### If the proxy mode is configured, commands from TSUN can be sent to the inverter via +### this connection or parameters (e.g. network credentials) can be queried. Filters can +### then be configured for the AT+ commands from the TSUN Cloud so that only certain +### accesses are permitted. +### +### An overview of all known AT+ commands can be found here: +### https://github.com/s-allius/tsun-gen3-proxy/wiki/AT--commands +### + [gen3plus.at_acl] +# filter for received commands from the internet tsun.allow = ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'] tsun.block = [] +# filter for received commands from the MQTT broker mqtt.allow = ['AT+'] mqtt.block = [] -# end of block [gen3plus.at_acl] From 0b3d84ff36685260de919e055ee24bfe72c82f71 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Mon, 16 Sep 2024 00:12:30 +0200 Subject: [PATCH 33/39] change default config --- app/tests/test_config.py | 156 +++++++++++++++++++++++++++++++++++++-- 1 file changed, 151 insertions(+), 5 deletions(-) diff --git a/app/tests/test_config.py b/app/tests/test_config.py index 9782567..5ceb1b3 100644 --- a/app/tests/test_config.py +++ b/app/tests/test_config.py @@ -30,7 +30,33 @@ def test_default_config(): validated = Config.conf_schema.validate(cnf) except Exception: assert False - assert validated == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'node_id': '', 'modbus_polling': False, 'monitor_sn': 0, 'suggested_area': '', 'sensor_list': 688}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'node_id': '', 'suggested_area': '', 'sensor_list': 688}}} + assert validated == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, + 'inverters': { + 'allow_all': False, + 'R170000000000001': { + 'node_id': '', + 'pv1': {'manufacturer': 'Risen', + 'type': 'RSM40-8-395M'}, + 'pv2': {'manufacturer': 'Risen', + 'type': 'RSM40-8-395M'}, + 'modbus_polling': False, + 'monitor_sn': 0, + 'suggested_area': '', + 'sensor_list': 688}, + 'Y170000000000001': { + 'modbus_polling': True, + 'monitor_sn': 2000000000, + 'node_id': '', + 'pv1': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'pv2': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'pv3': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'pv4': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'suggested_area': '', + 'sensor_list': 688}}} def test_full_config(): cnf = {'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, @@ -71,7 +97,37 @@ def test_read_empty(): err = TstConfig.read('app/config/') assert err == None cnf = TstConfig.get() - assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'suggested_area': '', 'modbus_polling': False, 'monitor_sn': 0, 'node_id': '', 'sensor_list': 688}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'suggested_area': '', 'node_id': '', 'sensor_list': 688}}} + assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, + 'inverters': { + 'allow_all': False, + 'R170000000000001': { + 'suggested_area': '', + 'modbus_polling': False, + 'monitor_sn': 0, + 'node_id': '', + 'pv1': {'manufacturer': 'Risen', + 'type': 'RSM40-8-395M'}, + 'pv2': {'manufacturer': 'Risen', + 'type': 'RSM40-8-395M'}, + 'sensor_list': 688 + }, + 'Y170000000000001': { + 'modbus_polling': True, + 'monitor_sn': 2000000000, + 'suggested_area': '', + 'node_id': '', + 'pv1': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'pv2': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'pv3': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'pv4': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'sensor_list': 688 + } + } + } defcnf = TstConfig.def_config.get('solarman') assert defcnf == {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000} @@ -93,7 +149,37 @@ def test_read_cnf1(): err = TstConfig.read('app/config/') assert err == None cnf = TstConfig.get() - assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': False, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'suggested_area': '', 'modbus_polling': False, 'monitor_sn': 0, 'node_id': '', 'sensor_list': 688}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'suggested_area': '', 'node_id': '', 'sensor_list': 688}}} + assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': False, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, + 'inverters': { + 'allow_all': False, + 'R170000000000001': { + 'suggested_area': '', + 'modbus_polling': False, + 'monitor_sn': 0, + 'node_id': '', + 'pv1': {'manufacturer': 'Risen', + 'type': 'RSM40-8-395M'}, + 'pv2': {'manufacturer': 'Risen', + 'type': 'RSM40-8-395M'}, + 'sensor_list': 688 + }, + 'Y170000000000001': { + 'modbus_polling': True, + 'monitor_sn': 2000000000, + 'suggested_area': '', + 'node_id': '', + 'pv1': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'pv2': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'pv3': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'pv4': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'sensor_list': 688 + } + } + } cnf = TstConfig.get('solarman') assert cnf == {'enabled': False, 'host': 'iot.talent-monitoring.com', 'port': 10000} defcnf = TstConfig.def_config.get('solarman') @@ -106,7 +192,37 @@ def test_read_cnf2(): err = TstConfig.read('app/config/') assert err == None cnf = TstConfig.get() - assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'suggested_area': '', 'modbus_polling': False, 'monitor_sn': 0, 'node_id': '', 'sensor_list': 688}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'suggested_area': '', 'node_id': '', 'sensor_list': 688}}} + assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, + 'inverters': { + 'allow_all': False, + 'R170000000000001': { + 'suggested_area': '', + 'modbus_polling': False, + 'monitor_sn': 0, + 'node_id': '', + 'pv1': {'manufacturer': 'Risen', + 'type': 'RSM40-8-395M'}, + 'pv2': {'manufacturer': 'Risen', + 'type': 'RSM40-8-395M'}, + 'sensor_list': 688 + }, + 'Y170000000000001': { + 'modbus_polling': True, + 'monitor_sn': 2000000000, + 'suggested_area': '', + 'node_id': '', + 'pv1': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'pv2': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'pv3': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'pv4': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'sensor_list': 688 + } + } + } assert True == TstConfig.is_default('solarman') def test_read_cnf3(): @@ -123,7 +239,37 @@ def test_read_cnf4(): err = TstConfig.read('app/config/') assert err == None cnf = TstConfig.get() - assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 5000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'suggested_area': '', 'modbus_polling': False, 'monitor_sn': 0, 'node_id': '', 'sensor_list': 688}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'suggested_area': '', 'node_id': '', 'sensor_list': 688}}} + assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 5000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, + 'inverters': { + 'allow_all': False, + 'R170000000000001': { + 'suggested_area': '', + 'modbus_polling': False, + 'monitor_sn': 0, + 'node_id': '', + 'pv1': {'manufacturer': 'Risen', + 'type': 'RSM40-8-395M'}, + 'pv2': {'manufacturer': 'Risen', + 'type': 'RSM40-8-395M'}, + 'sensor_list': 688 + }, + 'Y170000000000001': { + 'modbus_polling': True, + 'monitor_sn': 2000000000, + 'suggested_area': '', + 'node_id': '', + 'pv1': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'pv2': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'pv3': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'pv4': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'sensor_list': 688 + } + } + } assert False == TstConfig.is_default('solarman') def test_read_cnf5(): From 828f26cf24523714c82e4c98b0a0e23803b65269 Mon Sep 17 00:00:00 2001 From: Stefan Allius <122395479+s-allius@users.noreply.github.com> Date: Mon, 16 Sep 2024 00:17:43 +0200 Subject: [PATCH 34/39] Update README.md (#189) Config file is now foldable --- README.md | 172 +++++++++++++++++++++++++++++++++++++++++++++--------- 1 file changed, 143 insertions(+), 29 deletions(-) diff --git a/README.md b/README.md index 8f222c8..7590b4a 100644 --- a/README.md +++ b/README.md @@ -121,26 +121,63 @@ The proxy can be configured via the file 'config.toml'. When the proxy is starte The configration uses the TOML format, which aims to be easy to read due to obvious semantics. You find more details here: +

+Here is an example of a config.toml file + ```toml -# configuration for tsun cloud for 'GEN3' inverters -tsun.enabled = true # false: disables connecting to the tsun cloud, and avoids updates -tsun.host = 'logger.talent-monitoring.com' -tsun.port = 5005 - -# configuration for solarman cloud for 'GEN3 PLUS' inverters -solarman.enabled = true # false: disables connecting to the tsun cloud, and avoids updates -solarman.host = 'iot.talent-monitoring.com' -solarman.port = 10000 +########################################################################################## +### +### T S U N - G E N 3 - P R O X Y +### +### from Stefan Allius +### +########################################################################################## +### +### The readme will give you an overview of the project: +### https://s-allius.github.io/tsun-gen3-proxy/ +### +### The proxy supports different operation modes. Select the proper mode +### which depends on your inverter type and you inverter firmware. +### Please read: +### https://github.com/s-allius/tsun-gen3-proxy/wiki/Operation-Modes-Overview +### +### Here you will find a description of all configuration options: +### https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-details +### +### The configration uses the TOML format, which aims to be easy to read due to +### obvious semantics. You find more details here: https://toml.io/en/v1.0.0 +### +########################################################################################## -# mqtt broker configuration +########################################################################################## +## +## MQTT broker configuration +## +## In this block, you must configure the connection to your MQTT broker and specify the +## required credentials. As the proxy does not currently support an encrypted connection +## to the MQTT broker, it is strongly recommended that you do not use a public broker. +## +## https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-details#mqtt-broker-account +## + mqtt.host = 'mqtt' # URL or IP address of the mqtt broker mqtt.port = 1883 mqtt.user = '' mqtt.passwd = '' -# home-assistant +########################################################################################## +## +## HOME ASSISTANT +## +## The proxy supports the MQTT autoconfiguration of Home Assistant (HA). The default +## values match the HA default configuration. If you need to change these or want to use +## a different MQTT client, you can adjust the prefixes of the MQTT topics below. +## +## https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-details#home-assistant +## + ha.auto_conf_prefix = 'homeassistant' # MQTT prefix for subscribing for homeassistant status updates ha.discovery_prefix = 'homeassistant' # MQTT prefix for discovery topic ha.entity_prefix = 'tsun' # MQTT topic prefix for publishing inverter values @@ -148,40 +185,115 @@ ha.proxy_node_id = 'proxy' # MQTT node id, for the proxy_node_i ha.proxy_unique_id = 'P170000000000001' # MQTT unique id, to identify a proxy instance -# microinverters -inverters.allow_all = false # True: allow inverters, even if we have no inverter mapping +########################################################################################## +## +## GEN3 Proxy Mode Configuration +## +## In this block, you can configure an optional connection to the TSUN cloud for GEN3 +## inverters. This connection is only required if you want send data to the TSUN cloud +## to use the TSUN APPs or receive firmware updates. +## +## https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-details#tsun-cloud-for-gen3-inverter-only +## -# inverter mapping, maps a `serial_no* to a `node_id` and defines an optional `suggested_area` for `home-assistant` -# -# for each inverter add a block starting with [inverters."<16-digit serial numbeer>"] +tsun.enabled = true # false: disables connecting to the tsun cloud, and avoids updates +tsun.host = 'logger.talent-monitoring.com' +tsun.port = 5005 + + +########################################################################################## +## +## GEN3PLUS Proxy Mode Configuration +## +## In this block, you can configure an optional connection to the TSUN cloud for GEN3PLUS +## inverters. This connection is only required if you want send data to the TSUN cloud +## to use the TSUN APPs or receive firmware updates. +## +## https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-details#solarman-cloud-for-gen3plus-inverter-only +## +solarman.enabled = true # false: disables connecting to the tsun cloud, and avoids updates +solarman.host = 'iot.talent-monitoring.com' +solarman.port = 10000 + + +########################################################################################## +### +### Inverter Definitions +### +### The proxy supports the simultaneous operation of several inverters, even of different +### types. A configuration block must be defined for each inverter, in which all necessary +### parameters must be specified. These depend on the operation mode used and also differ +### slightly depending on the inverter type. +### +### In addition, the PV modules can be defined at the individual inputs for documentation +### purposes, whereby these are displayed in Home Assistant. +### +### The proxy only accepts connections from known inverters. This can be switched off for +### test purposes and unknown serial numbers are also accepted. +### + +inverters.allow_all = false # only allow known inverters + + +########################################################################################## +## +## For each GEN3 inverter, the serial number of the inverter must be mapped to an MQTT +## definition. To do this, the corresponding configuration block is started with +## `[Inverter.“<16-digit serial number>”]` so that all subsequent parameters are assigned +## to this inverter. Further inverter-specific parameters (e.g. polling mode) can be set +## in the configuration block +## +## The serial numbers of all GEN3 inverters start with `R17`! +## [inverters."R17xxxxxxxxxxxx1"] -node_id = 'inv1' # Optional, MQTT replacement for inverters serial number -suggested_area = 'roof' # Optional, suggested installation area for home-assistant -modbus_polling = false # Disable optional MODBUS polling for GEN3 inverter +node_id = 'inv_1' # MQTT replacement for inverters serial number +suggested_area = 'roof' # suggested installation place for home-assistant +modbus_polling = false # Disable optional MODBUS polling for GEN3 inverter pv1 = {type = 'RSM40-8-395M', manufacturer = 'Risen'} # Optional, PV module descr pv2 = {type = 'RSM40-8-395M', manufacturer = 'Risen'} # Optional, PV module descr -[inverters."R17xxxxxxxxxxxx2"] -node_id = 'inv2' # Optional, MQTT replacement for inverters serial number -suggested_area = 'balcony' # Optional, suggested installation area for home-assistant -modbus_polling = false # Disable optional MODBUS polling for GEN3 inverter -pv1 = {type = 'RSM40-8-405M', manufacturer = 'Risen'} # Optional, PV module descr -pv2 = {type = 'RSM40-8-405M', manufacturer = 'Risen'} # Optional, PV module descr + +########################################################################################## +## +## For each GEN3PLUS inverter, the serial number of the inverter must be mapped to an MQTT +## definition. To do this, the corresponding configuration block is started with +## `[Inverter.“<16-digit serial number>”]` so that all subsequent parameters are assigned +## to this inverter. Further inverter-specific parameters (e.g. polling mode, client mode) +## can be set in the configuration block +## +## The serial numbers of all GEN3PLUS inverters start with `Y17` or Y47! Each GEN3PLUS +## inverter is supplied with a “Monitoring SN:”. This can be found on a sticker enclosed +## with the inverter. +## [inverters."Y17xxxxxxxxxxxx1"] # This block is also for inverters with a Y47 serial no -monitor_sn = 2000000000 # The "Monitoring SN:" can be found on a sticker enclosed with the inverter -node_id = 'inv_3' # MQTT replacement for inverters serial number -suggested_area = 'garage' # suggested installation place for home-assistant -modbus_polling = false # Enable optional MODBUS polling for GEN3PLUS inverter +monitor_sn = 2000000000 # The GEN3PLUS "Monitoring SN:" +node_id = 'inv_2' # MQTT replacement for inverters serial number +suggested_area = 'garage' # suggested installation place for home-assistant +modbus_polling = true # Enable optional MODBUS polling + # if your inverter supports SSL connections you must use the client_mode. Pls, uncomment # the next line and configure the fixed IP of your inverter #client_mode = {host = '192.168.0.1', port = 8899} + pv1 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr pv2 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr pv3 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr pv4 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr + +########################################################################################## +### +### If the proxy mode is configured, commands from TSUN can be sent to the inverter via +### this connection or parameters (e.g. network credentials) can be queried. Filters can +### then be configured for the AT+ commands from the TSUN Cloud so that only certain +### accesses are permitted. +### +### An overview of all known AT+ commands can be found here: +### https://github.com/s-allius/tsun-gen3-proxy/wiki/AT--commands +### + [gen3plus.at_acl] tsun.allow = ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'] # allow this for TSUN access tsun.block = [] @@ -190,6 +302,8 @@ mqtt.block = [] ``` +
+ ## Inverter Configuration GEN3PLUS inverters offer a web interface that can be used to configure the inverter. This is very practical for sending the data directly to the proxy. On the one hand, the inverter broadcasts its own SSID on 2.4GHz. This can be recognized because it is broadcast with `AP_`. You will find the `Monitor SN` and the password for the WLAN connection on a small sticker enclosed with the inverter. From 166a85670594e43861a33ee4e696563fef86b9e9 Mon Sep 17 00:00:00 2001 From: Stefan Allius <122395479+s-allius@users.noreply.github.com> Date: Thu, 19 Sep 2024 19:17:22 +0200 Subject: [PATCH 35/39] GEN3: Invalid Contact Info Msg (#192) Fixes #191 --- app/src/gen3/talent.py | 21 +++++++++-------- app/tests/test_talent.py | 49 +++++++++++++++++++++++++++++++++++++++- 2 files changed, 59 insertions(+), 11 deletions(-) diff --git a/app/src/gen3/talent.py b/app/src/gen3/talent.py index c225691..bb86357 100644 --- a/app/src/gen3/talent.py +++ b/app/src/gen3/talent.py @@ -176,7 +176,7 @@ class Talent(Message): f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}') def forward_snd(self) -> None: - '''add the actual receive msg to the forwarding queue''' + '''add the build send msg to the forwarding queue''' tsun = Config.get('tsun') if tsun['enabled']: _len = len(self._send_buffer) - self.send_msg_ofs @@ -393,17 +393,18 @@ class Talent(Message): def __process_contact_info(self) -> bool: result = struct.unpack_from('!B', self._recv_buffer, self.header_len) name_len = result[0] - if self.data_len < name_len+2: + if self.data_len == 1: # this is a response withone status byte return False - result = struct.unpack_from(f'!{name_len+1}pB', self._recv_buffer, - self.header_len) - self.contact_name = result[0] - mail_len = result[1] - logger.info(f'name: {self.contact_name}') + if self.data_len >= name_len+2: + result = struct.unpack_from(f'!{name_len+1}pB', self._recv_buffer, + self.header_len) + self.contact_name = result[0] + mail_len = result[1] + logger.info(f'name: {self.contact_name}') - result = struct.unpack_from(f'!{mail_len+1}p', self._recv_buffer, - self.header_len+name_len+1) - self.contact_mail = result[0] + result = struct.unpack_from(f'!{mail_len+1}p', self._recv_buffer, + self.header_len+name_len+1) + self.contact_mail = result[0] logger.info(f'mail: {self.contact_mail}') return True diff --git a/app/tests/test_talent.py b/app/tests/test_talent.py index 4ea5b54..ce4cef4 100644 --- a/app/tests/test_talent.py +++ b/app/tests/test_talent.py @@ -99,14 +99,22 @@ class MemoryStream(Talent): @pytest.fixture def msg_contact_info(): # Contact Info message - Config.act_config = {'tsun':{'enabled': True}} return b'\x00\x00\x00\x2c\x10R170000000000001\x91\x00\x08solarhub\x0fsolarhub\x40123456' +@pytest.fixture +def msg_contact_info_empty(): # Contact Info message with empty string + return b'\x00\x00\x00\x15\x10R170000000000001\x91\x00\x00\x00' + @pytest.fixture def msg_contact_info_long_id(): # Contact Info message with longer ID Config.act_config = {'tsun':{'enabled': True}} return b'\x00\x00\x00\x2d\x11R1700000000000011\x91\x00\x08solarhub\x0fsolarhub\x40123456' + +@pytest.fixture +def msg_contact_info_broken(): # Contact Info message with invalid string coding + return b'\x00\x00\x00\x2a\x10R170000000000001\x91\x00solarhubsolarhub\x40123456' + @pytest.fixture def msg2_contact_info(): # two Contact Info messages return b'\x00\x00\x00\x2c\x10R170000000000001\x91\x00\x08solarhub\x0fsolarhub\x40123456\x00\x00\x00\x2c\x10R170000000000002\x91\x00\x08solarhub\x0fsolarhub\x40123456' @@ -728,6 +736,7 @@ def multiple_recv_buf(): # There are three message in the buffer, but the second return msg def test_read_message(msg_contact_info): + Config.act_config = {'tsun':{'enabled': True}} m = MemoryStream(msg_contact_info, (0,)) m.read() # read complete msg, and dispatch msg assert not m.header_valid # must be invalid, since msg was handled and buffer flushed @@ -782,6 +791,7 @@ def test_read_message_long_id(msg_contact_info_long_id): def test_read_message_in_chunks(msg_contact_info): + Config.act_config = {'tsun':{'enabled': True}} m = MemoryStream(msg_contact_info, (4,23,0)) m.read() # read 4 bytes, header incomplere assert not m.header_valid # must be invalid, since header not complete @@ -801,6 +811,7 @@ def test_read_message_in_chunks(msg_contact_info): m.close() def test_read_message_in_chunks2(msg_contact_info): + Config.act_config = {'tsun':{'enabled': True}} m = MemoryStream(msg_contact_info, (4,10,0)) m.read() # read 4 bytes, header incomplere assert not m.header_valid @@ -852,6 +863,42 @@ def test_read_two_messages(config_tsun_allow_all, msg2_contact_info,msg_contact_ assert m._send_buffer==b'\x00\x00\x00,\x10R170000000000002\x91\x00\x08solarhub\x0fsolarhub@123456' m.close() +def test_conttact_req(config_tsun_allow_all, msg_contact_info, msg_contact_rsp): + _ = config_tsun_allow_all + m = MemoryStream(msg_contact_info, (0,)) + m.read() # read complete msg, and dispatch msg + assert not m.header_valid # must be invalid, since msg was handled and buffer flushed + assert m.msg_count == 1 + assert m.id_str == b"R170000000000001" + assert m.contact_name == b'solarhub' + assert m.contact_mail == b'solarhub@123456' + assert m.unique_id == 'R170000000000001' + assert int(m.ctrl)==145 + assert m.msg_id==0 + assert m.header_len==23 + assert m.data_len==25 + assert m._forward_buffer==b'' + assert m._send_buffer==msg_contact_rsp + m.close() + +def test_contact_broken_req(config_tsun_allow_all, msg_contact_info_broken, msg_contact_rsp): + _ = config_tsun_allow_all + m = MemoryStream(msg_contact_info_broken, (0,)) + m.read() # read complete msg, and dispatch msg + assert not m.header_valid # must be invalid, since msg was handled and buffer flushed + assert m.msg_count == 1 + assert m.id_str == b"R170000000000001" + assert m.contact_name == b'' + assert m.contact_mail == b'' + assert m.unique_id == 'R170000000000001' + assert int(m.ctrl)==145 + assert m.msg_id==0 + assert m.header_len==23 + assert m.data_len==23 + assert m._forward_buffer==b'' + assert m._send_buffer==msg_contact_rsp + m.close() + def test_msg_contact_resp(config_tsun_inv1, msg_contact_rsp): _ = config_tsun_inv1 m = MemoryStream(msg_contact_rsp, (0,), False) From 62ea2a9e6f9c632aad7f04c38d88f3a79b65cce5 Mon Sep 17 00:00:00 2001 From: Stefan Allius <122395479+s-allius@users.noreply.github.com> Date: Sun, 13 Oct 2024 16:07:01 +0200 Subject: [PATCH 36/39] Refactoring async stream (#194) * GEN3: Invalid Contact Info Msg Fixes #191 * introduce ifc with FIFOs * add object factory * use AsyncIfc class with FIFO * declare more methods as classmethods * - refactoring - remove _forward_buffer - make async_write private * remove _forward_buffer * refactoring * avoid mqtt handling for invalid serial numbers * add two more callbacks * FIX update_header_cb handling * split AsyncStream in two classes * split ConnectionG3(P) in server and client class * update class diagramm * refactor server creation * remove duplicated imports * reduce code duplication * move StremPtr instances into Inverter class * resolution of connection classes - remove ConnectionG3Client - remove ConnectionG3Server - remove ConnectionG3PClient - remove ConnectionG3PServer * fix server connections * fix client loop closing * don't overwrite self.remote in constructor * update class diagramm * fixes - fixes null pointer accesses - initalize AsyncStreamClient with proper StreamPtr instance * add close callback * refactor close handling * remove connection classes * move more code into InverterBase class * remove test_inverter_base.py * add abstract inverter interface class * initial commit * fix sonar qube warnings * rename class Inverter into Proxy * fix typo * move class InverterIfc into a separate file * add more testcases * use ProtocolIfc class * add unit tests for AsyncStream class * icrease test coverage * reduce cognitive complexity * increase test coverage * increase tes coverage * simplify heartbeat handler * remove obsolete tx_get method * add more unittests * update changelog * remove __del__ method for proper gc runs * check releasing of ModbusConn instances * call garbage collector to release unreachable objs * decrease ref counter after the with block --- CHANGELOG.md | 1 + app/proxy.svg | 535 +++++++----------- app/proxy.yuml | 50 +- app/proxy_2.svg | 432 ++++++++++++++ app/proxy_2.yuml | 51 ++ app/src/async_ifc.py | 104 ++++ app/src/async_stream.py | 415 +++++++++----- app/src/byte_fifo.py | 54 ++ app/src/gen3/connection_g3.py | 46 -- app/src/gen3/inverter_g3.py | 138 +---- app/src/gen3/talent.py | 150 +++-- app/src/gen3plus/connection_g3p.py | 47 -- app/src/gen3plus/inverter_g3p.py | 141 +---- app/src/gen3plus/solarman_v5.py | 122 ++-- app/src/infos.py | 12 +- app/src/inverter_base.py | 183 ++++++ app/src/inverter_ifc.py | 40 ++ app/src/iter_registry.py | 9 + app/src/messages.py | 72 ++- app/src/modbus.py | 4 - app/src/modbus_tcp.py | 31 +- app/src/mqtt.py | 3 - app/src/protocol_ifc.py | 21 + app/src/{inverter.py => proxy.py} | 33 +- app/src/server.py | 53 +- app/tests/test_async_stream.py | 532 +++++++++++++++++ app/tests/test_byte_fifo.py | 43 ++ app/tests/test_connection_g3.py | 84 --- app/tests/test_connection_g3p.py | 89 --- app/tests/test_inverter_base.py | 304 ++++++++++ app/tests/test_inverter_g3.py | 177 +++--- app/tests/test_inverter_g3p.py | 152 ++--- app/tests/test_modbus_tcp.py | 169 ++++-- app/tests/test_mqtt.py | 7 +- app/tests/{test_inverter.py => test_proxy.py} | 25 +- app/tests/test_solarman.py | 393 +++++++------ app/tests/test_talent.py | 394 +++++++------ tsun.code-workspace | 11 + 38 files changed, 3245 insertions(+), 1882 deletions(-) create mode 100644 app/proxy_2.svg create mode 100644 app/proxy_2.yuml create mode 100644 app/src/async_ifc.py create mode 100644 app/src/byte_fifo.py delete mode 100644 app/src/gen3/connection_g3.py delete mode 100644 app/src/gen3plus/connection_g3p.py create mode 100644 app/src/inverter_base.py create mode 100644 app/src/inverter_ifc.py create mode 100644 app/src/iter_registry.py create mode 100644 app/src/protocol_ifc.py rename app/src/{inverter.py => proxy.py} (73%) create mode 100644 app/tests/test_async_stream.py create mode 100644 app/tests/test_byte_fifo.py delete mode 100644 app/tests/test_connection_g3.py delete mode 100644 app/tests/test_connection_g3p.py create mode 100644 app/tests/test_inverter_base.py rename app/tests/{test_inverter.py => test_proxy.py} (79%) create mode 100644 tsun.code-workspace diff --git a/CHANGELOG.md b/CHANGELOG.md index eacd121..618da05 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [unreleased] +- refactoring: cleaner architecture, increase test coverage - Parse more values in Server Mode [#186](https://github.com/s-allius/tsun-gen3-proxy/issues/186) - GEN3: add support for new messages of version 3 firmwares [#182](https://github.com/s-allius/tsun-gen3-proxy/issues/182) - add support for controller MAC and serial number diff --git a/app/proxy.svg b/app/proxy.svg index 32caab7..f032452 100644 --- a/app/proxy.svg +++ b/app/proxy.svg @@ -4,408 +4,257 @@ - - + + G - + A0 - - - -You can stick notes -on diagrams too! + + + +You can stick notes +on diagrams too! A1 - -Singleton - - - -A2 - -Mqtt - -<static>ha_restarts -<static>__client -<static>__cb_MqttIsUp - -<async>publish() -<async>close() - - - -A1->A2 - - - - - -A11 - -Inverter - -cls.db_stat -cls.entity_prfx -cls.discovery_prfx -cls.proxy_node_id -cls.proxy_unique_id -cls.mqtt:Mqtt - - - - -A2->A11 - - - - -A3 - -Modbus - -que -snd_handler -rsp_handler -timeout -max_retires -last_xxx -err -retry_cnt -req_pend -tim - -build_msg() -recv_req() -recv_resp() -close() + +<<AbstractIterMeta>> + + +__iter__() A4 - -IterRegistry - - -__iter__ + +<<InverterIfc>> + + +healthy()->bool +<async>disc(shutdown_started=False) +<async>create_remote() + + + +A1->A4 + + + + + +A2 + +Mqtt +<<Singleton>> + +<static>ha_restarts +<static>__client +<static>__cb_MqttIsUp + +<async>publish() +<async>close() + + + +A3 + +Proxy + +<cls>db_stat +<cls>entity_prfx +<cls>discovery_prfx +<cls>proxy_node_id +<cls>proxy_unique_id +<cls>mqtt:Mqtt +__ha_restarts + +class_init() +class_close() +<async>_cb_mqtt_is_up() +<async>_register_proxy_stat_home_assistant() +<async>_async_publ_mqtt_proxy_stat(key) + + + +A3->A2 + + + A5 - -Message - -server_side:bool -header_valid:bool -header_len:unsigned -data_len:unsigned -unique_id -node_id -sug_area -_recv_buffer:bytearray -_send_buffer:bytearray -_forward_buffer:bytearray -db:Infos -new_data:list -state - -_read():void<abstract> -close():void -inc_counter():void -dec_counter():void + +InverterBase + +_registry +__ha_restarts +addr +config_id:str +prot_class:MessageProt +remote:StreamPtr +local:StreamPtr + +healthy()->bool +<async>disc(shutdown_started=False) +<async>create_remote() +<async>async_publ_mqtt() + + + +A3->A5 + + A4->A5 - - + + A6 - -Talent - -await_conn_resp_cnt -id_str -contact_name -contact_mail -db:InfosG3 -mb:Modbus -switch - -msg_contact_info() -msg_ota_update() -msg_get_time() -msg_collector_data() -msg_inverter_data() -msg_unknown() -close() + +StreamPtr + + +stream:ProtocolIfc +ifc:AsyncIfc - + A5->A6 - - + + +2 A7 - -SolarmanV5 - -control -serial -snr -db:InfosG3P -mb:Modbus -switch - -msg_unknown() -close() + +InverterG3 - -A5->A7 - - - - - -A6->A3 - - -1 -has - - - -A8 - -ConnectionG3 - -remote_stream:ConnectionG3 - -healthy() -close() - - -A6->A8 - - - - - -A7->A3 - - -1 -has +A5->A7 + + A9 - -ConnectionG3P - -remote_stream:ConnectionG3P - -healthy() -close() + +InverterG3P - - -A7->A9 - - + + +A5->A9 + + - - -A8->A8 - - -0..1 -has + + +A11 + +<<AsyncIfc>> + + + +A6->A11 + + +1 A12 - -InverterG3 - -__ha_restarts - -async_create_remote() -async_publ_mqtt() -close() + +<<ProtocolIfc>> - - -A8->A12 - - + + +A6->A12 + + +1 - - -A9->A9 - - -0..1 -has + + +A8 + + + +Creates an GEN3 +inverter instance +with +prot_class:Talent - - -A13 - -InverterG3P - -__ha_restarts - -async_create_remote( -)async_publ_mqtt() -close() - - - -A9->A13 - - + + +A7->A8 + A10 - -AsyncStream - -reader -writer -addr -r_addr -l_addr - -<async>server_loop() -<async>client_loop() -<async>loop -disc() -close() -__async_read() -async_write() -__async_forward() + + + +Creates an GEN3PLUS +inverter instance +with +prot_class:SolarmanV5 - - -A10->A8 - - + + +A9->A10 + - - -A10->A9 - - - - - -A11->A12 - - - - + -A11->A13 - - +A12->A11 + + +use + + + +A13 + +ModbusConn + +host +port +addr +stream:InverterG3P + + + + +A13->A9 + + +1 +has A14 - -Infos - -stat -new_stat_data -info_dev - -static_init() -dev_value() -inc_counter() -dec_counter() -ha_proxy_conf -ha_conf -ha_remove -update_db -set_db_def_value -get_db_value -ignore_this_device + +ModbusTcp - - -A15 - -InfosG3 - - -ha_confs() -parse() - - - -A14->A15 - - - - - -A16 - -InfosG3P - - -ha_confs() -parse() - - - -A14->A16 - - - - - -A15->A6 - - - - - -A16->A7 - - - - - -A17 - -ModbusConn - -host -port -addr -stream:InverterG3P - - - - -A17->A13 - - -1 -has + + +A14->A13 + + +* +creates diff --git a/app/proxy.yuml b/app/proxy.yuml index c629d35..dbb5417 100644 --- a/app/proxy.yuml +++ b/app/proxy.yuml @@ -3,28 +3,34 @@ // {generate:true} [note: You can stick notes on diagrams too!{bg:cornsilk}] -[Singleton]^[Mqtt|ha_restarts;__client;__cb_MqttIsUp|publish();close()] -[Modbus|que;;snd_handler;rsp_handler;timeout;max_retires;last_xxx;err;retry_cnt;req_pend;tim|build_msg();recv_req();recv_resp();close()] -[IterRegistry||__iter__]^[Message|server_side:bool;header_valid:bool;header_len:unsigned;data_len:unsigned;unique_id;node_id;sug_area;_recv_buffer:bytearray;_send_buffer:bytearray;_forward_buffer:bytearray;db:Infos;new_data:list;state|_read():void;close():void;inc_counter():void;dec_counter():void] -[Message]^[Talent|await_conn_resp_cnt;id_str;contact_name;contact_mail;db:InfosG3;mb:Modbus;switch|msg_contact_info();msg_ota_update();msg_get_time();msg_collector_data();msg_inverter_data();msg_unknown();;close()] -[Message]^[SolarmanV5|control;serial;snr;db:InfosG3P;mb:Modbus;switch|msg_unknown();;close()] -[Talent]^[ConnectionG3|remote_stream:ConnectionG3|healthy();close()] -[Talent]has-1>[Modbus] -[SolarmanV5]^[ConnectionG3P|remote_stream:ConnectionG3P|healthy();close()] -[SolarmanV5]has-1>[Modbus] -[AsyncStream|reader;writer;addr;r_addr;l_addr|server_loop();client_loop();loop;disc();close();;__async_read();async_write();__async_forward()]^[ConnectionG3] -[AsyncStream]^[ConnectionG3P] -[Inverter|cls.db_stat;cls.entity_prfx;cls.discovery_prfx;cls.proxy_node_id;cls.proxy_unique_id;cls.mqtt:Mqtt|]^[InverterG3|__ha_restarts|async_create_remote();async_publ_mqtt();;close()] -[Inverter]^[InverterG3P|__ha_restarts|async_create_remote(;)async_publ_mqtt();close()] -[Mqtt]-[Inverter] -[ConnectionG3]^[InverterG3] -[ConnectionG3]has-0..1>[ConnectionG3] -[ConnectionG3P]^[InverterG3P] -[ConnectionG3P]has-0..1>[ConnectionG3P] +[<>||__iter__()] + +[Mqtt;<>|ha_restarts;__client;__cb_MqttIsUp|publish();close()] +[Proxy|db_stat;entity_prfx;discovery_prfx;proxy_node_id;proxy_unique_id;mqtt:Mqtt;;__ha_restarts|class_init();class_close();;_cb_mqtt_is_up();_register_proxy_stat_home_assistant();_async_publ_mqtt_proxy_stat(key)] + +[<>||healthy()->bool;disc(shutdown_started=False);create_remote();] +[<>]^-.-[<>] +[InverterBase|_registry;__ha_restarts;;addr;config_id:str;prot_class:MessageProt;remote:StreamPtr;local:StreamPtr;|healthy()->bool;disc(shutdown_started=False);create_remote();async_publ_mqtt()] +[StreamPtr||stream:ProtocolIfc;ifc:AsyncIfc] +[<>]^-.-[InverterBase] +[InverterG3]-[note: Creates an GEN3 inverter instance with prot_class:Talent{bg:cornsilk}] +[InverterG3P]-[note: Creates an GEN3PLUS inverter instance with prot_class:SolarmanV5{bg:cornsilk}] +[InverterBase]^[InverterG3] +[InverterBase]^[InverterG3P] +[Proxy]^[InverterBase] +[InverterBase]-2>[StreamPtr] +[Proxy]++->[Mqtt;<>] + +[<>] + + +[StreamPtr]-1>[<>] +[StreamPtr]-1>[<>] + + +[<>]use-.->[<>] + -[Infos|stat;new_stat_data;info_dev|static_init();dev_value();inc_counter();dec_counter();ha_proxy_conf;ha_conf;ha_remove;update_db;set_db_def_value;get_db_value;ignore_this_device]^[InfosG3||ha_confs();parse()] -[Infos]^[InfosG3P||ha_confs();parse()] -[InfosG3P]->[SolarmanV5] -[InfosG3]->[Talent] [ModbusConn|host;port;addr;stream:InverterG3P;|]has-1>[InverterG3P] +[ModbusTcp]creates-*>[ModbusConn] diff --git a/app/proxy_2.svg b/app/proxy_2.svg new file mode 100644 index 0000000..6a6fb51 --- /dev/null +++ b/app/proxy_2.svg @@ -0,0 +1,432 @@ + + + + + + +G + + + +A0 + + + +You can stick notes +on diagrams too! + + + +A1 + +<<AbstractIterMeta>> + + +__iter__() + + + +A14 + +<<ProtocolIfc>> + +_registry + +close() + + + +A1->A14 + + + + + +A2 + +InverterG3 + +addr +remote:StreamPtr +local:StreamPtr + +create_remote() +close() + + + +A7 + +AsyncStreamServer + +create_remote + +<async>server_loop() +<async>_async_forward() +<async>publish_outstanding_mqtt() +close() + + + +A2->A7 + + + +local + + + +A8 + +AsyncStreamClient + + +<async>client_loop() +<async>_async_forward()) + + + +A2->A8 + + +remote + + + +A3 + +InverterG3P + +addr +remote:StreamPtr +local:StreamPtr + +create_remote() +close() + + + +A3->A7 + + + +local + + + +A3->A8 + + +remote + + + +A4 + +<<AsyncIfc>> + + +set_node_id() +get_conn_no() +tx_add() +tx_flush() +tx_get() +tx_peek() +tx_log() +tx_clear() +tx_len() +fwd_add() +fwd_log() +rx_get() +rx_peek() +rx_log() +rx_clear() +rx_len() +rx_set_cb() +prot_set_timeout_cb() + + + +A5 + +AsyncIfcImpl + +fwd_fifo:ByteFifo +tx_fifo:ByteFifo +rx_fifo:ByteFifo +conn_no:Count +node_id +timeout_cb + + + +A4->A5 + + + + + +A6 + +AsyncStream + +reader +writer +addr +r_addr +l_addr + +<async>loop +disc() +close() +healthy() +__async_read() +__async_write() +__async_forward() + + + +A5->A6 + + + + + +A6->A7 + + + + + +A6->A8 + + + + + +A9 + +Talent + +ifc:AsyncIfc +conn_no +addr +await_conn_resp_cnt +id_str +contact_name +contact_mail +db:InfosG3 +mb:Modbus +switch + +msg_contact_info() +msg_ota_update() +msg_get_time() +msg_collector_data() +msg_inverter_data() +msg_unknown() +healthy() +close() + + + +A9->A2 + + +remote + + + +A9->A2 + + + +local + + + +A9->A4 + + +use + + + +A12 + +InfosG3 + + +ha_confs() +parse() + + + +A9->A12 + + + + + +A10 + +SolarmanV5 + +ifc:AsyncIfc +conn_no +addr +control +serial +snr +db:InfosG3P +mb:Modbus +switch + +msg_unknown() +healthy() +close() + + + +A10->A3 + + +remote + + + +A10->A3 + + + +local + + + +A10->A4 + + +use + + + +A13 + +InfosG3P + + +ha_confs() +parse() + + + +A10->A13 + + + + + +A11 + +Infos + +stat +new_stat_data +info_dev + +static_init() +dev_value() +inc_counter() +dec_counter() +ha_proxy_conf +ha_conf +ha_remove +update_db +set_db_def_value +get_db_value +ignore_this_device + + + +A11->A12 + + + + + +A11->A13 + + + + + +A15 + +Message + +node_id + +inc_counter() +dec_counter() + + + +A14->A15 + + + + + +A15->A9 + + + + + +A15->A10 + + + + + +A16 + +Modbus + +que +snd_handler +rsp_handler +timeout +max_retires +last_xxx +err +retry_cnt +req_pend +tim + +build_msg() +recv_req() +recv_resp() +close() + + + +A16->A9 + + +has +1 + + + +A16->A10 + + +has +1 + + + diff --git a/app/proxy_2.yuml b/app/proxy_2.yuml new file mode 100644 index 0000000..39a399e --- /dev/null +++ b/app/proxy_2.yuml @@ -0,0 +1,51 @@ +// {type:class} +// {direction:topDown} +// {generate:true} + +[note: You can stick notes on diagrams too!{bg:cornsilk}] +[<>||__iter__()] + +[InverterG3|addr;remote:StreamPtr;local:StreamPtr|create_remote();;close()] +[InverterG3P|addr;remote:StreamPtr;local:StreamPtr|create_remote();;close()] + +[<>||set_node_id();get_conn_no();;tx_add();tx_flush();tx_get();tx_peek();tx_log();tx_clear();tx_len();;fwd_add();fwd_log();rx_get();rx_peek();rx_log();rx_clear();rx_len();rx_set_cb();;prot_set_timeout_cb()] +[AsyncIfcImpl|fwd_fifo:ByteFifo;tx_fifo:ByteFifo;rx_fifo:ByteFifo;conn_no:Count;node_id;timeout_cb] +[AsyncStream|reader;writer;addr;r_addr;l_addr|;loop;disc();close();healthy();;__async_read();__async_write();__async_forward()] +[AsyncStreamServer|create_remote|server_loop();_async_forward();publish_outstanding_mqtt();close()] +[AsyncStreamClient||client_loop();_async_forward())] +[<>]^-.-[AsyncIfcImpl] +[AsyncIfcImpl]^[AsyncStream] +[AsyncStream]^[AsyncStreamServer] +[AsyncStream]^[AsyncStreamClient] + + +[Talent|ifc:AsyncIfc;conn_no;addr;;await_conn_resp_cnt;id_str;contact_name;contact_mail;db:InfosG3;mb:Modbus;switch|msg_contact_info();msg_ota_update();msg_get_time();msg_collector_data();msg_inverter_data();msg_unknown();;healthy();close()] +[Talent][AsyncStreamClient] +[Talent]<-local++[InverterG3] +[InverterG3]++local->[AsyncStreamServer] + +[SolarmanV5|ifc:AsyncIfc;conn_no;addr;;control;serial;snr;db:InfosG3P;mb:Modbus;switch|msg_unknown();;healthy();close()] +[SolarmanV5][AsyncStreamClient] +[SolarmanV5]<-local++[InverterG3P] +[InverterG3P]++local->[AsyncStreamServer] + +[Infos|stat;new_stat_data;info_dev|static_init();dev_value();inc_counter();dec_counter();ha_proxy_conf;ha_conf;ha_remove;update_db;set_db_def_value;get_db_value;ignore_this_device] +[Infos]^[InfosG3||ha_confs();parse()] +[Infos]^[InfosG3P||ha_confs();parse()] + +[Talent]use->[<>] +[Talent]->[InfosG3] +[SolarmanV5]use->[<>] +[SolarmanV5]->[InfosG3P] + +[<>|_registry|close()] +[<>]^-.-[<>] +[<>]^-.-[Message|node_id|inc_counter();dec_counter()] +[Message]^[Talent] +[Message]^[SolarmanV5] + +[Modbus|que;;snd_handler;rsp_handler;timeout;max_retires;last_xxx;err;retry_cnt;req_pend;tim|build_msg();recv_req();recv_resp();close()] +[Modbus]<1-has[SolarmanV5] +[Modbus]<1-has[Talent] diff --git a/app/src/async_ifc.py b/app/src/async_ifc.py new file mode 100644 index 0000000..80af383 --- /dev/null +++ b/app/src/async_ifc.py @@ -0,0 +1,104 @@ +from abc import ABC, abstractmethod + + +class AsyncIfc(ABC): + @abstractmethod + def get_conn_no(self): + pass # pragma: no cover + + @abstractmethod + def set_node_id(self, value: str): + pass # pragma: no cover + + # + # TX - QUEUE + # + @abstractmethod + def tx_add(self, data: bytearray): + ''' add data to transmit queue''' + pass # pragma: no cover + + @abstractmethod + def tx_flush(self): + ''' send transmit queue and clears it''' + pass # pragma: no cover + + @abstractmethod + def tx_peek(self, size: int = None) -> bytearray: + '''returns size numbers of byte without removing them''' + pass # pragma: no cover + + @abstractmethod + def tx_log(self, level, info): + ''' log the transmit queue''' + pass # pragma: no cover + + @abstractmethod + def tx_clear(self): + ''' clear transmit queue''' + pass # pragma: no cover + + @abstractmethod + def tx_len(self): + ''' get numner of bytes in the transmit queue''' + pass # pragma: no cover + + # + # FORWARD - QUEUE + # + @abstractmethod + def fwd_add(self, data: bytearray): + ''' add data to forward queue''' + pass # pragma: no cover + + @abstractmethod + def fwd_log(self, level, info): + ''' log the forward queue''' + pass # pragma: no cover + + # + # RX - QUEUE + # + @abstractmethod + def rx_get(self, size: int = None) -> bytearray: + '''removes size numbers of bytes and return them''' + pass # pragma: no cover + + @abstractmethod + def rx_peek(self, size: int = None) -> bytearray: + '''returns size numbers of byte without removing them''' + pass # pragma: no cover + + @abstractmethod + def rx_log(self, level, info): + ''' logs the receive queue''' + pass # pragma: no cover + + @abstractmethod + def rx_clear(self): + ''' clear receive queue''' + pass # pragma: no cover + + @abstractmethod + def rx_len(self): + ''' get numner of bytes in the receive queue''' + pass # pragma: no cover + + @abstractmethod + def rx_set_cb(self, callback): + pass # pragma: no cover + + # + # Protocol Callbacks + # + @abstractmethod + def prot_set_timeout_cb(self, callback): + pass # pragma: no cover + + @abstractmethod + def prot_set_init_new_client_conn_cb(self, callback): + pass # pragma: no cover + + @abstractmethod + def prot_set_update_header_cb(self, callback): + pass # pragma: no cover diff --git a/app/src/async_stream.py b/app/src/async_stream.py index ae7e584..2650235 100644 --- a/app/src/async_stream.py +++ b/app/src/async_stream.py @@ -7,17 +7,136 @@ from typing import Self from itertools import count if __name__ == "app.src.async_stream": - from app.src.messages import hex_dump_memory, State + from app.src.proxy import Proxy + from app.src.byte_fifo import ByteFifo + from app.src.async_ifc import AsyncIfc + from app.src.infos import Infos else: # pragma: no cover - from messages import hex_dump_memory, State + from proxy import Proxy + from byte_fifo import ByteFifo + from async_ifc import AsyncIfc + from infos import Infos import gc logger = logging.getLogger('conn') -class AsyncStream(): +class AsyncIfcImpl(AsyncIfc): _ids = count(0) + + def __init__(self) -> None: + logger.debug('AsyncIfcImpl.__init__') + self.fwd_fifo = ByteFifo() + self.tx_fifo = ByteFifo() + self.rx_fifo = ByteFifo() + self.conn_no = next(self._ids) + self.node_id = '' + self.timeout_cb = None + self.init_new_client_conn_cb = None + self.update_header_cb = None + + def close(self): + self.timeout_cb = None + self.fwd_fifo.reg_trigger(None) + self.tx_fifo.reg_trigger(None) + self.rx_fifo.reg_trigger(None) + + def set_node_id(self, value: str): + self.node_id = value + + def get_conn_no(self): + return self.conn_no + + def tx_add(self, data: bytearray): + ''' add data to transmit queue''' + self.tx_fifo += data + + def tx_flush(self): + ''' send transmit queue and clears it''' + self.tx_fifo() + + def tx_peek(self, size: int = None) -> bytearray: + '''returns size numbers of byte without removing them''' + return self.tx_fifo.peek(size) + + def tx_log(self, level, info): + ''' log the transmit queue''' + self.tx_fifo.logging(level, info) + + def tx_clear(self): + ''' clear transmit queue''' + self.tx_fifo.clear() + + def tx_len(self): + ''' get numner of bytes in the transmit queue''' + return len(self.tx_fifo) + + def fwd_add(self, data: bytearray): + ''' add data to forward queue''' + self.fwd_fifo += data + + def fwd_log(self, level, info): + ''' log the forward queue''' + self.fwd_fifo.logging(level, info) + + def rx_get(self, size: int = None) -> bytearray: + '''removes size numbers of bytes and return them''' + return self.rx_fifo.get(size) + + def rx_peek(self, size: int = None) -> bytearray: + '''returns size numbers of byte without removing them''' + return self.rx_fifo.peek(size) + + def rx_log(self, level, info): + ''' logs the receive queue''' + self.rx_fifo.logging(level, info) + + def rx_clear(self): + ''' clear receive queue''' + self.rx_fifo.clear() + + def rx_len(self): + ''' get numner of bytes in the receive queue''' + return len(self.rx_fifo) + + def rx_set_cb(self, callback): + self.rx_fifo.reg_trigger(callback) + + def prot_set_timeout_cb(self, callback): + self.timeout_cb = callback + + def prot_set_init_new_client_conn_cb(self, callback): + self.init_new_client_conn_cb = callback + + def prot_set_update_header_cb(self, callback): + self.update_header_cb = callback + + +class StreamPtr(): + '''Descr StreamPtr''' + def __init__(self, _stream, _ifc=None): + self.stream = _stream + self.ifc = _ifc + + @property + def ifc(self): + return self._ifc + + @ifc.setter + def ifc(self, value): + self._ifc = value + + @property + def stream(self): + return self._stream + + @stream.setter + def stream(self, value): + self._stream = value + + +class AsyncStream(AsyncIfcImpl): MAX_PROC_TIME = 2 '''maximum processing time for a received msg in sec''' MAX_START_TIME = 400 @@ -28,95 +147,42 @@ class AsyncStream(): '''maximum default time without a received msg in sec''' def __init__(self, reader: StreamReader, writer: StreamWriter, - addr) -> None: + rstream: "StreamPtr") -> None: + AsyncIfcImpl.__init__(self) + logger.debug('AsyncStream.__init__') - self.reader = reader - self.writer = writer - self.addr = addr - self.r_addr = '' - self.l_addr = '' - self.conn_no = next(self._ids) + + self.remote = rstream + self.tx_fifo.reg_trigger(self.__write_cb) + self._reader = reader + self._writer = writer + self.r_addr = writer.get_extra_info('peername') + self.l_addr = writer.get_extra_info('sockname') self.proc_start = None # start processing start timestamp self.proc_max = 0 + self.async_publ_mqtt = None # will be set AsyncStreamServer only + + def __write_cb(self): + self._writer.write(self.tx_fifo.get()) def __timeout(self) -> int: - if self.state == State.init or self.state == State.received: - to = self.MAX_START_TIME - elif self.state == State.up and \ - self.server_side and self.modbus_polling: - to = self.MAX_INV_IDLE_TIME - else: - to = self.MAX_DEF_IDLE_TIME - return to - - async def publish_outstanding_mqtt(self): - '''Publish all outstanding MQTT topics''' - try: - if self.unique_id: - await self.async_publ_mqtt() - await self._async_publ_mqtt_proxy_stat('proxy') - except Exception: - pass - - async def server_loop(self, addr: str) -> None: - '''Loop for receiving messages from the inverter (server-side)''' - logger.info(f'[{self.node_id}:{self.conn_no}] ' - f'Accept connection from {addr}') - self.inc_counter('Inverter_Cnt') - await self.publish_outstanding_mqtt() - await self.loop() - self.dec_counter('Inverter_Cnt') - await self.publish_outstanding_mqtt() - logger.info(f'[{self.node_id}:{self.conn_no}] Server loop stopped for' - f' r{self.r_addr}') - - # if the server connection closes, we also have to disconnect - # the connection to te TSUN cloud - if self.remote_stream: - logger.info(f'[{self.node_id}:{self.conn_no}] disc client ' - f'connection: [{self.remote_stream.node_id}:' - f'{self.remote_stream.conn_no}]') - await self.remote_stream.disc() - - async def client_loop(self, _: str) -> None: - '''Loop for receiving messages from the TSUN cloud (client-side)''' - client_stream = await self.remote_stream.loop() - logger.info(f'[{client_stream.node_id}:{client_stream.conn_no}] ' - 'Client loop stopped for' - f' l{client_stream.l_addr}') - - # if the client connection closes, we don't touch the server - # connection. Instead we erase the client connection stream, - # thus on the next received packet from the inverter, we can - # establish a new connection to the TSUN cloud - - # erase backlink to inverter - client_stream.remote_stream = None - - if self.remote_stream == client_stream: - # logging.debug(f'Client l{client_stream.l_addr} refs:' - # f' {gc.get_referrers(client_stream)}') - # than erase client connection - self.remote_stream = None + if self.timeout_cb: + return self.timeout_cb() + return 360 async def loop(self) -> Self: """Async loop handler for precessing all received messages""" - self.r_addr = self.writer.get_extra_info('peername') - self.l_addr = self.writer.get_extra_info('sockname') self.proc_start = time.time() while True: try: - proc = time.time() - self.proc_start - if proc > self.proc_max: - self.proc_max = proc - self.proc_start = None + self.__calc_proc_time() dead_conn_to = self.__timeout() await asyncio.wait_for(self.__async_read(), dead_conn_to) - if self.unique_id: - await self.async_write() - await self.__async_forward() + await self.__async_write() + await self.__async_forward() + if self.async_publ_mqtt: await self.async_publ_mqtt() except asyncio.TimeoutError: @@ -124,7 +190,6 @@ class AsyncStream(): f'connection timeout ({dead_conn_to}s) ' f'for {self.l_addr}') await self.disc() - self.close() return self except OSError as error: @@ -132,56 +197,54 @@ class AsyncStream(): f'{error} for l{self.l_addr} | ' f'r{self.r_addr}') await self.disc() - self.close() return self except RuntimeError as error: logger.info(f'[{self.node_id}:{self.conn_no}] ' f'{error} for {self.l_addr}') await self.disc() - self.close() return self except Exception: - self.inc_counter('SW_Exception') + Infos.inc_counter('SW_Exception') logger.error( - f"Exception for {self.addr}:\n" + f"Exception for {self.r_addr}:\n" f"{traceback.format_exc()}") await asyncio.sleep(0) # be cooperative to other task - async def async_write(self, headline: str = 'Transmit to ') -> None: - """Async write handler to transmit the send_buffer""" - if self._send_buffer: - hex_dump_memory(logging.INFO, f'{headline}{self.addr}:', - self._send_buffer, len(self._send_buffer)) - self.writer.write(self._send_buffer) - await self.writer.drain() - self._send_buffer = bytearray(0) # self._send_buffer[sent:] + def __calc_proc_time(self): + if self.proc_start: + proc = time.time() - self.proc_start + if proc > self.proc_max: + self.proc_max = proc + self.proc_start = None async def disc(self) -> None: """Async disc handler for graceful disconnect""" - if self.writer.is_closing(): + self.remote = None + if self._writer.is_closing(): return logger.debug(f'AsyncStream.disc() l{self.l_addr} | r{self.r_addr}') - self.writer.close() - await self.writer.wait_closed() + self._writer.close() + await self._writer.wait_closed() def close(self) -> None: + logging.debug(f'AsyncStream.close() l{self.l_addr} | r{self.r_addr}') """close handler for a no waiting disconnect hint: must be called before releasing the connection instance """ - self.reader.feed_eof() # abort awaited read - if self.writer.is_closing(): + super().close() + self._reader.feed_eof() # abort awaited read + if self._writer.is_closing(): return - logger.debug(f'AsyncStream.close() l{self.l_addr} | r{self.r_addr}') - self.writer.close() + self._writer.close() def healthy(self) -> bool: elapsed = 0 if self.proc_start is not None: elapsed = time.time() - self.proc_start - if self.state == State.closed or elapsed > self.MAX_PROC_TIME: + if elapsed > self.MAX_PROC_TIME: logging.debug(f'[{self.node_id}:{self.conn_no}:' f'{type(self).__name__}]' f' act:{round(1000*elapsed)}ms' @@ -194,61 +257,139 @@ class AsyncStream(): ''' async def __async_read(self) -> None: """Async read handler to read received data from TCP stream""" - data = await self.reader.read(4096) + data = await self._reader.read(4096) if data: self.proc_start = time.time() - self._recv_buffer += data - wait = self.read() # call read in parent class - if wait > 0: + self.rx_fifo += data + wait = self.rx_fifo() # call read in parent class + if wait and wait > 0: await asyncio.sleep(wait) else: raise RuntimeError("Peer closed.") + async def __async_write(self, headline: str = 'Transmit to ') -> None: + """Async write handler to transmit the send_buffer""" + if len(self.tx_fifo) > 0: + self.tx_fifo.logging(logging.INFO, f'{headline}{self.r_addr}:') + self._writer.write(self.tx_fifo.get()) + await self._writer.drain() + async def __async_forward(self) -> None: """forward handler transmits data over the remote connection""" - if not self._forward_buffer: + if len(self.fwd_fifo) == 0: return try: - if not self.remote_stream: - await self.async_create_remote() - if self.remote_stream: - if self.remote_stream._init_new_client_conn(): - await self.remote_stream.async_write() - - if self.remote_stream: - self.remote_stream._update_header(self._forward_buffer) - hex_dump_memory(logging.INFO, - f'Forward to {self.remote_stream.addr}:', - self._forward_buffer, - len(self._forward_buffer)) - self.remote_stream.writer.write(self._forward_buffer) - await self.remote_stream.writer.drain() - self._forward_buffer = bytearray(0) + await self._async_forward() except OSError as error: - if self.remote_stream: - rmt = self.remote_stream - self.remote_stream = None - logger.error(f'[{rmt.node_id}:{rmt.conn_no}] Fwd: {error} for ' - f'l{rmt.l_addr} | r{rmt.r_addr}') - await rmt.disc() - rmt.close() + if self.remote.stream: + rmt = self.remote + logger.error(f'[{rmt.stream.node_id}:{rmt.stream.conn_no}] ' + f'Fwd: {error} for ' + f'l{rmt.ifc.l_addr} | r{rmt.ifc.r_addr}') + await rmt.ifc.disc() + if rmt.ifc.close_cb: + rmt.ifc.close_cb() except RuntimeError as error: - if self.remote_stream: - rmt = self.remote_stream - self.remote_stream = None - logger.info(f'[{rmt.node_id}:{rmt.conn_no}] ' - f'Fwd: {error} for {rmt.l_addr}') - await rmt.disc() - rmt.close() + if self.remote.stream: + rmt = self.remote + logger.info(f'[{rmt.stream.node_id}:{rmt.stream.conn_no}] ' + f'Fwd: {error} for {rmt.ifc.l_addr}') + await rmt.ifc.disc() + if rmt.ifc.close_cb: + rmt.ifc.close_cb() except Exception: - self.inc_counter('SW_Exception') + Infos.inc_counter('SW_Exception') logger.error( - f"Fwd Exception for {self.addr}:\n" + f"Fwd Exception for {self.r_addr}:\n" f"{traceback.format_exc()}") - def __del__(self): - logger.debug( - f"AsyncStream.__del__ l{self.l_addr} | r{self.r_addr}") + +class AsyncStreamServer(AsyncStream): + def __init__(self, reader: StreamReader, writer: StreamWriter, + async_publ_mqtt, create_remote, + rstream: "StreamPtr") -> None: + AsyncStream.__init__(self, reader, writer, rstream) + self.create_remote = create_remote + self.async_publ_mqtt = async_publ_mqtt + + def close(self) -> None: + logging.debug('AsyncStreamServer.close()') + self.create_remote = None + self.async_publ_mqtt = None + super().close() + + async def server_loop(self) -> None: + '''Loop for receiving messages from the inverter (server-side)''' + logger.info(f'[{self.node_id}:{self.conn_no}] ' + f'Accept connection from {self.r_addr}') + Infos.inc_counter('Inverter_Cnt') + await self.publish_outstanding_mqtt() + await self.loop() + Infos.dec_counter('Inverter_Cnt') + await self.publish_outstanding_mqtt() + logger.info(f'[{self.node_id}:{self.conn_no}] Server loop stopped for' + f' r{self.r_addr}') + + # if the server connection closes, we also have to disconnect + # the connection to te TSUN cloud + if self.remote and self.remote.stream: + logger.info(f'[{self.node_id}:{self.conn_no}] disc client ' + f'connection: [{self.remote.ifc.node_id}:' + f'{self.remote.ifc.conn_no}]') + await self.remote.ifc.disc() + + async def _async_forward(self) -> None: + """forward handler transmits data over the remote connection""" + if not self.remote.stream: + await self.create_remote() + if self.remote.stream and \ + self.remote.ifc.init_new_client_conn_cb(): + await self.remote.ifc._AsyncStream__async_write() + if self.remote.stream: + self.remote.ifc.update_header_cb(self.fwd_fifo.peek()) + self.fwd_fifo.logging(logging.INFO, 'Forward to ' + f'{self.remote.ifc.r_addr}:') + self.remote.ifc._writer.write(self.fwd_fifo.get()) + await self.remote.ifc._writer.drain() + + async def publish_outstanding_mqtt(self): + '''Publish all outstanding MQTT topics''' + try: + await self.async_publ_mqtt() + await Proxy._async_publ_mqtt_proxy_stat('proxy') + except Exception: + pass + + +class AsyncStreamClient(AsyncStream): + def __init__(self, reader: StreamReader, writer: StreamWriter, + rstream: "StreamPtr", close_cb) -> None: + AsyncStream.__init__(self, reader, writer, rstream) + self.close_cb = close_cb + + def close(self) -> None: + logging.debug('AsyncStreamClient.close()') + self.close_cb = None + super().close() + + async def client_loop(self, _: str) -> None: + '''Loop for receiving messages from the TSUN cloud (client-side)''' + await self.loop() + logger.info(f'[{self.node_id}:{self.conn_no}] ' + 'Client loop stopped for' + f' l{self.l_addr}') + + if self.close_cb: + self.close_cb() + + async def _async_forward(self) -> None: + """forward handler transmits data over the remote connection""" + if self.remote.stream: + self.remote.ifc.update_header_cb(self.fwd_fifo.peek()) + self.fwd_fifo.logging(logging.INFO, 'Forward to ' + f'{self.remote.ifc.r_addr}:') + self.remote.ifc._writer.write(self.fwd_fifo.get()) + await self.remote.ifc._writer.drain() diff --git a/app/src/byte_fifo.py b/app/src/byte_fifo.py new file mode 100644 index 0000000..af9cb09 --- /dev/null +++ b/app/src/byte_fifo.py @@ -0,0 +1,54 @@ + +if __name__ == "app.src.byte_fifo": + from app.src.messages import hex_dump_str, hex_dump_memory +else: # pragma: no cover + from messages import hex_dump_str, hex_dump_memory + + +class ByteFifo: + """ a byte FIFO buffer with trigger callback """ + def __init__(self): + self.__buf = bytearray() + self.__trigger_cb = None + + def reg_trigger(self, cb) -> None: + self.__trigger_cb = cb + + def __iadd__(self, data): + self.__buf.extend(data) + return self + + def __call__(self): + '''triggers the observer''' + if callable(self.__trigger_cb): + return self.__trigger_cb() + return None + + def get(self, size: int = None) -> bytearray: + '''removes size numbers of byte and return them''' + if not size: + data = self.__buf + self.clear() + else: + data = self.__buf[:size] + # The fast delete syntax + self.__buf[:size] = b'' + return data + + def peek(self, size: int = None) -> bytearray: + '''returns size numbers of byte without removing them''' + if not size: + return self.__buf + return self.__buf[:size] + + def clear(self): + self.__buf = bytearray() + + def __len__(self) -> int: + return len(self.__buf) + + def __str__(self) -> str: + return hex_dump_str(self.__buf, self.__len__()) + + def logging(self, level, info): + hex_dump_memory(level, info, self.__buf, self.__len__()) diff --git a/app/src/gen3/connection_g3.py b/app/src/gen3/connection_g3.py deleted file mode 100644 index b7e246b..0000000 --- a/app/src/gen3/connection_g3.py +++ /dev/null @@ -1,46 +0,0 @@ -import logging -from asyncio import StreamReader, StreamWriter - -if __name__ == "app.src.gen3.connection_g3": - from app.src.async_stream import AsyncStream - from app.src.gen3.talent import Talent -else: # pragma: no cover - from async_stream import AsyncStream - from gen3.talent import Talent - -logger = logging.getLogger('conn') - - -class ConnectionG3(AsyncStream, Talent): - - def __init__(self, reader: StreamReader, writer: StreamWriter, - addr, remote_stream: 'ConnectionG3', server_side: bool, - id_str=b'') -> None: - AsyncStream.__init__(self, reader, writer, addr) - Talent.__init__(self, server_side, id_str) - - self.remote_stream: 'ConnectionG3' = remote_stream - - ''' - Our puplic methods - ''' - def close(self): - AsyncStream.close(self) - Talent.close(self) - # logger.info(f'AsyncStream refs: {gc.get_referrers(self)}') - - async def async_create_remote(self) -> None: - pass # virtual interface # pragma: no cover - - async def async_publ_mqtt(self) -> None: - pass # virtual interface # pragma: no cover - - def healthy(self) -> bool: - logger.debug('ConnectionG3 healthy()') - return AsyncStream.healthy(self) - - ''' - Our private methods - ''' - def __del__(self): - super().__del__() diff --git a/app/src/gen3/inverter_g3.py b/app/src/gen3/inverter_g3.py index c365286..de9c519 100644 --- a/app/src/gen3/inverter_g3.py +++ b/app/src/gen3/inverter_g3.py @@ -1,137 +1,13 @@ -import logging -import traceback -import json -import asyncio from asyncio import StreamReader, StreamWriter -from aiomqtt import MqttCodeError if __name__ == "app.src.gen3.inverter_g3": - from app.src.config import Config - from app.src.inverter import Inverter - from app.src.gen3.connection_g3 import ConnectionG3 - from app.src.infos import Infos + from app.src.inverter_base import InverterBase + from app.src.gen3.talent import Talent else: # pragma: no cover - from config import Config - from inverter import Inverter - from gen3.connection_g3 import ConnectionG3 - from infos import Infos + from inverter_base import InverterBase + from gen3.talent import Talent -logger_mqtt = logging.getLogger('mqtt') - - -class InverterG3(Inverter, ConnectionG3): - '''class Inverter is a derivation of an Async_Stream - - The class has some class method for managing common resources like a - connection to the MQTT broker or proxy error counter which are common - for all inverter connection - - Instances of the class are connections to an inverter and can have an - optional link to an remote connection to the TSUN cloud. A remote - connection dies with the inverter connection. - - class methods: - class_init(): initialize the common resources of the proxy (MQTT - broker, Proxy DB, etc). Must be called before the - first inverter instance can be created - class_close(): release the common resources of the proxy. Should not - be called before any instances of the class are - destroyed - - methods: - server_loop(addr): Async loop method for receiving messages from the - inverter (server-side) - client_loop(addr): Async loop method for receiving messages from the - TSUN cloud (client-side) - async_create_remote(): Establish a client connection to the TSUN cloud - async_publ_mqtt(): Publish data to MQTT broker - close(): Release method which must be called before a instance can be - destroyed - ''' - - def __init__(self, reader: StreamReader, writer: StreamWriter, addr): - super().__init__(reader, writer, addr, None, True) - self.__ha_restarts = -1 - - async def async_create_remote(self) -> None: - '''Establish a client connection to the TSUN cloud''' - tsun = Config.get('tsun') - host = tsun['host'] - port = tsun['port'] - addr = (host, port) - - try: - logging.info(f'[{self.node_id}] Connect to {addr}') - connect = asyncio.open_connection(host, port) - reader, writer = await connect - self.remote_stream = ConnectionG3(reader, writer, addr, self, - False, self.id_str) - logging.info(f'[{self.remote_stream.node_id}:' - f'{self.remote_stream.conn_no}] ' - f'Connected to {addr}') - asyncio.create_task(self.client_loop(addr)) - - except (ConnectionRefusedError, TimeoutError) as error: - logging.info(f'{error}') - except Exception: - self.inc_counter('SW_Exception') - logging.error( - f"Inverter: Exception for {addr}:\n" - f"{traceback.format_exc()}") - - async def async_publ_mqtt(self) -> None: - '''publish data to MQTT broker''' - # check if new inverter or collector infos are available or when the - # home assistant has changed the status back to online - try: - if (('inverter' in self.new_data and self.new_data['inverter']) - or ('collector' in self.new_data and - self.new_data['collector']) - or self.mqtt.ha_restarts != self.__ha_restarts): - await self._register_proxy_stat_home_assistant() - await self.__register_home_assistant() - self.__ha_restarts = self.mqtt.ha_restarts - - for key in self.new_data: - await self.__async_publ_mqtt_packet(key) - for key in Infos.new_stat_data: - await self._async_publ_mqtt_proxy_stat(key) - - except MqttCodeError as error: - logging.error(f'Mqtt except: {error}') - except Exception: - self.inc_counter('SW_Exception') - logging.error( - f"Inverter: Exception:\n" - f"{traceback.format_exc()}") - - async def __async_publ_mqtt_packet(self, key): - db = self.db.db - if key in db and self.new_data[key]: - data_json = json.dumps(db[key]) - node_id = self.node_id - logger_mqtt.debug(f'{key}: {data_json}') - await self.mqtt.publish(f'{self.entity_prfx}{node_id}{key}', data_json) # noqa: E501 - self.new_data[key] = False - - async def __register_home_assistant(self) -> None: - '''register all our topics at home assistant''' - for data_json, component, node_id, id in self.db.ha_confs( - self.entity_prfx, self.node_id, self.unique_id, - self.sug_area): - logger_mqtt.debug(f"MQTT Register: cmp:'{component}'" - f" node_id:'{node_id}' {data_json}") - await self.mqtt.publish(f"{self.discovery_prfx}{component}" - f"/{node_id}{id}/config", data_json) - - self.db.reg_clr_at_midnight(f'{self.entity_prfx}{self.node_id}') - - def close(self) -> None: - logging.debug(f'InverterG3.close() l{self.l_addr} | r{self.r_addr}') - super().close() # call close handler in the parent class -# logging.info(f'Inverter refs: {gc.get_referrers(self)}') - - def __del__(self): - logging.debug("InverterG3.__del__") - super().__del__() +class InverterG3(InverterBase): + def __init__(self, reader: StreamReader, writer: StreamWriter): + super().__init__(reader, writer, 'tsun', Talent) diff --git a/app/src/gen3/talent.py b/app/src/gen3/talent.py index bb86357..522f4d0 100644 --- a/app/src/gen3/talent.py +++ b/app/src/gen3/talent.py @@ -5,14 +5,16 @@ from datetime import datetime from tzlocal import get_localzone if __name__ == "app.src.gen3.talent": - from app.src.messages import hex_dump_memory, Message, State + from app.src.async_ifc import AsyncIfc + from app.src.messages import Message, State from app.src.modbus import Modbus from app.src.my_timer import Timer from app.src.config import Config from app.src.gen3.infos_g3 import InfosG3 from app.src.infos import Register else: # pragma: no cover - from messages import hex_dump_memory, Message, State + from async_ifc import AsyncIfc + from messages import Message, State from modbus import Modbus from my_timer import Timer from config import Config @@ -44,8 +46,16 @@ class Talent(Message): MB_REGULAR_TIMEOUT = 60 TXT_UNKNOWN_CTRL = 'Unknown Ctrl' - def __init__(self, server_side: bool, id_str=b''): + def __init__(self, addr, ifc: "AsyncIfc", server_side: bool, + client_mode: bool = False, id_str=b''): super().__init__(server_side, self.send_modbus_cb, mb_timeout=15) + ifc.rx_set_cb(self.read) + ifc.prot_set_timeout_cb(self._timeout) + ifc.prot_set_init_new_client_conn_cb(self._init_new_client_conn) + ifc.prot_set_update_header_cb(self._update_header) + self.addr = addr + self.ifc = ifc + self.conn_no = ifc.get_conn_no() self.await_conn_resp_cnt = 0 self.id_str = id_str self.contact_name = b'' @@ -56,7 +66,7 @@ class Talent(Message): 0x00: self.msg_contact_info, 0x13: self.msg_ota_update, 0x22: self.msg_get_time, - 0x99: self.msg_act_time, + 0x99: self.msg_heartbeat, 0x71: self.msg_collector_data, # 0x76: 0x77: self.msg_modbus, @@ -103,6 +113,11 @@ class Talent(Message): self.log_lvl.clear() self.state = State.closed self.mb_timer.close() + self.ifc.rx_set_cb(None) + self.ifc.prot_set_timeout_cb(None) + self.ifc.prot_set_init_new_client_conn_cb(None) + self.ifc.prot_set_update_header_cb(None) + self.ifc = None super().close() def __set_serial_no(self, serial_no: str): @@ -138,10 +153,10 @@ class Talent(Message): self._read() while True: if not self.header_valid: - self.__parse_header(self._recv_buffer, len(self._recv_buffer)) + self.__parse_header(self.ifc.rx_peek(), self.ifc.rx_len()) if self.header_valid and \ - len(self._recv_buffer) >= (self.header_len + self.data_len): + self.ifc.rx_len() >= (self.header_len + self.data_len): if self.state == State.init: self.state = State.received # received 1st package @@ -149,11 +164,10 @@ class Talent(Message): if callable(log_lvl): log_lvl = log_lvl() - hex_dump_memory(log_lvl, f'Received from {self.addr}:' - f' BufLen: {len(self._recv_buffer)}' + self.ifc.rx_log(log_lvl, f'Received from {self.addr}:' + f' BufLen: {self.ifc.rx_len()}' f' HdrLen: {self.header_len}' - f' DtaLen: {self.data_len}', - self._recv_buffer, len(self._recv_buffer)) + f' DtaLen: {self.data_len}') self.__set_serial_no(self.id_str.decode("utf-8")) self.__dispatch_msg() @@ -165,35 +179,15 @@ class Talent(Message): '''add the actual receive msg to the forwarding queue''' tsun = Config.get('tsun') if tsun['enabled']: - buffer = self._recv_buffer buflen = self.header_len+self.data_len - self._forward_buffer += buffer[:buflen] - hex_dump_memory(logging.DEBUG, 'Store for forwarding:', - buffer, buflen) + buffer = self.ifc.rx_peek(buflen) + self.ifc.fwd_add(buffer) + self.ifc.fwd_log(logging.DEBUG, 'Store for forwarding:') fnc = self.switch.get(self.msg_id, self.msg_unknown) logger.info(self.__flow_str(self.server_side, 'forwrd') + f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}') - def forward_snd(self) -> None: - '''add the build send msg to the forwarding queue''' - tsun = Config.get('tsun') - if tsun['enabled']: - _len = len(self._send_buffer) - self.send_msg_ofs - struct.pack_into('!l', self._send_buffer, self.send_msg_ofs, - _len-4) - - buffer = self._send_buffer[self.send_msg_ofs:] - buflen = _len - self._forward_buffer += buffer[:buflen] - hex_dump_memory(logging.INFO, 'Store for forwarding:', - buffer, buflen) - - fnc = self.switch.get(self.msg_id, self.msg_unknown) - logger.info(self.__flow_str(self.server_side, 'forwrd') + - f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}') - self._send_buffer = self._send_buffer[:self.send_msg_ofs] - def send_modbus_cb(self, modbus_pdu: bytearray, log_lvl: int, state: str): if self.state != State.up: logger.warning(f'[{self.node_id}] ignore MODBUS cmd,' @@ -201,15 +195,13 @@ class Talent(Message): return self.__build_header(0x70, 0x77) - self._send_buffer += b'\x00\x01\xa3\x28' # magic ? - self._send_buffer += struct.pack('!B', len(modbus_pdu)) - self._send_buffer += modbus_pdu + self.ifc.tx_add(b'\x00\x01\xa3\x28') # magic ? + self.ifc.tx_add(struct.pack('!B', len(modbus_pdu))) + self.ifc.tx_add(modbus_pdu) self.__finish_send_msg() - hex_dump_memory(log_lvl, f'Send Modbus {state}:{self.addr}:', - self._send_buffer, len(self._send_buffer)) - self.writer.write(self._send_buffer) - self._send_buffer = bytearray(0) # self._send_buffer[sent:] + self.ifc.tx_log(log_lvl, f'Send Modbus {state}:{self.addr}:') + self.ifc.tx_flush() def _send_modbus_cmd(self, func, addr, val, log_lvl) -> None: if self.state != State.up: @@ -237,9 +229,9 @@ class Talent(Message): self.msg_id = 0 self.await_conn_resp_cnt += 1 self.__build_header(0x91) - self._send_buffer += struct.pack(f'!{len(contact_name)+1}p' - f'{len(contact_mail)+1}p', - contact_name, contact_mail) + self.ifc.tx_add(struct.pack(f'!{len(contact_name)+1}p' + f'{len(contact_mail)+1}p', + contact_name, contact_mail)) self.__finish_send_msg() return True @@ -323,7 +315,7 @@ class Talent(Message): self.inc_counter('Invalid_Msg_Format') # erase broken recv buffer - self._recv_buffer = bytearray() + self.ifc.rx_clear() return hdr_len = 5+id_len+2 @@ -344,16 +336,17 @@ class Talent(Message): def __build_header(self, ctrl, msg_id=None) -> None: if not msg_id: msg_id = self.msg_id - self.send_msg_ofs = len(self._send_buffer) - self._send_buffer += struct.pack(f'!l{len(self.id_str)+1}pBB', - 0, self.id_str, ctrl, msg_id) + self.send_msg_ofs = self.ifc.tx_len() + self.ifc.tx_add(struct.pack(f'!l{len(self.id_str)+1}pBB', + 0, self.id_str, ctrl, msg_id)) fnc = self.switch.get(msg_id, self.msg_unknown) logger.info(self.__flow_str(self.server_side, 'tx') + f' Ctl: {int(ctrl):#02x} Msg: {fnc.__name__!r}') def __finish_send_msg(self) -> None: - _len = len(self._send_buffer) - self.send_msg_ofs - struct.pack_into('!l', self._send_buffer, self.send_msg_ofs, _len-4) + _len = self.ifc.tx_len() - self.send_msg_ofs + struct.pack_into('!l', self.ifc.tx_peek(), self.send_msg_ofs, + _len-4) def __dispatch_msg(self) -> None: fnc = self.switch.get(self.msg_id, self.msg_unknown) @@ -367,7 +360,7 @@ class Talent(Message): f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}') def __flush_recv_msg(self) -> None: - self._recv_buffer = self._recv_buffer[(self.header_len+self.data_len):] + self.ifc.rx_get(self.header_len+self.data_len) self.header_valid = False ''' @@ -377,7 +370,7 @@ class Talent(Message): if self.ctrl.is_ind(): if self.server_side and self.__process_contact_info(): self.__build_header(0x91) - self._send_buffer += b'\x01' + self.ifc.tx_add(b'\x01') self.__finish_send_msg() # don't forward this contact info here, we will build one # when the remote connection is established @@ -391,18 +384,19 @@ class Talent(Message): self.forward() def __process_contact_info(self) -> bool: - result = struct.unpack_from('!B', self._recv_buffer, self.header_len) + buf = self.ifc.rx_peek() + result = struct.unpack_from('!B', buf, self.header_len) name_len = result[0] if self.data_len == 1: # this is a response withone status byte return False if self.data_len >= name_len+2: - result = struct.unpack_from(f'!{name_len+1}pB', self._recv_buffer, + result = struct.unpack_from(f'!{name_len+1}pB', buf, self.header_len) self.contact_name = result[0] mail_len = result[1] logger.info(f'name: {self.contact_name}') - result = struct.unpack_from(f'!{mail_len+1}p', self._recv_buffer, + result = struct.unpack_from(f'!{mail_len+1}p', buf, self.header_len+name_len+1) self.contact_mail = result[0] logger.info(f'mail: {self.contact_mail}') @@ -417,16 +411,16 @@ class Talent(Message): ts = self._timestamp() logger.debug(f'time: {ts:08x}') self.__build_header(0x91) - self._send_buffer += struct.pack('!q', ts) + self.ifc.tx_add(struct.pack('!q', ts)) self.__finish_send_msg() elif self.data_len >= 8: ts = self._timestamp() - result = struct.unpack_from('!q', self._recv_buffer, + result = struct.unpack_from('!q', self.ifc.rx_peek(), self.header_len) self.ts_offset = result[0]-ts - if self.remote_stream: - self.remote_stream.ts_offset = self.ts_offset + if self.ifc.remote.stream: + self.ifc.remote.stream.ts_offset = self.ts_offset logger.debug(f'tsun-time: {int(result[0]):08x}' f' proxy-time: {ts:08x}' f' offset: {self.ts_offset}') @@ -437,7 +431,7 @@ class Talent(Message): self.forward() - def msg_act_time(self): + def msg_heartbeat(self): if self.ctrl.is_ind(): if self.data_len == 9: self.state = State.up # allow MODBUS cmds @@ -446,25 +440,23 @@ class Talent(Message): self.db.set_db_def_value(Register.POLLING_INTERVAL, self.mb_timeout) self.__build_header(0x99) - self._send_buffer += b'\x02' + self.ifc.tx_add(b'\x02') self.__finish_send_msg() - result = struct.unpack_from('!Bq', self._recv_buffer, + result = struct.unpack_from('!Bq', self.ifc.rx_peek(), self.header_len) resp_code = result[0] ts = result[1]+self.ts_offset logger.debug(f'inv-time: {int(result[1]):08x}' f' tsun-time: {ts:08x}' f' offset: {self.ts_offset}') - self.__build_header(0x91) - self._send_buffer += struct.pack('!Bq', resp_code, ts) - self.forward_snd() - return + struct.pack_into('!Bq', self.ifc.rx_peek(), + self.header_len, resp_code, ts) elif self.ctrl.is_resp(): - result = struct.unpack_from('!B', self._recv_buffer, + result = struct.unpack_from('!B', self.ifc.rx_peek(), self.header_len) resp_code = result[0] - logging.debug(f'TimeActRespCode: {resp_code}') + logging.debug(f'Heartbeat-RespCode: {resp_code}') return else: logger.warning(self.TXT_UNKNOWN_CTRL) @@ -473,7 +465,8 @@ class Talent(Message): self.forward() def parse_msg_header(self): - result = struct.unpack_from('!lB', self._recv_buffer, self.header_len) + result = struct.unpack_from('!lB', self.ifc.rx_peek(), + self.header_len) data_id = result[0] # len of complete message id_len = result[1] # len of variable id string @@ -481,7 +474,7 @@ class Talent(Message): msg_hdr_len = 5+id_len+9 - result = struct.unpack_from(f'!{id_len+1}pBq', self._recv_buffer, + result = struct.unpack_from(f'!{id_len+1}pBq', self.ifc.rx_peek(), self.header_len + 4) timestamp = result[2] @@ -494,7 +487,7 @@ class Talent(Message): def msg_collector_data(self): if self.ctrl.is_ind(): self.__build_header(0x99) - self._send_buffer += b'\x01' + self.ifc.tx_add(b'\x01') self.__finish_send_msg() self.__process_data() @@ -509,7 +502,7 @@ class Talent(Message): def msg_inverter_data(self): if self.ctrl.is_ind(): self.__build_header(0x99) - self._send_buffer += b'\x01' + self.ifc.tx_add(b'\x01') self.__finish_send_msg() self.__process_data() self.state = State.up # allow MODBUS cmds @@ -529,7 +522,7 @@ class Talent(Message): def __process_data(self): msg_hdr_len, ts = self.parse_msg_header() - for key, update in self.db.parse(self._recv_buffer, self.header_len + for key, update in self.db.parse(self.ifc.rx_peek(), self.header_len + msg_hdr_len, self.node_id): if update: self._set_mqtt_timestamp(key, self._utcfromts(ts)) @@ -549,7 +542,7 @@ class Talent(Message): msg_hdr_len = 5 - result = struct.unpack_from('!lBB', self._recv_buffer, + result = struct.unpack_from('!lBB', self.ifc.rx_peek(), self.header_len) modbus_len = result[1] return msg_hdr_len, modbus_len @@ -558,7 +551,7 @@ class Talent(Message): msg_hdr_len = 6 - result = struct.unpack_from('!lBBB', self._recv_buffer, + result = struct.unpack_from('!lBBB', self.ifc.rx_peek(), self.header_len) modbus_len = result[2] return msg_hdr_len, modbus_len @@ -579,13 +572,12 @@ class Talent(Message): self.__msg_modbus(hdr_len) def __msg_modbus(self, hdr_len): - data = self._recv_buffer[self.header_len: - self.header_len+self.data_len] + data = self.ifc.rx_peek()[self.header_len: + self.header_len+self.data_len] if self.ctrl.is_req(): - if self.remote_stream.mb.recv_req(data[hdr_len:], - self.remote_stream. - msg_forward): + rstream = self.ifc.remote.stream + if rstream.mb.recv_req(data[hdr_len:], rstream.msg_forward): self.inc_counter('Modbus_Command') else: self.inc_counter('Invalid_Msg_Format') diff --git a/app/src/gen3plus/connection_g3p.py b/app/src/gen3plus/connection_g3p.py deleted file mode 100644 index 89dfc1a..0000000 --- a/app/src/gen3plus/connection_g3p.py +++ /dev/null @@ -1,47 +0,0 @@ -import logging -from asyncio import StreamReader, StreamWriter - -if __name__ == "app.src.gen3plus.connection_g3p": - from app.src.async_stream import AsyncStream - from app.src.gen3plus.solarman_v5 import SolarmanV5 -else: # pragma: no cover - from async_stream import AsyncStream - from gen3plus.solarman_v5 import SolarmanV5 - -logger = logging.getLogger('conn') - - -class ConnectionG3P(AsyncStream, SolarmanV5): - - def __init__(self, reader: StreamReader, writer: StreamWriter, - addr, remote_stream: 'ConnectionG3P', - server_side: bool, - client_mode: bool) -> None: - AsyncStream.__init__(self, reader, writer, addr) - SolarmanV5.__init__(self, server_side, client_mode) - - self.remote_stream: 'ConnectionG3P' = remote_stream - - ''' - Our puplic methods - ''' - def close(self): - AsyncStream.close(self) - SolarmanV5.close(self) - # logger.info(f'AsyncStream refs: {gc.get_referrers(self)}') - - async def async_create_remote(self) -> None: - pass # virtual interface # pragma: no cover - - async def async_publ_mqtt(self) -> None: - pass # virtual interface # pragma: no cover - - def healthy(self) -> bool: - logger.debug('ConnectionG3P healthy()') - return AsyncStream.healthy(self) - - ''' - Our private methods - ''' - def __del__(self): - super().__del__() diff --git a/app/src/gen3plus/inverter_g3p.py b/app/src/gen3plus/inverter_g3p.py index d9bf0f2..cc27bb4 100644 --- a/app/src/gen3plus/inverter_g3p.py +++ b/app/src/gen3plus/inverter_g3p.py @@ -1,140 +1,15 @@ -import logging -import traceback -import json -import asyncio from asyncio import StreamReader, StreamWriter -from aiomqtt import MqttCodeError if __name__ == "app.src.gen3plus.inverter_g3p": - from app.src.config import Config - from app.src.inverter import Inverter - from app.src.gen3plus.connection_g3p import ConnectionG3P - from app.src.infos import Infos + from app.src.inverter_base import InverterBase + from app.src.gen3plus.solarman_v5 import SolarmanV5 else: # pragma: no cover - from config import Config - from inverter import Inverter - from gen3plus.connection_g3p import ConnectionG3P - from infos import Infos + from inverter_base import InverterBase + from gen3plus.solarman_v5 import SolarmanV5 -logger_mqtt = logging.getLogger('mqtt') - - -class InverterG3P(Inverter, ConnectionG3P): - '''class Inverter is a derivation of an Async_Stream - - The class has some class method for managing common resources like a - connection to the MQTT broker or proxy error counter which are common - for all inverter connection - - Instances of the class are connections to an inverter and can have an - optional link to an remote connection to the TSUN cloud. A remote - connection dies with the inverter connection. - - class methods: - class_init(): initialize the common resources of the proxy (MQTT - broker, Proxy DB, etc). Must be called before the - first inverter instance can be created - class_close(): release the common resources of the proxy. Should not - be called before any instances of the class are - destroyed - - methods: - server_loop(addr): Async loop method for receiving messages from the - inverter (server-side) - client_loop(addr): Async loop method for receiving messages from the - TSUN cloud (client-side) - async_create_remote(): Establish a client connection to the TSUN cloud - async_publ_mqtt(): Publish data to MQTT broker - close(): Release method which must be called before a instance can be - destroyed - ''' - - def __init__(self, reader: StreamReader, writer: StreamWriter, addr, +class InverterG3P(InverterBase): + def __init__(self, reader: StreamReader, writer: StreamWriter, client_mode: bool = False): - super().__init__(reader, writer, addr, None, - server_side=True, client_mode=client_mode) - self.__ha_restarts = -1 - - async def async_create_remote(self) -> None: - '''Establish a client connection to the TSUN cloud''' - tsun = Config.get('solarman') - host = tsun['host'] - port = tsun['port'] - addr = (host, port) - - try: - logging.info(f'[{self.node_id}] Connect to {addr}') - connect = asyncio.open_connection(host, port) - reader, writer = await connect - self.remote_stream = ConnectionG3P(reader, writer, addr, self, - server_side=False, - client_mode=False) - logging.info(f'[{self.remote_stream.node_id}:' - f'{self.remote_stream.conn_no}] ' - f'Connected to {addr}') - asyncio.create_task(self.client_loop(addr)) - - except (ConnectionRefusedError, TimeoutError) as error: - logging.info(f'{error}') - except Exception: - self.inc_counter('SW_Exception') - logging.error( - f"Inverter: Exception for {addr}:\n" - f"{traceback.format_exc()}") - - async def async_publ_mqtt(self) -> None: - '''publish data to MQTT broker''' - # check if new inverter or collector infos are available or when the - # home assistant has changed the status back to online - try: - if (('inverter' in self.new_data and self.new_data['inverter']) - or ('collector' in self.new_data and - self.new_data['collector']) - or self.mqtt.ha_restarts != self.__ha_restarts): - await self._register_proxy_stat_home_assistant() - await self.__register_home_assistant() - self.__ha_restarts = self.mqtt.ha_restarts - - for key in self.new_data: - await self.__async_publ_mqtt_packet(key) - for key in Infos.new_stat_data: - await self._async_publ_mqtt_proxy_stat(key) - - except MqttCodeError as error: - logging.error(f'Mqtt except: {error}') - except Exception: - self.inc_counter('SW_Exception') - logging.error( - f"Inverter: Exception:\n" - f"{traceback.format_exc()}") - - async def __async_publ_mqtt_packet(self, key): - db = self.db.db - if key in db and self.new_data[key]: - data_json = json.dumps(db[key]) - node_id = self.node_id - logger_mqtt.debug(f'{key}: {data_json}') - await self.mqtt.publish(f'{self.entity_prfx}{node_id}{key}', data_json) # noqa: E501 - self.new_data[key] = False - - async def __register_home_assistant(self) -> None: - '''register all our topics at home assistant''' - for data_json, component, node_id, id in self.db.ha_confs( - self.entity_prfx, self.node_id, self.unique_id, - self.sug_area): - logger_mqtt.debug(f"MQTT Register: cmp:'{component}'" - f" node_id:'{node_id}' {data_json}") - await self.mqtt.publish(f"{self.discovery_prfx}{component}" - f"/{node_id}{id}/config", data_json) - - self.db.reg_clr_at_midnight(f'{self.entity_prfx}{self.node_id}') - - def close(self) -> None: - logging.debug(f'InverterG3P.close() l{self.l_addr} | r{self.r_addr}') - super().close() # call close handler in the parent class -# logger.debug (f'Inverter refs: {gc.get_referrers(self)}') - - def __del__(self): - logging.debug("InverterG3P.__del__") - super().__del__() + super().__init__(reader, writer, 'solarman', + SolarmanV5, client_mode) diff --git a/app/src/gen3plus/solarman_v5.py b/app/src/gen3plus/solarman_v5.py index e1b6e3e..51eb3a1 100644 --- a/app/src/gen3plus/solarman_v5.py +++ b/app/src/gen3plus/solarman_v5.py @@ -5,6 +5,7 @@ import asyncio from datetime import datetime if __name__ == "app.src.gen3plus.solarman_v5": + from app.src.async_ifc import AsyncIfc from app.src.messages import hex_dump_memory, Message, State from app.src.modbus import Modbus from app.src.my_timer import Timer @@ -12,6 +13,7 @@ if __name__ == "app.src.gen3plus.solarman_v5": from app.src.gen3plus.infos_g3p import InfosG3P from app.src.infos import Register else: # pragma: no cover + from async_ifc import AsyncIfc from messages import hex_dump_memory, Message, State from config import Config from modbus import Modbus @@ -60,9 +62,17 @@ class SolarmanV5(Message): HDR_FMT = '= \ + if self.header_valid and self.ifc.rx_len() >= \ (self.header_len + self.data_len+2): self.__process_complete_received_msg() self.__flush_recv_msg() @@ -243,10 +259,10 @@ class SolarmanV5(Message): log_lvl = self.log_lvl.get(self.control, logging.WARNING) if callable(log_lvl): log_lvl = log_lvl() - hex_dump_memory(log_lvl, f'Received from {self.addr}:', - self._recv_buffer, self.header_len + - self.data_len+2) - if self.__trailer_is_ok(self._recv_buffer, self.header_len + self.ifc.rx_log(log_lvl, f'Received from {self.addr}:') + # self._recv_buffer, self.header_len + + # self.data_len+2) + if self.__trailer_is_ok(self.ifc.rx_peek(), self.header_len + self.data_len + 2): if self.state == State.init: self.state = State.received @@ -259,9 +275,8 @@ class SolarmanV5(Message): return tsun = Config.get('solarman') if tsun['enabled']: - self._forward_buffer += buffer[:buflen] - hex_dump_memory(logging.DEBUG, 'Store for forwarding:', - buffer, buflen) + self.ifc.fwd_add(buffer[:buflen]) + self.ifc.fwd_log(logging.DEBUG, 'Store for forwarding:') fnc = self.switch.get(self.control, self.msg_unknown) logger.info(self.__flow_str(self.server_side, 'forwrd') + @@ -317,7 +332,7 @@ class SolarmanV5(Message): self.inc_counter('Invalid_Msg_Format') # erase broken recv buffer - self._recv_buffer = bytearray() + self.ifc.rx_clear() return self.header_valid = True @@ -329,11 +344,11 @@ class SolarmanV5(Message): 'Drop packet w invalid stop byte from ' f'{self.addr}:', buf, buf_len) self.inc_counter('Invalid_Msg_Format') - if len(self._recv_buffer) > (self.data_len+13): + if self.ifc.rx_len() > (self.data_len+13): next_start = buf[self.data_len+13] if next_start != 0xa5: # erase broken recv buffer - self._recv_buffer = bytearray() + self.ifc.rx_clear() return False @@ -349,21 +364,22 @@ class SolarmanV5(Message): def __build_header(self, ctrl) -> None: '''build header for new transmit message''' - self.send_msg_ofs = len(self._send_buffer) + self.send_msg_ofs = self.ifc.tx_len() - self._send_buffer += struct.pack( - ' None: '''finish the transmit message, set lenght and checksum''' - _len = len(self._send_buffer) - self.send_msg_ofs - struct.pack_into(' None: - self._recv_buffer = self._recv_buffer[(self.header_len + - self.data_len+2):] + self.ifc.rx_get(self.header_len + self.data_len+2) self.header_valid = False def __send_ack_rsp(self, msgtype, ftype, ack=1): self.__build_header(msgtype) - self._send_buffer += struct.pack(' None: if self.state != State.up: @@ -460,17 +473,18 @@ class SolarmanV5(Message): self.forward_at_cmd_resp = False self.__build_header(0x4510) - self._send_buffer += struct.pack(f'> 8 - for key, update in self.db.parse(self._recv_buffer, msg_type, ftype, + for key, update in self.db.parse(self.ifc.rx_peek(), msg_type, ftype, self.node_id): if update: if key == 'inverter': @@ -510,7 +524,7 @@ class SolarmanV5(Message): self.__forward_msg() def msg_dev_ind(self): - data = self._recv_buffer[self.header_len:] + data = self.ifc.rx_peek()[self.header_len:] result = struct.unpack_from(self.HDR_FMT, data, 0) ftype = result[0] # always 2 total = result[1] @@ -531,7 +545,7 @@ class SolarmanV5(Message): self.__send_ack_rsp(0x1110, ftype) def msg_data_ind(self): - data = self._recv_buffer + data = self.ifc.rx_peek() result = struct.unpack_from(' int: - ftype = self._recv_buffer[self.header_len] + ftype = self.ifc.rx_peek()[self.header_len] if ftype == self.AT_CMD: if self.forward_at_cmd_resp: return logging.INFO @@ -613,8 +627,8 @@ class SolarmanV5(Message): return logging.WARNING def msg_command_rsp(self): - data = self._recv_buffer[self.header_len: - self.header_len+self.data_len] + data = self.ifc.rx_peek()[self.header_len: + self.header_len+self.data_len] ftype = data[0] if ftype == self.AT_CMD: if not self.forward_at_cmd_resp: @@ -650,7 +664,7 @@ class SolarmanV5(Message): self.__build_model_name() def msg_hbeat_ind(self): - data = self._recv_buffer[self.header_len:] + data = self.ifc.rx_peek()[self.header_len:] result = struct.unpack_from(' None: + @classmethod + def inc_counter(cls, counter: str) -> None: '''inc proxy statistic counter''' - db_dict = self.stat['proxy'] + db_dict = cls.stat['proxy'] db_dict[counter] += 1 + cls.new_stat_data['proxy'] = True - def dec_counter(self, counter: str) -> None: + @classmethod + def dec_counter(cls, counter: str) -> None: '''dec proxy statistic counter''' - db_dict = self.stat['proxy'] + db_dict = cls.stat['proxy'] db_dict[counter] -= 1 + cls.new_stat_data['proxy'] = True def ha_proxy_confs(self, ha_prfx: str, node_id: str, snr: str) \ -> Generator[tuple[str, str, str, str], None, None]: diff --git a/app/src/inverter_base.py b/app/src/inverter_base.py new file mode 100644 index 0000000..8acb01e --- /dev/null +++ b/app/src/inverter_base.py @@ -0,0 +1,183 @@ +import weakref +import asyncio +import logging +import traceback +import json +import gc +from aiomqtt import MqttCodeError +from asyncio import StreamReader, StreamWriter + +if __name__ == "app.src.inverter_base": + from app.src.inverter_ifc import InverterIfc + from app.src.proxy import Proxy + from app.src.async_stream import StreamPtr + from app.src.async_stream import AsyncStreamClient + from app.src.async_stream import AsyncStreamServer + from app.src.config import Config + from app.src.infos import Infos +else: # pragma: no cover + from inverter_ifc import InverterIfc + from proxy import Proxy + from async_stream import StreamPtr + from async_stream import AsyncStreamClient + from async_stream import AsyncStreamServer + from config import Config + from infos import Infos + +logger_mqtt = logging.getLogger('mqtt') + + +class InverterBase(InverterIfc, Proxy): + + def __init__(self, reader: StreamReader, writer: StreamWriter, + config_id: str, prot_class, + client_mode: bool = False): + Proxy.__init__(self) + self._registry.append(weakref.ref(self)) + self.addr = writer.get_extra_info('peername') + self.config_id = config_id + self.prot_class = prot_class + self.__ha_restarts = -1 + self.remote = StreamPtr(None) + ifc = AsyncStreamServer(reader, writer, + self.async_publ_mqtt, + self.create_remote, + self.remote) + + self.local = StreamPtr( + self.prot_class(self.addr, ifc, True, client_mode), ifc + ) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc, tb) -> None: + logging.debug(f'InverterBase.__exit__() {self.addr}') + self.__del_remote() + + self.local.stream.close() + self.local.stream = None + self.local.ifc.close() + self.local.ifc = None + + # now explicitly call garbage collector to release unreachable objects + unreachable_obj = gc.collect() + logging.info( + f'InverterBase.__exit: freed unreachable obj: {unreachable_obj}') + + def __del_remote(self): + if self.remote.stream: + self.remote.stream.close() + self.remote.stream = None + + if self.remote.ifc: + self.remote.ifc.close() + self.remote.ifc = None + + async def disc(self, shutdown_started=False) -> None: + if self.remote.stream: + self.remote.stream.shutdown_started = shutdown_started + if self.remote.ifc: + await self.remote.ifc.disc() + if self.local.stream: + self.local.stream.shutdown_started = shutdown_started + if self.local.ifc: + await self.local.ifc.disc() + + def healthy(self) -> bool: + logging.debug('InverterBase healthy()') + + if self.local.ifc and not self.local.ifc.healthy(): + return False + if self.remote.ifc and not self.remote.ifc.healthy(): + return False + return True + + async def create_remote(self) -> None: + '''Establish a client connection to the TSUN cloud''' + + tsun = Config.get(self.config_id) + host = tsun['host'] + port = tsun['port'] + addr = (host, port) + stream = self.local.stream + + try: + logging.info(f'[{stream.node_id}] Connect to {addr}') + connect = asyncio.open_connection(host, port) + reader, writer = await connect + ifc = AsyncStreamClient( + reader, writer, self.local, self.__del_remote) + + self.remote.ifc = ifc + if hasattr(stream, 'id_str'): + self.remote.stream = self.prot_class( + addr, ifc, server_side=False, + client_mode=False, id_str=stream.id_str) + else: + self.remote.stream = self.prot_class( + addr, ifc, server_side=False, + client_mode=False) + + logging.info(f'[{self.remote.stream.node_id}:' + f'{self.remote.stream.conn_no}] ' + f'Connected to {addr}') + asyncio.create_task(self.remote.ifc.client_loop(addr)) + + except (ConnectionRefusedError, TimeoutError) as error: + logging.info(f'{error}') + except Exception: + Infos.inc_counter('SW_Exception') + logging.error( + f"Inverter: Exception for {addr}:\n" + f"{traceback.format_exc()}") + + async def async_publ_mqtt(self) -> None: + '''publish data to MQTT broker''' + stream = self.local.stream + if not stream or not stream.unique_id: + return + # check if new inverter or collector infos are available or when the + # home assistant has changed the status back to online + try: + if (('inverter' in stream.new_data and stream.new_data['inverter']) + or ('collector' in stream.new_data and + stream.new_data['collector']) + or self.mqtt.ha_restarts != self.__ha_restarts): + await self._register_proxy_stat_home_assistant() + await self.__register_home_assistant(stream) + self.__ha_restarts = self.mqtt.ha_restarts + + for key in stream.new_data: + await self.__async_publ_mqtt_packet(stream, key) + for key in Infos.new_stat_data: + await Proxy._async_publ_mqtt_proxy_stat(key) + + except MqttCodeError as error: + logging.error(f'Mqtt except: {error}') + except Exception: + Infos.inc_counter('SW_Exception') + logging.error( + f"Inverter: Exception:\n" + f"{traceback.format_exc()}") + + async def __async_publ_mqtt_packet(self, stream, key): + db = stream.db.db + if key in db and stream.new_data[key]: + data_json = json.dumps(db[key]) + node_id = stream.node_id + logger_mqtt.debug(f'{key}: {data_json}') + await self.mqtt.publish(f'{self.entity_prfx}{node_id}{key}', data_json) # noqa: E501 + stream.new_data[key] = False + + async def __register_home_assistant(self, stream) -> None: + '''register all our topics at home assistant''' + for data_json, component, node_id, id in stream.db.ha_confs( + self.entity_prfx, stream.node_id, stream.unique_id, + stream.sug_area): + logger_mqtt.debug(f"MQTT Register: cmp:'{component}'" + f" node_id:'{node_id}' {data_json}") + await self.mqtt.publish(f"{self.discovery_prfx}{component}" + f"/{node_id}{id}/config", data_json) + + stream.db.reg_clr_at_midnight(f'{self.entity_prfx}{stream.node_id}') diff --git a/app/src/inverter_ifc.py b/app/src/inverter_ifc.py new file mode 100644 index 0000000..55fc1b9 --- /dev/null +++ b/app/src/inverter_ifc.py @@ -0,0 +1,40 @@ +from abc import abstractmethod +import logging +from asyncio import StreamReader, StreamWriter + +if __name__ == "app.src.inverter_ifc": + from app.src.iter_registry import AbstractIterMeta +else: # pragma: no cover + from iter_registry import AbstractIterMeta + +logger_mqtt = logging.getLogger('mqtt') + + +class InverterIfc(metaclass=AbstractIterMeta): + _registry = [] + + @abstractmethod + def __init__(self, reader: StreamReader, writer: StreamWriter, + config_id: str, prot_class, + client_mode: bool): + pass # pragma: no cover + + @abstractmethod + def __enter__(self): + pass # pragma: no cover + + @abstractmethod + def __exit__(self, exc_type, exc, tb): + pass # pragma: no cover + + @abstractmethod + def healthy(self) -> bool: + pass # pragma: no cover + + @abstractmethod + async def disc(self, shutdown_started=False) -> None: + pass # pragma: no cover + + @abstractmethod + async def create_remote(self) -> None: + pass # pragma: no cover diff --git a/app/src/iter_registry.py b/app/src/iter_registry.py new file mode 100644 index 0000000..ea0cd73 --- /dev/null +++ b/app/src/iter_registry.py @@ -0,0 +1,9 @@ +from abc import ABCMeta + + +class AbstractIterMeta(ABCMeta): + def __iter__(cls): + for ref in cls._registry: + obj = ref() + if obj is not None: + yield obj diff --git a/app/src/messages.py b/app/src/messages.py index 731945b..bbff315 100644 --- a/app/src/messages.py +++ b/app/src/messages.py @@ -1,13 +1,15 @@ import logging import weakref -from typing import Callable, Generator +from typing import Callable from enum import Enum if __name__ == "app.src.messages": + from app.src.protocol_ifc import ProtocolIfc from app.src.infos import Infos, Register from app.src.modbus import Modbus else: # pragma: no cover + from protocol_ifc import ProtocolIfc from infos import Infos, Register from modbus import Modbus @@ -33,13 +35,9 @@ def __asc_val(n, data, data_len): return line -def hex_dump_memory(level, info, data, data_len): +def hex_dump(data, data_len) -> list: n = 0 lines = [] - lines.append(info) - tracer = logging.getLogger('tracer') - if not tracer.isEnabledFor(level): - return for i in range(0, data_len, 16): line = ' ' @@ -50,17 +48,26 @@ def hex_dump_memory(level, info, data, data_len): line += __asc_val(n, data, data_len) lines.append(line) + return lines + + +def hex_dump_str(data, data_len): + lines = hex_dump(data, data_len) + return '\n'.join(lines) + + +def hex_dump_memory(level, info, data, data_len): + lines = [] + lines.append(info) + tracer = logging.getLogger('tracer') + if not tracer.isEnabledFor(level): + return + + lines += hex_dump(data, data_len) + tracer.log(level, '\n'.join(lines)) -class IterRegistry(type): - def __iter__(cls) -> Generator['Message', None, None]: - for ref in cls._registry: - obj = ref() - if obj is not None: - yield obj - - class State(Enum): '''state of the logical connection''' init = 0 @@ -75,8 +82,13 @@ class State(Enum): '''connection closed''' -class Message(metaclass=IterRegistry): - _registry = [] +class Message(ProtocolIfc): + MAX_START_TIME = 400 + '''maximum time without a received msg in sec''' + MAX_INV_IDLE_TIME = 120 + '''maximum time without a received msg from the inverter in sec''' + MAX_DEF_IDLE_TIME = 360 + '''maximum default time without a received msg in sec''' def __init__(self, server_side: bool, send_modbus_cb: Callable[[bytes, int, str], None], mb_timeout: int): @@ -92,15 +104,21 @@ class Message(metaclass=IterRegistry): self.header_len = 0 self.data_len = 0 self.unique_id = 0 - self.node_id = '' # will be overwritten in the child class's __init__ + self._node_id = '' self.sug_area = '' - self._recv_buffer = bytearray(0) - self._send_buffer = bytearray(0) - self._forward_buffer = bytearray(0) self.new_data = {} self.state = State.init self.shutdown_started = False + @property + def node_id(self): + return self._node_id + + @node_id.setter + def node_id(self, value): + self._node_id = value + self.ifc.set_node_id(value) + ''' Empty methods, that have to be implemented in any child class which don't use asyncio @@ -109,10 +127,6 @@ class Message(metaclass=IterRegistry): # to our _recv_buffer return # pragma: no cover - def _update_header(self, _forward_buffer): - '''callback for updating the header of the forward buffer''' - pass # pragma: no cover - def _set_mqtt_timestamp(self, key, ts: float | None): if key not in self.new_data or \ not self.new_data[key]: @@ -128,6 +142,16 @@ class Message(metaclass=IterRegistry): # logger.info(f'update: key: {key} ts:{tstr}' self.db.set_db_def_value(info_id, round(ts)) + def _timeout(self) -> int: + if self.state == State.init or self.state == State.received: + to = self.MAX_START_TIME + elif self.state == State.up and \ + self.server_side and self.modbus_polling: + to = self.MAX_INV_IDLE_TIME + else: + to = self.MAX_DEF_IDLE_TIME + return to + ''' Our puplic methods ''' diff --git a/app/src/modbus.py b/app/src/modbus.py index 9a0c918..028699d 100644 --- a/app/src/modbus.py +++ b/app/src/modbus.py @@ -117,10 +117,6 @@ class Modbus(): while not self.que.empty(): self.que.get_nowait() - def __del__(self): - """log statistics on the deleting of a MODBUS instance""" - logging.debug(f'Modbus __del__:\n {self.counter}') - def build_msg(self, addr: int, func: int, reg: int, val: int, log_lvl=logging.DEBUG) -> None: """Build MODBUS RTU request frame and add it to the tx queue diff --git a/app/src/modbus_tcp.py b/app/src/modbus_tcp.py index 429710b..a0082ee 100644 --- a/app/src/modbus_tcp.py +++ b/app/src/modbus_tcp.py @@ -5,9 +5,11 @@ import asyncio if __name__ == "app.src.modbus_tcp": from app.src.config import Config from app.src.gen3plus.inverter_g3p import InverterG3P + from app.src.infos import Infos else: # pragma: no cover from config import Config from gen3plus.inverter_g3p import InverterG3P + from infos import Infos logger = logging.getLogger('conn') @@ -17,23 +19,26 @@ class ModbusConn(): self.host = host self.port = port self.addr = (host, port) - self.stream = None + self.inverter = None async def __aenter__(self) -> 'InverterG3P': '''Establish a client connection to the TSUN cloud''' connection = asyncio.open_connection(self.host, self.port) reader, writer = await connection - self.stream = InverterG3P(reader, writer, self.addr, - client_mode=True) - logging.info(f'[{self.stream.node_id}:{self.stream.conn_no}] ' + self.inverter = InverterG3P(reader, writer, + client_mode=True) + self.inverter.__enter__() + stream = self.inverter.local.stream + logging.info(f'[{stream.node_id}:{stream.conn_no}] ' f'Connected to {self.addr}') - self.stream.inc_counter('Inverter_Cnt') - await self.stream.publish_outstanding_mqtt() - return self.stream + Infos.inc_counter('Inverter_Cnt') + await self.inverter.local.ifc.publish_outstanding_mqtt() + return self.inverter async def __aexit__(self, exc_type, exc, tb): - self.stream.dec_counter('Inverter_Cnt') - await self.stream.publish_outstanding_mqtt() + Infos.dec_counter('Inverter_Cnt') + await self.inverter.local.ifc.publish_outstanding_mqtt() + self.inverter.__exit__(exc_type, exc, tb) class ModbusTcp(): @@ -58,20 +63,22 @@ class ModbusTcp(): '''Loop for receiving messages from the TSUN cloud (client-side)''' while True: try: - async with ModbusConn(host, port) as stream: + async with ModbusConn(host, port) as inverter: + stream = inverter.local.stream await stream.send_start_cmd(snr, host) - await stream.loop() + await stream.ifc.loop() logger.info(f'[{stream.node_id}:{stream.conn_no}] ' f'Connection closed - Shutdown: ' f'{stream.shutdown_started}') if stream.shutdown_started: return + del inverter # decrease ref counter after the with block except (ConnectionRefusedError, TimeoutError) as error: logging.debug(f'Inv-conn:{error}') except OSError as error: - if error.errno == 113: + if error.errno == 113: # pragma: no cover logging.debug(f'os-error:{error}') else: logging.info(f'os-error: {error}') diff --git a/app/src/mqtt.py b/app/src/mqtt.py index a51f039..83e0fd4 100644 --- a/app/src/mqtt.py +++ b/app/src/mqtt.py @@ -44,9 +44,6 @@ class Mqtt(metaclass=Singleton): def ha_restarts(self, value): self._ha_restarts = value - def __del__(self): - logger_mqtt.debug('MQTT: __del__') - async def close(self) -> None: logger_mqtt.debug('MQTT: close') self.task.cancel() diff --git a/app/src/protocol_ifc.py b/app/src/protocol_ifc.py new file mode 100644 index 0000000..46795e4 --- /dev/null +++ b/app/src/protocol_ifc.py @@ -0,0 +1,21 @@ +from abc import abstractmethod + +if __name__ == "app.src.protocol_ifc": + from app.src.iter_registry import AbstractIterMeta + from app.src.async_ifc import AsyncIfc +else: # pragma: no cover + from iter_registry import AbstractIterMeta + from async_ifc import AsyncIfc + + +class ProtocolIfc(metaclass=AbstractIterMeta): + _registry = [] + + @abstractmethod + def __init__(self, addr, ifc: "AsyncIfc", server_side: bool, + client_mode: bool = False, id_str=b''): + pass # pragma: no cover + + @abstractmethod + def close(self): + pass # pragma: no cover diff --git a/app/src/inverter.py b/app/src/proxy.py similarity index 73% rename from app/src/inverter.py rename to app/src/proxy.py index 996fa0f..9b75c37 100644 --- a/app/src/inverter.py +++ b/app/src/proxy.py @@ -1,7 +1,8 @@ import asyncio import logging import json -if __name__ == "app.src.inverter": + +if __name__ == "app.src.proxy": from app.src.config import Config from app.src.mqtt import Mqtt from app.src.infos import Infos @@ -13,10 +14,32 @@ else: # pragma: no cover logger_mqtt = logging.getLogger('mqtt') -class Inverter(): +class Proxy(): + '''class Proxy is a baseclass + + The class has some class method for managing common resources like a + connection to the MQTT broker or proxy error counter which are common + for all inverter connection + + Instances of the class are connections to an inverter and can have an + optional link to an remote connection to the TSUN cloud. A remote + connection dies with the inverter connection. + + class methods: + class_init(): initialize the common resources of the proxy (MQTT + broker, Proxy DB, etc). Must be called before the + first inverter instance can be created + class_close(): release the common resources of the proxy. Should not + be called before any instances of the class are + destroyed + + methods: + create_remote(): Establish a client connection to the TSUN cloud + async_publ_mqtt(): Publish data to MQTT broker + ''' @classmethod def class_init(cls) -> None: - logging.debug('Inverter.class_init') + logging.debug('Proxy.class_init') # initialize the proxy statistics Infos.static_init() cls.db_stat = Infos() @@ -38,7 +61,7 @@ class Inverter(): # reset at midnight when you restart the proxy just before # midnight! inverters = Config.get('inverters') - # logger.debug(f'Inverters: {inverters}') + # logger.debug(f'Proxys: {inverters}') for inv in inverters.values(): if (type(inv) is dict): node_id = inv['node_id'] @@ -77,7 +100,7 @@ class Inverter(): @classmethod def class_close(cls, loop) -> None: # pragma: no cover - logging.debug('Inverter.class_close') + logging.debug('Proxy.class_close') logging.info('Close MQTT Task') loop.run_until_complete(cls.mqtt.close()) cls.mqtt = None diff --git a/app/src/server.py b/app/src/server.py index 95cc715..cda8501 100644 --- a/app/src/server.py +++ b/app/src/server.py @@ -5,8 +5,8 @@ import os from asyncio import StreamReader, StreamWriter from aiohttp import web from logging import config # noqa F401 -from messages import Message -from inverter import Inverter +from proxy import Proxy +from inverter_ifc import InverterIfc from gen3.inverter_g3 import InverterG3 from gen3plus.inverter_g3p import InverterG3P from scheduler import Schedule @@ -38,9 +38,9 @@ async def healthy(request): if proxy_is_up: # logging.info('web reqeust healthy()') - for stream in Message: + for inverter in InverterIfc: try: - res = stream.healthy() + res = inverter.healthy() if not res: return web.Response(status=503, text="I have a problem") except Exception as err: @@ -70,18 +70,11 @@ async def webserver(addr, port): logging.debug('HTTP cleanup done') -async def handle_client(reader: StreamReader, writer: StreamWriter): +async def handle_client(reader: StreamReader, writer: StreamWriter, inv_class): '''Handles a new incoming connection and starts an async loop''' - addr = writer.get_extra_info('peername') - await InverterG3(reader, writer, addr).server_loop(addr) - - -async def handle_client_v2(reader: StreamReader, writer: StreamWriter): - '''Handles a new incoming connection and starts an async loop''' - - addr = writer.get_extra_info('peername') - await InverterG3P(reader, writer, addr).server_loop(addr) + with inv_class(reader, writer) as inv: + await inv.local.ifc.server_loop() async def handle_shutdown(web_task): @@ -94,25 +87,13 @@ async def handle_shutdown(web_task): # # first, disc all open TCP connections gracefully # - for stream in Message: - stream.shutdown_started = True - try: - await asyncio.wait_for(stream.disc(), 2) - except Exception: - pass + for inverter in InverterIfc: + await inverter.disc(True) + logging.info('Proxy disconnecting done') # - # second, close all open TCP connections - # - for stream in Message: - stream.close() - - await asyncio.sleep(0.1) # give time for closing - logging.info('Proxy closing done') - - # - # third, cancel the web server + # second, cancel the web server # web_task.cancel() await web_task @@ -171,17 +152,19 @@ if __name__ == "__main__": ConfigErr = Config.class_init() if ConfigErr is not None: logging.info(f'ConfigErr: {ConfigErr}') - Inverter.class_init() + Proxy.class_init() Schedule.start() - mb_tcp = ModbusTcp(loop) + ModbusTcp(loop) # # Create tasks for our listening servers. These must be tasks! If we call # start_server directly out of our main task, the eventloop will be blocked # and we can't receive and handle the UNIX signals! # - loop.create_task(asyncio.start_server(handle_client, '0.0.0.0', 5005)) - loop.create_task(asyncio.start_server(handle_client_v2, '0.0.0.0', 10000)) + for inv_class, port in [(InverterG3, 5005), (InverterG3P, 10000)]: + loop.create_task(asyncio.start_server(lambda r, w, i=inv_class: + handle_client(r, w, i), + '0.0.0.0', port)) web_task = loop.create_task(webserver('0.0.0.0', 8127)) # @@ -202,7 +185,7 @@ if __name__ == "__main__": pass finally: logging.info("Event loop is stopped") - Inverter.class_close(loop) + Proxy.class_close(loop) logging.debug('Close event loop') loop.close() logging.info(f'Finally, exit Server "{serv_name}"') diff --git a/app/tests/test_async_stream.py b/app/tests/test_async_stream.py new file mode 100644 index 0000000..d7dcf12 --- /dev/null +++ b/app/tests/test_async_stream.py @@ -0,0 +1,532 @@ +# test_with_pytest.py +import pytest +import asyncio +import gc +import time + +from app.src.infos import Infos +from app.src.inverter_base import InverterBase +from app.src.async_stream import AsyncStreamServer, AsyncStreamClient, StreamPtr +from app.src.messages import Message +from app.tests.test_modbus_tcp import FakeReader, FakeWriter +from app.tests.test_inverter_base import config_conn, patch_open_connection + +pytest_plugins = ('pytest_asyncio',) + +# initialize the proxy statistics +Infos.static_init() + +class FakeProto(Message): + def __init__(self, server_side): + super().__init__(server_side, None, 10) + self.conn_no = 0 + +def fake_reader_fwd(): + reader = FakeReader() + reader.test = FakeReader.RD_TEST_13_BYTES + reader.on_recv.set() + return reader + +def test_timeout_cb(): + reader = FakeReader() + writer = FakeWriter() + def timeout(): + return 13 + + ifc = AsyncStreamClient(reader, writer, None, None) + assert 360 == ifc._AsyncStream__timeout() + ifc.prot_set_timeout_cb(timeout) + assert 13 == ifc._AsyncStream__timeout() + ifc.prot_set_timeout_cb(None) + assert 360 == ifc._AsyncStream__timeout() + + # call healthy outside the contexter manager (__exit__() was called) + assert ifc.healthy() + del ifc + + cnt = 0 + for inv in InverterBase: + print(f'InverterBase refs:{gc.get_referrers(inv)}') + cnt += 1 + assert cnt == 0 + +def test_health(): + reader = FakeReader() + writer = FakeWriter() + + ifc = AsyncStreamClient(reader, writer, None, None) + ifc.proc_start = time.time() + assert ifc.healthy() + ifc.proc_start = time.time() -10 + assert not ifc.healthy() + ifc.proc_start = None + assert ifc.healthy() + + del ifc + + cnt = 0 + for inv in InverterBase: + print(f'InverterBase refs:{gc.get_referrers(inv)}') + cnt += 1 + assert cnt == 0 + +@pytest.mark.asyncio +async def test_close_cb(): + assert asyncio.get_running_loop() + reader = FakeReader() + writer = FakeWriter() + cnt = 0 + def timeout(): + return 0.1 + def closed(): + nonlocal cnt + nonlocal ifc + ifc.close() # clears the closed callback + cnt += 1 + + cnt = 0 + ifc = AsyncStreamClient(reader, writer, None, closed) + ifc.prot_set_timeout_cb(timeout) + await ifc.client_loop('') + assert cnt == 1 + ifc.prot_set_timeout_cb(timeout) + await ifc.client_loop('') + assert cnt == 1 # check that the closed method would not be called + del ifc + + cnt = 0 + ifc = AsyncStreamClient(reader, writer, None, None) + ifc.prot_set_timeout_cb(timeout) + await ifc.client_loop('') + assert cnt == 0 + del ifc + + cnt = 0 + for inv in InverterBase: + print(f'InverterBase refs:{gc.get_referrers(inv)}') + cnt += 1 + assert cnt == 0 + +@pytest.mark.asyncio +async def test_read(): + global test + assert asyncio.get_running_loop() + reader = FakeReader() + reader.test = FakeReader.RD_TEST_13_BYTES + reader.on_recv.set() + writer = FakeWriter() + cnt = 0 + def timeout(): + return 1 + def closed(): + nonlocal cnt + nonlocal ifc + ifc.close() # clears the closed callback + cnt += 1 + def app_read(): + nonlocal ifc + ifc.proc_start -= 3 + return 0.01 # async wait of 0.01 + cnt = 0 + ifc = AsyncStreamClient(reader, writer, None, closed) + ifc.proc_max = 0 + ifc.prot_set_timeout_cb(timeout) + ifc.rx_set_cb(app_read) + await ifc.client_loop('') + print('End loop') + assert ifc.proc_max >= 3 + assert 13 == ifc.rx_len() + assert cnt == 1 + del ifc + + cnt = 0 + for inv in InverterBase: + print(f'InverterBase refs:{gc.get_referrers(inv)}') + cnt += 1 + assert cnt == 0 + +@pytest.mark.asyncio +async def test_write(): + global test + assert asyncio.get_running_loop() + reader = FakeReader() + reader.test = FakeReader.RD_TEST_13_BYTES + reader.on_recv.set() + writer = FakeWriter() + cnt = 0 + def timeout(): + return 1 + def closed(): + nonlocal cnt + nonlocal ifc + ifc.close() # clears the closed callback + cnt += 1 + def app_read(): + nonlocal ifc + ifc.proc_start -= 3 + return 0.01 # async wait of 0.01 + + cnt = 0 + ifc = AsyncStreamClient(reader, writer, None, closed) + ifc.proc_max = 10 + ifc.prot_set_timeout_cb(timeout) + ifc.rx_set_cb(app_read) + ifc.tx_add(b'test-data-resp') + assert 14 == ifc.tx_len() + await ifc.client_loop('') + print('End loop') + assert ifc.proc_max >= 3 + assert 13 == ifc.rx_len() + assert 0 == ifc.tx_len() + assert cnt == 1 + del ifc + + cnt = 0 + for inv in InverterBase: + print(f'InverterBase refs:{gc.get_referrers(inv)}') + cnt += 1 + assert cnt == 0 + +@pytest.mark.asyncio +async def test_publ_mqtt_cb(): + assert asyncio.get_running_loop() + reader = FakeReader() + reader.test = FakeReader.RD_TEST_13_BYTES + reader.on_recv.set() + writer = FakeWriter() + cnt = 0 + def timeout(): + return 0.1 + async def publ_mqtt(): + nonlocal cnt + nonlocal ifc + cnt += 1 + + cnt = 0 + ifc = AsyncStreamServer(reader, writer, publ_mqtt, None, None) + assert ifc.async_publ_mqtt + ifc.prot_set_timeout_cb(timeout) + await ifc.server_loop() + assert cnt == 3 # 2 calls in server_loop() and 1 in loop() + assert ifc.async_publ_mqtt + ifc.close() # clears the closed callback + assert not ifc.async_publ_mqtt + del ifc + + cnt = 0 + for inv in InverterBase: + print(f'InverterBase refs:{gc.get_referrers(inv)}') + cnt += 1 + assert cnt == 0 + +@pytest.mark.asyncio +async def test_create_remote_cb(): + assert asyncio.get_running_loop() + reader = FakeReader() + writer = FakeWriter() + cnt = 0 + def timeout(): + return 0.1 + async def create_remote(): + nonlocal cnt + nonlocal ifc + ifc.close() # clears the closed callback + cnt += 1 + + cnt = 0 + ifc = AsyncStreamServer(reader, writer, None, create_remote, None) + assert ifc.create_remote + await ifc.create_remote() + assert cnt == 1 + ifc.prot_set_timeout_cb(timeout) + await ifc.server_loop() + assert not ifc.create_remote + del ifc + + cnt = 0 + for inv in InverterBase: + print(f'InverterBase refs:{gc.get_referrers(inv)}') + cnt += 1 + assert cnt == 0 + +@pytest.mark.asyncio +async def test_sw_exception(): + global test + assert asyncio.get_running_loop() + reader = FakeReader() + reader.test = FakeReader.RD_TEST_SW_EXCEPT + reader.on_recv.set() + writer = FakeWriter() + cnt = 0 + def timeout(): + return 1 + def closed(): + nonlocal cnt + nonlocal ifc + ifc.close() # clears the closed callback + cnt += 1 + cnt = 0 + ifc = AsyncStreamClient(reader, writer, None, closed) + ifc.prot_set_timeout_cb(timeout) + await ifc.client_loop('') + print('End loop') + assert cnt == 1 + del ifc + + cnt = 0 + for inv in InverterBase: + print(f'InverterBase refs:{gc.get_referrers(inv)}') + cnt += 1 + assert cnt == 0 + +@pytest.mark.asyncio +async def test_os_error(): + global test + assert asyncio.get_running_loop() + reader = FakeReader() + reader.test = FakeReader.RD_TEST_OS_ERROR + + reader.on_recv.set() + writer = FakeWriter() + cnt = 0 + def timeout(): + return 1 + def closed(): + nonlocal cnt + nonlocal ifc + ifc.close() # clears the closed callback + cnt += 1 + cnt = 0 + ifc = AsyncStreamClient(reader, writer, None, closed) + ifc.prot_set_timeout_cb(timeout) + await ifc.client_loop('') + print('End loop') + assert cnt == 1 + del ifc + + cnt = 0 + for inv in InverterBase: + print(f'InverterBase refs:{gc.get_referrers(inv)}') + cnt += 1 + assert cnt == 0 + +class TestType(): + FWD_NO_EXCPT = 1 + FWD_SW_EXCPT = 2 + FWD_TIMEOUT = 3 + FWD_OS_ERROR = 4 + FWD_OS_ERROR_NO_STREAM = 5 + FWD_RUNTIME_ERROR = 6 + FWD_RUNTIME_ERROR_NO_STREAM = 7 + +def create_remote(remote, test_type, with_close_hdr:bool = False): + def update_hdr(buf): + return + def callback(): + if test_type == TestType.FWD_SW_EXCPT: + remote.unknown_var += 1 + elif test_type == TestType.FWD_TIMEOUT: + raise TimeoutError + elif test_type == TestType.FWD_OS_ERROR: + raise ConnectionRefusedError + elif test_type == TestType.FWD_OS_ERROR_NO_STREAM: + remote.stream = None + raise ConnectionRefusedError + elif test_type == TestType.FWD_RUNTIME_ERROR: + raise RuntimeError("Peer closed") + elif test_type == TestType.FWD_RUNTIME_ERROR_NO_STREAM: + remote.stream = None + raise RuntimeError("Peer closed") + + def close(): + return + if with_close_hdr: + close_hndl = close + else: + close_hndl = None + + remote.ifc = AsyncStreamClient( + FakeReader(), FakeWriter(), StreamPtr(None), close_hndl) + remote.ifc.prot_set_update_header_cb(update_hdr) + remote.ifc.prot_set_init_new_client_conn_cb(callback) + remote.stream = FakeProto(False) + +@pytest.mark.asyncio +async def test_forward(): + assert asyncio.get_running_loop() + remote = StreamPtr(None) + cnt = 0 + + async def _create_remote(): + nonlocal cnt, remote, ifc + create_remote(remote, TestType.FWD_NO_EXCPT) + ifc.fwd_add(b'test-forward_msg2 ') + cnt += 1 + + cnt = 0 + ifc = AsyncStreamServer(fake_reader_fwd(), FakeWriter(), None, _create_remote, remote) + ifc.fwd_add(b'test-forward_msg') + await ifc.server_loop() + assert cnt == 1 + del ifc + +@pytest.mark.asyncio +async def test_forward_with_conn(): + assert asyncio.get_running_loop() + remote = StreamPtr(None) + cnt = 0 + + async def _create_remote(): + nonlocal cnt, remote, ifc + cnt += 1 + + cnt = 0 + ifc = AsyncStreamServer(fake_reader_fwd(), FakeWriter(), None, _create_remote, remote) + create_remote(remote, TestType.FWD_NO_EXCPT) + ifc.fwd_add(b'test-forward_msg') + await ifc.server_loop() + assert cnt == 0 + del ifc + +@pytest.mark.asyncio +async def test_forward_no_conn(): + assert asyncio.get_running_loop() + remote = StreamPtr(None) + cnt = 0 + + async def _create_remote(): + nonlocal cnt + cnt += 1 + + cnt = 0 + ifc = AsyncStreamServer(fake_reader_fwd(), FakeWriter(), None, _create_remote, remote) + ifc.fwd_add(b'test-forward_msg') + await ifc.server_loop() + assert cnt == 1 + del ifc + +@pytest.mark.asyncio +async def test_forward_sw_except(): + assert asyncio.get_running_loop() + remote = StreamPtr(None) + cnt = 0 + + async def _create_remote(): + nonlocal cnt, remote + create_remote(remote, TestType.FWD_SW_EXCPT) + cnt += 1 + + cnt = 0 + ifc = AsyncStreamServer(fake_reader_fwd(), FakeWriter(), None, _create_remote, remote) + ifc.fwd_add(b'test-forward_msg') + await ifc.server_loop() + assert cnt == 1 + del ifc + +@pytest.mark.asyncio +async def test_forward_os_error(): + assert asyncio.get_running_loop() + remote = StreamPtr(None) + cnt = 0 + + async def _create_remote(): + nonlocal cnt, remote + create_remote(remote, TestType.FWD_OS_ERROR) + cnt += 1 + + cnt = 0 + ifc = AsyncStreamServer(fake_reader_fwd(), FakeWriter(), None, _create_remote, remote) + ifc.fwd_add(b'test-forward_msg') + await ifc.server_loop() + assert cnt == 1 + del ifc + +@pytest.mark.asyncio +async def test_forward_os_error2(): + assert asyncio.get_running_loop() + remote = StreamPtr(None) + cnt = 0 + + async def _create_remote(): + nonlocal cnt, remote + create_remote(remote, TestType.FWD_OS_ERROR, True) + cnt += 1 + + cnt = 0 + ifc = AsyncStreamServer(fake_reader_fwd(), FakeWriter(), None, _create_remote, remote) + ifc.fwd_add(b'test-forward_msg') + await ifc.server_loop() + assert cnt == 1 + del ifc + +@pytest.mark.asyncio +async def test_forward_os_error3(): + assert asyncio.get_running_loop() + remote = StreamPtr(None) + cnt = 0 + + async def _create_remote(): + nonlocal cnt, remote + create_remote(remote, TestType.FWD_OS_ERROR_NO_STREAM) + cnt += 1 + + cnt = 0 + ifc = AsyncStreamServer(fake_reader_fwd(), FakeWriter(), None, _create_remote, remote) + ifc.fwd_add(b'test-forward_msg') + await ifc.server_loop() + assert cnt == 1 + del ifc + +@pytest.mark.asyncio +async def test_forward_runtime_error(): + assert asyncio.get_running_loop() + remote = StreamPtr(None) + cnt = 0 + + async def _create_remote(): + nonlocal cnt, remote + create_remote(remote, TestType.FWD_RUNTIME_ERROR) + cnt += 1 + + cnt = 0 + ifc = AsyncStreamServer(fake_reader_fwd(), FakeWriter(), None, _create_remote, remote) + ifc.fwd_add(b'test-forward_msg') + await ifc.server_loop() + assert cnt == 1 + del ifc + +@pytest.mark.asyncio +async def test_forward_runtime_error2(): + assert asyncio.get_running_loop() + remote = StreamPtr(None) + cnt = 0 + + async def _create_remote(): + nonlocal cnt, remote + create_remote(remote, TestType.FWD_RUNTIME_ERROR, True) + cnt += 1 + + cnt = 0 + ifc = AsyncStreamServer(fake_reader_fwd(), FakeWriter(), None, _create_remote, remote) + ifc.fwd_add(b'test-forward_msg') + await ifc.server_loop() + assert cnt == 1 + del ifc + +@pytest.mark.asyncio +async def test_forward_runtime_error3(): + assert asyncio.get_running_loop() + remote = StreamPtr(None) + cnt = 0 + + async def _create_remote(): + nonlocal cnt, remote + create_remote(remote, TestType.FWD_RUNTIME_ERROR_NO_STREAM, True) + cnt += 1 + + cnt = 0 + ifc = AsyncStreamServer(fake_reader_fwd(), FakeWriter(), None, _create_remote, remote) + ifc.fwd_add(b'test-forward_msg') + await ifc.server_loop() + assert cnt == 1 + del ifc diff --git a/app/tests/test_byte_fifo.py b/app/tests/test_byte_fifo.py new file mode 100644 index 0000000..1544cc0 --- /dev/null +++ b/app/tests/test_byte_fifo.py @@ -0,0 +1,43 @@ +# test_with_pytest.py + +from app.src.byte_fifo import ByteFifo + +def test_fifo(): + read = ByteFifo() + assert 0 == len(read) + read += b'12' + assert 2 == len(read) + read += bytearray("34", encoding='UTF8') + assert 4 == len(read) + assert b'12' == read.peek(2) + assert 4 == len(read) + assert b'1234' == read.peek() + assert 4 == len(read) + assert b'12' == read.get(2) + assert 2 == len(read) + assert b'34' == read.get() + assert 0 == len(read) + +def test_fifo_fmt(): + read = ByteFifo() + read += b'1234' + assert b'1234' == read.peek() + assert " 0000 | 31 32 33 34 | 1234" == f'{read}' + +def test_fifo_observer(): + read = ByteFifo() + + def _read(): + assert b'1234' == read.get(4) + + read += b'12' + assert 2 == len(read) + read() + read.reg_trigger(_read) + read += b'34' + assert 4 == len(read) + read() + assert 0 == len(read) + assert b'' == read.peek(2) + assert b'' == read.get(2) + assert 0 == len(read) diff --git a/app/tests/test_connection_g3.py b/app/tests/test_connection_g3.py deleted file mode 100644 index 452bf18..0000000 --- a/app/tests/test_connection_g3.py +++ /dev/null @@ -1,84 +0,0 @@ -# test_with_pytest.py -import pytest -import asyncio - -from mock import patch -from app.src.async_stream import AsyncStream -from app.src.gen3.connection_g3 import ConnectionG3 -from app.src.gen3.talent import Talent - -@pytest.fixture -def patch_async_init(): - with patch.object(AsyncStream, '__init__') as conn: - yield conn - -@pytest.fixture -def patch_talent_init(): - with patch.object(Talent, '__init__') as conn: - yield conn - -@pytest.fixture -def patch_healthy(): - with patch.object(AsyncStream, 'healthy') as conn: - yield conn - -@pytest.fixture -def patch_async_close(): - with patch.object(AsyncStream, 'close') as conn: - yield conn - -@pytest.fixture -def patch_talent_close(): - with patch.object(Talent, 'close') as conn: - yield conn - -class FakeReader(): - def __init__(self): - self.on_recv = asyncio.Event() - async def read(self, max_len: int): - await self.on_recv.wait() - return b'' - def feed_eof(self): - return - - -class FakeWriter(): - def write(self, buf: bytes): - return - def get_extra_info(self, sel: str): - if sel == 'peername': - return 'remote.intern' - elif sel == 'sockname': - return 'sock:1234' - assert False - def is_closing(self): - return False - def close(self): - return - async def wait_closed(self): - return - - - -def test_method_calls(patch_async_init, patch_talent_init, patch_healthy, patch_async_close, patch_talent_close): - spy1 = patch_async_init - spy2 = patch_talent_init - spy3 = patch_healthy - spy4 = patch_async_close - spy5 = patch_talent_close - reader = FakeReader() - writer = FakeWriter() - id_str = "id_string" - addr = ('proxy.local', 10000) - conn = ConnectionG3(reader, writer, addr, - remote_stream= None, server_side=True, id_str=id_str) - spy1.assert_called_once_with(conn, reader, writer, addr) - spy2.assert_called_once_with(conn, True, id_str) - conn.healthy() - - spy3.assert_called_once() - - conn.close() - spy4.assert_called_once() - spy5.assert_called_once() - diff --git a/app/tests/test_connection_g3p.py b/app/tests/test_connection_g3p.py deleted file mode 100644 index 67607f1..0000000 --- a/app/tests/test_connection_g3p.py +++ /dev/null @@ -1,89 +0,0 @@ -# test_with_pytest.py -import pytest -import asyncio - -from mock import patch -from app.src.singleton import Singleton -from app.src.async_stream import AsyncStream -from app.src.gen3plus.connection_g3p import ConnectionG3P -from app.src.gen3plus.solarman_v5 import SolarmanV5 - -@pytest.fixture -def patch_async_init(): - with patch.object(AsyncStream, '__init__') as conn: - yield conn - -@pytest.fixture -def patch_solarman_init(): - with patch.object(SolarmanV5, '__init__') as conn: - yield conn - -@pytest.fixture(scope="module", autouse=True) -def module_init(): - Singleton._instances.clear() - yield - -@pytest.fixture -def patch_healthy(): - with patch.object(AsyncStream, 'healthy') as conn: - yield conn - -@pytest.fixture -def patch_async_close(): - with patch.object(AsyncStream, 'close') as conn: - yield conn - -@pytest.fixture -def patch_solarman_close(): - with patch.object(SolarmanV5, 'close') as conn: - yield conn - -class FakeReader(): - def __init__(self): - self.on_recv = asyncio.Event() - async def read(self, max_len: int): - await self.on_recv.wait() - return b'' - def feed_eof(self): - return - - -class FakeWriter(): - def write(self, buf: bytes): - return - def get_extra_info(self, sel: str): - if sel == 'peername': - return 'remote.intern' - elif sel == 'sockname': - return 'sock:1234' - assert False - def is_closing(self): - return False - def close(self): - return - async def wait_closed(self): - return - - - -def test_method_calls(patch_async_init, patch_solarman_init, patch_healthy, patch_async_close, patch_solarman_close): - spy1 = patch_async_init - spy2 = patch_solarman_init - spy3 = patch_healthy - spy4 = patch_async_close - spy5 = patch_solarman_close - reader = FakeReader() - writer = FakeWriter() - addr = ('proxy.local', 10000) - conn = ConnectionG3P(reader, writer, addr, - remote_stream= None, server_side=True, client_mode=False) - spy1.assert_called_once_with(conn, reader, writer, addr) - spy2.assert_called_once_with(conn, True, False) - conn.healthy() - - spy3.assert_called_once() - - conn.close() - spy4.assert_called_once() - spy5.assert_called_once() - diff --git a/app/tests/test_inverter_base.py b/app/tests/test_inverter_base.py new file mode 100644 index 0000000..054d729 --- /dev/null +++ b/app/tests/test_inverter_base.py @@ -0,0 +1,304 @@ +# test_with_pytest.py +import pytest +import asyncio +import gc + +from mock import patch +from enum import Enum +from app.src.infos import Infos +from app.src.config import Config +from app.src.gen3.talent import Talent +from app.src.inverter_base import InverterBase +from app.src.singleton import Singleton +from app.src.async_stream import AsyncStream, AsyncStreamClient + +from app.tests.test_modbus_tcp import patch_mqtt_err, patch_mqtt_except, test_port, test_hostname + +pytest_plugins = ('pytest_asyncio',) + +# initialize the proxy statistics +Infos.static_init() + +@pytest.fixture +def config_conn(): + Config.act_config = { + 'mqtt':{ + 'host': test_hostname, + 'port': test_port, + 'user': '', + 'passwd': '' + }, + 'ha':{ + 'auto_conf_prefix': 'homeassistant', + 'discovery_prefix': 'homeassistant', + 'entity_prefix': 'tsun', + 'proxy_node_id': 'test_1', + 'proxy_unique_id': '' + }, + 'tsun':{'enabled': True, 'host': 'test_cloud.local', 'port': 1234}, 'inverters':{'allow_all':True} + } + +@pytest.fixture(scope="module", autouse=True) +def module_init(): + Singleton._instances.clear() + yield + +class FakeReader(): + def __init__(self): + self.on_recv = asyncio.Event() + async def read(self, max_len: int): + await self.on_recv.wait() + return b'' + def feed_eof(self): + return + + +class FakeWriter(): + def write(self, buf: bytes): + return + def get_extra_info(self, sel: str): + if sel == 'peername': + return 'remote.intern' + elif sel == 'sockname': + return 'sock:1234' + assert False + def is_closing(self): + return False + def close(self): + return + async def wait_closed(self): + return + +class TestType(Enum): + RD_TEST_0_BYTES = 1 + RD_TEST_TIMEOUT = 2 + RD_TEST_EXCEPT = 3 + + +test = TestType.RD_TEST_0_BYTES + +@pytest.fixture +def patch_open_connection(): + async def new_conn(conn): + await asyncio.sleep(0) + return FakeReader(), FakeWriter() + + def new_open(host: str, port: int): + global test + if test == TestType.RD_TEST_TIMEOUT: + raise ConnectionRefusedError + elif test == TestType.RD_TEST_EXCEPT: + raise ValueError("Value cannot be negative") # Compliant + return new_conn(None) + + with patch.object(asyncio, 'open_connection', new_open) as conn: + yield conn + + +@pytest.fixture +def patch_healthy(): + with patch.object(AsyncStream, 'healthy') as conn: + yield conn + +@pytest.fixture +def patch_unhealthy(): + def new_healthy(self): + return False + with patch.object(AsyncStream, 'healthy', new_healthy) as conn: + yield conn +@pytest.fixture +def patch_unhealthy_remote(): + def new_healthy(self): + return False + with patch.object(AsyncStreamClient, 'healthy', new_healthy) as conn: + yield conn + +def test_inverter_iter(): + InverterBase._registry.clear() + cnt = 0 + reader = FakeReader() + writer = FakeWriter() + + with InverterBase(reader, writer, 'tsun', Talent) as inverter: + for inv in InverterBase: + assert inv == inverter + cnt += 1 + del inv + del inverter + assert cnt == 1 + + for inv in InverterBase: + assert False + +def test_method_calls(patch_healthy): + spy = patch_healthy + InverterBase._registry.clear() + reader = FakeReader() + writer = FakeWriter() + + with InverterBase(reader, writer, 'tsun', Talent) as inverter: + assert inverter.local.stream + assert inverter.local.ifc + # call healthy inside the contexter manager + for inv in InverterBase: + assert inv.healthy() + del inv + spy.assert_called_once() + + # outside context manager the health function of AsyncStream is not reachable + cnt = 0 + for inv in InverterBase: + assert inv.healthy() + cnt += 1 + del inv + assert cnt == 1 + spy.assert_called_once() # counter don't increase and keep one! + + del inverter + cnt = 0 + for inv in InverterBase: + print(f'InverterBase refs:{gc.get_referrers(inv)}') + cnt += 1 + assert cnt == 0 + +def test_unhealthy(patch_unhealthy): + _ = patch_unhealthy + InverterBase._registry.clear() + reader = FakeReader() + writer = FakeWriter() + + with InverterBase(reader, writer, 'tsun', Talent) as inverter: + assert inverter.local.stream + assert inverter.local.ifc + # call healthy inside the contexter manager + assert not inverter.healthy() + + # outside context manager the unhealth AsyncStream is released + cnt = 0 + for inv in InverterBase: + assert inv.healthy() # inverter is healthy again (without the unhealty AsyncStream) + cnt += 1 + del inv + assert cnt == 1 + + del inverter + cnt = 0 + for inv in InverterBase: + print(f'InverterBase refs:{gc.get_referrers(inv)}') + cnt += 1 + assert cnt == 0 + +def test_unhealthy_remote(patch_unhealthy_remote): + _ = patch_unhealthy + InverterBase._registry.clear() + reader = FakeReader() + writer = FakeWriter() + + with InverterBase(reader, writer, 'tsun', Talent) as inverter: + assert inverter.local.stream + assert inverter.local.ifc + # call healthy inside the contexter manager + assert not inverter.healthy() + + # outside context manager the unhealth AsyncStream is released + cnt = 0 + for inv in InverterBase: + assert inv.healthy() # inverter is healthy again (without the unhealty AsyncStream) + cnt += 1 + del inv + assert cnt == 1 + + del inverter + cnt = 0 + for inv in InverterBase: + print(f'InverterBase refs:{gc.get_referrers(inv)}') + cnt += 1 + assert cnt == 0 + +@pytest.mark.asyncio +async def test_remote_conn(config_conn, patch_open_connection): + _ = config_conn + _ = patch_open_connection + assert asyncio.get_running_loop() + reader = FakeReader() + writer = FakeWriter() + + with InverterBase(reader, writer, 'tsun', Talent) as inverter: + await inverter.create_remote() + await asyncio.sleep(0) + assert inverter.remote.stream + assert inverter.remote.ifc + # call healthy inside the contexter manager + assert inverter.healthy() + + # call healthy outside the contexter manager (__exit__() was called) + assert inverter.healthy() + del inverter + + cnt = 0 + for inv in InverterBase: + print(f'InverterBase refs:{gc.get_referrers(inv)}') + cnt += 1 + assert cnt == 0 + +@pytest.mark.asyncio +async def test_unhealthy_remote(config_conn, patch_open_connection, patch_unhealthy_remote): + _ = config_conn + _ = patch_open_connection + _ = patch_unhealthy_remote + assert asyncio.get_running_loop() + InverterBase._registry.clear() + reader = FakeReader() + writer = FakeWriter() + + with InverterBase(reader, writer, 'tsun', Talent) as inverter: + assert inverter.local.stream + assert inverter.local.ifc + await inverter.create_remote() + await asyncio.sleep(0) + assert inverter.remote.stream + assert inverter.remote.ifc + assert inverter.local.ifc.healthy() + assert not inverter.remote.ifc.healthy() + # call healthy inside the contexter manager + assert not inverter.healthy() + + # outside context manager the unhealth AsyncStream is released + cnt = 0 + for inv in InverterBase: + assert inv.healthy() # inverter is healthy again (without the unhealty AsyncStream) + cnt += 1 + del inv + assert cnt == 1 + + del inverter + cnt = 0 + for inv in InverterBase: + print(f'InverterBase refs:{gc.get_referrers(inv)}') + cnt += 1 + assert cnt == 0 + +@pytest.mark.asyncio +async def test_remote_disc(config_conn, patch_open_connection): + _ = config_conn + _ = patch_open_connection + assert asyncio.get_running_loop() + reader = FakeReader() + writer = FakeWriter() + + with InverterBase(reader, writer, 'tsun', Talent) as inverter: + await inverter.create_remote() + await asyncio.sleep(0) + assert inverter.remote.stream + # call disc inside the contexter manager + await inverter.disc() + + # call disc outside the contexter manager (__exit__() was called) + await inverter.disc() + del inverter + + cnt = 0 + for inv in InverterBase: + print(f'InverterBase refs:{gc.get_referrers(inv)}') + cnt += 1 + assert cnt == 0 diff --git a/app/tests/test_inverter_g3.py b/app/tests/test_inverter_g3.py index 017e897..45438bb 100644 --- a/app/tests/test_inverter_g3.py +++ b/app/tests/test_inverter_g3.py @@ -1,15 +1,17 @@ # test_with_pytest.py import pytest import asyncio +import sys,gc from mock import patch from enum import Enum from app.src.infos import Infos from app.src.config import Config -from app.src.inverter import Inverter +from app.src.proxy import Proxy +from app.src.inverter_base import InverterBase from app.src.singleton import Singleton -from app.src.gen3.connection_g3 import ConnectionG3 from app.src.gen3.inverter_g3 import InverterG3 +from app.src.async_stream import AsyncStream from app.tests.test_modbus_tcp import patch_mqtt_err, patch_mqtt_except, test_port, test_hostname @@ -42,16 +44,6 @@ def module_init(): Singleton._instances.clear() yield -@pytest.fixture -def patch_conn_init(): - with patch.object(ConnectionG3, '__init__', return_value= None) as conn: - yield conn - -@pytest.fixture -def patch_conn_close(): - with patch.object(ConnectionG3, 'close') as conn: - yield conn - class FakeReader(): def __init__(self): self.on_recv = asyncio.Event() @@ -103,133 +95,132 @@ def patch_open_connection(): with patch.object(asyncio, 'open_connection', new_open) as conn: yield conn +@pytest.fixture +def patch_healthy(): + with patch.object(AsyncStream, 'healthy') as conn: + yield conn -def test_method_calls(patch_conn_init, patch_conn_close): - spy1 = patch_conn_init - spy2 = patch_conn_close +def test_method_calls(patch_healthy): + spy = patch_healthy reader = FakeReader() writer = FakeWriter() - addr = ('proxy.local', 10000) - inverter = InverterG3(reader, writer, addr) - inverter.l_addr = '' - inverter.r_addr = '' + InverterBase._registry.clear() - spy1.assert_called_once() - spy1.assert_called_once_with(reader, writer, addr, None, True) - - inverter.close() - spy2.assert_called_once() + with InverterG3(reader, writer) as inverter: + assert inverter.local.stream + assert inverter.local.ifc + for inv in InverterBase: + inv.healthy() + del inv + spy.assert_called_once() + del inverter + cnt = 0 + for inv in InverterBase: + cnt += 1 + assert cnt == 0 @pytest.mark.asyncio -async def test_remote_conn(config_conn, patch_open_connection, patch_conn_close): +async def test_remote_conn(config_conn, patch_open_connection): _ = config_conn _ = patch_open_connection assert asyncio.get_running_loop() - spy1 = patch_conn_close + with InverterG3(FakeReader(), FakeWriter()) as inverter: + await inverter.create_remote() + await asyncio.sleep(0) + assert inverter.remote.stream + del inverter - inverter = InverterG3(FakeReader(), FakeWriter(), ('proxy.local', 10000)) - - await inverter.async_create_remote() - await asyncio.sleep(0) - assert inverter.remote_stream - inverter.close() - spy1.assert_called_once() + cnt = 0 + for inv in InverterBase: + print(f'InverterBase refs:{gc.get_referrers(inv)}') + cnt += 1 + assert cnt == 0 @pytest.mark.asyncio -async def test_remote_except(config_conn, patch_open_connection, patch_conn_close): +async def test_remote_except(config_conn, patch_open_connection): _ = config_conn _ = patch_open_connection assert asyncio.get_running_loop() - spy1 = patch_conn_close - global test test = TestType.RD_TEST_TIMEOUT - inverter = InverterG3(FakeReader(), FakeWriter(), ('proxy.local', 10000)) + with InverterG3(FakeReader(), FakeWriter()) as inverter: + await inverter.create_remote() + await asyncio.sleep(0) + assert inverter.remote.stream==None - await inverter.async_create_remote() - await asyncio.sleep(0) - assert inverter.remote_stream==None + test = TestType.RD_TEST_EXCEPT + await inverter.create_remote() + await asyncio.sleep(0) + assert inverter.remote.stream==None + del inverter - test = TestType.RD_TEST_EXCEPT - await inverter.async_create_remote() - await asyncio.sleep(0) - assert inverter.remote_stream==None - inverter.close() - spy1.assert_called_once() + cnt = 0 + for inv in InverterBase: + print(f'InverterBase refs:{gc.get_referrers(inv)}') + cnt += 1 + assert cnt == 0 @pytest.mark.asyncio -async def test_mqtt_publish(config_conn, patch_open_connection, patch_conn_close): +async def test_mqtt_publish(config_conn, patch_open_connection): _ = config_conn _ = patch_open_connection assert asyncio.get_running_loop() - spy1 = patch_conn_close - - Inverter.class_init() + Proxy.class_init() - inverter = InverterG3(FakeReader(), FakeWriter(), ('proxy.local', 10000)) - inverter._Talent__set_serial_no(serial_no= "123344") - - inverter.new_data['inverter'] = True - inverter.db.db['inverter'] = {} - await inverter.async_publ_mqtt() - assert inverter.new_data['inverter'] == False + with InverterG3(FakeReader(), FakeWriter()) as inverter: + stream = inverter.local.stream + await inverter.async_publ_mqtt() # check call with invalid unique_id + stream._Talent__set_serial_no(serial_no= "123344") - inverter.new_data['env'] = True - inverter.db.db['env'] = {} - await inverter.async_publ_mqtt() - assert inverter.new_data['env'] == False + stream.new_data['inverter'] = True + stream.db.db['inverter'] = {} + await inverter.async_publ_mqtt() + assert stream.new_data['inverter'] == False - Infos.new_stat_data['proxy'] = True - await inverter.async_publ_mqtt() - assert Infos.new_stat_data['proxy'] == False + stream.new_data['env'] = True + stream.db.db['env'] = {} + await inverter.async_publ_mqtt() + assert stream.new_data['env'] == False - inverter.close() - spy1.assert_called_once() + Infos.new_stat_data['proxy'] = True + await inverter.async_publ_mqtt() + assert Infos.new_stat_data['proxy'] == False @pytest.mark.asyncio -async def test_mqtt_err(config_conn, patch_open_connection, patch_mqtt_err, patch_conn_close): +async def test_mqtt_err(config_conn, patch_open_connection, patch_mqtt_err): _ = config_conn _ = patch_open_connection _ = patch_mqtt_err assert asyncio.get_running_loop() - spy1 = patch_conn_close - - Inverter.class_init() + Proxy.class_init() - inverter = InverterG3(FakeReader(), FakeWriter(), ('proxy.local', 10000)) - inverter._Talent__set_serial_no(serial_no= "123344") - - inverter.new_data['inverter'] = True - inverter.db.db['inverter'] = {} - await inverter.async_publ_mqtt() - assert inverter.new_data['inverter'] == True - - inverter.close() - spy1.assert_called_once() + with InverterG3(FakeReader(), FakeWriter()) as inverter: + stream = inverter.local.stream + stream._Talent__set_serial_no(serial_no= "123344") + stream.new_data['inverter'] = True + stream.db.db['inverter'] = {} + await inverter.async_publ_mqtt() + assert stream.new_data['inverter'] == True @pytest.mark.asyncio -async def test_mqtt_except(config_conn, patch_open_connection, patch_mqtt_except, patch_conn_close): +async def test_mqtt_except(config_conn, patch_open_connection, patch_mqtt_except): _ = config_conn _ = patch_open_connection _ = patch_mqtt_except assert asyncio.get_running_loop() - spy1 = patch_conn_close - - Inverter.class_init() + Proxy.class_init() - inverter = InverterG3(FakeReader(), FakeWriter(), ('proxy.local', 10000)) - inverter._Talent__set_serial_no(serial_no= "123344") - - inverter.new_data['inverter'] = True - inverter.db.db['inverter'] = {} - await inverter.async_publ_mqtt() - assert inverter.new_data['inverter'] == True + with InverterG3(FakeReader(), FakeWriter()) as inverter: + stream = inverter.local.stream + stream._Talent__set_serial_no(serial_no= "123344") - inverter.close() - spy1.assert_called_once() + stream.new_data['inverter'] = True + stream.db.db['inverter'] = {} + await inverter.async_publ_mqtt() + assert stream.new_data['inverter'] == True diff --git a/app/tests/test_inverter_g3p.py b/app/tests/test_inverter_g3p.py index 07d1160..0f47cbe 100644 --- a/app/tests/test_inverter_g3p.py +++ b/app/tests/test_inverter_g3p.py @@ -6,9 +6,9 @@ from mock import patch from enum import Enum from app.src.infos import Infos from app.src.config import Config -from app.src.inverter import Inverter +from app.src.proxy import Proxy +from app.src.inverter_base import InverterBase from app.src.singleton import Singleton -from app.src.gen3plus.connection_g3p import ConnectionG3P from app.src.gen3plus.inverter_g3p import InverterG3P from app.tests.test_modbus_tcp import patch_mqtt_err, patch_mqtt_except, test_port, test_hostname @@ -43,16 +43,6 @@ def module_init(): Singleton._instances.clear() yield -@pytest.fixture -def patch_conn_init(): - with patch.object(ConnectionG3P, '__init__', return_value= None) as conn: - yield conn - -@pytest.fixture -def patch_conn_close(): - with patch.object(ConnectionG3P, 'close') as conn: - yield conn - class FakeReader(): def __init__(self): self.on_recv = asyncio.Event() @@ -104,133 +94,103 @@ def patch_open_connection(): with patch.object(asyncio, 'open_connection', new_open) as conn: yield conn - -def test_method_calls(patch_conn_init, patch_conn_close): - spy1 = patch_conn_init - spy2 = patch_conn_close +def test_method_calls(): reader = FakeReader() writer = FakeWriter() - addr = ('proxy.local', 10000) - inverter = InverterG3P(reader, writer, addr, client_mode=False) - inverter.l_addr = '' - inverter.r_addr = '' + InverterBase._registry.clear() - spy1.assert_called_once() - spy1.assert_called_once_with(reader, writer, addr, None, server_side=True, client_mode=False) - - inverter.close() - spy2.assert_called_once() + with InverterG3P(reader, writer, client_mode=False) as inverter: + assert inverter.local.stream + assert inverter.local.ifc @pytest.mark.asyncio -async def test_remote_conn(config_conn, patch_open_connection, patch_conn_close): +async def test_remote_conn(config_conn, patch_open_connection): _ = config_conn _ = patch_open_connection assert asyncio.get_running_loop() - spy1 = patch_conn_close - - inverter = InverterG3P(FakeReader(), FakeWriter(), ('proxy.local', 10000), client_mode=False) - - await inverter.async_create_remote() - await asyncio.sleep(0) - assert inverter.remote_stream - inverter.close() - spy1.assert_called_once() + with InverterG3P(FakeReader(), FakeWriter(), client_mode=False) as inverter: + await inverter.create_remote() + await asyncio.sleep(0) + assert inverter.remote.stream @pytest.mark.asyncio -async def test_remote_except(config_conn, patch_open_connection, patch_conn_close): +async def test_remote_except(config_conn, patch_open_connection): _ = config_conn _ = patch_open_connection assert asyncio.get_running_loop() - - spy1 = patch_conn_close global test test = TestType.RD_TEST_TIMEOUT - inverter = InverterG3P(FakeReader(), FakeWriter(), ('proxy.local', 10000), client_mode=False) + with InverterG3P(FakeReader(), FakeWriter(), client_mode=False) as inverter: + await inverter.create_remote() + await asyncio.sleep(0) + assert inverter.remote.stream==None - await inverter.async_create_remote() - await asyncio.sleep(0) - assert inverter.remote_stream==None - - test = TestType.RD_TEST_EXCEPT - await inverter.async_create_remote() - await asyncio.sleep(0) - assert inverter.remote_stream==None - inverter.close() - spy1.assert_called_once() + test = TestType.RD_TEST_EXCEPT + await inverter.create_remote() + await asyncio.sleep(0) + assert inverter.remote.stream==None @pytest.mark.asyncio -async def test_mqtt_publish(config_conn, patch_open_connection, patch_conn_close): +async def test_mqtt_publish(config_conn, patch_open_connection): _ = config_conn _ = patch_open_connection assert asyncio.get_running_loop() - spy1 = patch_conn_close - - Inverter.class_init() + Proxy.class_init() - inverter = InverterG3P(FakeReader(), FakeWriter(), ('proxy.local', 10000), client_mode=False) - inverter._SolarmanV5__set_serial_no(snr= 123344) - - inverter.new_data['inverter'] = True - inverter.db.db['inverter'] = {} - await inverter.async_publ_mqtt() - assert inverter.new_data['inverter'] == False + with InverterG3P(FakeReader(), FakeWriter(), client_mode=False) as inverter: + stream = inverter.local.stream + await inverter.async_publ_mqtt() # check call with invalid unique_id + stream._SolarmanV5__set_serial_no(snr= 123344) - inverter.new_data['env'] = True - inverter.db.db['env'] = {} - await inverter.async_publ_mqtt() - assert inverter.new_data['env'] == False + stream.new_data['inverter'] = True + stream.db.db['inverter'] = {} + await inverter.async_publ_mqtt() + assert stream.new_data['inverter'] == False - Infos.new_stat_data['proxy'] = True - await inverter.async_publ_mqtt() - assert Infos.new_stat_data['proxy'] == False + stream.new_data['env'] = True + stream.db.db['env'] = {} + await inverter.async_publ_mqtt() + assert stream.new_data['env'] == False - inverter.close() - spy1.assert_called_once() + Infos.new_stat_data['proxy'] = True + await inverter.async_publ_mqtt() + assert Infos.new_stat_data['proxy'] == False @pytest.mark.asyncio -async def test_mqtt_err(config_conn, patch_open_connection, patch_mqtt_err, patch_conn_close): +async def test_mqtt_err(config_conn, patch_open_connection, patch_mqtt_err): _ = config_conn _ = patch_open_connection _ = patch_mqtt_err assert asyncio.get_running_loop() - spy1 = patch_conn_close - - Inverter.class_init() + Proxy.class_init() - inverter = InverterG3P(FakeReader(), FakeWriter(), ('proxy.local', 10000), client_mode=False) - inverter._SolarmanV5__set_serial_no(snr= 123344) - - inverter.new_data['inverter'] = True - inverter.db.db['inverter'] = {} - await inverter.async_publ_mqtt() - assert inverter.new_data['inverter'] == True - - inverter.close() - spy1.assert_called_once() + with InverterG3P(FakeReader(), FakeWriter(), client_mode=False) as inverter: + stream = inverter.local.stream + stream._SolarmanV5__set_serial_no(snr= 123344) + stream.new_data['inverter'] = True + stream.db.db['inverter'] = {} + await inverter.async_publ_mqtt() + assert stream.new_data['inverter'] == True @pytest.mark.asyncio -async def test_mqtt_except(config_conn, patch_open_connection, patch_mqtt_except, patch_conn_close): +async def test_mqtt_except(config_conn, patch_open_connection, patch_mqtt_except): _ = config_conn _ = patch_open_connection _ = patch_mqtt_except assert asyncio.get_running_loop() - spy1 = patch_conn_close - - Inverter.class_init() + Proxy.class_init() - inverter = InverterG3P(FakeReader(), FakeWriter(), ('proxy.local', 10000), client_mode=False) - inverter._SolarmanV5__set_serial_no(snr= 123344) - - inverter.new_data['inverter'] = True - inverter.db.db['inverter'] = {} - await inverter.async_publ_mqtt() - assert inverter.new_data['inverter'] == True + with InverterG3P(FakeReader(), FakeWriter(), client_mode=False) as inverter: + stream = inverter.local.stream + stream._SolarmanV5__set_serial_no(snr= 123344) - inverter.close() - spy1.assert_called_once() + stream.new_data['inverter'] = True + stream.db.db['inverter'] = {} + await inverter.async_publ_mqtt() + assert stream.new_data['inverter'] == True diff --git a/app/tests/test_modbus_tcp.py b/app/tests/test_modbus_tcp.py index f68e031..93ecfa0 100644 --- a/app/tests/test_modbus_tcp.py +++ b/app/tests/test_modbus_tcp.py @@ -9,12 +9,9 @@ from app.src.singleton import Singleton from app.src.config import Config from app.src.infos import Infos from app.src.mqtt import Mqtt +from app.src.inverter_base import InverterBase from app.src.messages import Message, State -from app.src.inverter import Inverter -from app.src.modbus_tcp import ModbusConn, ModbusTcp -from app.src.mqtt import Mqtt -from app.src.messages import Message, State -from app.src.inverter import Inverter +from app.src.proxy import Proxy from app.src.modbus_tcp import ModbusConn, ModbusTcp @@ -75,55 +72,93 @@ def config_conn(test_hostname, test_port): } -class TestType(Enum): +class FakeReader(): RD_TEST_0_BYTES = 1 RD_TEST_TIMEOUT = 2 + RD_TEST_13_BYTES = 3 + RD_TEST_SW_EXCEPT = 4 + RD_TEST_OS_ERROR = 5 - -test = TestType.RD_TEST_0_BYTES - - -class FakeReader(): def __init__(self): self.on_recv = asyncio.Event() + self.test = self.RD_TEST_0_BYTES + async def read(self, max_len: int): + print(f'fakeReader test: {self.test}') await self.on_recv.wait() - if test == TestType.RD_TEST_0_BYTES: + if self.test == self.RD_TEST_0_BYTES: return b'' - elif test == TestType.RD_TEST_TIMEOUT: + elif self.test == self.RD_TEST_13_BYTES: + print('fakeReader return 13 bytes') + self.test = self.RD_TEST_0_BYTES + return b'test-data-req' + elif self.test == self.RD_TEST_TIMEOUT: raise TimeoutError + elif self.test == self.RD_TEST_SW_EXCEPT: + self.test = self.RD_TEST_0_BYTES + self.unknown_var += 1 + elif self.test == self.RD_TEST_OS_ERROR: + self.test = self.RD_TEST_0_BYTES + raise ConnectionRefusedError + def feed_eof(self): return class FakeWriter(): + def __init__(self, conn='remote.intern'): + self.conn = conn + self.closing = False def write(self, buf: bytes): return + async def drain(self): + await asyncio.sleep(0) def get_extra_info(self, sel: str): if sel == 'peername': - return 'remote.intern' + return self.conn elif sel == 'sockname': return 'sock:1234' assert False def is_closing(self): - return False + return self.closing def close(self): - return + self.closing = True async def wait_closed(self): - return + await asyncio.sleep(0) @pytest.fixture def patch_open(): async def new_conn(conn): await asyncio.sleep(0) - return FakeReader(), FakeWriter() + return FakeReader(), FakeWriter(conn) def new_open(host: str, port: int): - global test - if test == TestType.RD_TEST_TIMEOUT: - raise TimeoutError - return new_conn(None) + return new_conn(f'{host}:{port}') + + with patch.object(asyncio, 'open_connection', new_open) as conn: + yield conn + +@pytest.fixture +def patch_open_timeout(): + def new_open(host: str, port: int): + raise TimeoutError + + with patch.object(asyncio, 'open_connection', new_open) as conn: + yield conn + +@pytest.fixture +def patch_open_value_error(): + def new_open(host: str, port: int): + raise ValueError + + with patch.object(asyncio, 'open_connection', new_open) as conn: + yield conn + +@pytest.fixture +def patch_open_conn_abort(): + def new_open(host: str, port: int): + raise ConnectionAbortedError with patch.object(asyncio, 'open_connection', new_open) as conn: yield conn @@ -154,13 +189,18 @@ async def test_modbus_conn(patch_open): _ = patch_open assert Infos.stat['proxy']['Inverter_Cnt'] == 0 - async with ModbusConn('test.local', 1234) as stream: + async with ModbusConn('test.local', 1234) as inverter: + stream = inverter.local.stream assert stream.node_id == 'G3P' - assert stream.addr == ('test.local', 1234) - assert type(stream.reader) is FakeReader - assert type(stream.writer) is FakeWriter + assert stream.addr == ('test.local:1234') + assert type(stream.ifc._reader) is FakeReader + assert type(stream.ifc._writer) is FakeWriter assert Infos.stat['proxy']['Inverter_Cnt'] == 1 - + del inverter + + for _ in InverterBase: + assert False + assert Infos.stat['proxy']['Inverter_Cnt'] == 0 @pytest.mark.asyncio @@ -171,13 +211,47 @@ async def test_modbus_no_cnf(): assert Infos.stat['proxy']['Inverter_Cnt'] == 0 @pytest.mark.asyncio -async def test_modbus_cnf1(config_conn, patch_open): +async def test_modbus_timeout(config_conn, patch_open_timeout): _ = config_conn - _ = patch_open - global test + _ = patch_open_timeout assert asyncio.get_running_loop() - Inverter.class_init() - test = TestType.RD_TEST_TIMEOUT + Proxy.class_init() + + assert Infos.stat['proxy']['Inverter_Cnt'] == 0 + loop = asyncio.get_event_loop() + ModbusTcp(loop) + await asyncio.sleep(0.01) + for m in Message: + if (m.node_id == 'inv_2'): + assert False + + await asyncio.sleep(0.01) + assert Infos.stat['proxy']['Inverter_Cnt'] == 0 + +@pytest.mark.asyncio +async def test_modbus_value_err(config_conn, patch_open_value_error): + _ = config_conn + _ = patch_open_value_error + assert asyncio.get_running_loop() + Proxy.class_init() + + assert Infos.stat['proxy']['Inverter_Cnt'] == 0 + loop = asyncio.get_event_loop() + ModbusTcp(loop) + await asyncio.sleep(0.01) + for m in Message: + if (m.node_id == 'inv_2'): + assert False + + await asyncio.sleep(0.01) + assert Infos.stat['proxy']['Inverter_Cnt'] == 0 + +@pytest.mark.asyncio +async def test_modbus_conn_abort(config_conn, patch_open_conn_abort): + _ = config_conn + _ = patch_open_conn_abort + assert asyncio.get_running_loop() + Proxy.class_init() assert Infos.stat['proxy']['Inverter_Cnt'] == 0 loop = asyncio.get_event_loop() @@ -195,10 +269,8 @@ async def test_modbus_cnf2(config_conn, patch_no_mqtt, patch_open): _ = config_conn _ = patch_open _ = patch_no_mqtt - global test assert asyncio.get_running_loop() - Inverter.class_init() - test = TestType.RD_TEST_0_BYTES + Proxy.class_init() assert Infos.stat['proxy']['Inverter_Cnt'] == 0 ModbusTcp(asyncio.get_event_loop()) @@ -209,7 +281,7 @@ async def test_modbus_cnf2(config_conn, patch_no_mqtt, patch_open): test += 1 assert Infos.stat['proxy']['Inverter_Cnt'] == 1 m.shutdown_started = True - m.reader.on_recv.set() + m.ifc._reader.on_recv.set() del m assert 1 == test @@ -221,10 +293,8 @@ async def test_modbus_cnf3(config_conn, patch_no_mqtt, patch_open): _ = config_conn _ = patch_open _ = patch_no_mqtt - global test assert asyncio.get_running_loop() - Inverter.class_init() - test = TestType.RD_TEST_0_BYTES + Proxy.class_init() assert Infos.stat['proxy']['Inverter_Cnt'] == 0 ModbusTcp(asyncio.get_event_loop(), tim_restart= 0) @@ -236,13 +306,13 @@ async def test_modbus_cnf3(config_conn, patch_no_mqtt, patch_open): test += 1 if test == 1: m.shutdown_started = False - m.reader.on_recv.set() + m.ifc._reader.on_recv.set() await asyncio.sleep(0.1) assert m.state == State.closed await asyncio.sleep(0.1) else: m.shutdown_started = True - m.reader.on_recv.set() + m.ifc._reader.on_recv.set() del m assert 2 == test @@ -254,10 +324,8 @@ async def test_mqtt_err(config_conn, patch_mqtt_err, patch_open): _ = config_conn _ = patch_open _ = patch_mqtt_err - global test assert asyncio.get_running_loop() - Inverter.class_init() - test = TestType.RD_TEST_0_BYTES + Proxy.class_init() assert Infos.stat['proxy']['Inverter_Cnt'] == 0 ModbusTcp(asyncio.get_event_loop(), tim_restart= 0) @@ -269,13 +337,14 @@ async def test_mqtt_err(config_conn, patch_mqtt_err, patch_open): test += 1 if test == 1: m.shutdown_started = False - m.reader.on_recv.set() + m.ifc._reader.on_recv.set() await asyncio.sleep(0.1) assert m.state == State.closed await asyncio.sleep(0.1) + await asyncio.sleep(0.1) else: m.shutdown_started = True - m.reader.on_recv.set() + m.ifc._reader.on_recv.set() del m await asyncio.sleep(0.01) @@ -286,10 +355,8 @@ async def test_mqtt_except(config_conn, patch_mqtt_except, patch_open): _ = config_conn _ = patch_open _ = patch_mqtt_except - global test assert asyncio.get_running_loop() - Inverter.class_init() - test = TestType.RD_TEST_0_BYTES + Proxy.class_init() assert Infos.stat['proxy']['Inverter_Cnt'] == 0 ModbusTcp(asyncio.get_event_loop(), tim_restart= 0) @@ -301,13 +368,13 @@ async def test_mqtt_except(config_conn, patch_mqtt_except, patch_open): test += 1 if test == 1: m.shutdown_started = False - m.reader.on_recv.set() + m.ifc._reader.on_recv.set() await asyncio.sleep(0.1) assert m.state == State.closed await asyncio.sleep(0.1) else: m.shutdown_started = True - m.reader.on_recv.set() + m.ifc._reader.on_recv.set() del m await asyncio.sleep(0.01) diff --git a/app/tests/test_mqtt.py b/app/tests/test_mqtt.py index 3072627..9fb857e 100644 --- a/app/tests/test_mqtt.py +++ b/app/tests/test_mqtt.py @@ -5,6 +5,7 @@ import aiomqtt import logging from mock import patch, Mock +from app.src.async_stream import AsyncIfcImpl from app.src.singleton import Singleton from app.src.mqtt import Mqtt from app.src.modbus import Modbus @@ -44,7 +45,7 @@ def config_no_conn(test_port): @pytest.fixture def spy_at_cmd(): - conn = SolarmanV5(server_side=True, client_mode= False) + conn = SolarmanV5(('test.local', 1234), server_side=True, client_mode= False, ifc=AsyncIfcImpl()) conn.node_id = 'inv_2/' with patch.object(conn, 'send_at_cmd', wraps=conn.send_at_cmd) as wrapped_conn: yield wrapped_conn @@ -52,7 +53,7 @@ def spy_at_cmd(): @pytest.fixture def spy_modbus_cmd(): - conn = SolarmanV5(server_side=True, client_mode= False) + conn = SolarmanV5(('test.local', 1234), server_side=True, client_mode= False, ifc=AsyncIfcImpl()) conn.node_id = 'inv_1/' with patch.object(conn, 'send_modbus_cmd', wraps=conn.send_modbus_cmd) as wrapped_conn: yield wrapped_conn @@ -60,7 +61,7 @@ def spy_modbus_cmd(): @pytest.fixture def spy_modbus_cmd_client(): - conn = SolarmanV5(server_side=False, client_mode= False) + conn = SolarmanV5(('test.local', 1234), server_side=False, client_mode= False, ifc=AsyncIfcImpl()) conn.node_id = 'inv_1/' with patch.object(conn, 'send_modbus_cmd', wraps=conn.send_modbus_cmd) as wrapped_conn: yield wrapped_conn diff --git a/app/tests/test_inverter.py b/app/tests/test_proxy.py similarity index 79% rename from app/tests/test_inverter.py rename to app/tests/test_proxy.py index 40b23bf..bee01fd 100644 --- a/app/tests/test_inverter.py +++ b/app/tests/test_proxy.py @@ -6,7 +6,7 @@ import logging from mock import patch, Mock from app.src.singleton import Singleton -from app.src.inverter import Inverter +from app.src.proxy import Proxy from app.src.mqtt import Mqtt from app.src.gen3plus.solarman_v5 import SolarmanV5 from app.src.config import Config @@ -18,7 +18,7 @@ pytest_plugins = ('pytest_asyncio',) @pytest.fixture(scope="module", autouse=True) def module_init(): def new_init(cls, cb_mqtt_is_up): - cb_mqtt_is_up() + pass # empty test methos Singleton._instances.clear() with patch.object(Mqtt, '__init__', new_init): @@ -63,12 +63,13 @@ def config_conn(test_hostname, test_port): async def test_inverter_cb(config_conn): _ = config_conn - with patch.object(Inverter, '_cb_mqtt_is_up', wraps=Inverter._cb_mqtt_is_up) as spy: - print('call Inverter.class_init') - Inverter.class_init() - assert 'homeassistant/' == Inverter.discovery_prfx - assert 'tsun/' == Inverter.entity_prfx - assert 'test_1/' == Inverter.proxy_node_id + with patch.object(Proxy, '_cb_mqtt_is_up', wraps=Proxy._cb_mqtt_is_up) as spy: + print('call Proxy.class_init') + Proxy.class_init() + assert 'homeassistant/' == Proxy.discovery_prfx + assert 'tsun/' == Proxy.entity_prfx + assert 'test_1/' == Proxy.proxy_node_id + await Proxy._cb_mqtt_is_up() spy.assert_called_once() @pytest.mark.asyncio @@ -76,8 +77,8 @@ async def test_mqtt_is_up(config_conn): _ = config_conn with patch.object(Mqtt, 'publish') as spy: - Inverter.class_init() - await Inverter._cb_mqtt_is_up() + Proxy.class_init() + await Proxy._cb_mqtt_is_up() spy.assert_called() @pytest.mark.asyncio @@ -85,6 +86,6 @@ async def test_mqtt_proxy_statt_invalid(config_conn): _ = config_conn with patch.object(Mqtt, 'publish') as spy: - Inverter.class_init() - await Inverter._async_publ_mqtt_proxy_stat('InValId_kEy') + Proxy.class_init() + await Proxy._async_publ_mqtt_proxy_stat('InValId_kEy') spy.assert_not_called() diff --git a/app/tests/test_solarman.py b/app/tests/test_solarman.py index 80778a2..a980744 100644 --- a/app/tests/test_solarman.py +++ b/app/tests/test_solarman.py @@ -5,6 +5,7 @@ import asyncio import logging import random from math import isclose +from app.src.async_stream import AsyncIfcImpl, StreamPtr from app.src.gen3plus.solarman_v5 import SolarmanV5 from app.src.config import Config from app.src.infos import Infos, Register @@ -20,13 +21,6 @@ Infos.static_init() timestamp = int(time.time()) # 1712861197 heartbeat = 60 -class Writer(): - def __init__(self): - self.sent_pdu = b'' - - def write(self, pdu: bytearray): - self.sent_pdu = pdu - class Mqtt(): def __init__(self): @@ -38,14 +32,21 @@ class Mqtt(): self.data = data +class FakeIfc(AsyncIfcImpl): + def __init__(self): + super().__init__() + self.remote = StreamPtr(None) + class MemoryStream(SolarmanV5): def __init__(self, msg, chunks = (0,), server_side: bool = True): - super().__init__(server_side, client_mode=False) + _ifc = FakeIfc() + super().__init__(('test.local', 1234), _ifc, server_side, client_mode=False) if server_side: self.mb.timeout = 0.4 # overwrite for faster testing self.mb_first_timeout = 0.5 self.mb_timeout = 0.5 - self.writer = Writer() + self.sent_pdu = b'' + self.ifc.tx_fifo.reg_trigger(self.write_cb) self.mqtt = Mqtt() self.__msg = msg self.__msg_len = len(msg) @@ -64,6 +65,11 @@ class MemoryStream(SolarmanV5): self.data = '' self.msg_recvd = [] + def write_cb(self): + if self.test_exception_async_write: + raise RuntimeError("Peer closed.") + self.sent_pdu = self.ifc.tx_fifo.get() + def _timestamp(self): return timestamp @@ -86,25 +92,21 @@ class MemoryStream(SolarmanV5): chunk_len = self.__chunks[self.__chunk_idx] self.__chunk_idx += 1 if chunk_len!=0: - self._recv_buffer += self.__msg[self.__offs:chunk_len] + self.ifc.rx_fifo += self.__msg[self.__offs:chunk_len] copied_bytes = chunk_len - self.__offs self.__offs = chunk_len else: - self._recv_buffer += self.__msg[self.__offs:] + self.ifc.rx_fifo += self.__msg[self.__offs:] copied_bytes = self.__msg_len - self.__offs self.__offs = self.__msg_len except Exception: pass # ignore exceptions here return copied_bytes - async def async_write(self, headline=''): - if self.test_exception_async_write: - raise RuntimeError("Peer closed.") - def createClientStream(self, msg, chunks = (0,)): c = MemoryStream(msg, chunks, False) - self.remote_stream = c - c. remote_stream = self + self.ifc.remote.stream = c + c.ifc.remote.stream = self return c def _SolarmanV5__flush_recv_msg(self) -> None: @@ -680,6 +682,7 @@ def config_tsun_inv1(): Config.act_config = {'solarman':{'enabled': True},'inverters':{'Y170000000000001':{'monitor_sn': 2070233889, 'node_id':'inv1', 'modbus_polling': True, 'suggested_area':'roof', 'sensor_list': 688}}} def test_read_message(device_ind_msg): + Config.act_config = {'solarman':{'enabled': True}} m = MemoryStream(device_ind_msg, (0,)) m.read() # read complete msg, and dispatch msg assert not m.header_valid # must be invalid, since msg was handled and buffer flushed @@ -690,9 +693,9 @@ def test_read_message(device_ind_msg): assert m.control == 0x4110 assert str(m.seq) == '01:00' assert m.data_len == 0xd4 - assert m._recv_buffer==b'' - assert m._send_buffer==b'' - assert m._forward_buffer==b'' + assert m.ifc.rx_get()==b'' + assert m.ifc.tx_fifo.get()==b'' + assert m.ifc.fwd_fifo.get()==b'' assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0 m.close() @@ -711,9 +714,9 @@ def test_invalid_start_byte(invalid_start_byte, device_ind_msg): assert m.control == 0x4110 assert str(m.seq) == '01:00' assert m.data_len == 0xd4 - assert m._recv_buffer==b'' - assert m._send_buffer==b'' - assert m._forward_buffer==b'' + assert m.ifc.rx_get()==b'' + assert m.ifc.tx_fifo.get()==b'' + assert m.ifc.fwd_fifo.get()==b'' assert m.db.stat['proxy']['Invalid_Msg_Format'] == 1 m.close() @@ -731,9 +734,9 @@ def test_invalid_stop_byte(invalid_stop_byte): assert m.control == 0x4110 assert str(m.seq) == '01:00' assert m.data_len == 0xd4 - assert m._recv_buffer==b'' - assert m._send_buffer==b'' - assert m._forward_buffer==b'' + assert m.ifc.rx_get()==b'' + assert m.ifc.tx_fifo.get()==b'' + assert m.ifc.fwd_fifo.get()==b'' assert m.db.stat['proxy']['Invalid_Msg_Format'] == 1 m.close() @@ -756,9 +759,9 @@ def test_invalid_stop_byte2(invalid_stop_byte, device_ind_msg): assert m.msg_recvd[1]['data_len']==0xd4 assert m.unique_id == None - assert m._recv_buffer==b'' - assert m._send_buffer==b'' - assert m._forward_buffer==b'' + assert m.ifc.rx_get()==b'' + assert m.ifc.tx_fifo.get()==b'' + assert m.ifc.fwd_fifo.get()==b'' assert m.db.stat['proxy']['Invalid_Msg_Format'] == 1 m.close() @@ -778,9 +781,9 @@ def test_invalid_stop_start_byte(invalid_stop_byte, invalid_start_byte): assert m.control == 0x4110 assert str(m.seq) == '01:00' assert m.data_len == 0xd4 - assert m._recv_buffer==b'' - assert m._send_buffer==b'' - assert m._forward_buffer==b'' + assert m.ifc.rx_get()==b'' + assert m.ifc.tx_fifo.get()==b'' + assert m.ifc.fwd_fifo.get()==b'' assert m.db.stat['proxy']['Invalid_Msg_Format'] == 1 m.close() @@ -802,9 +805,9 @@ def test_invalid_checksum(invalid_checksum, device_ind_msg): assert m.msg_recvd[1]['control']==0x4110 assert m.msg_recvd[1]['seq']=='01:00' assert m.msg_recvd[1]['data_len']==0xd4 - assert m._recv_buffer==b'' - assert m._send_buffer==b'' - assert m._forward_buffer==b'' + assert m.ifc.rx_get()==b'' + assert m.ifc.tx_fifo.get()==b'' + assert m.ifc.fwd_fifo.get()==b'' assert m.db.stat['proxy']['Invalid_Msg_Format'] == 1 m.close() @@ -824,8 +827,8 @@ def test_read_message_twice(config_no_tsun_inv1, device_ind_msg, device_rsp_msg) assert m.msg_recvd[1]['control']==0x4110 assert m.msg_recvd[1]['seq']=='01:01' assert m.msg_recvd[1]['data_len']==0xd4 - assert m._send_buffer==device_rsp_msg+device_rsp_msg - assert m._forward_buffer==b'' + assert m.ifc.tx_fifo.get()==device_rsp_msg+device_rsp_msg + assert m.ifc.fwd_fifo.get()==b'' assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0 m.close() @@ -896,12 +899,11 @@ def test_read_two_messages(config_tsun_allow_all, device_ind_msg, device_rsp_msg assert m.msg_recvd[1]['data_len']==0x199 assert '02b0' == m.db.get_db_value(Register.SENSOR_LIST, None) assert 0x02b0 == m.sensor_list - assert m._forward_buffer==device_ind_msg+inverter_ind_msg - assert m._send_buffer==device_rsp_msg+inverter_rsp_msg + assert m.ifc.fwd_fifo.get()==device_ind_msg+inverter_ind_msg + assert m.ifc.tx_fifo.get()==device_rsp_msg+inverter_rsp_msg - m._send_buffer = bytearray(0) # clear send buffer for next test m._init_new_client_conn() - assert m._send_buffer==b'' + assert m.ifc.tx_fifo.get()==b'' m.close() def test_read_two_messages2(config_tsun_allow_all, inverter_ind_msg, inverter_ind_msg_81, inverter_rsp_msg, inverter_rsp_msg_81): @@ -922,12 +924,11 @@ def test_read_two_messages2(config_tsun_allow_all, inverter_ind_msg, inverter_in assert m.msg_recvd[1]['seq']=='03:03' assert m.msg_recvd[1]['data_len']==0x199 assert m.time_ofs == 0x33e447a0 - assert m._forward_buffer==inverter_ind_msg+inverter_ind_msg_81 - assert m._send_buffer==inverter_rsp_msg+inverter_rsp_msg_81 + assert m.ifc.fwd_fifo.get()==inverter_ind_msg+inverter_ind_msg_81 + assert m.ifc.tx_fifo.get()==inverter_rsp_msg+inverter_rsp_msg_81 - m._send_buffer = bytearray(0) # clear send buffer for next test m._init_new_client_conn() - assert m._send_buffer==b'' + assert m.ifc.tx_fifo.get()==b'' m.close() def test_read_two_messages3(config_tsun_allow_all, device_ind_msg2, device_rsp_msg2, inverter_ind_msg, inverter_rsp_msg): @@ -952,12 +953,11 @@ def test_read_two_messages3(config_tsun_allow_all, device_ind_msg2, device_rsp_m assert m.msg_recvd[1]['data_len']==0xd4 assert '02b0' == m.db.get_db_value(Register.SENSOR_LIST, None) assert 0x02b0 == m.sensor_list - assert m._forward_buffer==inverter_ind_msg+device_ind_msg2 - assert m._send_buffer==inverter_rsp_msg+device_rsp_msg2 + assert m.ifc.fwd_fifo.get()==inverter_ind_msg+device_ind_msg2 + assert m.ifc.tx_fifo.get()==inverter_rsp_msg+device_rsp_msg2 - m._send_buffer = bytearray(0) # clear send buffer for next test m._init_new_client_conn() - assert m._send_buffer==b'' + assert m.ifc.tx_fifo.get()==b'' m.close() def test_unkown_frame_code(config_tsun_inv1, inverter_ind_msg_81, inverter_rsp_msg_81): @@ -972,9 +972,9 @@ def test_unkown_frame_code(config_tsun_inv1, inverter_ind_msg_81, inverter_rsp_m assert m.control == 0x4210 assert str(m.seq) == '03:03' assert m.data_len == 0x199 - assert m._recv_buffer==b'' - assert m._send_buffer==inverter_rsp_msg_81 - assert m._forward_buffer==inverter_ind_msg_81 + assert m.ifc.rx_get()==b'' + assert m.ifc.tx_fifo.get()==inverter_rsp_msg_81 + assert m.ifc.fwd_fifo.get()==inverter_ind_msg_81 assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0 m.close() @@ -990,9 +990,9 @@ def test_unkown_message(config_tsun_inv1, unknown_msg): assert m.control == 0x5110 assert str(m.seq) == '84:10' assert m.data_len == 0x0a - assert m._recv_buffer==b'' - assert m._send_buffer==b'' - assert m._forward_buffer==unknown_msg + assert m.ifc.rx_get()==b'' + assert m.ifc.tx_fifo.get()==b'' + assert m.ifc.fwd_fifo.get()==unknown_msg assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0 m.close() @@ -1008,9 +1008,9 @@ def test_device_rsp(config_tsun_inv1, device_rsp_msg): assert m.control == 0x1110 assert str(m.seq) == '01:01' assert m.data_len == 0x0a - assert m._recv_buffer==b'' - assert m._send_buffer==b'' - assert m._forward_buffer==b'' + assert m.ifc.rx_get()==b'' + assert m.ifc.tx_fifo.get()==b'' + assert m.ifc.fwd_fifo.get()==b'' assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0 m.close() @@ -1026,9 +1026,9 @@ def test_inverter_rsp(config_tsun_inv1, inverter_rsp_msg): assert m.control == 0x1210 assert str(m.seq) == '02:02' assert m.data_len == 0x0a - assert m._recv_buffer==b'' - assert m._send_buffer==b'' - assert m._forward_buffer==b'' + assert m.ifc.rx_get()==b'' + assert m.ifc.tx_fifo.get()==b'' + assert m.ifc.fwd_fifo.get()==b'' assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0 m.close() @@ -1043,9 +1043,9 @@ def test_heartbeat_ind(config_tsun_inv1, heartbeat_ind_msg, heartbeat_rsp_msg): assert m.control == 0x4710 assert str(m.seq) == '84:11' # value after sending response assert m.data_len == 0x01 - assert m._recv_buffer==b'' - assert m._send_buffer==heartbeat_rsp_msg - assert m._forward_buffer==heartbeat_ind_msg + assert m.ifc.rx_get()==b'' + assert m.ifc.tx_fifo.get()==heartbeat_rsp_msg + assert m.ifc.fwd_fifo.get()==heartbeat_ind_msg assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0 m.close() @@ -1061,9 +1061,9 @@ def test_heartbeat_ind2(config_tsun_inv1, heartbeat_ind_msg, heartbeat_rsp_msg): assert m.control == 0x4710 assert str(m.seq) == '84:11' # value after sending response assert m.data_len == 0x01 - assert m._recv_buffer==b'' - assert m._send_buffer==heartbeat_rsp_msg - assert m._forward_buffer==b'' + assert m.ifc.rx_get()==b'' + assert m.ifc.tx_fifo.get()==heartbeat_rsp_msg + assert m.ifc.fwd_fifo.get()==b'' assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0 m.close() @@ -1079,9 +1079,9 @@ def test_heartbeat_rsp(config_tsun_inv1, heartbeat_rsp_msg): assert m.control == 0x1710 assert str(m.seq) == '11:84' # value after sending response assert m.data_len == 0x0a - assert m._recv_buffer==b'' - assert m._send_buffer==b'' - assert m._forward_buffer==b'' + assert m.ifc.rx_get()==b'' + assert m.ifc.tx_fifo.get()==b'' + assert m.ifc.fwd_fifo.get()==b'' assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0 m.close() @@ -1096,15 +1096,15 @@ def test_sync_start_ind(config_tsun_inv1, sync_start_ind_msg, sync_start_rsp_msg assert m.control == 0x4310 assert str(m.seq) == '0d:0d' # value after sending response assert m.data_len == 47 - assert m._recv_buffer==b'' - assert m._send_buffer==sync_start_rsp_msg - assert m._forward_buffer==sync_start_ind_msg + assert m.ifc.rx_get()==b'' + assert m.ifc.tx_fifo.get()==sync_start_rsp_msg + assert m.ifc.fwd_fifo.peek()==sync_start_ind_msg assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0 m.seq.server_side = False # simulate forawding to TSUN cloud - m._update_header(m._forward_buffer) + m._update_header(m.ifc.fwd_fifo.peek()) assert str(m.seq) == '0d:0e' # value after forwarding indication - assert m._forward_buffer==sync_start_fwd_msg + assert m.ifc.fwd_fifo.get()==sync_start_fwd_msg m.close() @@ -1120,9 +1120,9 @@ def test_sync_start_rsp(config_tsun_inv1, sync_start_rsp_msg): assert m.control == 0x1310 assert str(m.seq) == '0d:0d' # value after sending response assert m.data_len == 0x0a - assert m._recv_buffer==b'' - assert m._send_buffer==b'' - assert m._forward_buffer==b'' + assert m.ifc.rx_get()==b'' + assert m.ifc.tx_fifo.get()==b'' + assert m.ifc.fwd_fifo.get()==b'' assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0 m.close() @@ -1137,9 +1137,9 @@ def test_sync_end_ind(config_tsun_inv1, sync_end_ind_msg, sync_end_rsp_msg): assert m.control == 0x4810 assert str(m.seq) == '07:07' # value after sending response assert m.data_len == 60 - assert m._recv_buffer==b'' - assert m._send_buffer==sync_end_rsp_msg - assert m._forward_buffer==sync_end_ind_msg + assert m.ifc.rx_get()==b'' + assert m.ifc.tx_fifo.get()==sync_end_rsp_msg + assert m.ifc.fwd_fifo.get()==sync_end_ind_msg assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0 m.close() @@ -1155,9 +1155,9 @@ def test_sync_end_rsp(config_tsun_inv1, sync_end_rsp_msg): assert m.control == 0x1810 assert str(m.seq) == '07:07' # value after sending response assert m.data_len == 0x0a - assert m._recv_buffer==b'' - assert m._send_buffer==b'' - assert m._forward_buffer==b'' + assert m.ifc.rx_get()==b'' + assert m.ifc.tx_fifo.get()==b'' + assert m.ifc.fwd_fifo.get()==b'' assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0 m.close() @@ -1175,9 +1175,9 @@ def test_build_modell_600(config_tsun_allow_all, inverter_ind_msg): assert '02b0' == m.db.get_db_value(Register.SENSOR_LIST, None) assert 0 == m.sensor_list # must not been set by an inverter data ind - m._send_buffer = bytearray(0) # clear send buffer for next test + m.ifc.tx_clear() # clear send buffer for next test m._init_new_client_conn() - assert m._send_buffer==b'' + assert m.ifc.tx_fifo.get()==b'' m.close() def test_build_modell_1600(config_tsun_allow_all, inverter_ind_msg1600): @@ -1241,9 +1241,9 @@ def test_build_logger_modell(config_tsun_allow_all, device_ind_msg): def test_msg_iterator(): Message._registry.clear() - m1 = SolarmanV5(server_side=True, client_mode=False) - m2 = SolarmanV5(server_side=True, client_mode=False) - m3 = SolarmanV5(server_side=True, client_mode=False) + m1 = SolarmanV5(('test1.local', 1234), ifc=AsyncIfcImpl(), server_side=True, client_mode=False) + m2 = SolarmanV5(('test2.local', 1234), ifc=AsyncIfcImpl(), server_side=True, client_mode=False) + m3 = SolarmanV5(('test3.local', 1234), ifc=AsyncIfcImpl(), server_side=True, client_mode=False) m3.close() del m3 test1 = 0 @@ -1261,7 +1261,7 @@ def test_msg_iterator(): assert test2 == 1 def test_proxy_counter(): - m = SolarmanV5(server_side=True, client_mode=False) + m = SolarmanV5(('test.local', 1234), ifc=AsyncIfcImpl(), server_side=True, client_mode=False) assert m.new_data == {} m.db.stat['proxy']['Unknown_Msg'] = 0 Infos.new_stat_data['proxy'] = False @@ -1285,16 +1285,14 @@ async def test_msg_build_modbus_req(config_tsun_inv1, device_ind_msg, device_rsp m.read() assert m.control == 0x4110 assert str(m.seq) == '01:01' - assert m._send_buffer==device_rsp_msg - assert m._forward_buffer==device_ind_msg + assert m.ifc.tx_fifo.get()==device_rsp_msg + assert m.ifc.fwd_fifo.get()==device_ind_msg - m._send_buffer = bytearray(0) # clear send buffer for next test - m._forward_buffer = bytearray(0) # clear send buffer for next test await m.send_modbus_cmd(Modbus.WRITE_SINGLE_REG, 0x2008, 0, logging.DEBUG) assert 0 == m.send_msg_ofs - assert m._forward_buffer == b'' - assert m.writer.sent_pdu == b'' # modbus command must be ignore, cause connection is still not up - assert m._send_buffer == b'' # modbus command must be ignore, cause connection is still not up + assert m.ifc.fwd_fifo.get() == b'' + assert m.sent_pdu == b'' # modbus command must be ignore, cause connection is still not up + assert m.ifc.tx_fifo.get() == b'' # modbus command must be ignore, cause connection is still not up m.append_msg(inverter_ind_msg) m.read() @@ -1304,24 +1302,15 @@ async def test_msg_build_modbus_req(config_tsun_inv1, device_ind_msg, device_rsp assert m.msg_recvd[0]['seq']=='01:01' assert m.msg_recvd[1]['control']==0x4210 assert m.msg_recvd[1]['seq']=='02:02' - assert m._recv_buffer==b'' - assert m._send_buffer==inverter_rsp_msg - assert m._forward_buffer==inverter_ind_msg + assert m.ifc.rx_get()==b'' + assert m.ifc.tx_fifo.get()==inverter_rsp_msg + assert m.ifc.fwd_fifo.get()==inverter_ind_msg - m._send_buffer = bytearray(0) # clear send buffer for next test - m._forward_buffer = bytearray(0) # clear send buffer for next test await m.send_modbus_cmd(Modbus.WRITE_SINGLE_REG, 0x2008, 0, logging.DEBUG) assert 0 == m.send_msg_ofs - assert m._forward_buffer == b'' - assert m.writer.sent_pdu == msg_modbus_cmd - assert m._send_buffer == b'' - - m._send_buffer = bytearray(0) # clear send buffer for next test - m.test_exception_async_write = True - await m.send_modbus_cmd(Modbus.WRITE_SINGLE_REG, 0x2008, 0, logging.DEBUG) - assert 0 == m.send_msg_ofs - assert m._forward_buffer == b'' - assert m._send_buffer == b'' + assert m.ifc.fwd_fifo.get() == b'' + assert m.sent_pdu == msg_modbus_cmd + assert m.ifc.tx_fifo.get()== b'' m.close() @pytest.mark.asyncio @@ -1331,14 +1320,13 @@ async def test_at_cmd(config_tsun_allow_all, device_ind_msg, device_rsp_msg, inv m.read() # read device ind assert m.control == 0x4110 assert str(m.seq) == '01:01' - assert m._send_buffer==device_rsp_msg - assert m._forward_buffer==device_ind_msg + assert m.ifc.tx_fifo.get()==device_rsp_msg + assert m.ifc.fwd_fifo.get()==device_ind_msg - m._send_buffer = bytearray(0) # clear send buffer for next test - m._forward_buffer = bytearray(0) # clear send buffer for next test await m.send_at_cmd('AT+TIME=214028,1,60,120') - assert m._send_buffer==b'' - assert m._forward_buffer==b'' + assert m.ifc.tx_fifo.get()==b'' + assert m.ifc.fwd_fifo.get()==b'' + assert m.sent_pdu == b'' assert str(m.seq) == '01:01' assert m.mqtt.key == '' assert m.mqtt.data == "" @@ -1347,34 +1335,37 @@ async def test_at_cmd(config_tsun_allow_all, device_ind_msg, device_rsp_msg, inv m.read() # read inverter ind assert m.control == 0x4210 assert str(m.seq) == '02:02' - assert m._send_buffer==inverter_rsp_msg - assert m._forward_buffer==inverter_ind_msg + assert m.ifc.tx_fifo.get()==inverter_rsp_msg + assert m.ifc.fwd_fifo.get()==inverter_ind_msg - m._send_buffer = bytearray(0) # clear send buffer for next test - m._forward_buffer = bytearray(0) # clear send buffer for next test await m.send_at_cmd('AT+TIME=214028,1,60,120') - assert m._send_buffer==at_command_ind_msg - assert m._forward_buffer==b'' + assert m.ifc.fwd_fifo.get() == b'' + assert m.ifc.tx_fifo.get()== b'' + assert m.sent_pdu == at_command_ind_msg + m.sent_pdu = bytearray() + assert str(m.seq) == '02:03' assert m.mqtt.key == '' assert m.mqtt.data == "" - m._send_buffer = bytearray(0) # clear send buffer for next test m.append_msg(at_command_rsp_msg) m.read() # read at resp assert m.control == 0x1510 assert str(m.seq) == '03:03' - assert m._recv_buffer==b'' - assert m._send_buffer==b'' - assert m._forward_buffer==b'' + assert m.ifc.rx_get()==b'' + assert m.ifc.tx_fifo.get()==b'' + assert m.ifc.fwd_fifo.get()==b'' assert m.key == 'at_resp' assert m.data == "+ok" + m.sent_pdu = bytearray() m.test_exception_async_write = True await m.send_at_cmd('AT+TIME=214028,1,60,120') - assert m._recv_buffer==b'' - assert m._send_buffer==b'' - assert m._forward_buffer==b'' + assert m.sent_pdu == b'' + assert m.ifc.rx_get()==b'' + assert m.ifc.tx_fifo.get()==b'' + assert m.ifc.fwd_fifo.get()==b'' + assert m.sent_pdu == b'' assert str(m.seq) == '03:04' assert m.forward_at_cmd_resp == False assert m.mqtt.key == '' @@ -1388,14 +1379,12 @@ async def test_at_cmd_blocked(config_tsun_allow_all, device_ind_msg, device_rsp_ m.read() assert m.control == 0x4110 assert str(m.seq) == '01:01' - assert m._send_buffer==device_rsp_msg - assert m._forward_buffer==device_ind_msg + assert m.ifc.tx_fifo.get()==device_rsp_msg + assert m.ifc.fwd_fifo.get()==device_ind_msg - m._send_buffer = bytearray(0) # clear send buffer for next test - m._forward_buffer = bytearray(0) # clear send buffer for next test await m.send_at_cmd('AT+WEBU') - assert m._send_buffer==b'' - assert m._forward_buffer==b'' + assert m.ifc.tx_fifo.get()==b'' + assert m.ifc.fwd_fifo.get()==b'' assert str(m.seq) == '01:01' assert m.mqtt.key == '' assert m.mqtt.data == "" @@ -1404,16 +1393,14 @@ async def test_at_cmd_blocked(config_tsun_allow_all, device_ind_msg, device_rsp_ m.read() assert m.control == 0x4210 assert str(m.seq) == '02:02' - assert m._recv_buffer==b'' - assert m._send_buffer==inverter_rsp_msg - assert m._forward_buffer==inverter_ind_msg + assert m.ifc.rx_get()==b'' + assert m.ifc.tx_fifo.get()==inverter_rsp_msg + assert m.ifc.fwd_fifo.get()==inverter_ind_msg - m._send_buffer = bytearray(0) # clear send buffer for next test - m._forward_buffer = bytearray(0) # clear send buffer for next test await m.send_at_cmd('AT+WEBU') - assert m._recv_buffer==b'' - assert m._send_buffer==b'' - assert m._forward_buffer==b'' + assert m.ifc.rx_get()==b'' + assert m.ifc.tx_fifo.get()==b'' + assert m.ifc.fwd_fifo.get()==b'' assert str(m.seq) == '02:02' assert m.forward_at_cmd_resp == False assert m.mqtt.key == 'at_resp' @@ -1435,9 +1422,9 @@ def test_at_cmd_ind(config_tsun_inv1, at_command_ind_msg): assert m.control == 0x4510 assert str(m.seq) == '03:02' assert m.data_len == 39 - assert m._recv_buffer==b'' - assert m._send_buffer==b'' - assert m._forward_buffer==at_command_ind_msg + assert m.ifc.rx_get()==b'' + assert m.ifc.tx_fifo.get()==b'' + assert m.ifc.fwd_fifo.get()==at_command_ind_msg assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0 assert m.db.stat['proxy']['AT_Command'] == 1 assert m.db.stat['proxy']['AT_Command_Blocked'] == 0 @@ -1459,9 +1446,9 @@ def test_at_cmd_ind_block(config_tsun_inv1, at_command_ind_msg_block): assert m.control == 0x4510 assert str(m.seq) == '03:02' assert m.data_len == 23 - assert m._recv_buffer==b'' - assert m._send_buffer==b'' - assert m._forward_buffer==b'' + assert m.ifc.rx_get()==b'' + assert m.ifc.tx_fifo.get()==b'' + assert m.ifc.fwd_fifo.get()==b'' assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0 assert m.db.stat['proxy']['AT_Command'] == 0 assert m.db.stat['proxy']['AT_Command_Blocked'] == 1 @@ -1481,8 +1468,8 @@ def test_msg_at_command_rsp1(config_tsun_inv1, at_command_rsp_msg): assert str(m.seq) == '03:03' assert m.header_len==11 assert m.data_len==17 - assert m._forward_buffer==at_command_rsp_msg - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==at_command_rsp_msg + assert m.ifc.tx_fifo.get()==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 assert m.db.stat['proxy']['Modbus_Command'] == 0 m.close() @@ -1500,8 +1487,8 @@ def test_msg_at_command_rsp2(config_tsun_inv1, at_command_rsp_msg): assert str(m.seq) == '03:03' assert m.header_len==11 assert m.data_len==17 - assert m._forward_buffer==b'' - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==b'' + assert m.ifc.tx_fifo.get()==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 assert m.db.stat['proxy']['Modbus_Command'] == 0 m.close() @@ -1525,9 +1512,9 @@ def test_msg_modbus_req(config_tsun_inv1, msg_modbus_cmd, msg_modbus_cmd_fwd): assert str(c.seq) == '03:02' assert c.header_len==11 assert c.data_len==23 - assert c._forward_buffer==b'' - assert c._send_buffer==b'' - assert m.writer.sent_pdu == msg_modbus_cmd_fwd + assert c.ifc.fwd_fifo.get()==b'' + assert c.ifc.tx_fifo.get()==b'' + assert m.sent_pdu == msg_modbus_cmd_fwd assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 assert m.db.stat['proxy']['AT_Command'] == 0 assert m.db.stat['proxy']['Modbus_Command'] == 1 @@ -1552,9 +1539,9 @@ def test_msg_modbus_req2(config_tsun_inv1, msg_modbus_cmd_crc_err): assert str(c.seq) == '03:02' assert c.header_len==11 assert c.data_len==23 - assert c._forward_buffer==b'' - assert c._send_buffer==b'' - assert m.writer.sent_pdu==b'' + assert c.ifc.fwd_fifo.get()==b'' + assert c.ifc.tx_fifo.get()==b'' + assert m.sent_pdu==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 assert m.db.stat['proxy']['AT_Command'] == 0 assert m.db.stat['proxy']['Modbus_Command'] == 0 @@ -1575,8 +1562,8 @@ def test_msg_unknown_cmd_req(config_tsun_inv1, msg_unknown_cmd): assert str(m.seq) == '03:02' assert m.header_len==11 assert m.data_len==23 - assert m._forward_buffer==msg_unknown_cmd - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==msg_unknown_cmd + assert m.ifc.tx_fifo.get()==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 assert m.db.stat['proxy']['AT_Command'] == 0 assert m.db.stat['proxy']['Modbus_Command'] == 0 @@ -1596,8 +1583,8 @@ def test_msg_modbus_rsp1(config_tsun_inv1, msg_modbus_rsp): assert str(m.seq) == '03:03' assert m.header_len==11 assert m.data_len==59 - assert m._forward_buffer==b'' - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==b'' + assert m.ifc.tx_fifo.get()==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 assert m.db.stat['proxy']['Modbus_Command'] == 0 m.close() @@ -1620,21 +1607,20 @@ def test_msg_modbus_rsp2(config_tsun_inv1, msg_modbus_rsp): assert not m.header_valid # must be invalid, since msg was handled and buffer flushed assert m.mb.err == 0 assert m.msg_count == 1 - assert m._forward_buffer==msg_modbus_rsp - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==msg_modbus_rsp + assert m.ifc.tx_fifo.get()==b'' assert m.db.get_db_value(Register.VERSION) == 'V4.0.10' assert m.new_data['inverter'] == True m.new_data['inverter'] = False m.mb.req_pend = True - m._forward_buffer = bytearray() m.append_msg(msg_modbus_rsp) m.read() # read complete msg, and dispatch msg assert not m.header_valid # must be invalid, since msg was handled and buffer flushed assert m.mb.err == 0 assert m.msg_count == 2 - assert m._forward_buffer==msg_modbus_rsp - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==msg_modbus_rsp + assert m.ifc.tx_fifo.get()==b'' assert m.db.get_db_value(Register.VERSION) == 'V4.0.10' assert m.new_data['inverter'] == False @@ -1658,20 +1644,19 @@ def test_msg_modbus_rsp3(config_tsun_inv1, msg_modbus_rsp): assert not m.header_valid # must be invalid, since msg was handled and buffer flushed assert m.mb.err == 0 assert m.msg_count == 1 - assert m._forward_buffer==msg_modbus_rsp - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==msg_modbus_rsp + assert m.ifc.tx_fifo.get()==b'' assert m.db.get_db_value(Register.VERSION) == 'V4.0.10' assert m.new_data['inverter'] == True m.new_data['inverter'] = False - m._forward_buffer = bytearray() m.append_msg(msg_modbus_rsp) m.read() # read complete msg, and dispatch msg assert not m.header_valid # must be invalid, since msg was handled and buffer flushed assert m.mb.err == 5 assert m.msg_count == 2 - assert m._forward_buffer==b'' - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==b'' + assert m.ifc.tx_fifo.get()==b'' assert m.db.get_db_value(Register.VERSION) == 'V4.0.10' assert m.new_data['inverter'] == False @@ -1689,8 +1674,8 @@ def test_msg_unknown_rsp(config_tsun_inv1, msg_unknown_cmd_rsp): assert str(m.seq) == '03:03' assert m.header_len==11 assert m.data_len==59 - assert m._forward_buffer==msg_unknown_cmd_rsp - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==msg_unknown_cmd_rsp + assert m.ifc.tx_fifo.get()==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 assert m.db.stat['proxy']['Modbus_Command'] == 0 m.close() @@ -1703,8 +1688,8 @@ def test_msg_modbus_invalid(config_tsun_inv1, msg_modbus_invalid): m.read() # read complete msg, and dispatch msg assert not m.header_valid # must be invalid, since msg was handled and buffer flushed assert m.msg_count == 1 - assert m._forward_buffer==b'' - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==b'' + assert m.ifc.tx_fifo.get()==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 assert m.db.stat['proxy']['Modbus_Command'] == 0 m.close() @@ -1726,8 +1711,8 @@ def test_msg_modbus_fragment(config_tsun_inv1, msg_modbus_rsp): m.read() # read complete msg, and dispatch msg assert not m.header_valid # must be invalid, since msg was handled and buffer flushed assert m.msg_count == 1 - assert m._forward_buffer==msg_modbus_rsp - assert m._send_buffer == b'' + assert m.ifc.fwd_fifo.get()==msg_modbus_rsp + assert m.ifc.tx_fifo.get()== b'' assert m.mb.err == 0 assert m.modbus_elms == 20-1 # register 0x300d is unknown, so one value can't be mapped assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 @@ -1750,28 +1735,27 @@ async def test_modbus_polling(config_tsun_inv1, heartbeat_ind_msg, heartbeat_rsp assert m.control == 0x4710 assert str(m.seq) == '84:11' # value after sending response assert m.data_len == 0x01 - assert m._recv_buffer==b'' - assert m._send_buffer==heartbeat_rsp_msg - assert m._forward_buffer==heartbeat_ind_msg + assert m.ifc.rx_get()==b'' + assert m.ifc.tx_fifo.get()==heartbeat_rsp_msg + assert m.ifc.fwd_fifo.get()==heartbeat_ind_msg assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0 - m._send_buffer = bytearray(0) # clear send buffer for next test assert m.state == State.up assert isclose(m.mb_timeout, 0.5) assert next(m.mb_timer.exp_count) == 0 await asyncio.sleep(0.5) - assert m.writer.sent_pdu==bytearray(b'\xa5\x17\x00\x10E\x12\x84!Ce{\x02\xb0\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x03\x30\x00\x000J\xde\x86\x15') - assert m._send_buffer==b'' + assert m.sent_pdu==bytearray(b'\xa5\x17\x00\x10E\x12\x84!Ce{\x02\xb0\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x03\x30\x00\x000J\xde\x86\x15') + assert m.ifc.tx_fifo.get()==b'' await asyncio.sleep(0.5) - assert m.writer.sent_pdu==bytearray(b'\xa5\x17\x00\x10E\x13\x84!Ce{\x02\xb0\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x03\x30\x00\x000J\xde\x87\x15') - assert m._send_buffer==b'' + assert m.sent_pdu==bytearray(b'\xa5\x17\x00\x10E\x13\x84!Ce{\x02\xb0\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x03\x30\x00\x000J\xde\x87\x15') + assert m.ifc.tx_fifo.get()==b'' m.state = State.closed - m.writer.sent_pdu = bytearray() + m.sent_pdu = bytearray() await asyncio.sleep(0.5) - assert m.writer.sent_pdu==bytearray(b'') - assert m._send_buffer==b'' + assert m.sent_pdu==bytearray(b'') + assert m.ifc.tx_fifo.get()==b'' assert next(m.mb_timer.exp_count) == 4 m.close() @@ -1785,7 +1769,7 @@ async def test_start_client_mode(config_tsun_inv1, str_test_ip): assert m.mb_timer.tim == None assert asyncio.get_running_loop() == m.mb_timer.loop await m.send_start_cmd(get_sn_int(), str_test_ip, m.mb_first_timeout) - assert m.writer.sent_pdu==bytearray(b'\xa5\x17\x00\x10E\x01\x00!Ce{\x02\xb0\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x030\x00\x000J\xde\xf1\x15') + assert m.sent_pdu==bytearray(b'\xa5\x17\x00\x10E\x01\x00!Ce{\x02\xb0\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x030\x00\x000J\xde\xf1\x15') assert m.db.get_db_value(Register.IP_ADDRESS) == str_test_ip assert isclose(m.db.get_db_value(Register.POLLING_INTERVAL), 0.5) assert m.db.get_db_value(Register.HEARTBEAT_INTERVAL) == 120 @@ -1793,16 +1777,29 @@ async def test_start_client_mode(config_tsun_inv1, str_test_ip): assert m.state == State.up assert m.no_forwarding == True - assert m._send_buffer==b'' + assert m.ifc.tx_fifo.get()==b'' assert isclose(m.mb_timeout, 0.5) assert next(m.mb_timer.exp_count) == 0 await asyncio.sleep(0.5) - assert m.writer.sent_pdu==bytearray(b'\xa5\x17\x00\x10E\x02\x00!Ce{\x02\xb0\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x030\x00\x000J\xde\xf2\x15') - assert m._send_buffer==b'' + assert m.sent_pdu==bytearray(b'\xa5\x17\x00\x10E\x02\x00!Ce{\x02\xb0\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x030\x00\x000J\xde\xf2\x15') + assert m.ifc.tx_fifo.get()==b'' await asyncio.sleep(0.5) - assert m.writer.sent_pdu==bytearray(b'\xa5\x17\x00\x10E\x03\x00!Ce{\x02\xb0\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x030\x00\x000J\xde\xf3\x15') - assert m._send_buffer==b'' + assert m.sent_pdu==bytearray(b'\xa5\x17\x00\x10E\x03\x00!Ce{\x02\xb0\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x030\x00\x000J\xde\xf3\x15') + assert m.ifc.tx_fifo.get()==b'' assert next(m.mb_timer.exp_count) == 3 m.close() + +def test_timeout(config_tsun_inv1): + _ = config_tsun_inv1 + m = MemoryStream(b'') + assert m.state == State.init + assert SolarmanV5.MAX_START_TIME == m._timeout() + m.state = State.up + m.modbus_polling = True + assert SolarmanV5.MAX_INV_IDLE_TIME == m._timeout() + m.modbus_polling = False + assert SolarmanV5.MAX_DEF_IDLE_TIME == m._timeout() + m.state = State.closed + m.close() diff --git a/app/tests/test_talent.py b/app/tests/test_talent.py index ce4cef4..ae156bb 100644 --- a/app/tests/test_talent.py +++ b/app/tests/test_talent.py @@ -1,6 +1,7 @@ # test_with_pytest.py import pytest, logging, asyncio from math import isclose +from app.src.async_stream import AsyncIfcImpl, StreamPtr from app.src.gen3.talent import Talent, Control from app.src.config import Config from app.src.infos import Infos, Register @@ -15,22 +16,21 @@ Infos.static_init() tracer = logging.getLogger('tracer') - -class Writer(): +class FakeIfc(AsyncIfcImpl): def __init__(self): - self.sent_pdu = b'' - - def write(self, pdu: bytearray): - self.sent_pdu = pdu + super().__init__() + self.remote = StreamPtr(None) class MemoryStream(Talent): def __init__(self, msg, chunks = (0,), server_side: bool = True): - super().__init__(server_side) + self.ifc = FakeIfc() + super().__init__(('test.local', 1234), self.ifc, server_side) if server_side: self.mb.timeout = 0.4 # overwrite for faster testing self.mb_first_timeout = 0.5 self.mb_timeout = 0.5 - self.writer = Writer() + self.sent_pdu = b'' + self.ifc.tx_fifo.reg_trigger(self.write_cb) self.__msg = msg self.__msg_len = len(msg) self.__chunks = chunks @@ -39,9 +39,11 @@ class MemoryStream(Talent): self.msg_count = 0 self.addr = 'Test: SrvSide' self.send_msg_ofs = 0 - self.test_exception_async_write = False self.msg_recvd = [] - self.remote_stream = None + + def write_cb(self): + self.sent_pdu = self.ifc.tx_fifo.get() + def append_msg(self, msg): self.__msg += msg @@ -54,11 +56,11 @@ class MemoryStream(Talent): chunk_len = self.__chunks[self.__chunk_idx] self.__chunk_idx += 1 if chunk_len!=0: - self._recv_buffer += self.__msg[self.__offs:chunk_len] + self.ifc.rx_fifo += self.__msg[self.__offs:chunk_len] copied_bytes = chunk_len - self.__offs self.__offs = chunk_len else: - self._recv_buffer += self.__msg[self.__offs:] + self.ifc.rx_fifo += self.__msg[self.__offs:] copied_bytes = self.__msg_len - self.__offs self.__offs = self.__msg_len except Exception: @@ -73,8 +75,8 @@ class MemoryStream(Talent): def createClientStream(self, msg, chunks = (0,)): c = MemoryStream(msg, chunks, False) - self.remote_stream = c - c. remote_stream = self + self.ifc.remote.stream = c + c.ifc.remote.stream = self return c def _Talent__flush_recv_msg(self) -> None: @@ -91,10 +93,6 @@ class MemoryStream(Talent): self.msg_count += 1 - async def async_write(self, headline=''): - if self.test_exception_async_write: - raise RuntimeError("Peer closed.") - @pytest.fixture @@ -747,7 +745,9 @@ def test_read_message(msg_contact_info): assert m.msg_id==0 assert m.header_len==23 assert m.data_len==25 - assert m._forward_buffer==b'' + assert m.ifc.rx_get()==b'' + assert m.ifc.tx_fifo.get()==b'' + assert m.ifc.fwd_fifo.get()==b'' m.close() def test_read_message_twice(config_no_tsun_inv1, msg_inverter_ind): @@ -767,7 +767,7 @@ def test_read_message_twice(config_no_tsun_inv1, msg_inverter_ind): assert m.msg_recvd[1]['data_len']==120 assert m.id_str == b"R170000000000001" assert m.unique_id == 'R170000000000001' - assert m._forward_buffer==b'' + assert m.ifc.fwd_fifo.get()==b'' m.close() def test_read_message_long_id(msg_contact_info_long_id): @@ -852,15 +852,15 @@ def test_read_two_messages(config_tsun_allow_all, msg2_contact_info,msg_contact_ assert m.msg_recvd[1]['msg_id']==0 assert m.msg_recvd[1]['header_len']==23 assert m.msg_recvd[1]['data_len']==25 - assert m._forward_buffer==b'' - assert m._send_buffer==msg_contact_rsp + msg_contact_rsp2 + assert m.ifc.fwd_fifo.get()==b'' + assert m.ifc.tx_fifo.get()==msg_contact_rsp + msg_contact_rsp2 assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 - m._send_buffer = bytearray(0) # clear send buffer for next test + m.ifc.tx_clear() # clear send buffer for next test m.contact_name = b'solarhub' m.contact_mail = b'solarhub@123456' m._init_new_client_conn() - assert m._send_buffer==b'\x00\x00\x00,\x10R170000000000002\x91\x00\x08solarhub\x0fsolarhub@123456' + assert m.ifc.tx_fifo.get()==b'\x00\x00\x00,\x10R170000000000002\x91\x00\x08solarhub\x0fsolarhub@123456' m.close() def test_conttact_req(config_tsun_allow_all, msg_contact_info, msg_contact_rsp): @@ -877,8 +877,8 @@ def test_conttact_req(config_tsun_allow_all, msg_contact_info, msg_contact_rsp): assert m.msg_id==0 assert m.header_len==23 assert m.data_len==25 - assert m._forward_buffer==b'' - assert m._send_buffer==msg_contact_rsp + assert m.ifc.fwd_fifo.get()==b'' + assert m.ifc.tx_fifo.get()==msg_contact_rsp m.close() def test_contact_broken_req(config_tsun_allow_all, msg_contact_info_broken, msg_contact_rsp): @@ -895,8 +895,44 @@ def test_contact_broken_req(config_tsun_allow_all, msg_contact_info_broken, msg_ assert m.msg_id==0 assert m.header_len==23 assert m.data_len==23 - assert m._forward_buffer==b'' - assert m._send_buffer==msg_contact_rsp + assert m.ifc.fwd_fifo.get()==b'' + assert m.ifc.tx_fifo.get()==msg_contact_rsp + m.close() + +def test_conttact_req(config_tsun_allow_all, msg_contact_info, msg_contact_rsp): + _ = config_tsun_allow_all + m = MemoryStream(msg_contact_info, (0,)) + m.read() # read complete msg, and dispatch msg + assert not m.header_valid # must be invalid, since msg was handled and buffer flushed + assert m.msg_count == 1 + assert m.id_str == b"R170000000000001" + assert m.contact_name == b'solarhub' + assert m.contact_mail == b'solarhub@123456' + assert m.unique_id == 'R170000000000001' + assert int(m.ctrl)==145 + assert m.msg_id==0 + assert m.header_len==23 + assert m.data_len==25 + assert m.ifc.fwd_fifo.get()==b'' + assert m.ifc.tx_fifo.get()==msg_contact_rsp + m.close() + +def test_contact_broken_req(config_tsun_allow_all, msg_contact_info_broken, msg_contact_rsp): + _ = config_tsun_allow_all + m = MemoryStream(msg_contact_info_broken, (0,)) + m.read() # read complete msg, and dispatch msg + assert not m.header_valid # must be invalid, since msg was handled and buffer flushed + assert m.msg_count == 1 + assert m.id_str == b"R170000000000001" + assert m.contact_name == b'' + assert m.contact_mail == b'' + assert m.unique_id == 'R170000000000001' + assert int(m.ctrl)==145 + assert m.msg_id==0 + assert m.header_len==23 + assert m.data_len==23 + assert m.ifc.fwd_fifo.get()==b'' + assert m.ifc.tx_fifo.get()==msg_contact_rsp m.close() def test_msg_contact_resp(config_tsun_inv1, msg_contact_rsp): @@ -914,8 +950,8 @@ def test_msg_contact_resp(config_tsun_inv1, msg_contact_rsp): assert m.msg_id==0 assert m.header_len==23 assert m.data_len==1 - assert m._forward_buffer==b'' - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==b'' + assert m.ifc.tx_fifo.get()==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 m.close() @@ -934,8 +970,8 @@ def test_msg_contact_resp_2(config_tsun_inv1, msg_contact_rsp): assert m.msg_id==0 assert m.header_len==23 assert m.data_len==1 - assert m._forward_buffer==msg_contact_rsp - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==msg_contact_rsp + assert m.ifc.tx_fifo.get()==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 m.close() @@ -954,8 +990,8 @@ def test_msg_contact_resp_3(config_tsun_inv1, msg_contact_rsp): assert m.msg_id==0 assert m.header_len==23 assert m.data_len==1 - assert m._forward_buffer==msg_contact_rsp - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==msg_contact_rsp + assert m.ifc.tx_fifo.get()==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 m.close() @@ -972,8 +1008,8 @@ def test_msg_contact_invalid(config_tsun_inv1, msg_contact_invalid): assert m.msg_id==0 assert m.header_len==23 assert m.data_len==1 - assert m._forward_buffer==msg_contact_invalid - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==msg_contact_invalid + assert m.ifc.tx_fifo.get()==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 1 m.close() @@ -993,8 +1029,8 @@ def test_msg_get_time(config_tsun_inv1, msg_get_time): assert m.ts_offset==0 assert m.data_len==0 assert m.state==State.pend - assert m._forward_buffer==msg_get_time - assert m._send_buffer==b'\x00\x00\x00\x1b\x10R170000000000001\x91"\x00\x00\x01\x89\xc6,_\x00' + assert m.ifc.fwd_fifo.get()==msg_get_time + assert m.ifc.tx_fifo.get()==b'\x00\x00\x00\x1b\x10R170000000000001\x91"\x00\x00\x01\x89\xc6,_\x00' assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 m.close() @@ -1014,8 +1050,8 @@ def test_msg_get_time_autark(config_no_tsun_inv1, msg_get_time): assert m.ts_offset==0 assert m.data_len==0 assert m.state==State.received - assert m._forward_buffer==b'' - assert m._send_buffer==bytearray(b'\x00\x00\x00\x1b\x10R170000000000001\x91"\x00\x00\x01\x89\xc6,_\x00') + assert m.ifc.fwd_fifo.get()==b'' + assert m.ifc.tx_fifo.get()==bytearray(b'\x00\x00\x00\x1b\x10R170000000000001\x91"\x00\x00\x01\x89\xc6,_\x00') assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 m.close() @@ -1025,7 +1061,7 @@ def test_msg_time_resp(config_tsun_inv1, msg_time_rsp): m = MemoryStream(msg_time_rsp, (0,), False) s = MemoryStream(b'', (0,), True) assert s.ts_offset==0 - m.remote_stream = s + m.ifc.remote.stream = s m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.read() # read complete msg, and dispatch msg assert not m.header_valid # must be invalid, since msg was handled and buffer flushed @@ -1038,10 +1074,10 @@ def test_msg_time_resp(config_tsun_inv1, msg_time_rsp): assert m.ts_offset==3600000 assert s.ts_offset==3600000 assert m.data_len==8 - assert m._forward_buffer==b'' - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==b'' + assert m.ifc.tx_fifo.get()==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 - m.remote_stream = None + m.ifc.remote.stream = None s.close() m.close() @@ -1059,8 +1095,8 @@ def test_msg_time_resp_autark(config_no_tsun_inv1, msg_time_rsp): assert m.header_len==23 assert m.ts_offset==3600000 assert m.data_len==8 - assert m._forward_buffer==b'' - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==b'' + assert m.ifc.tx_fifo.get()==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 m.close() @@ -1078,8 +1114,8 @@ def test_msg_time_inv_resp(config_tsun_inv1, msg_time_rsp_inv): assert m.header_len==23 assert m.ts_offset==0 assert m.data_len==4 - assert m._forward_buffer==msg_time_rsp_inv - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==msg_time_rsp_inv + assert m.ifc.tx_fifo.get()==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 m.close() @@ -1097,8 +1133,8 @@ def test_msg_time_invalid(config_tsun_inv1, msg_time_invalid): assert m.header_len==23 assert m.ts_offset==0 assert m.data_len==0 - assert m._forward_buffer==msg_time_invalid - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==msg_time_invalid + assert m.ifc.tx_fifo.get()==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 1 m.close() @@ -1116,8 +1152,8 @@ def test_msg_time_invalid_autark(config_no_tsun_inv1, msg_time_invalid): assert m.ts_offset==0 assert m.header_len==23 assert m.data_len==0 - assert m._forward_buffer==b'' - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==b'' + assert m.ifc.tx_fifo.get()==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 1 m.close() @@ -1140,8 +1176,8 @@ def test_msg_act_time(config_no_modbus_poll, msg_act_time, msg_act_time_ack): assert m.header_len==23 assert m.data_len==9 assert m.state == State.up - assert m._forward_buffer==msg_act_time - assert m._send_buffer==msg_act_time_ack + assert m.ifc.fwd_fifo.get()==msg_act_time + assert m.ifc.tx_fifo.get()==msg_act_time_ack assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 assert 125 == m.db.get_db_value(Register.POLLING_INTERVAL, 0) m.close() @@ -1164,8 +1200,8 @@ def test_msg_act_time2(config_tsun_inv1, msg_act_time, msg_act_time_ack): assert m.ts_offset==0 assert m.header_len==23 assert m.data_len==9 - assert m._forward_buffer==msg_act_time - assert m._send_buffer==msg_act_time_ack + assert m.ifc.fwd_fifo.get()==msg_act_time + assert m.ifc.tx_fifo.get()==msg_act_time_ack assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 assert 123 == m.db.get_db_value(Register.POLLING_INTERVAL, 0) m.close() @@ -1185,8 +1221,8 @@ def test_msg_act_time_ofs(config_tsun_inv1, msg_act_time, msg_act_time_ofs, msg_ assert m.ts_offset==3600 assert m.header_len==23 assert m.data_len==9 - assert m._forward_buffer==msg_act_time_ofs - assert m._send_buffer==msg_act_time_ack + assert m.ifc.fwd_fifo.get()==msg_act_time_ofs + assert m.ifc.tx_fifo.get()==msg_act_time_ack assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 m.close() @@ -1205,8 +1241,8 @@ def test_msg_act_time_ofs2(config_tsun_inv1, msg_act_time, msg_act_time_ofs, msg assert m.ts_offset==-3600 assert m.header_len==23 assert m.data_len==9 - assert m._forward_buffer==msg_act_time - assert m._send_buffer==msg_act_time_ack + assert m.ifc.fwd_fifo.get()==msg_act_time + assert m.ifc.tx_fifo.get()==msg_act_time_ack assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 m.close() @@ -1225,8 +1261,8 @@ def test_msg_act_time_autark(config_no_tsun_inv1, msg_act_time, msg_act_time_ack assert m.ts_offset==0 assert m.header_len==23 assert m.data_len==9 - assert m._forward_buffer==b'' - assert m._send_buffer==msg_act_time_ack + assert m.ifc.fwd_fifo.get()==b'' + assert m.ifc.tx_fifo.get()==msg_act_time_ack assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 m.close() @@ -1243,8 +1279,8 @@ def test_msg_act_time_ack(config_tsun_inv1, msg_act_time_ack): assert m.msg_id==153 assert m.header_len==23 assert m.data_len==1 - assert m._forward_buffer==b'' - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==b'' + assert m.ifc.tx_fifo.get()==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 m.close() @@ -1261,8 +1297,8 @@ def test_msg_act_time_cmd(config_tsun_inv1, msg_act_time_cmd): assert m.msg_id==153 assert m.header_len==23 assert m.data_len==1 - assert m._forward_buffer==msg_act_time_cmd - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==msg_act_time_cmd + assert m.ifc.tx_fifo.get()==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 1 m.close() @@ -1279,8 +1315,8 @@ def test_msg_act_time_inv(config_tsun_inv1, msg_act_time_inv): assert m.msg_id==153 assert m.header_len==23 assert m.data_len==8 - assert m._forward_buffer==msg_act_time_inv - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==msg_act_time_inv + assert m.ifc.tx_fifo.get()==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 m.close() @@ -1298,12 +1334,12 @@ def test_msg_cntrl_ind(config_tsun_inv1, msg_controller_ind, msg_controller_ind_ assert m.header_len==23 assert m.data_len==284 m.ts_offset = 0 - m._update_header(m._forward_buffer) - assert m._forward_buffer==msg_controller_ind + m._update_header(m.ifc.fwd_fifo.peek()) + assert m.ifc.fwd_fifo.peek()==msg_controller_ind m.ts_offset = -4096 - m._update_header(m._forward_buffer) - assert m._forward_buffer==msg_controller_ind_ts_offs - assert m._send_buffer==msg_controller_ack + m._update_header(m.ifc.fwd_fifo.peek()) + assert m.ifc.fwd_fifo.get()==msg_controller_ind_ts_offs + assert m.ifc.tx_fifo.get()==msg_controller_ack assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 m.close() @@ -1320,8 +1356,8 @@ def test_msg_cntrl_ack(config_tsun_inv1, msg_controller_ack): assert m.msg_id==113 assert m.header_len==23 assert m.data_len==1 - assert m._forward_buffer==b'' - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==b'' + assert m.ifc.tx_fifo.get()==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 m.close() @@ -1339,12 +1375,12 @@ def test_msg_cntrl_invalid(config_tsun_inv1, msg_controller_invalid): assert m.header_len==23 assert m.data_len==1 m.ts_offset = 0 - m._update_header(m._forward_buffer) - assert m._forward_buffer==msg_controller_invalid + m._update_header(m.ifc.fwd_fifo.peek()) + assert m.ifc.fwd_fifo.peek()==msg_controller_invalid m.ts_offset = -4096 - m._update_header(m._forward_buffer) - assert m._forward_buffer==msg_controller_invalid - assert m._send_buffer==b'' + m._update_header(m.ifc.fwd_fifo.peek()) + assert m.ifc.fwd_fifo.get()==msg_controller_invalid + assert m.ifc.tx_fifo.get()==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 1 m.close() @@ -1363,12 +1399,12 @@ def test_msg_inv_ind(config_tsun_inv1, msg_inverter_ind, msg_inverter_ind_ts_off assert m.header_len==23 assert m.data_len==120 m.ts_offset = 0 - m._update_header(m._forward_buffer) - assert m._forward_buffer==msg_inverter_ind + m._update_header(m.ifc.fwd_fifo.peek()) + assert m.ifc.fwd_fifo.peek()==msg_inverter_ind m.ts_offset = +256 - m._update_header(m._forward_buffer) - assert m._forward_buffer==msg_inverter_ind_ts_offs - assert m._send_buffer==msg_inverter_ack + m._update_header(m.ifc.fwd_fifo.peek()) + assert m.ifc.fwd_fifo.get()==msg_inverter_ind_ts_offs + assert m.ifc.tx_fifo.get()==msg_inverter_ack assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 m.close() @@ -1390,9 +1426,9 @@ def test_msg_inv_ind1(config_tsun_inv1, msg_inverter_ind2, msg_inverter_ind_ts_o assert m.header_len==23 assert m.data_len==1263 m.ts_offset = 0 - m._update_header(m._forward_buffer) - assert m._forward_buffer==msg_inverter_ind2 - assert m._send_buffer==msg_inverter_ack + m._update_header(m.ifc.fwd_fifo.peek()) + assert m.ifc.fwd_fifo.get()==msg_inverter_ind2 + assert m.ifc.tx_fifo.get()==msg_inverter_ack assert m.db.get_db_value(Register.TS_GRID) == 1691243349 m.close() @@ -1414,9 +1450,9 @@ def test_msg_inv_ind2(config_tsun_inv1, msg_inverter_ind_new, msg_inverter_ind_t assert m.header_len==23 assert m.data_len==1165 m.ts_offset = 0 - m._update_header(m._forward_buffer) - assert m._forward_buffer==msg_inverter_ind_new - assert m._send_buffer==msg_inverter_ack + m._update_header(m.ifc.fwd_fifo.peek()) + assert m.ifc.fwd_fifo.get()==msg_inverter_ind_new + assert m.ifc.tx_fifo.get()==msg_inverter_ack assert m.db.get_db_value(Register.INVERTER_STATUS) == None assert m.db.get_db_value(Register.TS_GRID) == None m.db.db['grid'] = {'Output_Power': 100} @@ -1442,9 +1478,9 @@ def test_msg_inv_ind3(config_tsun_inv1, msg_inverter_ind_0w, msg_inverter_ack): assert m.header_len==23 assert m.data_len==1263 m.ts_offset = 0 - m._update_header(m._forward_buffer) - assert m._forward_buffer==msg_inverter_ind_0w - assert m._send_buffer==msg_inverter_ack + m._update_header(m.ifc.fwd_fifo.peek()) + assert m.ifc.fwd_fifo.get()==msg_inverter_ind_0w + assert m.ifc.tx_fifo.get()==msg_inverter_ack assert m.db.get_db_value(Register.INVERTER_STATUS) == 1 assert isclose(m.db.db['grid']['Output_Power'], 0.5) m.close() @@ -1466,8 +1502,8 @@ def test_msg_inv_ack(config_tsun_inv1, msg_inverter_ack): assert m.msg_id==4 assert m.header_len==23 assert m.data_len==1 - assert m._forward_buffer==b'' - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==b'' + assert m.ifc.tx_fifo.get()==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 m.close() @@ -1485,12 +1521,12 @@ def test_msg_inv_invalid(config_tsun_inv1, msg_inverter_invalid): assert m.header_len==23 assert m.data_len==1 m.ts_offset = 0 - m._update_header(m._forward_buffer) - assert m._forward_buffer==msg_inverter_invalid + m._update_header(m.ifc.fwd_fifo.peek()) + assert m.ifc.fwd_fifo.peek()==msg_inverter_invalid m.ts_offset = 256 - m._update_header(m._forward_buffer) - assert m._forward_buffer==msg_inverter_invalid - assert m._send_buffer==b'' + m._update_header(m.ifc.fwd_fifo.peek()) + assert m.ifc.fwd_fifo.get()==msg_inverter_invalid + assert m.ifc.tx_fifo.get()==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 1 m.close() @@ -1509,12 +1545,12 @@ def test_msg_ota_req(config_tsun_inv1, msg_ota_req): assert m.header_len==23 assert m.data_len==259 m.ts_offset = 0 - m._update_header(m._forward_buffer) - assert m._forward_buffer==msg_ota_req + m._update_header(m.ifc.fwd_fifo.peek()) + assert m.ifc.fwd_fifo.peek()==msg_ota_req m.ts_offset = 4096 - m._update_header(m._forward_buffer) - assert m._forward_buffer==msg_ota_req - assert m._send_buffer==b'' + m._update_header(m.ifc.fwd_fifo.peek()) + assert m.ifc.fwd_fifo.get()==msg_ota_req + assert m.ifc.tx_fifo.get()==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 assert m.db.stat['proxy']['OTA_Start_Msg'] == 1 m.close() @@ -1536,12 +1572,12 @@ def test_msg_ota_ack(config_tsun_inv1, msg_ota_ack): assert m.header_len==23 assert m.data_len==1 m.ts_offset = 0 - m._update_header(m._forward_buffer) - assert m._forward_buffer==msg_ota_ack + m._update_header(m.ifc.fwd_fifo.peek()) + assert m.ifc.fwd_fifo.peek()==msg_ota_ack m.ts_offset = 256 - m._update_header(m._forward_buffer) - assert m._forward_buffer==msg_ota_ack - assert m._send_buffer==b'' + m._update_header(m.ifc.fwd_fifo.peek()) + assert m.ifc.fwd_fifo.get()==msg_ota_ack + assert m.ifc.tx_fifo.get()==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 assert m.db.stat['proxy']['OTA_Start_Msg'] == 0 m.close() @@ -1561,12 +1597,12 @@ def test_msg_ota_invalid(config_tsun_inv1, msg_ota_invalid): assert m.header_len==23 assert m.data_len==1 m.ts_offset = 0 - m._update_header(m._forward_buffer) - assert m._forward_buffer==msg_ota_invalid + m._update_header(m.ifc.fwd_fifo.peek()) + assert m.ifc.fwd_fifo.peek()==msg_ota_invalid m.ts_offset = 4096 - assert m._forward_buffer==msg_ota_invalid - m._update_header(m._forward_buffer) - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==msg_ota_invalid + m._update_header(m.ifc.fwd_fifo.peek()) + assert m.ifc.tx_fifo.get()==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 1 assert m.db.stat['proxy']['OTA_Start_Msg'] == 0 m.close() @@ -1584,8 +1620,8 @@ def test_msg_unknown(config_tsun_inv1, msg_unknown): assert m.msg_id==23 assert m.header_len==23 assert m.data_len==4 - assert m._forward_buffer==msg_unknown - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==msg_unknown + assert m.ifc.tx_fifo.get()==b'' assert 1 == m.db.stat['proxy']['Unknown_Msg'] m.close() @@ -1605,9 +1641,9 @@ def test_ctrl_byte(): def test_msg_iterator(): - m1 = Talent(server_side=True) - m2 = Talent(server_side=True) - m3 = Talent(server_side=True) + m1 = Talent(('test1.local', 1234), ifc=AsyncIfcImpl(), server_side=True) + m2 = Talent(('test2.local', 1234), ifc=AsyncIfcImpl(), server_side=True) + m3 = Talent(('test3.local', 1234), ifc=AsyncIfcImpl(), server_side=True) m3.close() del m3 test1 = 0 @@ -1709,12 +1745,12 @@ def test_msg_modbus_req(config_tsun_inv1, msg_modbus_cmd): assert c.msg_id==119 assert c.header_len==23 assert c.data_len==13 - assert c._forward_buffer==b'' - assert c._send_buffer==b'' + assert c.ifc.fwd_fifo.get()==b'' + assert c.ifc.tx_fifo.get()==b'' assert m.id_str == b"R170000000000001" - assert m._forward_buffer==b'' - assert m._send_buffer==b'' - assert m.writer.sent_pdu == msg_modbus_cmd + assert m.ifc.fwd_fifo.get()==b'' + assert m.ifc.tx_fifo.get()==b'' + assert m.sent_pdu == msg_modbus_cmd assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 assert m.db.stat['proxy']['Modbus_Command'] == 1 assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0 @@ -1739,12 +1775,12 @@ def test_msg_modbus_req2(config_tsun_inv1, msg_modbus_cmd): assert c.msg_id==119 assert c.header_len==23 assert c.data_len==13 - assert c._forward_buffer==b'' - assert c._send_buffer==b'' + assert c.ifc.fwd_fifo.get()==b'' + assert c.ifc.tx_fifo.get()==b'' assert m.id_str == b"R170000000000001" - assert m._forward_buffer==b'' - assert m._send_buffer==b'' - assert m.writer.sent_pdu == b'' + assert m.ifc.fwd_fifo.get()==b'' + assert m.ifc.tx_fifo.get()==b'' + assert m.sent_pdu == b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 assert m.db.stat['proxy']['Modbus_Command'] == 1 assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0 @@ -1768,11 +1804,11 @@ def test_msg_modbus_req3(config_tsun_inv1, msg_modbus_cmd_crc_err): assert c.msg_id==119 assert c.header_len==23 assert c.data_len==13 - assert c._forward_buffer==b'' - assert c._send_buffer==b'' - assert m._forward_buffer==b'' - assert m._send_buffer==b'' - assert m.writer.sent_pdu ==b'' + assert c.ifc.fwd_fifo.get()==b'' + assert c.ifc.tx_fifo.get()==b'' + assert m.ifc.fwd_fifo.get()==b'' + assert m.ifc.tx_fifo.get()==b'' + assert m.sent_pdu ==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 assert m.db.stat['proxy']['Modbus_Command'] == 0 assert m.db.stat['proxy']['Invalid_Msg_Format'] == 1 @@ -1793,8 +1829,8 @@ def test_msg_modbus_rsp1(config_tsun_inv1, msg_modbus_rsp): assert m.msg_id==119 assert m.header_len==23 assert m.data_len==13 - assert m._forward_buffer==b'' - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==b'' + assert m.ifc.tx_fifo.get()==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 assert m.db.stat['proxy']['Modbus_Command'] == 0 m.close() @@ -1815,8 +1851,8 @@ def test_msg_modbus_cloud_rsp(config_tsun_inv1, msg_modbus_rsp): assert m.msg_id==119 assert m.header_len==23 assert m.data_len==13 - assert m._forward_buffer==b'' - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==b'' + assert m.ifc.tx_fifo.get()==b'' assert m.db.stat['proxy']['Unknown_Msg'] == 1 assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 assert m.db.stat['proxy']['Modbus_Command'] == 0 @@ -1843,8 +1879,8 @@ def test_msg_modbus_rsp2(config_tsun_inv1, msg_modbus_rsp20): assert not m.header_valid # must be invalid, since msg was handled and buffer flushed assert m.mb.err == 5 assert m.msg_count == 2 - assert m._forward_buffer==msg_modbus_rsp20 - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==msg_modbus_rsp20 + assert m.ifc.tx_fifo.get()==b'' assert m.db.db == {'collector': {'Serial_Number': 'R170000000000001'}, 'inverter': {'Version': 'V5.1.09', 'Rated_Power': 300}, 'grid': {'Timestamp': m._utc(), 'Voltage': 225.9, 'Current': 0.41, 'Frequency': 49.99, 'Output_Power': 94.8}, 'env': {'Inverter_Temp': 22}, 'input': {'Timestamp': m._utc(), 'pv1': {'Voltage': 0.8, 'Current': 0.0, 'Power': 0.0}, 'pv2': {'Voltage': 34.5, 'Current': 2.89, 'Power': 99.8}, 'pv3': {'Voltage': 0.0, 'Current': 0.0, 'Power': 0.0}, 'pv4': {'Voltage': 0.0, 'Current': 0.0, 'Power': 0.0}}} assert m.db.get_db_value(Register.VERSION) == 'V5.1.09' assert m.db.get_db_value(Register.TS_GRID) == m._utc() @@ -1873,8 +1909,8 @@ def test_msg_modbus_rsp3(config_tsun_inv1, msg_modbus_rsp21): assert not m.header_valid # must be invalid, since msg was handled and buffer flushed assert m.mb.err == 5 assert m.msg_count == 2 - assert m._forward_buffer==msg_modbus_rsp21 - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==msg_modbus_rsp21 + assert m.ifc.tx_fifo.get()==b'' assert m.db.db == {'collector': {'Serial_Number': 'R170000000000001'}, 'inverter': {'Version': 'V5.1.0E', 'Rated_Power': 300}, 'grid': {'Timestamp': m._utc(), 'Voltage': 225.9, 'Current': 0.41, 'Frequency': 49.99, 'Output_Power': 94.8}, 'env': {'Inverter_Temp': 22}, 'input': {'Timestamp': m._utc(), 'pv1': {'Voltage': 0.8, 'Current': 0.0, 'Power': 0.0}, 'pv2': {'Voltage': 34.5, 'Current': 2.89, 'Power': 99.8}, 'pv3': {'Voltage': 0.0, 'Current': 0.0, 'Power': 0.0}, 'pv4': {'Voltage': 0.0, 'Current': 0.0, 'Power': 0.0}}} assert m.db.get_db_value(Register.VERSION) == 'V5.1.0E' assert m.db.get_db_value(Register.TS_GRID) == m._utc() @@ -1902,9 +1938,9 @@ def test_msg_modbus_rsp4(config_tsun_inv1, msg_modbus_rsp21): assert not m.header_valid # must be invalid, since msg was handled and buffer flushed assert m.mb.err == 0 assert m.msg_count == 1 - assert m._forward_buffer==msg_modbus_rsp21 + assert m.ifc.fwd_fifo.get()==msg_modbus_rsp21 assert m.modbus_elms == 19 - assert m._send_buffer==b'' + assert m.ifc.tx_fifo.get()==b'' assert m.db.db == db_values assert m.db.get_db_value(Register.VERSION) == 'V5.1.0E' assert m.db.get_db_value(Register.TS_GRID) == m._utc() @@ -1927,8 +1963,8 @@ def test_msg_modbus_rsp_new(config_tsun_inv1, msg_modbus_rsp20_new): assert m.msg_id==135 assert m.header_len==23 assert m.data_len==107 - assert m._forward_buffer==b'' - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==b'' + assert m.ifc.tx_fifo.get()==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 assert m.db.stat['proxy']['Modbus_Command'] == 0 m.close() @@ -1947,8 +1983,8 @@ def test_msg_modbus_invalid(config_tsun_inv1, msg_modbus_inv): assert m.msg_id==119 assert m.header_len==23 assert m.data_len==13 - assert m._forward_buffer==msg_modbus_inv - assert m._send_buffer==b'' + assert m.ifc.fwd_fifo.get()==msg_modbus_inv + assert m.ifc.tx_fifo.get()==b'' assert m.db.stat['proxy']['Unknown_Ctrl'] == 1 assert m.db.stat['proxy']['Modbus_Command'] == 0 m.close() @@ -1976,8 +2012,8 @@ def test_msg_modbus_fragment(config_tsun_inv1, msg_modbus_rsp20): assert m.msg_id == 119 assert m.header_len == 23 assert m.data_len == 50 - assert m._forward_buffer==msg_modbus_rsp20 - assert m._send_buffer == b'' + assert m.ifc.fwd_fifo.get()==msg_modbus_rsp20 + assert m.ifc.tx_fifo.get() == b'' assert m.mb.err == 0 assert m.modbus_elms == 20-1 # register 0x300d is unknown, so one value can't be mapped assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 @@ -1991,24 +2027,16 @@ async def test_msg_build_modbus_req(config_tsun_inv1, msg_modbus_cmd): m.id_str = b"R170000000000001" await m.send_modbus_cmd(Modbus.WRITE_SINGLE_REG, 0x2008, 0, logging.DEBUG) assert 0 == m.send_msg_ofs - assert m._forward_buffer == b'' - assert m._send_buffer == b'' - assert m.writer.sent_pdu == b'' + assert m.ifc.fwd_fifo.get() == b'' + assert m.ifc.tx_fifo.get() == b'' + assert m.sent_pdu == b'' m.state = State.up await m.send_modbus_cmd(Modbus.WRITE_SINGLE_REG, 0x2008, 0, logging.DEBUG) assert 0 == m.send_msg_ofs - assert m._forward_buffer == b'' - assert m._send_buffer == b'' - assert m.writer.sent_pdu == msg_modbus_cmd - - m.writer.sent_pdu = bytearray(0) # clear send buffer for next test - m.test_exception_async_write = True - await m.send_modbus_cmd(Modbus.WRITE_SINGLE_REG, 0x2008, 0, logging.DEBUG) - assert 0 == m.send_msg_ofs - assert m._forward_buffer == b'' - assert m._send_buffer == b'' - assert m.writer.sent_pdu == b'' + assert m.ifc.fwd_fifo.get() == b'' + assert m.ifc.tx_fifo.get() == b'' + assert m.sent_pdu == msg_modbus_cmd m.close() def test_modbus_no_polling(config_no_modbus_poll, msg_get_time): @@ -2026,8 +2054,8 @@ def test_modbus_no_polling(config_no_modbus_poll, msg_get_time): assert m.header_len==23 assert m.ts_offset==0 assert m.data_len==0 - assert m._forward_buffer==msg_get_time - assert m._send_buffer==b'\x00\x00\x00\x1b\x10R170000000000001\x91"\x00\x00\x01\x89\xc6,_\x00' + assert m.ifc.fwd_fifo.get()==msg_get_time + assert m.ifc.tx_fifo.get()==b'\x00\x00\x00\x1b\x10R170000000000001\x91"\x00\x00\x01\x89\xc6,_\x00' assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 m.close() @@ -2050,25 +2078,25 @@ async def test_modbus_polling(config_tsun_inv1, msg_inverter_ind): assert m.header_len==23 assert m.ts_offset==0 assert m.data_len==120 - assert m._forward_buffer==msg_inverter_ind - assert m._send_buffer==b'\x00\x00\x00\x14\x10R170000000000001\x99\x04\x01' + assert m.ifc.fwd_fifo.get()==msg_inverter_ind + assert m.ifc.tx_fifo.get()==b'\x00\x00\x00\x14\x10R170000000000001\x99\x04\x01' assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 - m._send_buffer = bytearray(0) # clear send buffer for next test + m.ifc.tx_clear() # clear send buffer for next test assert isclose(m.mb_timeout, 0.5) assert next(m.mb_timer.exp_count) == 0 await asyncio.sleep(0.5) - assert m.writer.sent_pdu==b'\x00\x00\x00 \x10R170000000000001pw\x00\x01\xa3(\x08\x01\x030\x00\x000J\xde' - assert m._send_buffer==b'' + assert m.sent_pdu==b'\x00\x00\x00 \x10R170000000000001pw\x00\x01\xa3(\x08\x01\x030\x00\x000J\xde' + assert m.ifc.tx_fifo.get()==b'' await asyncio.sleep(0.5) - assert m.writer.sent_pdu==b'\x00\x00\x00 \x10R170000000000001pw\x00\x01\xa3(\x08\x01\x030\x00\x000J\xde' - assert m._send_buffer==b'' + assert m.sent_pdu==b'\x00\x00\x00 \x10R170000000000001pw\x00\x01\xa3(\x08\x01\x030\x00\x000J\xde' + assert m.ifc.tx_fifo.get()==b'' await asyncio.sleep(0.5) - assert m.writer.sent_pdu==b'\x00\x00\x00 \x10R170000000000001pw\x00\x01\xa3(\x08\x01\x03\x20\x00\x00`N"' - assert m._send_buffer==b'' + assert m.sent_pdu==b'\x00\x00\x00 \x10R170000000000001pw\x00\x01\xa3(\x08\x01\x03\x20\x00\x00`N"' + assert m.ifc.tx_fifo.get()==b'' assert next(m.mb_timer.exp_count) == 4 m.close() @@ -2111,3 +2139,15 @@ def test_multiiple_recv_buf(config_tsun_allow_all, multiple_recv_buf): assert m.db.stat['proxy']['Invalid_Data_Type'] == 1 m.close() + +def test_timeout(config_tsun_inv1): + _ = config_tsun_inv1 + m = MemoryStream(b'') + assert m.state == State.init + assert Talent.MAX_START_TIME == m._timeout() + m.state = State.up + m.modbus_polling = True + assert Talent.MAX_INV_IDLE_TIME == m._timeout() + m.modbus_polling = False + assert Talent.MAX_DEF_IDLE_TIME == m._timeout() + m.close() diff --git a/tsun.code-workspace b/tsun.code-workspace new file mode 100644 index 0000000..0bb7570 --- /dev/null +++ b/tsun.code-workspace @@ -0,0 +1,11 @@ +{ + "folders": [ + { + "path": "." + }, + { + "path": "../wiki" + } + ], + "settings": {} +} \ No newline at end of file From 719c6f703a5e422cf249f9a87320a8ab4cdf1fec Mon Sep 17 00:00:00 2001 From: Stefan Allius <122395479+s-allius@users.noreply.github.com> Date: Sun, 13 Oct 2024 17:13:07 +0200 Subject: [PATCH 37/39] S allius/issue196 (#198) * fix healthcheck - on infrastructure with IPv6 support localhost might be resolved to an IPv6 adress. Since the proxy only support IPv4 for now, we replace localhost by 127.0.0.1, to fix this --- CHANGELOG.md | 1 + app/src/inverter_base.py | 2 +- docker-compose.yaml | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 618da05..7e6b801 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [unreleased] +- fix healthcheck on infrastructure with IPv6 support [#196](https://github.com/s-allius/tsun-gen3-proxy/issues/196) - refactoring: cleaner architecture, increase test coverage - Parse more values in Server Mode [#186](https://github.com/s-allius/tsun-gen3-proxy/issues/186) - GEN3: add support for new messages of version 3 firmwares [#182](https://github.com/s-allius/tsun-gen3-proxy/issues/182) diff --git a/app/src/inverter_base.py b/app/src/inverter_base.py index 8acb01e..a493d8b 100644 --- a/app/src/inverter_base.py +++ b/app/src/inverter_base.py @@ -62,7 +62,7 @@ class InverterBase(InverterIfc, Proxy): # now explicitly call garbage collector to release unreachable objects unreachable_obj = gc.collect() - logging.info( + logging.debug( f'InverterBase.__exit: freed unreachable obj: {unreachable_obj}') def __del_remote(self): diff --git a/docker-compose.yaml b/docker-compose.yaml index 9b4f9e8..3a48099 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -83,7 +83,7 @@ services: - ${PROJECT_DIR:-./}tsun-proxy/log:/home/tsun-proxy/log - ${PROJECT_DIR:-./}tsun-proxy/config:/home/tsun-proxy/config healthcheck: - test: wget --no-verbose --tries=1 --spider http://localhost:8127/-/healthy || exit 1 + test: wget --no-verbose --tries=1 --spider http://127.0.0.1:8127/-/healthy || exit 1 interval: 10s timeout: 3s networks: From d4b618742c19f60a0e89036f666af3519912b731 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Sun, 13 Oct 2024 17:31:55 +0200 Subject: [PATCH 38/39] merge from main From a6ffcc0949fb86cca7abfb7a1ab1f2d7fe8b0981 Mon Sep 17 00:00:00 2001 From: Stefan Allius Date: Sun, 13 Oct 2024 18:24:00 +0200 Subject: [PATCH 39/39] update version 0.11 --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7e6b801..c2bd297 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [unreleased] +## [0.11.0] - 2024-10-13 + - fix healthcheck on infrastructure with IPv6 support [#196](https://github.com/s-allius/tsun-gen3-proxy/issues/196) - refactoring: cleaner architecture, increase test coverage - Parse more values in Server Mode [#186](https://github.com/s-allius/tsun-gen3-proxy/issues/186)