From 888e1475e45661e1c5746f86a57e7d40e37504a5 Mon Sep 17 00:00:00 2001
From: Stefan Allius <122395479+s-allius@users.noreply.github.com>
Date: Sun, 4 May 2025 18:50:31 +0200
Subject: [PATCH 1/8] S allius/issue397 (#405)
* add Dashboards log handler to all known loggers
* add list of last 3 warnings/errors to page
* add note list to page
* create LogHandler for the dashboard
- simple memory log handler which stores the last
64 warnings/errors for the dashboard
* render warnings/errors as note list
* add page for warnings and errors
* fix double defined build target
* add well done message if no errors in the logs
* translate page titles
* more translations
* add Notes page and table for important messages
* add unit tests
---
CHANGELOG.md | 1 +
Makefile | 2 +-
app/src/web/__init__.py | 7 ++
app/src/web/conn_table.py | 6 +-
app/src/web/log_handler.py | 24 +++++++
app/src/web/mqtt_table.py | 6 ++
app/src/web/notes_list.py | 19 +++++
app/src/web/pages.py | 7 ++
app/src/web/templates/base.html.j2 | 3 +-
app/src/web/templates/page_index.html.j2 | 2 +-
app/src/web/templates/page_logging.html.j2 | 4 +-
app/src/web/templates/page_mqtt.html.j2 | 3 +-
app/src/web/templates/page_notes.html.j2 | 10 +++
.../web/templates/templ_notes_list.html.j2 | 23 ++++++
app/tests/test_web_route.py | 16 +++++
app/translations/de/LC_MESSAGES/messages.po | 71 +++++++++++--------
16 files changed, 168 insertions(+), 36 deletions(-)
create mode 100644 app/src/web/log_handler.py
create mode 100644 app/src/web/notes_list.py
create mode 100644 app/src/web/templates/page_notes.html.j2
diff --git a/CHANGELOG.md b/CHANGELOG.md
index eedb850..46be5df 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -7,6 +7,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [unreleased]
+- Dashboard: add Notes page and table for important messages
- Dashboard: add Log-File page
- Dashboard: add Connection page
- add web UI to add-on
diff --git a/Makefile b/Makefile
index b98463f..5964e6e 100644
--- a/Makefile
+++ b/Makefile
@@ -6,7 +6,7 @@ babel:
build:
$(MAKE) -C ha_addons $@
-clean build:
+clean:
$(MAKE) -C app $@
$(MAKE) -C ha_addons $@
diff --git a/app/src/web/__init__.py b/app/src/web/__init__.py
index 24b0e87..ef3728c 100644
--- a/app/src/web/__init__.py
+++ b/app/src/web/__init__.py
@@ -7,6 +7,8 @@ Usage:
from quart import Quart, Blueprint
from quart_babel import Babel
from utils import load_modules
+from .log_handler import LogHandler
+import logging
web = Blueprint('web', __name__)
@@ -30,3 +32,8 @@ class Web:
locale_selector=get_locale,
timezone_selector=get_tz,
default_translation_directories=translation_directories)
+
+ h = LogHandler()
+ logging.getLogger().addHandler(h)
+ for name in logging.root.manager.loggerDict:
+ logging.getLogger(name).addHandler(h)
diff --git a/app/src/web/conn_table.py b/app/src/web/conn_table.py
index ae6fe38..4b81868 100644
--- a/app/src/web/conn_table.py
+++ b/app/src/web/conn_table.py
@@ -4,6 +4,7 @@ from quart_babel import format_datetime, _
from infos import Infos
from . import web
+from .log_handler import LogHandler
def _get_device_icon(client_mode: bool):
@@ -79,5 +80,8 @@ async def data_fetch():
data["conn-table"] = await render_template('templ_table.html.j2',
table=get_table_data())
- data["notes-list"] = await render_template('templ_notes_list.html.j2')
+ data["notes-list"] = await render_template(
+ 'templ_notes_list.html.j2',
+ notes=LogHandler().get_buffer(3),
+ hide_if_empty=True)
return data
diff --git a/app/src/web/log_handler.py b/app/src/web/log_handler.py
new file mode 100644
index 0000000..7565649
--- /dev/null
+++ b/app/src/web/log_handler.py
@@ -0,0 +1,24 @@
+from logging import Handler
+from logging import LogRecord
+import logging
+from collections import deque
+
+from singleton import Singleton
+
+
+class LogHandler(Handler, metaclass=Singleton):
+ def __init__(self, capacity=64):
+ super().__init__(logging.WARNING)
+ self.capacity = capacity
+ self.buffer = deque(maxlen=capacity)
+
+ def emit(self, record: LogRecord):
+ self.buffer.append({
+ 'ctime': record.created,
+ 'level': record.levelno,
+ 'lname': record.levelname,
+ 'msg': record.getMessage()
+ })
+
+ def get_buffer(self, elms=0) -> list:
+ return list(self.buffer)[-elms:]
diff --git a/app/src/web/mqtt_table.py b/app/src/web/mqtt_table.py
index fcd0477..8370c17 100644
--- a/app/src/web/mqtt_table.py
+++ b/app/src/web/mqtt_table.py
@@ -4,6 +4,7 @@ from quart_babel import format_datetime, _
from mqtt import Mqtt
from . import web
+from .log_handler import LogHandler
def _get_row(inv: InverterBase):
@@ -55,4 +56,9 @@ async def mqtt_fetch():
data["mqtt-table"] = await render_template('templ_table.html.j2',
table=get_table_data())
+ data["notes-list"] = await render_template(
+ 'templ_notes_list.html.j2',
+ notes=LogHandler().get_buffer(3),
+ hide_if_empty=True)
+
return data
diff --git a/app/src/web/notes_list.py b/app/src/web/notes_list.py
new file mode 100644
index 0000000..e96c319
--- /dev/null
+++ b/app/src/web/notes_list.py
@@ -0,0 +1,19 @@
+from quart import render_template
+from quart_babel import format_datetime
+
+from . import web
+from .log_handler import LogHandler
+
+
+@web.route('/notes-fetch')
+async def notes_fetch():
+ data = {
+ "update-time": format_datetime(format="medium"),
+ }
+
+ data["notes-list"] = await render_template(
+ 'templ_notes_list.html.j2',
+ notes=LogHandler().get_buffer(),
+ hide_if_empty=False)
+
+ return data
diff --git a/app/src/web/pages.py b/app/src/web/pages.py
index f365239..49d720a 100644
--- a/app/src/web/pages.py
+++ b/app/src/web/pages.py
@@ -18,6 +18,13 @@ async def mqtt():
fetch_url=url_for('.mqtt_fetch'))
+@web.route('/notes')
+async def notes():
+ return await render_template(
+ 'page_notes.html.j2',
+ fetch_url=url_for('.notes_fetch'))
+
+
@web.route('/logging')
async def logging():
return await render_template(
diff --git a/app/src/web/templates/base.html.j2 b/app/src/web/templates/base.html.j2
index 34b6f57..0ef1f7e 100644
--- a/app/src/web/templates/base.html.j2
+++ b/app/src/web/templates/base.html.j2
@@ -57,7 +57,8 @@
-
+
+
diff --git a/app/src/web/templates/page_index.html.j2 b/app/src/web/templates/page_index.html.j2
index f8364c5..65cae0d 100644
--- a/app/src/web/templates/page_index.html.j2
+++ b/app/src/web/templates/page_index.html.j2
@@ -1,6 +1,6 @@
{% extends 'base.html.j2' %}
-{% block title %} TSUN Proxy - Connections {% endblock title%}
+{% block title %}{{_("TSUN Proxy - Connections")}}{% endblock title %}
{% block menu1_class %}w3-blue{% endblock %}
{% block headline %} {{_('Proxy Connection Overview')}}{% endblock headline %}
diff --git a/app/src/web/templates/page_logging.html.j2 b/app/src/web/templates/page_logging.html.j2
index 720a1e6..f80763c 100644
--- a/app/src/web/templates/page_logging.html.j2
+++ b/app/src/web/templates/page_logging.html.j2
@@ -1,7 +1,7 @@
{% extends 'base.html.j2' %}
-{% block title %} TSUN Proxy - Log Files {% endblock title%}
-{% block menu3_class %}w3-blue{% endblock %}
+{% block title %}{{_("TSUN Proxy - Log Files")}}{% endblock title %}
+{% block menu4_class %}w3-blue{% endblock %}
{% block headline %} {{_('Log Files')}}{% endblock headline %}
{% block content %}
diff --git a/app/src/web/templates/page_mqtt.html.j2 b/app/src/web/templates/page_mqtt.html.j2
index f1010de..0f23492 100644
--- a/app/src/web/templates/page_mqtt.html.j2
+++ b/app/src/web/templates/page_mqtt.html.j2
@@ -1,6 +1,6 @@
{% extends 'base.html.j2' %}
-{% block title %} TSUN Proxy - MQTT Status {% endblock title%}
+{% block title %}{{_("TSUN Proxy - MQTT Status")}}{% endblock title %}
{% block menu2_class %}w3-blue{% endblock %}
{% block headline %} {{_('MQTT Overview')}}{% endblock headline %}
{% block content %}
@@ -45,6 +45,7 @@
+
{% endblock content%}
diff --git a/app/src/web/templates/page_notes.html.j2 b/app/src/web/templates/page_notes.html.j2
new file mode 100644
index 0000000..495e5e2
--- /dev/null
+++ b/app/src/web/templates/page_notes.html.j2
@@ -0,0 +1,10 @@
+{% extends 'base.html.j2' %}
+
+{% block title %}{{_("TSUN Proxy - Important Messages")}}{% endblock title %}
+{% block menu3_class %}w3-blue{% endblock %}
+{% block headline %} {{_('Important Messages')}}{% endblock headline %}
+{% block content %}
+
+{% endblock content%}
+
+{% block footer %}{% endblock footer %}
diff --git a/app/src/web/templates/templ_notes_list.html.j2 b/app/src/web/templates/templ_notes_list.html.j2
index e69de29..6d5d98e 100644
--- a/app/src/web/templates/templ_notes_list.html.j2
+++ b/app/src/web/templates/templ_notes_list.html.j2
@@ -0,0 +1,23 @@
+{% if notes|length > 0 %}
+
+
{{_("Warnings and error messages")}}
+
+ {% for note in notes %}
+ -
+ {{note.ctime|datetimeformat(format='short')}}
+ {{note.lname|e}}
+ {{note.msg|e}}
+
+ {% endfor %}
+
+
+{% elif not hide_if_empty %}
+
+
+
+
{{_("Well done!")}}
+
{{_("No warnings or errors have been logged since the last proxy start.")}}
+
+
+
+{% endif %}
\ No newline at end of file
diff --git a/app/tests/test_web_route.py b/app/tests/test_web_route.py
index cf5f307..4ed9b36 100644
--- a/app/tests/test_web_route.py
+++ b/app/tests/test_web_route.py
@@ -68,6 +68,13 @@ async def test_rel_page(client):
assert response.mimetype == 'text/html'
web.build_relative_urls = False
+@pytest.mark.asyncio
+async def test_notes(client):
+ """Test the notes page route."""
+ response = await client.get('/notes')
+ assert response.status_code == 200
+ assert response.mimetype == 'text/html'
+
@pytest.mark.asyncio
async def test_logging(client):
"""Test the logging page route."""
@@ -185,6 +192,15 @@ async def test_mqtt_fetch(client, create_inverter):
assert response.status_code == 200
+@pytest.mark.asyncio
+async def test_notes_fetch(client, config_conn):
+ """Test the notes-fetch route."""
+ _ = create_inverter
+
+ response = await client.get('/notes-fetch')
+ assert response.status_code == 200
+
+
@pytest.mark.asyncio
async def test_file_fetch(client, config_conn):
"""Test the data-fetch route."""
diff --git a/app/translations/de/LC_MESSAGES/messages.po b/app/translations/de/LC_MESSAGES/messages.po
index f4284ab..8daf333 100644
--- a/app/translations/de/LC_MESSAGES/messages.po
+++ b/app/translations/de/LC_MESSAGES/messages.po
@@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: tsun-gen3-proxy 0.14.0\n"
"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n"
-"POT-Creation-Date: 2025-05-03 21:59+0200\n"
+"POT-Creation-Date: 2025-05-04 18:16+0200\n"
"PO-Revision-Date: 2025-04-18 16:24+0200\n"
"Last-Translator: FULL NAME \n"
"Language: de\n"
@@ -19,39 +19,39 @@ msgstr ""
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel 2.17.0\n"
-#: src/web/conn_table.py:51 src/web/templates/base.html.j2:58
+#: src/web/conn_table.py:52 src/web/templates/base.html.j2:58
msgid "Connections"
msgstr "Verbindungen"
-#: src/web/conn_table.py:58
+#: src/web/conn_table.py:59
msgid "Device-IP:Port"
msgstr "Geräte-IP:Port"
-#: src/web/conn_table.py:58
+#: src/web/conn_table.py:59
msgid "Device-IP"
msgstr "Geräte-IP"
-#: src/web/conn_table.py:59 src/web/mqtt_table.py:33
+#: src/web/conn_table.py:60 src/web/mqtt_table.py:34
msgid "Serial-No"
msgstr "Seriennummer"
-#: src/web/conn_table.py:60
+#: src/web/conn_table.py:61
msgid "Cloud-IP:Port"
msgstr "Cloud-IP:Port"
-#: src/web/conn_table.py:60
+#: src/web/conn_table.py:61
msgid "Cloud-IP"
msgstr "Cloud-IP"
-#: src/web/mqtt_table.py:26
+#: src/web/mqtt_table.py:27
msgid "MQTT devices"
msgstr "MQTT Geräte"
-#: src/web/mqtt_table.py:34
+#: src/web/mqtt_table.py:35
msgid "Node-ID"
msgstr ""
-#: src/web/mqtt_table.py:35
+#: src/web/mqtt_table.py:36
msgid "HA-Area"
msgstr ""
@@ -63,10 +63,18 @@ msgstr "Aktualisiert:"
msgid "Version:"
msgstr ""
-#: src/web/templates/base.html.j2:60 src/web/templates/page_logging.html.j2:5
+#: src/web/templates/base.html.j2:60 src/web/templates/page_notes.html.j2:5
+msgid "Important Messages"
+msgstr "Wichtige Hinweise"
+
+#: src/web/templates/base.html.j2:61 src/web/templates/page_logging.html.j2:5
msgid "Log Files"
msgstr "Log Dateien"
+#: src/web/templates/page_index.html.j2:3
+msgid "TSUN Proxy - Connections"
+msgstr "TSUN Proxy - Verbindungen"
+
#: src/web/templates/page_index.html.j2:5
msgid "Proxy Connection Overview"
msgstr "Proxy Verbindungen"
@@ -103,6 +111,10 @@ msgstr "Emu Modus"
msgid "Emulation sends data to cloud"
msgstr "Emulation sendet in die Cloud"
+#: src/web/templates/page_logging.html.j2:3
+msgid "TSUN Proxy - Log Files"
+msgstr "TSUN Proxy - Log Dateien"
+
#: src/web/templates/page_logging.html.j2:10
msgid "Do you really want to delete the log file"
msgstr "Soll die Datei wirklich gelöscht werden"
@@ -115,6 +127,10 @@ msgstr "File löschen"
msgid "Abort"
msgstr "Abbruch"
+#: src/web/templates/page_mqtt.html.j2:3
+msgid "TSUN Proxy - MQTT Status"
+msgstr ""
+
#: src/web/templates/page_mqtt.html.j2:5
msgid "MQTT Overview"
msgstr "MQTT Überblick"
@@ -143,6 +159,10 @@ msgstr "Empfangene Topics"
msgid "Number of topics received"
msgstr "Anzahl der empfangenen Topics"
+#: src/web/templates/page_notes.html.j2:3
+msgid "TSUN Proxy - Important Messages"
+msgstr "TSUN Proxy - Wichtige Hinweise"
+
#: src/web/templates/templ_log_files_list.html.j2:11
msgid "Created"
msgstr "Erzeugt"
@@ -159,24 +179,17 @@ msgstr "Größe"
msgid "Download File"
msgstr "Datei Download"
-#~ msgid "MQTT Server"
-#~ msgstr ""
+#: src/web/templates/templ_notes_list.html.j2:3
+msgid "Warnings and error messages"
+msgstr "Warnungen und Fehlermeldungen"
-#~ msgid "MQTT User"
-#~ msgstr ""
+#: src/web/templates/templ_notes_list.html.j2:18
+msgid "Well done!"
+msgstr "Gut gemacht!"
-#~ msgid "MQTT Connected"
-#~ msgstr ""
-
-#~ msgid "Home Assistant Status"
-#~ msgstr ""
-
-#~ msgid "MQTT Publish Count"
-#~ msgstr ""
-
-#~ msgid "MQTT Reveiced Count"
-#~ msgstr ""
-
-#~ msgid "MQTT Connect Time"
-#~ msgstr "MQTT Verbindungszeit"
+#: src/web/templates/templ_notes_list.html.j2:19
+msgid "No warnings or errors have been logged since the last proxy start."
+msgstr ""
+"Seit dem letzten Proxystart wurden keine Warnungen oder Fehler "
+"protokolliert."
From f1628a0629d9dff7cb536207184ba3281d9e7b83 Mon Sep 17 00:00:00 2001
From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com>
Date: Sun, 4 May 2025 19:20:45 +0200
Subject: [PATCH 2/8] Update dependency aiomqtt to v2.4.0 (#404)
* Update dependency aiomqtt to v2.4.0
* update changelog
---------
Co-authored-by: Stefan Allius <122395479+s-allius@users.noreply.github.com>
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Stefan Allius
---
app/requirements.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/app/requirements.txt b/app/requirements.txt
index c26f354..2a100b6 100644
--- a/app/requirements.txt
+++ b/app/requirements.txt
@@ -1,4 +1,4 @@
- aiomqtt==2.3.2
+ aiomqtt==2.4.0
schema==0.7.7
aiocron==2.1
quart==0.20
From 48965ffda9650ec71bf2d01c322956b515d6323f Mon Sep 17 00:00:00 2001
From: Stefan Allius <122395479+s-allius@users.noreply.github.com>
Date: Sat, 10 May 2025 19:32:13 +0200
Subject: [PATCH 3/8] S allius/issue398 (#406)
* setup logger for hypercorn and dashboard
* use logger.ini to setup dashboard logger
* workaround: restore the hypercorn logger config
- quart/hyercorn overwrites the logger config.
as a workaround we restore the config at the
beginning of a request
* fix the hypercorn log handler only once
* change proxy into a ASGI application
- move Quart init from server.py into app.py
- create Server class for config and loggin setup
- restore hypercorn logging configuration after
start of Quart/Hypercorn
* move get_log_level into Server class
* define config in test_emu_init_close
* remove Web() instance from the testcase
- with importing app.py The blueprint Web() will
automatically created and a second call in test-
cases must avoided
* add unit tests
* move code from app.py into server.py
* test the init_logging_system() method
* add HypercornLogHndl tests
* fix deprecated pytest async warning
- Cleanup pending async tasks
- fix deprecated warning about event_loop
* add unit test for error handling in build_config()
* coverage: ignore quart template files
* check print output in test_save_and_restore
* update changelog
---
.coveragerc | 1 +
CHANGELOG.md | 4 +
app/src/logging.ini | 33 ++-
app/src/server.py | 348 ++++++++++++++++++------------
app/src/web/__init__.py | 7 -
app/tests/cnf/invalid_config.toml | 1 +
app/tests/conftest.py | 20 ++
app/tests/test_inverter_base.py | 23 +-
app/tests/test_inverter_g3.py | 13 +-
app/tests/test_inverter_g3p.py | 13 +-
app/tests/test_modbus.py | 47 ++--
app/tests/test_server.py | 246 ++++++++++++++++-----
app/tests/test_solarman.py | 171 ++++++++++-----
app/tests/test_solarman_emu.py | 20 +-
app/tests/test_talent.py | 3 +-
app/tests/test_web_route.py | 1 -
ha_addons/ha_addon/rootfs/run.sh | 2 +-
17 files changed, 648 insertions(+), 305 deletions(-)
create mode 100644 app/tests/cnf/invalid_config.toml
create mode 100644 app/tests/conftest.py
diff --git a/.coveragerc b/.coveragerc
index 398ff08..2626233 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -1,2 +1,3 @@
[run]
branch = True
+omit = app/src/web/templates/*.html.j2
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 46be5df..5e6e1ef 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [unreleased]
+- fix a lot of pytest-asyncio problems in the unit tests
+- Cleanup startup code for Quart and the Proxy
+- Redirect the hypercorn traces to a separate log-file
+- Configure the dashboard trace handler by the logging.ini file
- Dashboard: add Notes page and table for important messages
- Dashboard: add Log-File page
- Dashboard: add Connection page
diff --git a/app/src/logging.ini b/app/src/logging.ini
index 6be3905..fa84079 100644
--- a/app/src/logging.ini
+++ b/app/src/logging.ini
@@ -1,16 +1,15 @@
[loggers]
-keys=root,tracer,mesg,conn,data,mqtt,asyncio
+keys=root,tracer,mesg,conn,data,mqtt,asyncio,hypercorn_access,hypercorn_error
[handlers]
-keys=console_handler,file_handler_name1,file_handler_name2
+keys=console_handler,file_handler_name1,file_handler_name2,file_handler_name3,dashboard
[formatters]
keys=console_formatter,file_formatter
[logger_root]
level=DEBUG
-handlers=console_handler,file_handler_name1
-
+handlers=console_handler,file_handler_name1,dashboard
[logger_conn]
level=DEBUG
@@ -20,13 +19,13 @@ qualname=conn
[logger_mqtt]
level=INFO
-handlers=console_handler,file_handler_name1
+handlers=console_handler,file_handler_name1,dashboard
propagate=0
qualname=mqtt
[logger_asyncio]
level=INFO
-handlers=console_handler,file_handler_name1
+handlers=console_handler,file_handler_name1,dashboard
propagate=0
qualname=asyncio
@@ -49,6 +48,18 @@ handlers=file_handler_name2
propagate=0
qualname=tracer
+[logger_hypercorn_access]
+level=INFO
+handlers=file_handler_name3
+propagate=0
+qualname=hypercorn.access
+
+[logger_hypercorn_error]
+level=INFO
+handlers=file_handler_name1,dashboard
+propagate=0
+qualname=hypercorn.error
+
[handler_console_handler]
class=StreamHandler
level=DEBUG
@@ -66,6 +77,16 @@ level=NOTSET
formatter=file_formatter
args=(handlers.log_path + 'trace.log', when:='midnight', backupCount:=handlers.log_backups)
+[handler_file_handler_name3]
+class=handlers.TimedRotatingFileHandler
+level=NOTSET
+formatter=file_formatter
+args=(handlers.log_path + 'access.log', when:='midnight', backupCount:=handlers.log_backups)
+
+[handler_dashboard]
+level=WARNING
+class=web.log_handler.LogHandler
+
[formatter_console_formatter]
format=%(asctime)s %(levelname)5s | %(name)4s | %(message)s
datefmt=%Y-%m-%d %H:%M:%S
diff --git a/app/src/server.py b/app/src/server.py
index f9f8206..26ee093 100644
--- a/app/src/server.py
+++ b/app/src/server.py
@@ -1,26 +1,161 @@
import logging
-import asyncio
import logging.handlers
+from logging import config # noqa F401
+import asyncio
+from asyncio import StreamReader, StreamWriter
import os
import argparse
-from asyncio import StreamReader, StreamWriter
from quart import Quart, Response
-from logging import config # noqa F401
-from proxy import Proxy
-from inverter_ifc import InverterIfc
-from gen3.inverter_g3 import InverterG3
-from gen3plus.inverter_g3p import InverterG3P
-from scheduler import Schedule
+
from cnf.config import Config
from cnf.config_read_env import ConfigReadEnv
from cnf.config_read_toml import ConfigReadToml
from cnf.config_read_json import ConfigReadJson
from web import Web
from web.wrapper import url_for
+from proxy import Proxy
+from inverter_ifc import InverterIfc
+from gen3.inverter_g3 import InverterG3
+from gen3plus.inverter_g3p import InverterG3P
+from scheduler import Schedule
from modbus_tcp import ModbusTcp
+class Server():
+ serv_name = ''
+ version = ''
+ src_dir = ''
+
+ ####
+ # The following default values are used for the unit tests only, since
+ # `Server.parse_args()' will not be called during test setup.
+ # Ofcorse, we can call `Server.parse_args()' in a test case explicitly
+ # to overwrite this values
+ config_path = './config/'
+ json_config = ''
+ toml_config = ''
+ trans_path = '../translations/'
+ rel_urls = False
+ log_path = './log/'
+ log_backups = 0
+ log_level = None
+
+ def __init__(self, app, parse_args: bool):
+ ''' Applikation Setup
+
+ 1. Read cli arguments
+ 2. Init the logging system by the ini file
+ 3. Log the config parms
+ 4. Set the log-levels
+ 5. Read the build the config for the app
+ '''
+ self.serv_name = os.getenv('SERVICE_NAME', 'proxy')
+ self.version = os.getenv('VERSION', 'unknown')
+ self.src_dir = os.path.dirname(__file__) + '/'
+ if parse_args: # pragma: no cover
+ self.parse_args(None)
+ self.init_logging_system()
+ self.build_config()
+
+ @app.context_processor
+ def utility_processor():
+ return dict(version=self.version)
+
+ def parse_args(self, arg_list: list[str] | None):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-c', '--config_path', type=str,
+ default='./config/',
+ help='set path for the configuration files')
+ parser.add_argument('-j', '--json_config', type=str,
+ help='read user config from json-file')
+ parser.add_argument('-t', '--toml_config', type=str,
+ help='read user config from toml-file')
+ parser.add_argument('-l', '--log_path', type=str,
+ default='./log/',
+ help='set path for the logging files')
+ parser.add_argument('-b', '--log_backups', type=int,
+ default=0,
+ help='set max number of daily log-files')
+ parser.add_argument('-tr', '--trans_path', type=str,
+ default='../translations/',
+ help='set path for the translations files')
+ parser.add_argument('-r', '--rel_urls', action="store_true",
+ help='use relative dashboard urls')
+ args = parser.parse_args(arg_list)
+
+ self.config_path = args.config_path
+ self.json_config = args.json_config
+ self.toml_config = args.toml_config
+ self.trans_path = args.trans_path
+ self.rel_urls = args.rel_urls
+ self.log_path = args.log_path
+ self.log_backups = args.log_backups
+
+ def init_logging_system(self):
+ setattr(logging.handlers, "log_path", self.log_path)
+ setattr(logging.handlers, "log_backups", self.log_backups)
+ os.makedirs(self.log_path, exist_ok=True)
+
+ logging.config.fileConfig(self.src_dir + 'logging.ini')
+
+ logging.info(
+ f'Server "{self.serv_name} - {self.version}" will be started')
+ logging.info(f'current dir: {os.getcwd()}')
+ logging.info(f"config_path: {self.config_path}")
+ logging.info(f"json_config: {self.json_config}")
+ logging.info(f"toml_config: {self.toml_config}")
+ logging.info(f"trans_path: {self.trans_path}")
+ logging.info(f"rel_urls: {self.rel_urls}")
+ logging.info(f"log_path: {self.log_path}")
+ if self.log_backups == 0:
+ logging.info("log_backups: unlimited")
+ else:
+ logging.info(f"log_backups: {self.log_backups} days")
+ self.log_level = self.get_log_level()
+ logging.info('******')
+ if self.log_level:
+ # set lowest-severity for 'root', 'msg', 'conn' and 'data' logger
+ logging.getLogger().setLevel(self.log_level)
+ logging.getLogger('msg').setLevel(self.log_level)
+ logging.getLogger('conn').setLevel(self.log_level)
+ logging.getLogger('data').setLevel(self.log_level)
+ logging.getLogger('tracer').setLevel(self.log_level)
+ logging.getLogger('asyncio').setLevel(self.log_level)
+ # logging.getLogger('mqtt').setLevel(self.log_level)
+
+ def build_config(self):
+ # read config file
+ Config.init(ConfigReadToml(self.src_dir + "cnf/default_config.toml"),
+ log_path=self.log_path)
+ ConfigReadEnv()
+ ConfigReadJson(self.config_path + "config.json")
+ ConfigReadToml(self.config_path + "config.toml")
+ ConfigReadJson(self.json_config)
+ ConfigReadToml(self.toml_config)
+ config_err = Config.get_error()
+
+ if config_err is not None:
+ logging.info(f'config_err: {config_err}')
+ return
+
+ logging.info('******')
+
+ def get_log_level(self) -> int | None:
+ '''checks if LOG_LVL is set in the environment and returns the
+ corresponding logging.LOG_LEVEL'''
+ switch = {
+ 'DEBUG': logging.DEBUG,
+ 'WARN': logging.WARNING,
+ 'INFO': logging.INFO,
+ 'ERROR': logging.ERROR,
+ }
+ log_lvl = os.getenv('LOG_LVL', None)
+ logging.info(f"LOG_LVL : {log_lvl}")
+
+ return switch.get(log_lvl, None)
+
+
class ProxyState:
_is_up = False
@@ -33,11 +168,48 @@ class ProxyState:
ProxyState._is_up = value
+class HypercornLogHndl:
+ access_hndl = []
+ error_hndl = []
+ must_fix = False
+ HYPERC_ERR = 'hypercorn.error'
+ HYPERC_ACC = 'hypercorn.access'
+
+ @classmethod
+ def save(cls):
+ cls.access_hndl = logging.getLogger(
+ cls.HYPERC_ACC).handlers
+ cls.error_hndl = logging.getLogger(
+ cls.HYPERC_ERR).handlers
+ cls.must_fix = True
+
+ @classmethod
+ def restore(cls):
+ if not cls.must_fix:
+ return
+ cls.must_fix = False
+ access_hndl = logging.getLogger(
+ cls.HYPERC_ACC).handlers
+ if access_hndl != cls.access_hndl:
+ print(' * Fix hypercorn.access setting')
+ logging.getLogger(
+ cls.HYPERC_ACC).handlers = cls.access_hndl
+
+ error_hndl = logging.getLogger(
+ cls.HYPERC_ERR).handlers
+ if error_hndl != cls.error_hndl:
+ print(' * Fix hypercorn.error setting')
+ logging.getLogger(
+ cls.HYPERC_ERR).handlers = cls.error_hndl
+
+
app = Quart(__name__,
template_folder='web/templates',
static_folder='web/static')
app.secret_key = 'JKLdks.dajlKKKdladkflKwolafallsdfl'
app.jinja_env.globals.update(url_for=url_for)
+server = Server(app, __name__ == "__main__")
+Web(app, server.trans_path, server.rel_urls)
@app.route('/-/ready')
@@ -67,13 +239,36 @@ async def healthy():
return Response(status=200, response="I'm fine")
-async def handle_client(reader: StreamReader, writer: StreamWriter, inv_class):
+async def handle_client(reader: StreamReader,
+ writer: StreamWriter,
+ inv_class): # pragma: no cover
'''Handles a new incoming connection and starts an async loop'''
with inv_class(reader, writer) as inv:
await inv.local.ifc.server_loop()
+@app.before_serving
+async def startup_app(): # pragma: no cover
+ HypercornLogHndl.save()
+ loop = asyncio.get_event_loop()
+ Proxy.class_init()
+ Schedule.start()
+ ModbusTcp(loop)
+
+ for inv_class, port in [(InverterG3, 5005), (InverterG3P, 10000)]:
+ logging.info(f'listen on port: {port} for inverters')
+ loop.create_task(asyncio.start_server(lambda r, w, i=inv_class:
+ handle_client(r, w, i),
+ '0.0.0.0', port))
+ ProxyState.set_up(True)
+
+
+@app.before_request
+async def startup_request():
+ HypercornLogHndl.restore()
+
+
@app.after_serving
async def handle_shutdown(): # pragma: no cover
'''Close all TCP connections and stop the event loop'''
@@ -90,136 +285,15 @@ async def handle_shutdown(): # pragma: no cover
logging.info('Proxy disconnecting done')
- #
- # now cancel all remaining (pending) tasks
- #
- for task in asyncio.all_tasks():
- if task == asyncio.current_task():
- continue
- task.cancel()
- logging.info('Proxy cancelling done')
-
await Proxy.class_close(loop)
-def get_log_level() -> int | None:
- '''checks if LOG_LVL is set in the environment and returns the
- corresponding logging.LOG_LEVEL'''
- switch = {
- 'DEBUG': logging.DEBUG,
- 'WARN': logging.WARNING,
- 'INFO': logging.INFO,
- 'ERROR': logging.ERROR,
- }
- log_level = os.getenv('LOG_LVL', None)
- logging.info(f"LOG_LVL : {log_level}")
+if __name__ == "__main__": # pragma: no cover
- return switch.get(log_level, None)
-
-
-def main(): # pragma: no cover
- parser = argparse.ArgumentParser()
- parser.add_argument('-c', '--config_path', type=str,
- default='./config/',
- help='set path for the configuration files')
- parser.add_argument('-j', '--json_config', type=str,
- help='read user config from json-file')
- parser.add_argument('-t', '--toml_config', type=str,
- help='read user config from toml-file')
- parser.add_argument('-l', '--log_path', type=str,
- default='./log/',
- help='set path for the logging files')
- parser.add_argument('-b', '--log_backups', type=int,
- default=0,
- help='set max number of daily log-files')
- parser.add_argument('-tr', '--trans_path', type=str,
- default='../translations/',
- help='set path for the translations files')
- parser.add_argument('-r', '--rel_urls', type=bool,
- default=False,
- help='use relative dashboard urls')
- args = parser.parse_args()
- #
- # Setup our daily, rotating logger
- #
- serv_name = os.getenv('SERVICE_NAME', 'proxy')
- version = os.getenv('VERSION', 'unknown')
-
- @app.context_processor
- def utility_processor():
- return dict(version=version)
-
- setattr(logging.handlers, "log_path", args.log_path)
- setattr(logging.handlers, "log_backups", args.log_backups)
- os.makedirs(args.log_path, exist_ok=True)
-
- src_dir = os.path.dirname(__file__) + '/'
- logging.config.fileConfig(src_dir + 'logging.ini')
- logging.info(f'Server "{serv_name} - {version}" will be started')
- logging.info(f'current dir: {os.getcwd()}')
- logging.info(f"config_path: {args.config_path}")
- logging.info(f"json_config: {args.json_config}")
- logging.info(f"toml_config: {args.toml_config}")
- logging.info(f"trans_path: {args.trans_path}")
- logging.info(f"rel_urls: {args.rel_urls}")
- logging.info(f"log_path: {args.log_path}")
- if args.log_backups == 0:
- logging.info("log_backups: unlimited")
- else:
- logging.info(f"log_backups: {args.log_backups} days")
- log_level = get_log_level()
- logging.info('******')
- if log_level:
- # set lowest-severity for 'root', 'msg', 'conn' and 'data' logger
- logging.getLogger().setLevel(log_level)
- logging.getLogger('msg').setLevel(log_level)
- logging.getLogger('conn').setLevel(log_level)
- logging.getLogger('data').setLevel(log_level)
- logging.getLogger('tracer').setLevel(log_level)
- logging.getLogger('asyncio').setLevel(log_level)
- # logging.getLogger('mqtt').setLevel(log_level)
-
- loop = asyncio.new_event_loop()
- asyncio.set_event_loop(loop)
-
- # read config file
- Config.init(ConfigReadToml(src_dir + "cnf/default_config.toml"),
- log_path=args.log_path)
- ConfigReadEnv()
- ConfigReadJson(args.config_path + "config.json")
- ConfigReadToml(args.config_path + "config.toml")
- ConfigReadJson(args.json_config)
- ConfigReadToml(args.toml_config)
- config_err = Config.get_error()
-
- if config_err is not None:
- logging.info(f'config_err: {config_err}')
- return
-
- logging.info('******')
-
- Proxy.class_init()
- Schedule.start()
- ModbusTcp(loop)
- Web(app, args.trans_path, args.rel_urls)
-
- #
- # Create tasks for our listening servers. These must be tasks! If we call
- # start_server directly out of our main task, the eventloop will be blocked
- # and we can't receive and handle the UNIX signals!
- #
- for inv_class, port in [(InverterG3, 5005), (InverterG3P, 10000)]:
- logging.info(f'listen on port: {port} for inverters')
- loop.create_task(asyncio.start_server(lambda r, w, i=inv_class:
- handle_client(r, w, i),
- '0.0.0.0', port))
-
- loop.set_debug(log_level == logging.DEBUG)
try:
- ProxyState.set_up(True)
logging.info("Start Quart")
- app.run(host='0.0.0.0', port=8127, use_reloader=False, loop=loop,
- debug=log_level == logging.DEBUG)
+ app.run(host='0.0.0.0', port=8127, use_reloader=False,
+ debug=server.log_level == logging.DEBUG)
logging.info("Quart stopped")
except KeyboardInterrupt:
@@ -228,10 +302,4 @@ def main(): # pragma: no cover
logging.info("Quart cancelled")
finally:
- logging.debug('Close event loop')
- loop.close()
- logging.info(f'Finally, exit Server "{serv_name}"')
-
-
-if __name__ == "__main__": # pragma: no cover
- main()
+ logging.info(f'Finally, exit Server "{server.serv_name}"')
diff --git a/app/src/web/__init__.py b/app/src/web/__init__.py
index ef3728c..24b0e87 100644
--- a/app/src/web/__init__.py
+++ b/app/src/web/__init__.py
@@ -7,8 +7,6 @@ Usage:
from quart import Quart, Blueprint
from quart_babel import Babel
from utils import load_modules
-from .log_handler import LogHandler
-import logging
web = Blueprint('web', __name__)
@@ -32,8 +30,3 @@ class Web:
locale_selector=get_locale,
timezone_selector=get_tz,
default_translation_directories=translation_directories)
-
- h = LogHandler()
- logging.getLogger().addHandler(h)
- for name in logging.root.manager.loggerDict:
- logging.getLogger(name).addHandler(h)
diff --git a/app/tests/cnf/invalid_config.toml b/app/tests/cnf/invalid_config.toml
new file mode 100644
index 0000000..80075f2
--- /dev/null
+++ b/app/tests/cnf/invalid_config.toml
@@ -0,0 +1 @@
+mqtt.port = ":1883"
diff --git a/app/tests/conftest.py b/app/tests/conftest.py
new file mode 100644
index 0000000..3c3ebef
--- /dev/null
+++ b/app/tests/conftest.py
@@ -0,0 +1,20 @@
+import pytest_asyncio
+import asyncio
+
+
+@pytest_asyncio.fixture
+async def my_loop():
+ event_loop = asyncio.get_running_loop()
+ yield event_loop
+
+ # Collect all tasks and cancel those that are not 'done'.
+ tasks = asyncio.all_tasks(event_loop)
+ tasks = [t for t in tasks if not t.done()]
+ for task in tasks:
+ task.cancel()
+
+ # Wait for all tasks to complete, ignoring any CancelledErrors
+ try:
+ await asyncio.wait(tasks)
+ except asyncio.exceptions.CancelledError:
+ pass
diff --git a/app/tests/test_inverter_base.py b/app/tests/test_inverter_base.py
index 0de04db..ee95209 100644
--- a/app/tests/test_inverter_base.py
+++ b/app/tests/test_inverter_base.py
@@ -113,7 +113,9 @@ def patch_unhealthy_remote():
with patch.object(AsyncStreamClient, 'healthy', new_healthy) as conn:
yield conn
-def test_inverter_iter():
+@pytest.mark.asyncio
+async def test_inverter_iter(my_loop):
+ _ = my_loop
InverterBase._registry.clear()
cnt = 0
reader = FakeReader()
@@ -216,7 +218,8 @@ def test_unhealthy_remote(patch_unhealthy_remote):
assert cnt == 0
@pytest.mark.asyncio
-async def test_remote_conn(config_conn, patch_open_connection):
+async def test_remote_conn(my_loop, config_conn, patch_open_connection):
+ _ = my_loop
_ = config_conn
_ = patch_open_connection
assert asyncio.get_running_loop()
@@ -242,8 +245,9 @@ async def test_remote_conn(config_conn, patch_open_connection):
assert cnt == 0
@pytest.mark.asyncio
-async def test_remote_conn_to_private(config_conn, patch_open_connection):
+async def test_remote_conn_to_private(my_loop, config_conn, patch_open_connection):
'''check DNS resolving of the TSUN FQDN to a local address'''
+ _ = my_loop
_ = config_conn
_ = patch_open_connection
assert asyncio.get_running_loop()
@@ -280,8 +284,9 @@ async def test_remote_conn_to_private(config_conn, patch_open_connection):
@pytest.mark.asyncio
-async def test_remote_conn_to_loopback(config_conn, patch_open_connection):
+async def test_remote_conn_to_loopback(my_loop, config_conn, patch_open_connection):
'''check DNS resolving of the TSUN FQDN to the loopback address'''
+ _ = my_loop
_ = config_conn
_ = patch_open_connection
assert asyncio.get_running_loop()
@@ -317,8 +322,9 @@ async def test_remote_conn_to_loopback(config_conn, patch_open_connection):
assert cnt == 0
@pytest.mark.asyncio
-async def test_remote_conn_to_none(config_conn, patch_open_connection):
+async def test_remote_conn_to_none(my_loop, config_conn, patch_open_connection):
'''check if get_extra_info() return None in case of an error'''
+ _ = my_loop
_ = config_conn
_ = patch_open_connection
assert asyncio.get_running_loop()
@@ -354,7 +360,8 @@ async def test_remote_conn_to_none(config_conn, patch_open_connection):
assert cnt == 0
@pytest.mark.asyncio
-async def test_unhealthy_remote(config_conn, patch_open_connection, patch_unhealthy_remote):
+async def test_unhealthy_remote(my_loop, config_conn, patch_open_connection, patch_unhealthy_remote):
+ _ = my_loop
_ = config_conn
_ = patch_open_connection
_ = patch_unhealthy_remote
@@ -391,10 +398,10 @@ async def test_unhealthy_remote(config_conn, patch_open_connection, patch_unheal
assert cnt == 0
@pytest.mark.asyncio
-async def test_remote_disc(config_conn, patch_open_connection):
+async def test_remote_disc(my_loop, config_conn, patch_open_connection):
+ _ = my_loop
_ = config_conn
_ = patch_open_connection
- assert asyncio.get_running_loop()
reader = FakeReader()
writer = FakeWriter()
diff --git a/app/tests/test_inverter_g3.py b/app/tests/test_inverter_g3.py
index 626ba7d..0addd32 100644
--- a/app/tests/test_inverter_g3.py
+++ b/app/tests/test_inverter_g3.py
@@ -99,7 +99,8 @@ def patch_healthy():
with patch.object(AsyncStream, 'healthy') as conn:
yield conn
-def test_method_calls(patch_healthy):
+@pytest.mark.asyncio
+async def test_method_calls(my_loop, patch_healthy):
spy = patch_healthy
reader = FakeReader()
writer = FakeWriter()
@@ -119,7 +120,7 @@ def test_method_calls(patch_healthy):
assert cnt == 0
@pytest.mark.asyncio
-async def test_remote_conn(config_conn, patch_open_connection):
+async def test_remote_conn(my_loop, config_conn, patch_open_connection):
_ = config_conn
_ = patch_open_connection
assert asyncio.get_running_loop()
@@ -137,7 +138,7 @@ async def test_remote_conn(config_conn, patch_open_connection):
assert cnt == 0
@pytest.mark.asyncio
-async def test_remote_except(config_conn, patch_open_connection):
+async def test_remote_except(my_loop, config_conn, patch_open_connection):
_ = config_conn
_ = patch_open_connection
assert asyncio.get_running_loop()
@@ -164,7 +165,7 @@ async def test_remote_except(config_conn, patch_open_connection):
assert cnt == 0
@pytest.mark.asyncio
-async def test_mqtt_publish(config_conn, patch_open_connection):
+async def test_mqtt_publish(my_loop, config_conn, patch_open_connection):
_ = config_conn
_ = patch_open_connection
assert asyncio.get_running_loop()
@@ -191,7 +192,7 @@ async def test_mqtt_publish(config_conn, patch_open_connection):
assert Infos.new_stat_data['proxy'] == False
@pytest.mark.asyncio
-async def test_mqtt_err(config_conn, patch_open_connection, patch_mqtt_err):
+async def test_mqtt_err(my_loop, config_conn, patch_open_connection, patch_mqtt_err):
_ = config_conn
_ = patch_open_connection
_ = patch_mqtt_err
@@ -208,7 +209,7 @@ async def test_mqtt_err(config_conn, patch_open_connection, patch_mqtt_err):
assert stream.new_data['inverter'] == True
@pytest.mark.asyncio
-async def test_mqtt_except(config_conn, patch_open_connection, patch_mqtt_except):
+async def test_mqtt_except(my_loop, config_conn, patch_open_connection, patch_mqtt_except):
_ = config_conn
_ = patch_open_connection
_ = patch_mqtt_except
diff --git a/app/tests/test_inverter_g3p.py b/app/tests/test_inverter_g3p.py
index f1bb398..b9b7078 100644
--- a/app/tests/test_inverter_g3p.py
+++ b/app/tests/test_inverter_g3p.py
@@ -94,7 +94,8 @@ def patch_open_connection():
with patch.object(asyncio, 'open_connection', new_open) as conn:
yield conn
-def test_method_calls(config_conn):
+@pytest.mark.asyncio
+async def test_method_calls(my_loop, config_conn):
_ = config_conn
reader = FakeReader()
writer = FakeWriter()
@@ -105,7 +106,7 @@ def test_method_calls(config_conn):
assert inverter.local.ifc
@pytest.mark.asyncio
-async def test_remote_conn(config_conn, patch_open_connection):
+async def test_remote_conn(my_loop, config_conn, patch_open_connection):
_ = config_conn
_ = patch_open_connection
assert asyncio.get_running_loop()
@@ -116,7 +117,7 @@ async def test_remote_conn(config_conn, patch_open_connection):
assert inverter.remote.stream
@pytest.mark.asyncio
-async def test_remote_except(config_conn, patch_open_connection):
+async def test_remote_except(my_loop, config_conn, patch_open_connection):
_ = config_conn
_ = patch_open_connection
assert asyncio.get_running_loop()
@@ -138,7 +139,7 @@ async def test_remote_except(config_conn, patch_open_connection):
@pytest.mark.asyncio
-async def test_mqtt_publish(config_conn, patch_open_connection):
+async def test_mqtt_publish(my_loop, config_conn, patch_open_connection):
_ = config_conn
_ = patch_open_connection
assert asyncio.get_running_loop()
@@ -165,7 +166,7 @@ async def test_mqtt_publish(config_conn, patch_open_connection):
assert Infos.new_stat_data['proxy'] == False
@pytest.mark.asyncio
-async def test_mqtt_err(config_conn, patch_open_connection, patch_mqtt_err):
+async def test_mqtt_err(my_loop, config_conn, patch_open_connection, patch_mqtt_err):
_ = config_conn
_ = patch_open_connection
_ = patch_mqtt_err
@@ -182,7 +183,7 @@ async def test_mqtt_err(config_conn, patch_open_connection, patch_mqtt_err):
assert stream.new_data['inverter'] == True
@pytest.mark.asyncio
-async def test_mqtt_except(config_conn, patch_open_connection, patch_mqtt_except):
+async def test_mqtt_except(my_loop, config_conn, patch_open_connection, patch_mqtt_except):
_ = config_conn
_ = patch_open_connection
_ = patch_mqtt_except
diff --git a/app/tests/test_modbus.py b/app/tests/test_modbus.py
index b6914b0..4e0c716 100644
--- a/app/tests/test_modbus.py
+++ b/app/tests/test_modbus.py
@@ -19,7 +19,8 @@ class ModbusTestHelper(Modbus):
def resp_handler(self):
self.recv_responses += 1
-def test_modbus_crc():
+@pytest.mark.asyncio
+async def test_modbus_crc():
'''Check CRC-16 calculation'''
mb = Modbus(None)
assert 0x0b02 == mb._Modbus__calc_crc(b'\x01\x06\x20\x08\x00\x04')
@@ -37,7 +38,8 @@ def test_modbus_crc():
msg += b'\x00\x00\x00\x00\x00\x00\x00\xe6\xef'
assert 0 == mb._Modbus__calc_crc(msg)
-def test_build_modbus_pdu():
+@pytest.mark.asyncio
+async def test_build_modbus_pdu():
'''Check building and sending a MODBUS RTU'''
mb = ModbusTestHelper()
mb.build_msg(1,6,0x2000,0x12)
@@ -49,7 +51,8 @@ def test_build_modbus_pdu():
assert mb.last_len == 18
assert mb.err == 0
-def test_recv_req():
+@pytest.mark.asyncio
+async def test_recv_req():
'''Receive a valid request, which must transmitted'''
mb = ModbusTestHelper()
assert mb.recv_req(b'\x01\x06\x20\x00\x00\x12\x02\x07')
@@ -58,7 +61,8 @@ def test_recv_req():
assert mb.last_len == 0x12
assert mb.err == 0
-def test_recv_req_crc_err():
+@pytest.mark.asyncio
+async def test_recv_req_crc_err():
'''Receive a request with invalid CRC, which must be dropped'''
mb = ModbusTestHelper()
assert not mb.recv_req(b'\x01\x06\x20\x00\x00\x12\x02\x08')
@@ -68,7 +72,8 @@ def test_recv_req_crc_err():
assert mb.last_len == 0
assert mb.err == 1
-def test_recv_resp_crc_err():
+@pytest.mark.asyncio
+async def test_recv_resp_crc_err():
'''Receive a response with invalid CRC, which must be dropped'''
mb = ModbusTestHelper()
# simulate a transmitted request
@@ -89,7 +94,8 @@ def test_recv_resp_crc_err():
mb._Modbus__stop_timer()
assert not mb.req_pend
-def test_recv_resp_invalid_addr():
+@pytest.mark.asyncio
+async def test_recv_resp_invalid_addr():
'''Receive a response with wrong server addr, which must be dropped'''
mb = ModbusTestHelper()
mb.req_pend = True
@@ -113,7 +119,8 @@ def test_recv_resp_invalid_addr():
mb._Modbus__stop_timer()
assert not mb.req_pend
-def test_recv_recv_fcode():
+@pytest.mark.asyncio
+async def test_recv_recv_fcode():
'''Receive a response with wrong function code, which must be dropped'''
mb = ModbusTestHelper()
mb.build_msg(1,4,0x300e,2)
@@ -135,7 +142,8 @@ def test_recv_recv_fcode():
mb._Modbus__stop_timer()
assert not mb.req_pend
-def test_recv_resp_len():
+@pytest.mark.asyncio
+async def test_recv_resp_len():
'''Receive a response with wrong data length, which must be dropped'''
mb = ModbusTestHelper()
mb.build_msg(1,3,0x300e,3)
@@ -158,7 +166,8 @@ def test_recv_resp_len():
mb._Modbus__stop_timer()
assert not mb.req_pend
-def test_recv_unexpect_resp():
+@pytest.mark.asyncio
+async def test_recv_unexpect_resp():
'''Receive a response when we havb't sent a request'''
mb = ModbusTestHelper()
assert not mb.req_pend
@@ -174,7 +183,8 @@ def test_recv_unexpect_resp():
assert mb.req_pend == False
assert mb.que.qsize() == 0
-def test_parse_resp():
+@pytest.mark.asyncio
+async def test_parse_resp():
'''Receive matching response and parse the values'''
mb = ModbusTestHelper()
mb.build_msg(1,3,0x3007,6)
@@ -200,7 +210,8 @@ def test_parse_resp():
assert mb.que.qsize() == 0
assert not mb.req_pend
-def test_queue():
+@pytest.mark.asyncio
+async def test_queue():
mb = ModbusTestHelper()
mb.build_msg(1,3,0x3022,4)
assert mb.que.qsize() == 0
@@ -218,7 +229,8 @@ def test_queue():
mb._Modbus__stop_timer()
assert not mb.req_pend
-def test_queue2():
+@pytest.mark.asyncio
+async def test_queue2():
'''Check queue handling for build_msg() calls'''
mb = ModbusTestHelper()
mb.build_msg(1,3,0x3007,6)
@@ -267,7 +279,8 @@ def test_queue2():
assert mb.que.qsize() == 0
assert not mb.req_pend
-def test_queue3():
+@pytest.mark.asyncio
+async def test_queue3():
'''Check queue handling for recv_req() calls'''
mb = ModbusTestHelper()
assert mb.recv_req(b'\x01\x03\x30\x07\x00\x06{\t', mb.resp_handler)
@@ -324,7 +337,7 @@ def test_queue3():
assert not mb.req_pend
@pytest.mark.asyncio
-async def test_timeout():
+async def test_timeout(my_loop):
'''Test MODBUS response timeout and RTU retransmitting'''
assert asyncio.get_running_loop()
mb = ModbusTestHelper()
@@ -371,7 +384,8 @@ async def test_timeout():
assert mb.retry_cnt == 0
assert mb.send_calls == 4
-def test_recv_unknown_data():
+@pytest.mark.asyncio
+async def test_recv_unknown_data():
'''Receive a response with an unknwon register'''
mb = ModbusTestHelper()
assert 0x9000 not in mb.mb_reg_mapping
@@ -390,7 +404,8 @@ def test_recv_unknown_data():
del mb.mb_reg_mapping[0x9000]
-def test_close():
+@pytest.mark.asyncio
+async def test_close():
'''Check queue handling for build_msg() calls'''
mb = ModbusTestHelper()
mb.build_msg(1,3,0x3007,6)
diff --git a/app/tests/test_server.py b/app/tests/test_server.py
index 1fdae63..e13ee5f 100644
--- a/app/tests/test_server.py
+++ b/app/tests/test_server.py
@@ -3,66 +3,216 @@ import pytest
import logging
import os
from mock import patch
-from server import get_log_level, app, ProxyState
+from server import app, Server, ProxyState, HypercornLogHndl
pytest_plugins = ('pytest_asyncio',)
-def test_get_log_level():
- with patch.dict(os.environ, {}):
- log_lvl = get_log_level()
- assert log_lvl == None
+class TestServerClass:
+ class FakeServer(Server):
+ def __init__(self):
+ pass # don't call the suoer(.__init__ for unit tests
- with patch.dict(os.environ, {'LOG_LVL': 'DEBUG'}):
- log_lvl = get_log_level()
- assert log_lvl == logging.DEBUG
+ def test_get_log_level(self):
+ s = self.FakeServer()
- with patch.dict(os.environ, {'LOG_LVL': 'INFO'}):
- log_lvl = get_log_level()
- assert log_lvl == logging.INFO
+ with patch.dict(os.environ, {}):
+ log_lvl = s.get_log_level()
+ assert log_lvl == None
- with patch.dict(os.environ, {'LOG_LVL': 'WARN'}):
- log_lvl = get_log_level()
- assert log_lvl == logging.WARNING
+ with patch.dict(os.environ, {'LOG_LVL': 'DEBUG'}):
+ log_lvl = s.get_log_level()
+ assert log_lvl == logging.DEBUG
- with patch.dict(os.environ, {'LOG_LVL': 'ERROR'}):
- log_lvl = get_log_level()
- assert log_lvl == logging.ERROR
+ with patch.dict(os.environ, {'LOG_LVL': 'INFO'}):
+ log_lvl = s.get_log_level()
+ assert log_lvl == logging.INFO
- with patch.dict(os.environ, {'LOG_LVL': 'UNKNOWN'}):
- log_lvl = get_log_level()
- assert log_lvl == None
+ with patch.dict(os.environ, {'LOG_LVL': 'WARN'}):
+ log_lvl = s.get_log_level()
+ assert log_lvl == logging.WARNING
-@pytest.mark.asyncio
-async def test_ready():
- """Test the ready route."""
+ with patch.dict(os.environ, {'LOG_LVL': 'ERROR'}):
+ log_lvl = s.get_log_level()
+ assert log_lvl == logging.ERROR
- ProxyState.set_up(False)
- client = app.test_client()
- response = await client.get('/-/ready')
- assert response.status_code == 503
- result = await response.get_data()
- assert result == b"Not ready"
+ with patch.dict(os.environ, {'LOG_LVL': 'UNKNOWN'}):
+ log_lvl = s.get_log_level()
+ assert log_lvl == None
- ProxyState.set_up(True)
- response = await client.get('/-/ready')
- assert response.status_code == 200
- result = await response.get_data()
- assert result == b"Is ready"
+ def test_default_args(self):
+ s = self.FakeServer()
+ assert s.config_path == './config/'
+ assert s.json_config == ''
+ assert s.toml_config == ''
+ assert s.trans_path == '../translations/'
+ assert s.rel_urls == False
+ assert s.log_path == './log/'
+ assert s.log_backups == 0
-@pytest.mark.asyncio
-async def test_healthy():
- """Test the healthy route."""
+ def test_parse_args_empty(self):
+ s = self.FakeServer()
+ s.parse_args([])
+ assert s.config_path == './config/'
+ assert s.json_config == None
+ assert s.toml_config == None
+ assert s.trans_path == '../translations/'
+ assert s.rel_urls == False
+ assert s.log_path == './log/'
+ assert s.log_backups == 0
- ProxyState.set_up(False)
- client = app.test_client()
- response = await client.get('/-/healthy')
- assert response.status_code == 200
- result = await response.get_data()
- assert result == b"I'm fine"
+ def test_parse_args_short(self):
+ s = self.FakeServer()
+ s.parse_args(['-r', '-c', '/tmp/my-config', '-j', 'cnf.jsn', '-t', 'cnf.tml', '-tr', '/my/trans/', '-l', '/my_logs/', '-b', '3'])
+ assert s.config_path == '/tmp/my-config'
+ assert s.json_config == 'cnf.jsn'
+ assert s.toml_config == 'cnf.tml'
+ assert s.trans_path == '/my/trans/'
+ assert s.rel_urls == True
+ assert s.log_path == '/my_logs/'
+ assert s.log_backups == 3
+
+ def test_parse_args_long(self):
+ s = self.FakeServer()
+ s.parse_args(['--rel_urls', '--config_path', '/tmp/my-config', '--json_config', 'cnf.jsn',
+ '--toml_config', 'cnf.tml', '--trans_path', '/my/trans/', '--log_path', '/my_logs/',
+ '--log_backups', '3'])
+ assert s.config_path == '/tmp/my-config'
+ assert s.json_config == 'cnf.jsn'
+ assert s.toml_config == 'cnf.tml'
+ assert s.trans_path == '/my/trans/'
+ assert s.rel_urls == True
+ assert s.log_path == '/my_logs/'
+ assert s.log_backups == 3
+
+ def test_parse_args_invalid(self):
+ s = self.FakeServer()
+ with pytest.raises(SystemExit) as exc_info:
+ s.parse_args(['--inalid', '/tmp/my-config'])
+ assert exc_info.value.code == 2
+
+ def test_init_logging_system(self):
+ s = self.FakeServer()
+ s.src_dir = 'app/src/'
+ s.init_logging_system()
+ assert s.log_backups == 0
+ assert s.log_level == None
+ assert logging.handlers.log_path == './log/'
+ assert logging.handlers.log_backups == 0
+ assert logging.getLogger().level == logging.DEBUG
+ assert logging.getLogger('msg').level == logging.DEBUG
+ assert logging.getLogger('conn').level == logging.DEBUG
+ assert logging.getLogger('data').level == logging.DEBUG
+ assert logging.getLogger('tracer').level == logging.INFO
+ assert logging.getLogger('asyncio').level == logging.INFO
+ assert logging.getLogger('hypercorn.access').level == logging.INFO
+ assert logging.getLogger('hypercorn.error').level == logging.INFO
+
+ os.environ["LOG_LVL"] = "WARN"
+ s.parse_args(['--log_backups', '3'])
+ s.init_logging_system()
+ assert s.log_backups == 3
+ assert s.log_level == logging.WARNING
+ assert logging.handlers.log_backups == 3
+ assert logging.getLogger().level == s.log_level
+ assert logging.getLogger('msg').level == s.log_level
+ assert logging.getLogger('conn').level == s.log_level
+ assert logging.getLogger('data').level == s.log_level
+ assert logging.getLogger('tracer').level == s.log_level
+ assert logging.getLogger('asyncio').level == s.log_level
+ assert logging.getLogger('hypercorn.access').level == logging.INFO
+ assert logging.getLogger('hypercorn.error').level == logging.INFO
+
+ def test_build_config_error(self, caplog):
+ s = self.FakeServer()
+ s.src_dir = 'app/src/'
+ s.toml_config = 'app/tests/cnf/invalid_config.toml'
+
+ with caplog.at_level(logging.ERROR):
+ s.build_config()
+ assert "Can't read from app/tests/cnf/invalid_config.toml" in caplog.text
+ assert "Key 'port' error:" in caplog.text
+
+
+class TestHypercornLogHndl:
+ class FakeServer(Server):
+ def __init__(self):
+ pass # don't call the suoer(.__init__ for unit tests
+
+ def test_save_and_restore(self, capsys):
+ s = self.FakeServer()
+ s.src_dir = 'app/src/'
+ s.init_logging_system()
+
+ h = HypercornLogHndl()
+ assert h.must_fix == False
+ assert len(h.access_hndl) == 0
+ assert len(h.error_hndl) == 0
+
+ h.save()
+ assert h.must_fix == True
+ assert len(h.access_hndl) == 1
+ assert len(h.error_hndl) == 2
+ assert h.access_hndl == logging.getLogger('hypercorn.access').handlers
+ assert h.error_hndl == logging.getLogger('hypercorn.error').handlers
+
+ logging.getLogger('hypercorn.access').handlers = []
+ logging.getLogger('hypercorn.error').handlers = []
+
+ h.restore()
+ assert h.must_fix == False
+ assert h.access_hndl == logging.getLogger('hypercorn.access').handlers
+ assert h.error_hndl == logging.getLogger('hypercorn.error').handlers
+ output = capsys.readouterr().out.rstrip()
+ assert "* Fix hypercorn.access setting" in output
+ assert "* Fix hypercorn.error setting" in output
+
+ h.restore() # second restore do nothing
+ assert h.must_fix == False
+ output = capsys.readouterr().out.rstrip()
+ assert output == ''
+
+ h.save() # save the same values second time
+ assert h.must_fix == True
+
+ h.restore() # restore without changing the handlers
+ assert h.must_fix == False
+ output = capsys.readouterr().out.rstrip()
+ assert output == ''
+
+
+class TestApp:
+ @pytest.mark.asyncio
+ async def test_ready(self):
+ """Test the ready route."""
+
+ ProxyState.set_up(False)
+ client = app.test_client()
+ response = await client.get('/-/ready')
+ assert response.status_code == 503
+ result = await response.get_data()
+ assert result == b"Not ready"
+
+ ProxyState.set_up(True)
+ response = await client.get('/-/ready')
+ assert response.status_code == 200
+ result = await response.get_data()
+ assert result == b"Is ready"
+
+ @pytest.mark.asyncio
+ async def test_healthy(self):
+ """Test the healthy route."""
+
+ ProxyState.set_up(False)
+ client = app.test_client()
+ response = await client.get('/-/healthy')
+ assert response.status_code == 200
+ result = await response.get_data()
+ assert result == b"I'm fine"
+
+ ProxyState.set_up(True)
+ response = await client.get('/-/healthy')
+ assert response.status_code == 200
+ result = await response.get_data()
+ assert result == b"I'm fine"
- ProxyState.set_up(True)
- response = await client.get('/-/healthy')
- assert response.status_code == 200
- result = await response.get_data()
- assert result == b"I'm fine"
diff --git a/app/tests/test_solarman.py b/app/tests/test_solarman.py
index 0c3c86c..58da012 100644
--- a/app/tests/test_solarman.py
+++ b/app/tests/test_solarman.py
@@ -79,6 +79,7 @@ class MemoryStream(SolarmanV5):
self.key = ''
self.data = ''
self.msg_recvd = []
+
def write_cb(self):
if self.test_exception_async_write:
@@ -855,7 +856,8 @@ def config_tsun_scan_dcu():
def config_tsun_dcu1():
Config.act_config = {'solarman':{'enabled': True},'batteries':{'4100000000000001':{'monitor_sn': 2070233888, 'node_id':'inv1/', 'modbus_polling': True, 'suggested_area':'roof', 'sensor_list': 0}}}
-def test_read_message(device_ind_msg):
+@pytest.mark.asyncio
+async def test_read_message(device_ind_msg):
Config.act_config = {'solarman':{'enabled': True}}
m = MemoryStream(device_ind_msg, (0,))
m.read() # read complete msg, and dispatch msg
@@ -873,10 +875,12 @@ def test_read_message(device_ind_msg):
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
m.close()
-def test_invalid_start_byte(invalid_start_byte, device_ind_msg):
+@pytest.mark.asyncio
+async def test_invalid_start_byte(invalid_start_byte, device_ind_msg):
# received a message with wrong start byte plus an valid message
# the complete receive buffer must be cleared to
# find the next valid message
+ Config.act_config = {'solarman':{'enabled': True}}
m = MemoryStream(invalid_start_byte, (0,))
m.append_msg(device_ind_msg)
m.read() # read complete msg, and dispatch msg
@@ -894,10 +898,12 @@ def test_invalid_start_byte(invalid_start_byte, device_ind_msg):
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 1
m.close()
-def test_invalid_stop_byte(invalid_stop_byte):
+@pytest.mark.asyncio
+async def test_invalid_stop_byte(invalid_stop_byte):
# received a message with wrong stop byte
# the complete receive buffer must be cleared to
# find the next valid message
+ Config.act_config = {'solarman':{'enabled': True}}
m = MemoryStream(invalid_stop_byte, (0,))
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since start byte is wrong
@@ -914,9 +920,11 @@ def test_invalid_stop_byte(invalid_stop_byte):
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 1
m.close()
-def test_invalid_stop_byte2(invalid_stop_byte, device_ind_msg):
+@pytest.mark.asyncio
+async def test_invalid_stop_byte2(invalid_stop_byte, device_ind_msg):
# received a message with wrong stop byte plus an valid message
# only the first message must be discarded
+ Config.act_config = {'solarman':{'enabled': True}}
m = MemoryStream(invalid_stop_byte, (0,))
m.append_msg(device_ind_msg)
@@ -939,11 +947,13 @@ def test_invalid_stop_byte2(invalid_stop_byte, device_ind_msg):
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 1
m.close()
-def test_invalid_stop_start_byte(invalid_stop_byte, invalid_start_byte):
+@pytest.mark.asyncio
+async def test_invalid_stop_start_byte(invalid_stop_byte, invalid_start_byte):
# received a message with wrong stop byte plus an invalid message
# with fron start byte
# the complete receive buffer must be cleared to
# find the next valid message
+ Config.act_config = {'solarman':{'enabled': True}}
m = MemoryStream(invalid_stop_byte, (0,))
m.append_msg(invalid_start_byte)
m.read() # read complete msg, and dispatch msg
@@ -961,9 +971,11 @@ def test_invalid_stop_start_byte(invalid_stop_byte, invalid_start_byte):
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 1
m.close()
-def test_invalid_checksum(invalid_checksum, device_ind_msg):
+@pytest.mark.asyncio
+async def test_invalid_checksum(invalid_checksum, device_ind_msg):
# received a message with wrong checksum plus an valid message
# only the first message must be discarded
+ Config.act_config = {'solarman':{'enabled': True}}
m = MemoryStream(invalid_checksum, (0,))
m.append_msg(device_ind_msg)
@@ -985,7 +997,8 @@ def test_invalid_checksum(invalid_checksum, device_ind_msg):
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 1
m.close()
-def test_read_message_twice(config_no_tsun_inv1, device_ind_msg, device_rsp_msg):
+@pytest.mark.asyncio
+async def test_read_message_twice(config_no_tsun_inv1, device_ind_msg, device_rsp_msg):
_ = config_no_tsun_inv1
m = MemoryStream(device_ind_msg, (0,))
m.append_msg(device_ind_msg)
@@ -1006,7 +1019,9 @@ def test_read_message_twice(config_no_tsun_inv1, device_ind_msg, device_rsp_msg)
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
m.close()
-def test_read_message_in_chunks(device_ind_msg):
+@pytest.mark.asyncio
+async def test_read_message_in_chunks(device_ind_msg):
+ Config.act_config = {'solarman':{'enabled': True}}
m = MemoryStream(device_ind_msg, (4,11,0))
m.read() # read 4 bytes, header incomplere
assert not m.header_valid # must be invalid, since header not complete
@@ -1027,7 +1042,8 @@ def test_read_message_in_chunks(device_ind_msg):
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
m.close()
-def test_read_message_in_chunks2(config_tsun_inv1, device_ind_msg):
+@pytest.mark.asyncio
+async def test_read_message_in_chunks2(my_loop, config_tsun_inv1, device_ind_msg):
_ = config_tsun_inv1
m = MemoryStream(device_ind_msg, (4,10,0))
m.read() # read 4 bytes, header incomplere
@@ -1052,7 +1068,8 @@ def test_read_message_in_chunks2(config_tsun_inv1, device_ind_msg):
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
m.close()
-def test_read_two_messages(config_tsun_allow_all, device_ind_msg, device_rsp_msg, inverter_ind_msg, inverter_rsp_msg):
+@pytest.mark.asyncio
+async def test_read_two_messages(my_loop, config_tsun_allow_all, device_ind_msg, device_rsp_msg, inverter_ind_msg, inverter_rsp_msg):
_ = config_tsun_allow_all
m = MemoryStream(device_ind_msg, (0,))
m.append_msg(inverter_ind_msg)
@@ -1080,7 +1097,8 @@ def test_read_two_messages(config_tsun_allow_all, device_ind_msg, device_rsp_msg
assert m.ifc.tx_fifo.get()==b''
m.close()
-def test_read_two_messages2(config_tsun_allow_all, inverter_ind_msg, inverter_ind_msg_81, inverter_rsp_msg, inverter_rsp_msg_81):
+@pytest.mark.asyncio
+async def test_read_two_messages2(my_loop, config_tsun_allow_all, inverter_ind_msg, inverter_ind_msg_81, inverter_rsp_msg, inverter_rsp_msg_81):
_ = config_tsun_allow_all
m = MemoryStream(inverter_ind_msg, (0,))
m.append_msg(inverter_ind_msg_81)
@@ -1105,7 +1123,8 @@ def test_read_two_messages2(config_tsun_allow_all, inverter_ind_msg, inverter_in
assert m.ifc.tx_fifo.get()==b''
m.close()
-def test_read_two_messages3(config_tsun_allow_all, device_ind_msg2, device_rsp_msg2, inverter_ind_msg, inverter_rsp_msg):
+@pytest.mark.asyncio
+async def test_read_two_messages3(my_loop, config_tsun_allow_all, device_ind_msg2, device_rsp_msg2, inverter_ind_msg, inverter_rsp_msg):
# test device message received after the inverter masg
_ = config_tsun_allow_all
m = MemoryStream(inverter_ind_msg, (0,))
@@ -1134,7 +1153,8 @@ def test_read_two_messages3(config_tsun_allow_all, device_ind_msg2, device_rsp_m
assert m.ifc.tx_fifo.get()==b''
m.close()
-def test_read_two_messages4(config_tsun_dcu1, dcu_dev_ind_msg, dcu_dev_rsp_msg, dcu_data_ind_msg, dcu_data_rsp_msg):
+@pytest.mark.asyncio
+async def test_read_two_messages4(my_loop, config_tsun_dcu1, dcu_dev_ind_msg, dcu_dev_rsp_msg, dcu_data_ind_msg, dcu_data_rsp_msg):
_ = config_tsun_dcu1
m = MemoryStream(dcu_dev_ind_msg, (0,))
m.append_msg(dcu_data_ind_msg)
@@ -1162,7 +1182,8 @@ def test_read_two_messages4(config_tsun_dcu1, dcu_dev_ind_msg, dcu_dev_rsp_msg,
assert m.ifc.tx_fifo.get()==b''
m.close()
-def test_unkown_frame_code(config_tsun_inv1, inverter_ind_msg_81, inverter_rsp_msg_81):
+@pytest.mark.asyncio
+async def test_unkown_frame_code(my_loop, config_tsun_inv1, inverter_ind_msg_81, inverter_rsp_msg_81):
_ = config_tsun_inv1
m = MemoryStream(inverter_ind_msg_81, (0,))
m.read() # read complete msg, and dispatch msg
@@ -1180,7 +1201,8 @@ def test_unkown_frame_code(config_tsun_inv1, inverter_ind_msg_81, inverter_rsp_m
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
m.close()
-def test_unkown_message(config_tsun_inv1, unknown_msg):
+@pytest.mark.asyncio
+async def test_unkown_message(my_loop, config_tsun_inv1, unknown_msg):
_ = config_tsun_inv1
m = MemoryStream(unknown_msg, (0,))
m.read() # read complete msg, and dispatch msg
@@ -1198,7 +1220,8 @@ def test_unkown_message(config_tsun_inv1, unknown_msg):
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
m.close()
-def test_device_rsp(config_tsun_inv1, device_rsp_msg):
+@pytest.mark.asyncio
+async def test_device_rsp(my_loop, config_tsun_inv1, device_rsp_msg):
_ = config_tsun_inv1
m = MemoryStream(device_rsp_msg, (0,), False)
m.read() # read complete msg, and dispatch msg
@@ -1216,7 +1239,8 @@ def test_device_rsp(config_tsun_inv1, device_rsp_msg):
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
m.close()
-def test_inverter_rsp(config_tsun_inv1, inverter_rsp_msg):
+@pytest.mark.asyncio
+async def test_inverter_rsp(my_loop, config_tsun_inv1, inverter_rsp_msg):
_ = config_tsun_inv1
m = MemoryStream(inverter_rsp_msg, (0,), False)
m.read() # read complete msg, and dispatch msg
@@ -1234,7 +1258,8 @@ def test_inverter_rsp(config_tsun_inv1, inverter_rsp_msg):
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
m.close()
-def test_heartbeat_ind(config_tsun_inv1, heartbeat_ind_msg, heartbeat_rsp_msg):
+@pytest.mark.asyncio
+async def test_heartbeat_ind(my_loop, config_tsun_inv1, heartbeat_ind_msg, heartbeat_rsp_msg):
_ = config_tsun_inv1
m = MemoryStream(heartbeat_ind_msg, (0,))
m.read() # read complete msg, and dispatch msg
@@ -1251,7 +1276,8 @@ def test_heartbeat_ind(config_tsun_inv1, heartbeat_ind_msg, heartbeat_rsp_msg):
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
m.close()
-def test_heartbeat_ind2(config_tsun_inv1, heartbeat_ind_msg, heartbeat_rsp_msg):
+@pytest.mark.asyncio
+async def test_heartbeat_ind2(my_loop, config_tsun_inv1, heartbeat_ind_msg, heartbeat_rsp_msg):
_ = config_tsun_inv1
m = MemoryStream(heartbeat_ind_msg, (0,))
m.no_forwarding = True
@@ -1269,7 +1295,8 @@ def test_heartbeat_ind2(config_tsun_inv1, heartbeat_ind_msg, heartbeat_rsp_msg):
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
m.close()
-def test_heartbeat_rsp(config_tsun_inv1, heartbeat_rsp_msg):
+@pytest.mark.asyncio
+async def test_heartbeat_rsp(my_loop, config_tsun_inv1, heartbeat_rsp_msg):
_ = config_tsun_inv1
m = MemoryStream(heartbeat_rsp_msg, (0,), False)
m.read() # read complete msg, and dispatch msg
@@ -1287,7 +1314,8 @@ def test_heartbeat_rsp(config_tsun_inv1, heartbeat_rsp_msg):
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
m.close()
-def test_sync_start_ind(config_tsun_inv1, sync_start_ind_msg, sync_start_rsp_msg, sync_start_fwd_msg):
+@pytest.mark.asyncio
+async def test_sync_start_ind(my_loop, config_tsun_inv1, sync_start_ind_msg, sync_start_rsp_msg, sync_start_fwd_msg):
_ = config_tsun_inv1
m = MemoryStream(sync_start_ind_msg, (0,))
m.read() # read complete msg, and dispatch msg
@@ -1310,7 +1338,8 @@ def test_sync_start_ind(config_tsun_inv1, sync_start_ind_msg, sync_start_rsp_msg
m.close()
-def test_sync_start_rsp(config_tsun_inv1, sync_start_rsp_msg):
+@pytest.mark.asyncio
+async def test_sync_start_rsp(my_loop, config_tsun_inv1, sync_start_rsp_msg):
_ = config_tsun_inv1
m = MemoryStream(sync_start_rsp_msg, (0,), False)
m.read() # read complete msg, and dispatch msg
@@ -1328,7 +1357,8 @@ def test_sync_start_rsp(config_tsun_inv1, sync_start_rsp_msg):
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
m.close()
-def test_sync_end_ind(config_tsun_inv1, sync_end_ind_msg, sync_end_rsp_msg):
+@pytest.mark.asyncio
+async def test_sync_end_ind(my_loop, config_tsun_inv1, sync_end_ind_msg, sync_end_rsp_msg):
_ = config_tsun_inv1
m = MemoryStream(sync_end_ind_msg, (0,))
m.read() # read complete msg, and dispatch msg
@@ -1345,7 +1375,8 @@ def test_sync_end_ind(config_tsun_inv1, sync_end_ind_msg, sync_end_rsp_msg):
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
m.close()
-def test_sync_end_rsp(config_tsun_inv1, sync_end_rsp_msg):
+@pytest.mark.asyncio
+async def test_sync_end_rsp(my_loop, config_tsun_inv1, sync_end_rsp_msg):
_ = config_tsun_inv1
m = MemoryStream(sync_end_rsp_msg, (0,), False)
m.read() # read complete msg, and dispatch msg
@@ -1363,7 +1394,8 @@ def test_sync_end_rsp(config_tsun_inv1, sync_end_rsp_msg):
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
m.close()
-def test_build_modell_600(config_tsun_allow_all, inverter_ind_msg):
+@pytest.mark.asyncio
+async def test_build_modell_600(my_loop, config_tsun_allow_all, inverter_ind_msg):
_ = config_tsun_allow_all
m = MemoryStream(inverter_ind_msg, (0,))
assert 0 == m.sensor_list
@@ -1382,7 +1414,8 @@ def test_build_modell_600(config_tsun_allow_all, inverter_ind_msg):
assert m.ifc.tx_fifo.get()==b''
m.close()
-def test_build_modell_1600(config_tsun_allow_all, inverter_ind_msg1600):
+@pytest.mark.asyncio
+async def test_build_modell_1600(my_loop, config_tsun_allow_all, inverter_ind_msg1600):
_ = config_tsun_allow_all
m = MemoryStream(inverter_ind_msg1600, (0,))
assert 0 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0)
@@ -1394,7 +1427,8 @@ def test_build_modell_1600(config_tsun_allow_all, inverter_ind_msg1600):
assert 'TSOL-MS1600' == m.db.get_db_value(Register.EQUIPMENT_MODEL, 0)
m.close()
-def test_build_modell_1800(config_tsun_allow_all, inverter_ind_msg1800):
+@pytest.mark.asyncio
+async def test_build_modell_1800(my_loop, config_tsun_allow_all, inverter_ind_msg1800):
_ = config_tsun_allow_all
m = MemoryStream(inverter_ind_msg1800, (0,))
assert 0 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0)
@@ -1406,7 +1440,8 @@ def test_build_modell_1800(config_tsun_allow_all, inverter_ind_msg1800):
assert 'TSOL-MS1800' == m.db.get_db_value(Register.EQUIPMENT_MODEL, 0)
m.close()
-def test_build_modell_2000(config_tsun_allow_all, inverter_ind_msg2000):
+@pytest.mark.asyncio
+async def test_build_modell_2000(my_loop, config_tsun_allow_all, inverter_ind_msg2000):
_ = config_tsun_allow_all
m = MemoryStream(inverter_ind_msg2000, (0,))
assert 0 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0)
@@ -1418,7 +1453,8 @@ def test_build_modell_2000(config_tsun_allow_all, inverter_ind_msg2000):
assert 'TSOL-MS2000' == m.db.get_db_value(Register.EQUIPMENT_MODEL, 0)
m.close()
-def test_build_modell_800(config_tsun_allow_all, inverter_ind_msg800):
+@pytest.mark.asyncio
+async def test_build_modell_800(my_loop, config_tsun_allow_all, inverter_ind_msg800):
_ = config_tsun_allow_all
m = MemoryStream(inverter_ind_msg800, (0,))
assert 0 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0)
@@ -1430,7 +1466,8 @@ def test_build_modell_800(config_tsun_allow_all, inverter_ind_msg800):
assert 'TSOL-MSxx00' == m.db.get_db_value(Register.EQUIPMENT_MODEL, 0)
m.close()
-def test_build_logger_modell(config_tsun_allow_all, device_ind_msg):
+@pytest.mark.asyncio
+async def test_build_logger_modell(my_loop, config_tsun_allow_all, device_ind_msg):
_ = config_tsun_allow_all
m = MemoryStream(device_ind_msg, (0,))
assert 0 == m.db.get_db_value(Register.COLLECTOR_FW_VERSION, 0)
@@ -1441,7 +1478,8 @@ def test_build_logger_modell(config_tsun_allow_all, device_ind_msg):
assert 'V1.1.00.0B' == m.db.get_db_value(Register.COLLECTOR_FW_VERSION, 0).rstrip('\00')
m.close()
-def test_msg_iterator():
+@pytest.mark.asyncio
+async def test_msg_iterator(my_loop, config_tsun_inv1):
Message._registry.clear()
m1 = SolarmanV5(None, ('test1.local', 1234), ifc=AsyncIfcImpl(), server_side=True, client_mode=False)
m2 = SolarmanV5(None, ('test2.local', 1234), ifc=AsyncIfcImpl(), server_side=True, client_mode=False)
@@ -1462,7 +1500,8 @@ def test_msg_iterator():
assert test1 == 1
assert test2 == 1
-def test_proxy_counter():
+@pytest.mark.asyncio
+async def test_proxy_counter(my_loop, config_tsun_inv1):
m = SolarmanV5(None, ('test.local', 1234), ifc=AsyncIfcImpl(), server_side=True, client_mode=False)
assert m.new_data == {}
m.db.stat['proxy']['Unknown_Msg'] = 0
@@ -1481,7 +1520,7 @@ def test_proxy_counter():
m.close()
@pytest.mark.asyncio
-async def test_msg_build_modbus_req(config_tsun_inv1, device_ind_msg, device_rsp_msg, inverter_ind_msg, inverter_rsp_msg, msg_modbus_cmd):
+async def test_msg_build_modbus_req(my_loop, config_tsun_inv1, device_ind_msg, device_rsp_msg, inverter_ind_msg, inverter_rsp_msg, msg_modbus_cmd):
_ = config_tsun_inv1
m = MemoryStream(device_ind_msg, (0,), True)
m.read()
@@ -1516,7 +1555,7 @@ async def test_msg_build_modbus_req(config_tsun_inv1, device_ind_msg, device_rsp
m.close()
@pytest.mark.asyncio
-async def test_at_cmd(config_tsun_allow_all, device_ind_msg, device_rsp_msg, inverter_ind_msg, inverter_rsp_msg, at_command_ind_msg, at_command_rsp_msg):
+async def test_at_cmd(my_loop, config_tsun_allow_all, device_ind_msg, device_rsp_msg, inverter_ind_msg, inverter_rsp_msg, at_command_ind_msg, at_command_rsp_msg):
_ = config_tsun_allow_all
m = MemoryStream(device_ind_msg, (0,), True)
m.read() # read device ind
@@ -1576,7 +1615,7 @@ async def test_at_cmd(config_tsun_allow_all, device_ind_msg, device_rsp_msg, inv
m.close()
@pytest.mark.asyncio
-async def test_at_cmd_blocked(config_tsun_allow_all, device_ind_msg, device_rsp_msg, inverter_ind_msg, inverter_rsp_msg, at_command_ind_msg):
+async def test_at_cmd_blocked(my_loop, config_tsun_allow_all, device_ind_msg, device_rsp_msg, inverter_ind_msg, inverter_rsp_msg, at_command_ind_msg):
_ = config_tsun_allow_all
m = MemoryStream(device_ind_msg, (0,), True)
m.read()
@@ -1610,7 +1649,8 @@ async def test_at_cmd_blocked(config_tsun_allow_all, device_ind_msg, device_rsp_
assert Proxy.mqtt.data == "'AT+WEBU' is forbidden"
m.close()
-def test_at_cmd_ind(config_tsun_inv1, at_command_ind_msg, at_command_rsp_msg):
+@pytest.mark.asyncio
+async def test_at_cmd_ind(my_loop, config_tsun_inv1, at_command_ind_msg, at_command_rsp_msg):
_ = config_tsun_inv1
m = MemoryStream(at_command_ind_msg, (0,), False)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
@@ -1645,7 +1685,8 @@ def test_at_cmd_ind(config_tsun_inv1, at_command_ind_msg, at_command_rsp_msg):
m.close()
-def test_at_cmd_ind_block(config_tsun_inv1, at_command_ind_msg_block):
+@pytest.mark.asyncio
+async def test_at_cmd_ind_block(my_loop, config_tsun_inv1, at_command_ind_msg_block):
_ = config_tsun_inv1
m = MemoryStream(at_command_ind_msg_block, (0,), False)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
@@ -1673,7 +1714,8 @@ def test_at_cmd_ind_block(config_tsun_inv1, at_command_ind_msg_block):
assert Proxy.mqtt.data == ""
m.close()
-def test_msg_at_command_rsp1(config_tsun_inv1, at_command_rsp_msg):
+@pytest.mark.asyncio
+async def test_msg_at_command_rsp1(my_loop, config_tsun_inv1, at_command_rsp_msg):
_ = config_tsun_inv1
m = MemoryStream(at_command_rsp_msg)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
@@ -1692,7 +1734,8 @@ def test_msg_at_command_rsp1(config_tsun_inv1, at_command_rsp_msg):
assert m.db.stat['proxy']['Modbus_Command'] == 0
m.close()
-def test_msg_at_command_rsp2(config_tsun_inv1, at_command_rsp_msg):
+@pytest.mark.asyncio
+async def test_msg_at_command_rsp2(my_loop, config_tsun_inv1, at_command_rsp_msg):
_ = config_tsun_inv1
m = MemoryStream(at_command_rsp_msg)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
@@ -1713,7 +1756,8 @@ def test_msg_at_command_rsp2(config_tsun_inv1, at_command_rsp_msg):
assert Proxy.mqtt.data == "+ok"
m.close()
-def test_msg_at_command_rsp3(config_tsun_inv1, at_command_interim_rsp_msg):
+@pytest.mark.asyncio
+async def test_msg_at_command_rsp3(my_loop, config_tsun_inv1, at_command_interim_rsp_msg):
_ = config_tsun_inv1
m = MemoryStream(at_command_interim_rsp_msg)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
@@ -1738,7 +1782,8 @@ def test_msg_at_command_rsp3(config_tsun_inv1, at_command_interim_rsp_msg):
assert Proxy.mqtt.data == ""
m.close()
-def test_msg_modbus_req(config_tsun_inv1, msg_modbus_cmd, msg_modbus_cmd_fwd):
+@pytest.mark.asyncio
+async def test_msg_modbus_req(my_loop, config_tsun_inv1, msg_modbus_cmd, msg_modbus_cmd_fwd):
_ = config_tsun_inv1
m = MemoryStream(b'')
m.snr = get_sn_int()
@@ -1766,7 +1811,8 @@ def test_msg_modbus_req(config_tsun_inv1, msg_modbus_cmd, msg_modbus_cmd_fwd):
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
m.close()
-def test_msg_modbus_req_seq(config_tsun_inv1, msg_modbus_cmd_seq):
+@pytest.mark.asyncio
+async def test_msg_modbus_req_seq(my_loop, config_tsun_inv1, msg_modbus_cmd_seq):
_ = config_tsun_inv1
m = MemoryStream(b'')
m.snr = get_sn_int()
@@ -1794,7 +1840,8 @@ def test_msg_modbus_req_seq(config_tsun_inv1, msg_modbus_cmd_seq):
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
m.close()
-def test_msg_modbus_req2(config_tsun_inv1, msg_modbus_cmd_crc_err):
+@pytest.mark.asyncio
+async def test_msg_modbus_req2(my_loop, config_tsun_inv1, msg_modbus_cmd_crc_err):
_ = config_tsun_inv1
m = MemoryStream(b'')
m.snr = get_sn_int()
@@ -1821,7 +1868,8 @@ def test_msg_modbus_req2(config_tsun_inv1, msg_modbus_cmd_crc_err):
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 1
m.close()
-def test_msg_unknown_cmd_req(config_tsun_inv1, msg_unknown_cmd):
+@pytest.mark.asyncio
+async def test_msg_unknown_cmd_req(my_loop, config_tsun_inv1, msg_unknown_cmd):
_ = config_tsun_inv1
m = MemoryStream(msg_unknown_cmd, (0,), False)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
@@ -1843,7 +1891,8 @@ def test_msg_unknown_cmd_req(config_tsun_inv1, msg_unknown_cmd):
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
m.close()
-def test_msg_modbus_rsp1(config_tsun_inv1, msg_modbus_rsp):
+@pytest.mark.asyncio
+async def test_msg_modbus_rsp1(my_loop, config_tsun_inv1, msg_modbus_rsp):
'''Modbus response without a valid Modbus request must be dropped'''
_ = config_tsun_inv1
m = MemoryStream(msg_modbus_rsp)
@@ -1862,7 +1911,8 @@ def test_msg_modbus_rsp1(config_tsun_inv1, msg_modbus_rsp):
assert m.db.stat['proxy']['Modbus_Command'] == 0
m.close()
-def test_msg_modbus_rsp2(config_tsun_inv1, msg_modbus_rsp):
+@pytest.mark.asyncio
+async def test_msg_modbus_rsp2(my_loop, config_tsun_inv1, msg_modbus_rsp):
'''Modbus response with a valid Modbus request must be forwarded'''
_ = config_tsun_inv1 # setup config structure
m = MemoryStream(msg_modbus_rsp)
@@ -1899,7 +1949,8 @@ def test_msg_modbus_rsp2(config_tsun_inv1, msg_modbus_rsp):
m.close()
-def test_msg_modbus_rsp3(config_tsun_inv1, msg_modbus_rsp):
+@pytest.mark.asyncio
+async def test_msg_modbus_rsp3(my_loop, config_tsun_inv1, msg_modbus_rsp):
'''Modbus response with a valid Modbus request must be forwarded'''
_ = config_tsun_inv1
m = MemoryStream(msg_modbus_rsp)
@@ -1935,7 +1986,8 @@ def test_msg_modbus_rsp3(config_tsun_inv1, msg_modbus_rsp):
m.close()
-def test_msg_unknown_rsp(config_tsun_inv1, msg_unknown_cmd_rsp):
+@pytest.mark.asyncio
+async def test_msg_unknown_rsp(my_loop, config_tsun_inv1, msg_unknown_cmd_rsp):
_ = config_tsun_inv1
m = MemoryStream(msg_unknown_cmd_rsp)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
@@ -1953,7 +2005,8 @@ def test_msg_unknown_rsp(config_tsun_inv1, msg_unknown_cmd_rsp):
assert m.db.stat['proxy']['Modbus_Command'] == 0
m.close()
-def test_msg_modbus_invalid(config_tsun_inv1, msg_modbus_invalid):
+@pytest.mark.asyncio
+async def test_msg_modbus_invalid(my_loop, config_tsun_inv1, msg_modbus_invalid):
_ = config_tsun_inv1
m = MemoryStream(msg_modbus_invalid, (0,), False)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
@@ -1967,7 +2020,8 @@ def test_msg_modbus_invalid(config_tsun_inv1, msg_modbus_invalid):
assert m.db.stat['proxy']['Modbus_Command'] == 0
m.close()
-def test_msg_modbus_fragment(config_tsun_inv1, msg_modbus_rsp):
+@pytest.mark.asyncio
+async def test_msg_modbus_fragment(my_loop, config_tsun_inv1, msg_modbus_rsp):
_ = config_tsun_inv1
# receive more bytes than expected (7 bytes from the next msg)
m = MemoryStream(msg_modbus_rsp+b'\x00\x00\x00\x45\x10\x52\x31', (0,))
@@ -1993,7 +2047,7 @@ def test_msg_modbus_fragment(config_tsun_inv1, msg_modbus_rsp):
m.close()
@pytest.mark.asyncio
-async def test_modbus_polling(config_tsun_inv1, heartbeat_ind_msg, heartbeat_rsp_msg):
+async def test_modbus_polling(my_loop, config_tsun_inv1, heartbeat_ind_msg, heartbeat_rsp_msg):
_ = config_tsun_inv1
assert asyncio.get_running_loop()
m = MemoryStream(heartbeat_ind_msg, (0,))
@@ -2106,7 +2160,7 @@ async def test_modbus_scaning(config_tsun_scan, heartbeat_ind_msg, heartbeat_rsp
m.close()
@pytest.mark.asyncio
-async def test_start_client_mode(config_tsun_inv1, str_test_ip):
+async def test_start_client_mode(my_loop, config_tsun_inv1, str_test_ip):
_ = config_tsun_inv1
assert asyncio.get_running_loop()
m = MemoryStream(b'')
@@ -2210,7 +2264,8 @@ async def test_start_client_mode_scan(config_tsun_scan_dcu, str_test_ip, dcu_mod
m.close()
-def test_timeout(config_tsun_inv1):
+@pytest.mark.asyncio
+async def test_timeout(my_loop, config_tsun_inv1):
_ = config_tsun_inv1
m = MemoryStream(b'')
assert m.state == State.init
@@ -2223,7 +2278,8 @@ def test_timeout(config_tsun_inv1):
m.state = State.closed
m.close()
-def test_fnc_dispatch():
+@pytest.mark.asyncio
+async def test_fnc_dispatch(my_loop, config_tsun_inv1):
def msg():
return
@@ -2244,7 +2300,8 @@ def test_fnc_dispatch():
assert _obj == m.msg_unknown
assert _str == "'msg_unknown'"
-def test_timestamp():
+@pytest.mark.asyncio
+async def test_timestamp(my_loop, config_tsun_inv1):
m = MemoryStream(b'')
ts = m._timestamp()
ts_emu = m._emu_timestamp()
@@ -2271,7 +2328,7 @@ class InverterTest(InverterBase):
@pytest.mark.asyncio
-async def test_proxy_at_cmd(config_tsun_inv1, patch_open_connection, at_command_ind_msg, at_command_rsp_msg):
+async def test_proxy_at_cmd(my_loop, config_tsun_inv1, patch_open_connection, at_command_ind_msg, at_command_rsp_msg):
_ = config_tsun_inv1
_ = patch_open_connection
assert asyncio.get_running_loop()
@@ -2309,7 +2366,7 @@ async def test_proxy_at_cmd(config_tsun_inv1, patch_open_connection, at_command_
assert Proxy.mqtt.data == ""
@pytest.mark.asyncio
-async def test_proxy_at_blocked(config_tsun_inv1, patch_open_connection, at_command_ind_msg_block, at_command_rsp_msg):
+async def test_proxy_at_blocked(my_loop, config_tsun_inv1, patch_open_connection, at_command_ind_msg_block, at_command_rsp_msg):
_ = config_tsun_inv1
_ = patch_open_connection
assert asyncio.get_running_loop()
diff --git a/app/tests/test_solarman_emu.py b/app/tests/test_solarman_emu.py
index a62fbdc..a3d517c 100644
--- a/app/tests/test_solarman_emu.py
+++ b/app/tests/test_solarman_emu.py
@@ -9,6 +9,9 @@ from infos import Infos, Register
from test_solarman import FakeIfc, FakeInverter, MemoryStream, get_sn_int, get_sn, correct_checksum, config_tsun_inv1, msg_modbus_rsp
from test_infos_g3p import str_test_ip, bytes_test_ip
+
+pytest_plugins = ('pytest_asyncio',)
+
timestamp = 0x3224c8bc
class InvStream(MemoryStream):
@@ -125,17 +128,17 @@ def heartbeat_ind():
msg = b'\xa5\x01\x00\x10G\x00\x01\x00\x00\x00\x00\x00Y\x15'
return msg
-def test_emu_init_close():
- # received a message with wrong start byte plus an valid message
- # the complete receive buffer must be cleared to
- # find the next valid message
+@pytest.mark.asyncio
+async def test_emu_init_close(my_loop, config_tsun_inv1):
+ _ = config_tsun_inv1
+ assert asyncio.get_running_loop()
inv = InvStream()
cld = CldStream(inv)
cld.close()
@pytest.mark.asyncio
-async def test_emu_start(config_tsun_inv1, msg_modbus_rsp, str_test_ip, device_ind_msg):
+async def test_emu_start(my_loop, config_tsun_inv1, msg_modbus_rsp, str_test_ip, device_ind_msg):
_ = config_tsun_inv1
assert asyncio.get_running_loop()
inv = InvStream(msg_modbus_rsp)
@@ -152,7 +155,8 @@ async def test_emu_start(config_tsun_inv1, msg_modbus_rsp, str_test_ip, device_i
assert inv.ifc.fwd_fifo.peek() == device_ind_msg
cld.close()
-def test_snd_hb(config_tsun_inv1, heartbeat_ind):
+@pytest.mark.asyncio
+async def test_snd_hb(my_loop, config_tsun_inv1, heartbeat_ind):
_ = config_tsun_inv1
inv = InvStream()
cld = CldStream(inv)
@@ -163,7 +167,7 @@ def test_snd_hb(config_tsun_inv1, heartbeat_ind):
cld.close()
@pytest.mark.asyncio
-async def test_snd_inv_data(config_tsun_inv1, inverter_ind_msg, inverter_rsp_msg):
+async def test_snd_inv_data(my_loop, config_tsun_inv1, inverter_ind_msg, inverter_rsp_msg):
_ = config_tsun_inv1
inv = InvStream()
inv.db.set_db_def_value(Register.INVERTER_STATUS, 1)
@@ -205,7 +209,7 @@ async def test_snd_inv_data(config_tsun_inv1, inverter_ind_msg, inverter_rsp_msg
cld.close()
@pytest.mark.asyncio
-async def test_rcv_invalid(config_tsun_inv1, inverter_ind_msg, inverter_rsp_msg):
+async def test_rcv_invalid(my_loop, config_tsun_inv1, inverter_ind_msg, inverter_rsp_msg):
_ = config_tsun_inv1
inv = InvStream()
assert asyncio.get_running_loop() == inv.mb_timer.loop
diff --git a/app/tests/test_talent.py b/app/tests/test_talent.py
index 225c38e..fa42eed 100644
--- a/app/tests/test_talent.py
+++ b/app/tests/test_talent.py
@@ -1048,7 +1048,8 @@ def msg_inverter_ms3000_ind(): # Data indication from the controller
msg += b'\x53\x00\x66' # | S.f'
return msg
-def test_read_message(msg_contact_info):
+@pytest.mark.asyncio
+async def test_read_message(msg_contact_info):
Config.act_config = {'tsun':{'enabled': True}}
m = MemoryStream(msg_contact_info, (0,))
m.read() # read complete msg, and dispatch msg
diff --git a/app/tests/test_web_route.py b/app/tests/test_web_route.py
index 4ed9b36..86817ac 100644
--- a/app/tests/test_web_route.py
+++ b/app/tests/test_web_route.py
@@ -15,7 +15,6 @@ pytest_plugins = ('pytest_asyncio',)
@pytest.fixture(scope="session")
def client():
app.secret_key = 'super secret key'
- Web(app, '../transfer', False)
return app.test_client()
@pytest.fixture
diff --git a/ha_addons/ha_addon/rootfs/run.sh b/ha_addons/ha_addon/rootfs/run.sh
index f146a70..6c231e4 100755
--- a/ha_addons/ha_addon/rootfs/run.sh
+++ b/ha_addons/ha_addon/rootfs/run.sh
@@ -30,4 +30,4 @@ cd /home/proxy || exit
export VERSION=$(cat /proxy-version.txt)
echo "Start Proxyserver..."
-python3 server.py --rel_urls=True --json_config=/data/options.json --log_path=/homeassistant/tsun-proxy/logs/ --config_path=/homeassistant/tsun-proxy/ --log_backups=2
+python3 server.py --rel_urls --json_config=/data/options.json --log_path=/homeassistant/tsun-proxy/logs/ --config_path=/homeassistant/tsun-proxy/ --log_backups=2
From 2292c5e39e8aaa1cffa99877002126e79f8c42b3 Mon Sep 17 00:00:00 2001
From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com>
Date: Sat, 10 May 2025 20:26:00 +0200
Subject: [PATCH 4/8] Update ghcr.io/hassio-addons/base Docker tag to v17.2.5
(#407)
* Update ghcr.io/hassio-addons/base Docker tag to v17.2.5
---------
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Stefan Allius
---
CHANGELOG.md | 1 +
ha_addons/ha_addon/Dockerfile | 2 +-
2 files changed, 2 insertions(+), 1 deletion(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 5e6e1ef..9858bd1 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -7,6 +7,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [unreleased]
+- Update ghcr.io/hassio-addons/base Docker tag to v17.2.5
- fix a lot of pytest-asyncio problems in the unit tests
- Cleanup startup code for Quart and the Proxy
- Redirect the hypercorn traces to a separate log-file
diff --git a/ha_addons/ha_addon/Dockerfile b/ha_addons/ha_addon/Dockerfile
index 0529d5b..3557e13 100755
--- a/ha_addons/ha_addon/Dockerfile
+++ b/ha_addons/ha_addon/Dockerfile
@@ -13,7 +13,7 @@
# 1 Build Base Image #
######################
-ARG BUILD_FROM="ghcr.io/hassio-addons/base:17.2.4"
+ARG BUILD_FROM="ghcr.io/hassio-addons/base:17.2.5"
# hadolint ignore=DL3006
FROM $BUILD_FROM AS base
From 907dcb16237c1e6e7cbbe93e4bbd52735e1b9c13 Mon Sep 17 00:00:00 2001
From: Stefan Allius <122395479+s-allius@users.noreply.github.com>
Date: Tue, 13 May 2025 00:38:06 +0200
Subject: [PATCH 5/8] S allius/issue409 (#411)
* scan log files for timestamp as creating timestamp
* increase test coverage
* add an empty file for unit tests
- the empty file is needed for unit tests to force
an exception on the try to scan the first line
for an timestamp
* set timezone of scanned creation time
---
app/src/web/log_files.py | 42 ++++++++++++++++++++++++++++++++-----
app/tests/log/empty.txt | 0
app/tests/test_web_route.py | 23 +++++++++++++++++++-
3 files changed, 59 insertions(+), 6 deletions(-)
create mode 100644 app/tests/log/empty.txt
diff --git a/app/src/web/log_files.py b/app/src/web/log_files.py
index 772e292..72b3243 100644
--- a/app/src/web/log_files.py
+++ b/app/src/web/log_files.py
@@ -1,26 +1,58 @@
from quart import render_template
-from quart_babel import format_datetime, format_decimal
+from quart_babel import format_datetime, format_decimal, _
from quart.helpers import send_from_directory
from werkzeug.utils import secure_filename
from cnf.config import Config
+from datetime import datetime
+from os import DirEntry
import os
+from dateutil import tz
from . import web
-def _get_file(file):
+def _get_birth_from_log(path: str) -> None | datetime:
+ '''read timestamp from the first line of a log file'''
+ dt = None
+ try:
+ with open(path) as f:
+ first_line = f.readline()
+ first_line = first_line.lstrip("'")
+ fmt = "%Y-%m-%d %H:%M:%S" if first_line[4] == '-' \
+ else "%d-%m-%Y %H:%M:%S"
+ dt = datetime.strptime(first_line[0:19], fmt). \
+ replace(tzinfo=tz.tzlocal())
+ except Exception:
+ pass
+ return dt
+
+
+def _get_file(file: DirEntry) -> dict:
'''build one row for the connection table'''
entry = {}
entry['name'] = file.name
stat = file.stat()
entry['size'] = format_decimal(stat.st_size)
- entry['date'] = stat.st_mtime
- entry['created'] = format_datetime(stat.st_ctime, format="short")
+ try:
+ dt = stat.st_birthtime
+
+ except Exception:
+ dt = _get_birth_from_log(file.path)
+
+ if dt:
+ entry['created'] = format_datetime(dt, format="short")
+
+ # sort by creating date, if available
+ entry['date'] = dt if isinstance(dt, float) else dt.timestamp()
+ else:
+ entry['created'] = _('n/a')
+ entry['date'] = stat.st_mtime
+
entry['modified'] = format_datetime(stat.st_mtime, format="short")
return entry
-def get_list_data():
+def get_list_data() -> list:
'''build the connection table'''
file_list = []
with os.scandir(Config.get_log_path()) as it:
diff --git a/app/tests/log/empty.txt b/app/tests/log/empty.txt
new file mode 100644
index 0000000..e69de29
diff --git a/app/tests/test_web_route.py b/app/tests/test_web_route.py
index 86817ac..564a308 100644
--- a/app/tests/test_web_route.py
+++ b/app/tests/test_web_route.py
@@ -9,6 +9,8 @@ from cnf.config import Config
from mock import patch
from proxy import Proxy
import os, errno
+from os import DirEntry, stat_result
+import datetime
pytest_plugins = ('pytest_asyncio',)
@@ -201,14 +203,33 @@ async def test_notes_fetch(client, config_conn):
@pytest.mark.asyncio
-async def test_file_fetch(client, config_conn):
+async def test_file_fetch(client, config_conn, monkeypatch):
"""Test the data-fetch route."""
_ = config_conn
assert Config.log_path == 'app/tests/log/'
+ def my_stat1(*arg):
+ stat = stat_result
+ stat.st_size = 20
+ stat.st_birthtime = datetime.datetime(2024, 1, 31, 10, 30, 15)
+ stat.st_mtime = datetime.datetime(2024, 1, 1, 1, 30, 15).timestamp()
+ return stat
+
+ monkeypatch.setattr(DirEntry, "stat", my_stat1)
response = await client.get('/file-fetch')
assert response.status_code == 200
+ def my_stat2(*arg):
+ stat = stat_result
+ stat.st_size = 20
+ stat.st_mtime = datetime.datetime(2024, 1, 1, 1, 30, 15).timestamp()
+ return stat
+
+ monkeypatch.setattr(DirEntry, "stat", my_stat2)
+ monkeypatch.delattr(stat_result, "st_birthtime")
+ response = await client.get('/file-fetch')
+ assert response.status_code == 200
+
@pytest.mark.asyncio
async def test_send_file(client, config_conn):
"""Test the send-file route."""
From 4371f3dadb58504b47fbb03d9c4183f7eec54edf Mon Sep 17 00:00:00 2001
From: Stefan Allius <122395479+s-allius@users.noreply.github.com>
Date: Tue, 13 May 2025 21:38:33 +0200
Subject: [PATCH 6/8] S allius/issue396 (#412)
* add title to table icons
* optimize datetime formatting
* change icons
* translate n/a
---
app/src/web/conn_table.py | 21 +++++++++++----------
app/src/web/mqtt_table.py | 7 +++++--
app/src/web/templates/page_mqtt.html.j2 | 6 +++---
app/translations/de/LC_MESSAGES/messages.po | 6 +++++-
4 files changed, 24 insertions(+), 16 deletions(-)
diff --git a/app/src/web/conn_table.py b/app/src/web/conn_table.py
index 4b81868..35d221d 100644
--- a/app/src/web/conn_table.py
+++ b/app/src/web/conn_table.py
@@ -10,39 +10,40 @@ from .log_handler import LogHandler
def _get_device_icon(client_mode: bool):
'''returns the icon for the device conntection'''
if client_mode:
- return 'fa-download fa-rotate-180'
+ return 'fa-download fa-rotate-180', 'Server Mode'
- return 'fa-upload fa-rotate-180'
+ return 'fa-upload fa-rotate-180', 'Client Mode'
def _get_cloud_icon(emu_mode: bool):
'''returns the icon for the cloud conntection'''
if emu_mode:
- return 'fa-cloud-arrow-up-alt'
+ return 'fa-cloud-arrow-up-alt', 'Emu Mode'
- return 'fa-cloud'
+ return 'fa-cloud', 'Proxy Mode'
def _get_row(inv: InverterBase):
'''build one row for the connection table'''
client_mode = inv.client_mode
inv_serial = inv.local.stream.inv_serial
- icon1 = _get_device_icon(client_mode)
+ icon1, descr1 = _get_device_icon(client_mode)
ip1, port1 = inv.addr
icon2 = ''
+ descr2 = ''
ip2 = '--'
port2 = '--'
if inv.remote.ifc:
ip2, port2 = inv.remote.ifc.r_addr
- icon2 = _get_cloud_icon(client_mode)
+ icon2, descr2 = _get_cloud_icon(client_mode)
row = []
- row.append(f' {ip1}:{port1}')
- row.append(f' {ip1}')
+ row.append(f' {ip1}:{port1}')
+ row.append(f' {ip1}')
row.append(inv_serial)
- row.append(f' {ip2}:{port2}')
- row.append(f' {ip2}')
+ row.append(f' {ip2}:{port2}')
+ row.append(f' {ip2}')
return row
diff --git a/app/src/web/mqtt_table.py b/app/src/web/mqtt_table.py
index 8370c17..8530ef3 100644
--- a/app/src/web/mqtt_table.py
+++ b/app/src/web/mqtt_table.py
@@ -46,10 +46,13 @@ def get_table_data():
@web.route('/mqtt-fetch')
async def mqtt_fetch():
mqtt = Mqtt(None)
- ctime = format_datetime(dt=mqtt.ctime, format='short')
+ cdatetime = format_datetime(dt=mqtt.ctime, format='d.MM. HH:mm')
data = {
"update-time": format_datetime(format="medium"),
- "mqtt-ctime": f"{ctime}
",
+ "mqtt-ctime": f"""
+{cdatetime}
+{cdatetime}
+""",
"mqtt-tx": f"{mqtt.published}
",
"mqtt-rx": f"{mqtt.received}
",
}
diff --git a/app/src/web/templates/page_mqtt.html.j2 b/app/src/web/templates/page_mqtt.html.j2
index 0f23492..9a1d181 100644
--- a/app/src/web/templates/page_mqtt.html.j2
+++ b/app/src/web/templates/page_mqtt.html.j2
@@ -8,7 +8,7 @@
-
+
-
@@ -21,7 +21,7 @@
-
+
-
@@ -34,7 +34,7 @@
-
+
-
diff --git a/app/translations/de/LC_MESSAGES/messages.po b/app/translations/de/LC_MESSAGES/messages.po
index 8daf333..2bbbb99 100644
--- a/app/translations/de/LC_MESSAGES/messages.po
+++ b/app/translations/de/LC_MESSAGES/messages.po
@@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: tsun-gen3-proxy 0.14.0\n"
"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n"
-"POT-Creation-Date: 2025-05-04 18:16+0200\n"
+"POT-Creation-Date: 2025-05-13 20:55+0200\n"
"PO-Revision-Date: 2025-04-18 16:24+0200\n"
"Last-Translator: FULL NAME
\n"
"Language: de\n"
@@ -43,6 +43,10 @@ msgstr "Cloud-IP:Port"
msgid "Cloud-IP"
msgstr "Cloud-IP"
+#: src/web/log_files.py:48
+msgid "n/a"
+msgstr "keine Angabe"
+
#: src/web/mqtt_table.py:27
msgid "MQTT devices"
msgstr "MQTT Geräte"
From c1bdec08449a9f39fb341a478d5bcee458d0a38e Mon Sep 17 00:00:00 2001
From: Stefan Allius <122395479+s-allius@users.noreply.github.com>
Date: Tue, 13 May 2025 22:53:37 +0200
Subject: [PATCH 7/8] S allius/issue396 (#413)
* improve translation of delete modal
---
app/src/web/templates/page_logging.html.j2 | 4 ++--
app/translations/de/LC_MESSAGES/messages.po | 22 ++++++++++-----------
2 files changed, 13 insertions(+), 13 deletions(-)
diff --git a/app/src/web/templates/page_logging.html.j2 b/app/src/web/templates/page_logging.html.j2
index f80763c..176b058 100644
--- a/app/src/web/templates/page_logging.html.j2
+++ b/app/src/web/templates/page_logging.html.j2
@@ -7,9 +7,9 @@
-
{{_("Do you really want to delete the log file")}}:
?
+
{{_('Do you really want to delete the log file:
%(file)s ?', file='')}}
-
+
diff --git a/app/translations/de/LC_MESSAGES/messages.po b/app/translations/de/LC_MESSAGES/messages.po
index 2bbbb99..da4c5b2 100644
--- a/app/translations/de/LC_MESSAGES/messages.po
+++ b/app/translations/de/LC_MESSAGES/messages.po
@@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: tsun-gen3-proxy 0.14.0\n"
"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n"
-"POT-Creation-Date: 2025-05-13 20:55+0200\n"
+"POT-Creation-Date: 2025-05-13 22:34+0200\n"
"PO-Revision-Date: 2025-04-18 16:24+0200\n"
"Last-Translator: FULL NAME
\n"
"Language: de\n"
@@ -19,27 +19,27 @@ msgstr ""
"Content-Transfer-Encoding: 8bit\n"
"Generated-By: Babel 2.17.0\n"
-#: src/web/conn_table.py:52 src/web/templates/base.html.j2:58
+#: src/web/conn_table.py:53 src/web/templates/base.html.j2:58
msgid "Connections"
msgstr "Verbindungen"
-#: src/web/conn_table.py:59
+#: src/web/conn_table.py:60
msgid "Device-IP:Port"
msgstr "Geräte-IP:Port"
-#: src/web/conn_table.py:59
+#: src/web/conn_table.py:60
msgid "Device-IP"
msgstr "Geräte-IP"
-#: src/web/conn_table.py:60 src/web/mqtt_table.py:34
+#: src/web/conn_table.py:61 src/web/mqtt_table.py:34
msgid "Serial-No"
msgstr "Seriennummer"
-#: src/web/conn_table.py:61
+#: src/web/conn_table.py:62
msgid "Cloud-IP:Port"
msgstr "Cloud-IP:Port"
-#: src/web/conn_table.py:61
+#: src/web/conn_table.py:62
msgid "Cloud-IP"
msgstr "Cloud-IP"
@@ -120,12 +120,12 @@ msgid "TSUN Proxy - Log Files"
msgstr "TSUN Proxy - Log Dateien"
#: src/web/templates/page_logging.html.j2:10
-msgid "Do you really want to delete the log file"
-msgstr "Soll die Datei wirklich gelöscht werden"
+msgid "Do you really want to delete the log file:
%(file)s ?"
+msgstr "Soll die Datei:
%(file)s
wirklich gelöscht werden?"
#: src/web/templates/page_logging.html.j2:12
-msgid "Delete File
Date: Tue, 20 May 2025 19:54:24 +0200
Subject: [PATCH 8/8] add dcu_power MQTT topic (#416)
* add dcu_power MQTT topic
* add DCU_COMMAND counter
* test invalid dcu_power values
* handle and test DCU Command responses
* test dcu commands from the TSUN cloud
* cleanup MQTT topic handling
* update changelog
* test MQTT error and exception handling
* increase test coverage
* test dispatcher exceptions
* fix full_topic definition in dispatch test
---
CHANGELOG.md | 1 +
app/src/gen3plus/solarman_v5.py | 39 ++++++++
app/src/infos.py | 2 +
app/src/mqtt.py | 136 ++++++++++++++++------------
app/tests/test_infos.py | 4 +-
app/tests/test_mqtt.py | 141 ++++++++++++++++++++++++++++-
app/tests/test_solarman.py | 152 +++++++++++++++++++++++++++++++-
7 files changed, 413 insertions(+), 62 deletions(-)
mode change 100644 => 100755 app/src/gen3plus/solarman_v5.py
mode change 100644 => 100755 app/src/mqtt.py
mode change 100644 => 100755 app/tests/test_mqtt.py
mode change 100644 => 100755 app/tests/test_solarman.py
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9858bd1..df55052 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -7,6 +7,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [unreleased]
+- add MQTT topic `dcu_power` for setting output power on DCUs
- Update ghcr.io/hassio-addons/base Docker tag to v17.2.5
- fix a lot of pytest-asyncio problems in the unit tests
- Cleanup startup code for Quart and the Proxy
diff --git a/app/src/gen3plus/solarman_v5.py b/app/src/gen3plus/solarman_v5.py
old mode 100644
new mode 100755
index 38d9eb9..2cf7d16
--- a/app/src/gen3plus/solarman_v5.py
+++ b/app/src/gen3plus/solarman_v5.py
@@ -247,6 +247,7 @@ class SolarmanBase(Message):
class SolarmanV5(SolarmanBase):
AT_CMD = 1
MB_RTU_CMD = 2
+ DCU_CMD = 5
AT_CMD_RSP = 8
MB_CLIENT_DATA_UP = 30
'''Data up time in client mode'''
@@ -532,6 +533,26 @@ class SolarmanV5(SolarmanBase):
except Exception:
self.ifc.tx_clear()
+ def send_dcu_cmd(self, pdu: bytearray):
+ if self.sensor_list != 0x3026:
+ logger.debug(f'[{self.node_id}] DCU CMD not allowed,'
+ f' for sensor: {self.sensor_list:#04x}')
+ return
+
+ if self.state != State.up:
+ logger.warning(f'[{self.node_id}] ignore DCU CMD,'
+ ' cause the state is not UP anymore')
+ return
+
+ self.inverter.forward_dcu_cmd_resp = False
+ self._build_header(0x4510)
+ self.ifc.tx_add(struct.pack(' 1024:
- logger_mqtt.error('out_coeff: value must be in'
- 'the range 0..100,'
- f' got: {payload}')
- else:
- await self.modbus_cmd(message,
- Modbus.WRITE_SINGLE_REG,
- 0, 0x202c, val)
- except Exception:
- pass
-
- if message.topic.matches(self.mb_reads_topic):
- await self.modbus_cmd(message,
- Modbus.READ_REGS, 2)
-
- if message.topic.matches(self.mb_inputs_topic):
- await self.modbus_cmd(message,
- Modbus.READ_INPUTS, 2)
-
- if message.topic.matches(self.mb_at_cmd_topic):
- await self.at_cmd(message)
+ async def _out_coeff(self, message):
+ payload = message.payload.decode("UTF-8")
+ try:
+ val = round(float(payload) * 1024/100)
+ if val < 0 or val > 1024:
+ logger_mqtt.error('out_coeff: value must be in'
+ 'the range 0..100,'
+ f' got: {payload}')
+ else:
+ await self._modbus_cmd(message,
+ Modbus.WRITE_SINGLE_REG,
+ 0, 0x202c, val)
+ except Exception:
+ pass
def each_inverter(self, message, func_name: str):
topic = str(message.topic)
@@ -175,7 +182,7 @@ class Mqtt(metaclass=Singleton):
else:
logger_mqtt.warning(f'Node_id: {node_id} not found')
- async def modbus_cmd(self, message, func, params=0, addr=0, val=0):
+ async def _modbus_cmd(self, message, func, params=0, addr=0, val=0):
payload = message.payload.decode("UTF-8")
for fnc in self.each_inverter(message, "send_modbus_cmd"):
res = payload.split(',')
@@ -190,7 +197,22 @@ class Mqtt(metaclass=Singleton):
val = int(res[1]) # lenght
await fnc(func, addr, val, logging.INFO)
- async def at_cmd(self, message):
+ async def _at_cmd(self, message):
payload = message.payload.decode("UTF-8")
for fnc in self.each_inverter(message, "send_at_cmd"):
await fnc(payload)
+
+ def _dcu_cmd(self, message):
+ payload = message.payload.decode("UTF-8")
+ try:
+ val = round(float(payload) * 10)
+ if val < 1000 or val > 8000:
+ logger_mqtt.error('dcu_power: value must be in'
+ 'the range 100..800,'
+ f' got: {payload}')
+ else:
+ pdu = struct.pack('>BBBBBBH', 1, 1, 6, 1, 0, 1, val)
+ for fnc in self.each_inverter(message, "send_dcu_cmd"):
+ fnc(pdu)
+ except Exception:
+ pass
diff --git a/app/tests/test_infos.py b/app/tests/test_infos.py
index 43c0050..9977a67 100644
--- a/app/tests/test_infos.py
+++ b/app/tests/test_infos.py
@@ -17,13 +17,13 @@ def test_statistic_counter():
assert val == None or val == 0
i.static_init() # initialize counter
- assert json.dumps(i.stat) == json.dumps({"proxy": {"Inverter_Cnt": 0, "Cloud_Conn_Cnt": 0, "Unknown_SNR": 0, "Unknown_Msg": 0, "Invalid_Data_Type": 0, "Internal_Error": 0,"Unknown_Ctrl": 0, "OTA_Start_Msg": 0, "SW_Exception": 0, "Invalid_Msg_Format": 0, "AT_Command": 0, "AT_Command_Blocked": 0, "Modbus_Command": 0}})
+ assert json.dumps(i.stat) == json.dumps({"proxy": {"Inverter_Cnt": 0, "Cloud_Conn_Cnt": 0, "Unknown_SNR": 0, "Unknown_Msg": 0, "Invalid_Data_Type": 0, "Internal_Error": 0,"Unknown_Ctrl": 0, "OTA_Start_Msg": 0, "SW_Exception": 0, "Invalid_Msg_Format": 0, "AT_Command": 0, "AT_Command_Blocked": 0, "DCU_Command": 0, "Modbus_Command": 0}})
val = i.dev_value(Register.INVERTER_CNT) # valid and initiliazed addr
assert val == 0
i.inc_counter('Inverter_Cnt')
- assert json.dumps(i.stat) == json.dumps({"proxy": {"Inverter_Cnt": 1, "Cloud_Conn_Cnt": 0, "Unknown_SNR": 0, "Unknown_Msg": 0, "Invalid_Data_Type": 0, "Internal_Error": 0,"Unknown_Ctrl": 0, "OTA_Start_Msg": 0, "SW_Exception": 0, "Invalid_Msg_Format": 0, "AT_Command": 0, "AT_Command_Blocked": 0, "Modbus_Command": 0}})
+ assert json.dumps(i.stat) == json.dumps({"proxy": {"Inverter_Cnt": 1, "Cloud_Conn_Cnt": 0, "Unknown_SNR": 0, "Unknown_Msg": 0, "Invalid_Data_Type": 0, "Internal_Error": 0,"Unknown_Ctrl": 0, "OTA_Start_Msg": 0, "SW_Exception": 0, "Invalid_Msg_Format": 0, "AT_Command": 0, "AT_Command_Blocked": 0, "DCU_Command": 0, "Modbus_Command": 0}})
val = i.dev_value(Register.INVERTER_CNT)
assert val == 1
diff --git a/app/tests/test_mqtt.py b/app/tests/test_mqtt.py
old mode 100644
new mode 100755
index c6f7f49..eb68796
--- a/app/tests/test_mqtt.py
+++ b/app/tests/test_mqtt.py
@@ -3,8 +3,9 @@ import pytest
import asyncio
import aiomqtt
import logging
-
+from aiomqtt import MqttError
from mock import patch, Mock
+
from async_stream import AsyncIfcImpl
from singleton import Singleton
from mqtt import Mqtt
@@ -17,7 +18,7 @@ NO_MOSQUITTO_TEST = False
pytest_plugins = ('pytest_asyncio',)
-@pytest.fixture(scope="module", autouse=True)
+@pytest.fixture(scope="function", autouse=True)
def module_init():
Singleton._instances.clear()
yield
@@ -44,6 +45,14 @@ def config_no_conn(test_port):
Config.act_config = {'mqtt':{'host': "", 'port': test_port, 'user': '', 'passwd': ''},
'ha':{'auto_conf_prefix': 'homeassistant','discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun'}
}
+ Config.def_config = {}
+
+@pytest.fixture
+def config_def_conn(test_port):
+ Config.act_config = {'mqtt':{'host': "unknown_url", 'port': test_port, 'user': '', 'passwd': ''},
+ 'ha':{'auto_conf_prefix': 'homeassistant','discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun'}
+ }
+ Config.def_config = Config.act_config
@pytest.fixture
def spy_at_cmd():
@@ -69,6 +78,14 @@ def spy_modbus_cmd_client():
yield wrapped_conn
conn.close()
+@pytest.fixture
+def spy_dcu_cmd():
+ conn = SolarmanV5(None, ('test.local', 1234), server_side=True, client_mode= False, ifc=AsyncIfcImpl())
+ conn.node_id = 'inv_3/'
+ with patch.object(conn, 'send_dcu_cmd', wraps=conn.send_dcu_cmd) as wrapped_conn:
+ yield wrapped_conn
+ conn.close()
+
def test_native_client(test_hostname, test_port):
"""Sanity check: Make sure the paho-mqtt client can connect to the test
MQTT server. Otherwise the test set NO_MOSQUITTO_TEST to True and disable
@@ -167,12 +184,81 @@ async def test_mqtt_no_config(config_no_conn):
finally:
await m.close()
+@pytest.mark.asyncio
+async def test_mqtt_except_no_config(config_no_conn, monkeypatch, caplog):
+ _ = config_no_conn
+
+ assert asyncio.get_running_loop()
+
+ async def my_aenter(self):
+ raise MqttError('TestException') from None
+
+ monkeypatch.setattr(aiomqtt.Client, "__aenter__", my_aenter)
+
+ LOGGER = logging.getLogger("mqtt")
+ LOGGER.propagate = True
+ LOGGER.setLevel(logging.INFO)
+
+ with caplog.at_level(logging.INFO):
+ m = Mqtt(None)
+ assert m.task
+ await asyncio.sleep(0)
+ try:
+ await m.publish('homeassistant/status', 'online')
+ assert False
+ except MqttError:
+ pass
+ except Exception:
+ assert False
+ finally:
+ await m.close()
+ assert 'Connection lost; Reconnecting in 5 seconds' in caplog.text
+
+@pytest.mark.asyncio
+async def test_mqtt_except_def_config(config_def_conn, monkeypatch, caplog):
+ _ = config_def_conn
+
+ assert asyncio.get_running_loop()
+
+ on_connect = asyncio.Event()
+ async def cb():
+ on_connect.set()
+
+ async def my_aenter(self):
+ raise MqttError('TestException') from None
+
+ monkeypatch.setattr(aiomqtt.Client, "__aenter__", my_aenter)
+
+ LOGGER = logging.getLogger("mqtt")
+ LOGGER.propagate = True
+ LOGGER.setLevel(logging.INFO)
+
+ with caplog.at_level(logging.INFO):
+ m = Mqtt(cb)
+ assert m.task
+ await asyncio.sleep(0)
+ assert not on_connect.is_set()
+ try:
+ await m.publish('homeassistant/status', 'online')
+ assert False
+ except MqttError:
+ pass
+ except Exception:
+ assert False
+ finally:
+ await m.close()
+ assert 'MQTT is unconfigured; Check your config.toml!' in caplog.text
+
@pytest.mark.asyncio
async def test_msg_dispatch(config_mqtt_conn, spy_modbus_cmd):
_ = config_mqtt_conn
spy = spy_modbus_cmd
try:
m = Mqtt(None)
+ msg = aiomqtt.Message(topic= 'homeassistant/status', payload= b'online', qos= 0, retain = False, mid= 0, properties= None)
+ await m.dispatch_msg(msg)
+ assert m.ha_restarts == 1
+
msg = aiomqtt.Message(topic= 'tsun/inv_1/rated_load', payload= b'2', qos= 0, retain = False, mid= 0, properties= None)
await m.dispatch_msg(msg)
spy.assert_awaited_once_with(Modbus.WRITE_SINGLE_REG, 0x2008, 2, logging.INFO)
@@ -197,6 +283,23 @@ async def test_msg_dispatch(config_mqtt_conn, spy_modbus_cmd):
await m.dispatch_msg(msg)
spy.assert_awaited_once_with(Modbus.READ_INPUTS, 0x3000, 10, logging.INFO)
+ # test dispatching with empty mapping table
+ m.topic_defs.clear()
+ spy.reset_mock()
+ msg = aiomqtt.Message(topic= 'tsun/inv_1/modbus_read_inputs', payload= b'0x3000, 10', qos= 0, retain = False, mid= 0, properties= None)
+ await m.dispatch_msg(msg)
+ spy.assert_not_called()
+
+ # test dispatching with incomplete mapping table - invalid fnc defined
+ m.topic_defs.append(
+ {'prefix': 'entity_prefix', 'topic': '/+/modbus_read_inputs',
+ 'full_topic': 'tsun/+/modbus_read_inputs', 'fnc': 'invalid'}
+ )
+ spy.reset_mock()
+ msg = aiomqtt.Message(topic= 'tsun/inv_1/modbus_read_inputs', payload= b'0x3000, 10', qos= 0, retain = False, mid= 0, properties= None)
+ await m.dispatch_msg(msg)
+ spy.assert_not_called()
+
finally:
await m.close()
@@ -227,6 +330,12 @@ async def test_msg_dispatch_err(config_mqtt_conn, spy_modbus_cmd):
msg = aiomqtt.Message(topic= 'tsun/inv_1/modbus_read_regs', payload= b'0x3000, 10, 7', qos= 0, retain = False, mid= 0, properties= None)
await m.dispatch_msg(msg)
spy.assert_not_called()
+
+ spy.reset_mock()
+ msg = aiomqtt.Message(topic= 'tsun/inv_1/dcu_power', payload= b'100W', qos= 0, retain = False, mid= 0, properties= None)
+ await m.dispatch_msg(msg)
+ spy.assert_not_called()
+
finally:
await m.close()
@@ -267,3 +376,31 @@ async def test_at_cmd_dispatch(config_mqtt_conn, spy_at_cmd):
finally:
await m.close()
+
+@pytest.mark.asyncio
+async def test_dcu_dispatch(config_mqtt_conn, spy_dcu_cmd):
+ _ = config_mqtt_conn
+ spy = spy_dcu_cmd
+ try:
+ m = Mqtt(None)
+ msg = aiomqtt.Message(topic= 'tsun/inv_3/dcu_power', payload= b'100.0', qos= 0, retain = False, mid= 0, properties= None)
+ await m.dispatch_msg(msg)
+ spy.assert_called_once_with(b'\x01\x01\x06\x01\x00\x01\x03\xe8')
+ finally:
+ await m.close()
+
+@pytest.mark.asyncio
+async def test_dcu_inv_value(config_mqtt_conn, spy_dcu_cmd):
+ _ = config_mqtt_conn
+ spy = spy_dcu_cmd
+ try:
+ m = Mqtt(None)
+ msg = aiomqtt.Message(topic= 'tsun/inv_3/dcu_power', payload= b'99.9', qos= 0, retain = False, mid= 0, properties= None)
+ await m.dispatch_msg(msg)
+ spy.assert_not_called()
+
+ msg = aiomqtt.Message(topic= 'tsun/inv_3/dcu_power', payload= b'800.1', qos= 0, retain = False, mid= 0, properties= None)
+ await m.dispatch_msg(msg)
+ spy.assert_not_called()
+ finally:
+ await m.close()
diff --git a/app/tests/test_solarman.py b/app/tests/test_solarman.py
old mode 100644
new mode 100755
index 58da012..77866cf
--- a/app/tests/test_solarman.py
+++ b/app/tests/test_solarman.py
@@ -812,6 +812,26 @@ def dcu_data_rsp_msg(): # 0x1210
msg += b'\x15'
return msg
+@pytest.fixture
+def dcu_command_ind_msg(): # 0x4510
+ msg = b'\xa5\x17\x00\x10\x45\x94\x02' +get_dcu_sn() +b'\x05\x26\x30'
+ msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
+ msg += b'\x01\x01\x06\x01\x00\x01\x03\xe8'
+ msg += correct_checksum(msg)
+ msg += b'\x15'
+ return msg
+
+@pytest.fixture
+def dcu_command_rsp_msg(): # 0x1510
+ msg = b'\xa5\x11\x00\x10\x15\x94\x03' +get_dcu_sn() +b'\x05\x01'
+ msg += total()
+ msg += hb()
+ msg += b'\x00\x00\x00\x00'
+ msg += b'\x01\x01\x01'
+ msg += correct_checksum(msg)
+ msg += b'\x15'
+ return msg
+
@pytest.fixture
def config_tsun_allow_all():
Config.act_config = {
@@ -854,7 +874,17 @@ def config_tsun_scan_dcu():
@pytest.fixture
def config_tsun_dcu1():
- Config.act_config = {'solarman':{'enabled': True},'batteries':{'4100000000000001':{'monitor_sn': 2070233888, 'node_id':'inv1/', 'modbus_polling': True, 'suggested_area':'roof', 'sensor_list': 0}}}
+ Config.act_config = {
+ 'ha':{
+ 'auto_conf_prefix': 'homeassistant',
+ 'discovery_prefix': 'homeassistant',
+ 'entity_prefix': 'tsun',
+ 'proxy_node_id': 'test_1',
+ 'proxy_unique_id': ''
+ },
+ 'solarman':{'enabled': True, 'host': 'test_cloud.local', 'port': 1234},'batteries':{'4100000000000001':{'monitor_sn': 2070233888, 'node_id':'inv1/', 'modbus_polling': True, 'suggested_area':'roof', 'sensor_list': 0}}}
+ Proxy.class_init()
+ Proxy.mqtt = Mqtt()
@pytest.mark.asyncio
async def test_read_message(device_ind_msg):
@@ -2402,3 +2432,123 @@ async def test_proxy_at_blocked(my_loop, config_tsun_inv1, patch_open_connection
assert Proxy.mqtt.key == 'tsun/inv1/at_resp'
assert Proxy.mqtt.data == "+ok"
+
+@pytest.mark.asyncio
+async def test_dcu_cmd(my_loop, config_tsun_allow_all, dcu_dev_ind_msg, dcu_dev_rsp_msg, dcu_data_ind_msg, dcu_data_rsp_msg, dcu_command_ind_msg, dcu_command_rsp_msg):
+ '''test dcu_power command fpr a DCU device with sensor 0x3026'''
+ _ = config_tsun_allow_all
+ m = MemoryStream(dcu_dev_ind_msg, (0,), True)
+ m.read() # read device ind
+ assert m.control == 0x4110
+ assert str(m.seq) == '01:92'
+ assert m.ifc.tx_fifo.get()==dcu_dev_rsp_msg
+ assert m.ifc.fwd_fifo.get()==dcu_dev_ind_msg
+
+ m.send_dcu_cmd(b'\x01\x01\x06\x01\x00\x01\x03\xe8')
+ assert m.ifc.tx_fifo.get()==b''
+ assert m.ifc.fwd_fifo.get()==b''
+ assert m.sent_pdu == b''
+ assert str(m.seq) == '01:92'
+ assert Proxy.mqtt.key == ''
+ assert Proxy.mqtt.data == ""
+
+ m.append_msg(dcu_data_ind_msg)
+ m.read() # read inverter ind
+ assert m.control == 0x4210
+ assert str(m.seq) == '02:93'
+ assert m.ifc.tx_fifo.get()==dcu_data_rsp_msg
+ assert m.ifc.fwd_fifo.get()==dcu_data_ind_msg
+
+ m.send_dcu_cmd(b'\x01\x01\x06\x01\x00\x01\x03\xe8')
+ assert m.ifc.fwd_fifo.get() == b''
+ assert m.ifc.tx_fifo.get()== b''
+ assert m.sent_pdu == dcu_command_ind_msg
+ m.sent_pdu = bytearray()
+
+ assert str(m.seq) == '02:94'
+ assert Proxy.mqtt.key == ''
+ assert Proxy.mqtt.data == ""
+
+ m.append_msg(dcu_command_rsp_msg)
+ m.read() # read at resp
+ assert m.control == 0x1510
+ assert str(m.seq) == '03:94'
+ assert m.ifc.rx_get()==b''
+ assert m.ifc.tx_fifo.get()==b''
+ assert m.ifc.fwd_fifo.get()==b''
+ assert Proxy.mqtt.key == 'tsun/dcu_resp'
+ assert Proxy.mqtt.data == "+ok"
+ Proxy.mqtt.clear() # clear last test result
+
+@pytest.mark.asyncio
+async def test_dcu_cmd_not_supported(my_loop, config_tsun_allow_all, device_ind_msg, device_rsp_msg, inverter_ind_msg, inverter_rsp_msg):
+ '''test that an inverter don't accept the dcu_power command'''
+ _ = config_tsun_allow_all
+ m = MemoryStream(device_ind_msg, (0,), True)
+ m.read() # read device ind
+ assert m.control == 0x4110
+ assert str(m.seq) == '01:01'
+ assert m.ifc.tx_fifo.get()==device_rsp_msg
+ assert m.ifc.fwd_fifo.get()==device_ind_msg
+
+ m.send_dcu_cmd(b'\x01\x01\x06\x01\x00\x01\x03\xe8')
+ assert m.ifc.tx_fifo.get()==b''
+ assert m.ifc.fwd_fifo.get()==b''
+ assert m.sent_pdu == b''
+ assert str(m.seq) == '01:01'
+ assert Proxy.mqtt.key == ''
+ assert Proxy.mqtt.data == ""
+
+ m.append_msg(inverter_ind_msg)
+ m.read() # read inverter ind
+ assert m.control == 0x4210
+ assert str(m.seq) == '02:02'
+ assert m.ifc.tx_fifo.get()==inverter_rsp_msg
+ assert m.ifc.fwd_fifo.get()==inverter_ind_msg
+
+ m.send_dcu_cmd(b'\x01\x01\x06\x01\x00\x01\x03\xe8')
+ assert m.ifc.fwd_fifo.get() == b''
+ assert m.ifc.tx_fifo.get()== b''
+ assert m.sent_pdu == b''
+ Proxy.mqtt.clear() # clear last test result
+
+@pytest.mark.asyncio
+async def test_proxy_dcu_cmd(my_loop, config_tsun_dcu1, patch_open_connection, dcu_command_ind_msg, dcu_command_rsp_msg):
+ _ = config_tsun_inv1
+ _ = patch_open_connection
+ assert asyncio.get_running_loop()
+
+ with InverterTest(FakeReader(), FakeWriter(), client_mode=False) as inverter:
+ await inverter.create_remote()
+ await asyncio.sleep(0)
+ r = inverter.remote.stream
+ l = inverter.local.stream
+
+ l.db.stat['proxy']['DCU_Command'] = 0
+ l.db.stat['proxy']['AT_Command'] = 0
+ l.db.stat['proxy']['Unknown_Ctrl'] = 0
+ l.db.stat['proxy']['AT_Command_Blocked'] = 0
+ l.db.stat['proxy']['Modbus_Command'] = 0
+ inverter.forward_dcu_cmd_resp = False
+ r.append_msg(dcu_command_ind_msg)
+ r.read() # read complete msg, and dispatch msg
+ assert inverter.forward_dcu_cmd_resp
+ inverter.forward(r,l)
+
+ assert l.ifc.tx_fifo.get()==dcu_command_ind_msg
+
+ assert l.db.stat['proxy']['Invalid_Msg_Format'] == 0
+ assert l.db.stat['proxy']['DCU_Command'] == 1
+ assert l.db.stat['proxy']['AT_Command'] == 0
+ assert l.db.stat['proxy']['AT_Command_Blocked'] == 0
+ assert l.db.stat['proxy']['Modbus_Command'] == 0
+
+ l.append_msg(dcu_command_rsp_msg)
+ l.read() # read at resp
+ assert l.ifc.fwd_fifo.peek()==dcu_command_rsp_msg
+ inverter.forward(l,r)
+ assert r.ifc.tx_fifo.get()==dcu_command_rsp_msg
+
+ assert Proxy.mqtt.key == ''
+ assert Proxy.mqtt.data == ""
+