Compare commits
19 Commits
s-allius/i
...
s-allius/i
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e629488963 | ||
|
|
ac828921c6 | ||
|
|
6744e03a90 | ||
|
|
e78395c269 | ||
|
|
55665c95f8 | ||
|
|
51aec15148 | ||
|
|
dc9e90d049 | ||
|
|
b98b133d79 | ||
|
|
2eab7fec3e | ||
|
|
fb4fe6b34d | ||
|
|
bfd7dbe032 | ||
|
|
25e4714fa5 | ||
|
|
46d1b77e56 | ||
|
|
dc360f31d6 | ||
|
|
2e9d16b611 | ||
|
|
1491e42913 | ||
|
|
07b90da09e | ||
|
|
b2d9da2c09 | ||
|
|
cf1a87ed6f |
@@ -1 +1 @@
|
||||
3.13.5
|
||||
3.13.4
|
||||
|
||||
10
CHANGELOG.md
10
CHANGELOG.md
@@ -7,15 +7,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
|
||||
## [unreleased]
|
||||
|
||||
- catch socket.gaierror exception and log this with info level
|
||||
- Update dependency coverage to v7.9.2
|
||||
- add-on: bump base-image to version 18.0.3
|
||||
- add-on: remove armhf and armv7 support
|
||||
- add-on: add links to config and log-file to the web-UI
|
||||
- fix some SonarQube warnings
|
||||
- remove unused 32-bit architectures
|
||||
- Babel don't build new po file if only the pot creation-date was changed
|
||||
- Improve Makefile
|
||||
- Update dependency pytest-asyncio to v1
|
||||
|
||||
## [0.14.1] - 2025-05-31
|
||||
@@ -28,6 +19,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
- set no of pv modules for MS800 GEN3PLUS inverters
|
||||
- fix the paths to copy the config.example.toml file during proxy start
|
||||
- add MQTT topic `dcu_power` for setting output power on DCUs
|
||||
- add MQTT topic `dcu_power` for setting output power on DCUs
|
||||
- Update ghcr.io/hassio-addons/base Docker tag to v17.2.5
|
||||
- fix a lot of pytest-asyncio problems in the unit tests
|
||||
- Cleanup startup code for Quart and the Proxy
|
||||
|
||||
30
Makefile
30
Makefile
@@ -1,37 +1,27 @@
|
||||
.PHONY: help build babel clean addon-dev addon-debug addon-rc addon-rel debug dev preview rc rel check-docker-compose install
|
||||
.PHONY: build babel clean addon-dev addon-debug addon-rc addon-rel debug dev preview rc rel check-docker-compose install
|
||||
|
||||
help: ## show help message
|
||||
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m\033[0m\n"} /^[$$()% a-zA-Z0-9_-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST)
|
||||
|
||||
babel: ## build language files
|
||||
babel:
|
||||
$(MAKE) -C app $@
|
||||
|
||||
build:
|
||||
$(MAKE) -C ha_addons $@
|
||||
|
||||
clean: ## delete all built files
|
||||
clean:
|
||||
$(MAKE) -C app $@
|
||||
$(MAKE) -C ha_addons $@
|
||||
|
||||
debug dev preview rc rel: ## build docker container in <dev|debg|rc|rel> version
|
||||
debug dev preview rc rel:
|
||||
$(MAKE) -C app babel
|
||||
$(MAKE) -C app $@
|
||||
|
||||
addon-dev addon-debug addon-rc addon-rel: ## build HA add-on in <dev|debg|rc|rel> version
|
||||
addon-dev addon-debug addon-rc addon-rel:
|
||||
$(MAKE) -C app babel
|
||||
$(MAKE) -C ha_addons $(patsubst addon-%,%,$@)
|
||||
|
||||
check-docker-compose: ## check the docker-compose file
|
||||
check-docker-compose:
|
||||
docker-compose config -q
|
||||
|
||||
PY_VER := $(shell cat .python-version)
|
||||
|
||||
install: ## install requirements into the pyenv and switch to proper venv
|
||||
@pyenv local $(PY_VER) || { pyenv install $(PY_VER) && pyenv local $(PY_VER) || exit 1; }
|
||||
@pyenv exec pip install --upgrade pip
|
||||
@pyenv exec pip install -r requirements.txt
|
||||
@pyenv exec pip install -r requirements-test.txt
|
||||
pyenv exec python --version
|
||||
|
||||
run: ## run proxy locally out of the actual venv
|
||||
pyenv exec python app/src/server.py -c /app/src/cnf
|
||||
install:
|
||||
python3 -m pip install --upgrade pip
|
||||
python3 -m pip install -r requirements.txt
|
||||
python3 -m pip install -r requirements-test.txt
|
||||
@@ -55,7 +55,7 @@ $(BABEL_TRANSLATIONS)/%.pot : $(SRC)/.babel.cfg $(BABEL_INPUT)
|
||||
|
||||
$(BABEL_TRANSLATIONS)/%/LC_MESSAGES/messages.po : $(BABEL_TRANSLATIONS)/messages.pot
|
||||
@mkdir -p $(@D)
|
||||
@pybabel update --init-missing --ignore-pot-creation-date -i $< -d $(BABEL_TRANSLATIONS) -l $*
|
||||
@pybabel update --init-missing -i $< -d $(BABEL_TRANSLATIONS) -l $*
|
||||
|
||||
$(BABEL_TRANSLATIONS)/%/LC_MESSAGES/messages.mo : $(BABEL_TRANSLATIONS)/%/LC_MESSAGES/messages.po
|
||||
@pybabel compile -d $(BABEL_TRANSLATIONS) -l $*
|
||||
|
||||
@@ -29,17 +29,17 @@ target "_common" {
|
||||
"type =sbom,generator=docker/scout-sbom-indexer:latest"
|
||||
]
|
||||
annotations = [
|
||||
"index,manifest-descriptor:org.opencontainers.image.title=TSUN-Proxy",
|
||||
"index,manifest-descriptor:org.opencontainers.image.authors=Stefan Allius",
|
||||
"index,manifest-descriptor:org.opencontainers.image.created=${BUILD_DATE}",
|
||||
"index,manifest-descriptor:org.opencontainers.image.version=${VERSION}",
|
||||
"index,manifest-descriptor:org.opencontainers.image.revision=${BRANCH}",
|
||||
"index,manifest-descriptor:org.opencontainers.image.description=${DESCRIPTION}",
|
||||
"index:org.opencontainers.image.title=TSUN Gen3 Proxy",
|
||||
"index:org.opencontainers.image.authors=Stefan Allius",
|
||||
"index:org.opencontainers.image.created=${BUILD_DATE}",
|
||||
"index:org.opencontainers.image.version=${VERSION}",
|
||||
"index:org.opencontainers.image.revision=${BRANCH}",
|
||||
"index:org.opencontainers.image.description=${DESCRIPTION}",
|
||||
"index:org.opencontainers.image.licenses=BSD-3-Clause",
|
||||
"index:org.opencontainers.image.source=https://github.com/s-allius/tsun-gen3-proxy"
|
||||
]
|
||||
labels = {
|
||||
"org.opencontainers.image.title" = "TSUN-Proxy"
|
||||
"org.opencontainers.image.title" = "TSUN Gen3 Proxy"
|
||||
"org.opencontainers.image.authors" = "Stefan Allius"
|
||||
"org.opencontainers.image.created" = "${BUILD_DATE}"
|
||||
"org.opencontainers.image.version" = "${VERSION}"
|
||||
@@ -53,7 +53,7 @@ target "_common" {
|
||||
]
|
||||
|
||||
no-cache = false
|
||||
platforms = ["linux/amd64", "linux/arm64"]
|
||||
platforms = ["linux/amd64", "linux/arm64", "linux/arm/v7"]
|
||||
}
|
||||
|
||||
target "_debug" {
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
flake8==7.3.0
|
||||
pytest==8.4.1
|
||||
flake8==7.2.0
|
||||
pytest==8.4.0
|
||||
pytest-asyncio==1.0.0
|
||||
pytest-cov==6.2.1
|
||||
python-dotenv==1.1.1
|
||||
python-dotenv==1.1.0
|
||||
mock==5.2.0
|
||||
coverage==7.9.2
|
||||
coverage==7.9.0
|
||||
jinja2-cli==0.8.2
|
||||
@@ -341,9 +341,9 @@ class SolarmanV5(SolarmanBase):
|
||||
self.log_lvl.clear()
|
||||
super().close()
|
||||
|
||||
def send_start_cmd(self, snr: int, host: str,
|
||||
forward: bool,
|
||||
start_timeout=MB_CLIENT_DATA_UP):
|
||||
async def send_start_cmd(self, snr: int, host: str,
|
||||
forward: bool,
|
||||
start_timeout=MB_CLIENT_DATA_UP):
|
||||
self.no_forwarding = True
|
||||
self.establish_inv_emu = forward
|
||||
self.snr = snr
|
||||
|
||||
@@ -4,7 +4,6 @@ import logging
|
||||
import traceback
|
||||
import json
|
||||
import gc
|
||||
import socket
|
||||
from aiomqtt import MqttCodeError
|
||||
from asyncio import StreamReader, StreamWriter
|
||||
from ipaddress import ip_address
|
||||
@@ -139,9 +138,7 @@ class InverterBase(InverterIfc, Proxy):
|
||||
f'Connected to {addr}')
|
||||
asyncio.create_task(self.remote.ifc.client_loop(addr))
|
||||
|
||||
except (ConnectionRefusedError,
|
||||
TimeoutError,
|
||||
socket.gaierror) as error:
|
||||
except (ConnectionRefusedError, TimeoutError) as error:
|
||||
logging.info(f'{error}')
|
||||
except Exception:
|
||||
Infos.inc_counter('SW_Exception')
|
||||
|
||||
@@ -193,7 +193,7 @@ class Message(ProtocolIfc):
|
||||
return
|
||||
self.mb.build_msg(dev_id, func, addr, val, log_lvl)
|
||||
|
||||
def send_modbus_cmd(self, func, addr, val, log_lvl) -> None:
|
||||
async def send_modbus_cmd(self, func, addr, val, log_lvl) -> None:
|
||||
self._send_modbus_cmd(Modbus.INV_ADDR, func, addr, val, log_lvl)
|
||||
|
||||
def _send_modbus_scan(self):
|
||||
|
||||
@@ -66,7 +66,7 @@ class ModbusTcp():
|
||||
try:
|
||||
async with ModbusConn(host, port) as inverter:
|
||||
stream = inverter.local.stream
|
||||
stream.send_start_cmd(snr, host, forward)
|
||||
await stream.send_start_cmd(snr, host, forward)
|
||||
await stream.ifc.loop()
|
||||
logger.info(f'[{stream.node_id}:{stream.conn_no}] '
|
||||
f'Connection closed - Shutdown: '
|
||||
|
||||
@@ -112,7 +112,7 @@ class Mqtt(metaclass=Singleton):
|
||||
except asyncio.CancelledError:
|
||||
logger_mqtt.debug("MQTT task cancelled")
|
||||
self.__client = None
|
||||
raise
|
||||
return
|
||||
except Exception:
|
||||
# self.inc_counter('SW_Exception') # fixme
|
||||
self.ctime = None
|
||||
@@ -151,7 +151,7 @@ class Mqtt(metaclass=Singleton):
|
||||
if self.__cb_mqtt_is_up:
|
||||
await self.__cb_mqtt_is_up()
|
||||
|
||||
def _out_coeff(self, message):
|
||||
async def _out_coeff(self, message):
|
||||
payload = message.payload.decode("UTF-8")
|
||||
try:
|
||||
val = round(float(payload) * 1024/100)
|
||||
@@ -160,9 +160,9 @@ class Mqtt(metaclass=Singleton):
|
||||
'the range 0..100,'
|
||||
f' got: {payload}')
|
||||
else:
|
||||
self._modbus_cmd(message,
|
||||
Modbus.WRITE_SINGLE_REG,
|
||||
0, 0x202c, val)
|
||||
await self._modbus_cmd(message,
|
||||
Modbus.WRITE_SINGLE_REG,
|
||||
0, 0x202c, val)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@@ -182,7 +182,7 @@ class Mqtt(metaclass=Singleton):
|
||||
else:
|
||||
logger_mqtt.warning(f'Node_id: {node_id} not found')
|
||||
|
||||
def _modbus_cmd(self, message, func, params=0, addr=0, val=0):
|
||||
async def _modbus_cmd(self, message, func, params=0, addr=0, val=0):
|
||||
payload = message.payload.decode("UTF-8")
|
||||
for fnc in self.each_inverter(message, "send_modbus_cmd"):
|
||||
res = payload.split(',')
|
||||
@@ -195,7 +195,7 @@ class Mqtt(metaclass=Singleton):
|
||||
elif params == 2:
|
||||
addr = int(res[0], base=16)
|
||||
val = int(res[1]) # lenght
|
||||
fnc(func, addr, val, logging.INFO)
|
||||
await fnc(func, addr, val, logging.INFO)
|
||||
|
||||
async def _at_cmd(self, message):
|
||||
payload = message.payload.decode("UTF-8")
|
||||
|
||||
@@ -60,16 +60,7 @@ class Server():
|
||||
|
||||
@app.context_processor
|
||||
def utility_processor():
|
||||
var = {'version': self.version,
|
||||
'slug': os.getenv("SLUG"),
|
||||
'hostname': os.getenv("HOSTNAME"),
|
||||
}
|
||||
if var['slug']:
|
||||
var['hassio'] = True
|
||||
slug_len = len(var['slug'])
|
||||
var['addonname'] = var['slug'] + '_' + \
|
||||
var['hostname'][slug_len+1:]
|
||||
return var
|
||||
return dict(version=self.version)
|
||||
|
||||
def parse_args(self, arg_list: list[str] | None):
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
@@ -29,9 +29,9 @@ def get_tz():
|
||||
|
||||
@web.context_processor
|
||||
def utility_processor():
|
||||
return {'lang': babel_get_locale(),
|
||||
'lang_str': LANGUAGES.get(str(babel_get_locale()), "English"),
|
||||
'languages': LANGUAGES}
|
||||
return dict(lang=babel_get_locale(),
|
||||
lang_str=LANGUAGES.get(str(babel_get_locale()), "English"),
|
||||
languages=LANGUAGES)
|
||||
|
||||
|
||||
@web.route('/language/<language>')
|
||||
|
||||
@@ -22,6 +22,3 @@ class LogHandler(Handler, metaclass=Singleton):
|
||||
|
||||
def get_buffer(self, elms=0) -> list:
|
||||
return list(self.buffer)[-elms:]
|
||||
|
||||
def clear(self):
|
||||
self.buffer.clear()
|
||||
|
||||
@@ -7,4 +7,3 @@
|
||||
.fa-rotate-right:before{content:"\f01e"}
|
||||
.fa-cloud-arrow-down-alt:before{content:"\f381"}
|
||||
.fa-cloud-arrow-up-alt:before{content:"\f382"}
|
||||
.fa-gear:before{content:"\f013"}
|
||||
|
||||
@@ -59,11 +59,6 @@
|
||||
<a href="{{ url_for('.mqtt')}}" class="w3-bar-item w3-button w3-padding {% block menu2_class %}{% endblock %}"><i class="fa fa-database fa-fw"></i> MQTT</a>
|
||||
<a href="{{ url_for('.notes')}}" class="w3-bar-item w3-button w3-padding {% block menu3_class %}{% endblock %}"><i class="fa fa-info fa-fw"></i> {{_('Important Messages')}}</a>
|
||||
<a href="{{ url_for('.logging')}}" class="w3-bar-item w3-button w3-padding {% block menu4_class %}{% endblock %}"><i class="fa fa-file-export fa-fw"></i> {{_('Log Files')}}</a>
|
||||
{% if hassio is defined %}
|
||||
<br>
|
||||
<a href="/hassio/addon/{{addonname}}/config" target="_top" class="w3-bar-item w3-button w3-padding"><i class="fa fa-gear fa-fw"></i> {{_('Add-on Config')}}</a>
|
||||
<a href="/hassio/addon/{{addonname}}/logs" target="_top" class="w3-bar-item w3-button w3-padding"><i class="fa fa-file fa-fw"></i> {{_('Add-on Log')}}</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
</nav>
|
||||
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
2025-04-30 00:01:23 INFO | root | Server "proxy - unknown" will be started
|
||||
2025-04-30 00:01:24 INFO | root | current dir: /Users/sallius/tsun/tsun-gen3-proxy
|
||||
2025-04-30 00:01:25 INFO | root | config_path: ./config/
|
||||
2025-04-30 00:01:26 INFO | root | json_config: None
|
||||
2025-04-30 00:01:27 INFO | root | toml_config: None
|
||||
2025-04-30 00:01:28 INFO | root | trans_path: ../translations/
|
||||
2025-04-30 00:01:29 INFO | root | rel_urls: False
|
||||
2025-04-30 00:01:30 INFO | root | log_path: ./log/
|
||||
2025-04-30 00:01:31 INFO | root | log_backups: unlimited
|
||||
2025-04-30 00:01:32 INFO | root | LOG_LVL : None
|
||||
2025-04-30 00:01:33 INFO | root | ******
|
||||
2025-04-30 00:01:34 INFO | root | Read from /Users/sallius/tsun/tsun-gen3-proxy/app/src/cnf/default_config.toml => ok
|
||||
2025-04-30 00:01:35 INFO | root | Read from environment => ok
|
||||
2025-04-30 00:01:36 INFO | root | Read from ./config/config.json => n/a
|
||||
2025-04-30 00:01:37 INFO | root | Read from ./config/config.toml => n/a
|
||||
2025-04-30 00:01:38 INFO | root | ******
|
||||
2025-04-30 00:01:39 INFO | root | listen on port: 5005 for inverters
|
||||
2025-04-30 00:01:40 INFO | root | listen on port: 10000 for inverters
|
||||
2025-04-30 00:01:41 INFO | root | Start Quart
|
||||
2025-04-30 00:01:23 INFO | root | current dir: /Users/sallius/tsun/tsun-gen3-proxy
|
||||
2025-04-30 00:01:23 INFO | root | config_path: ./config/
|
||||
2025-04-30 00:01:23 INFO | root | json_config: None
|
||||
2025-04-30 00:01:23 INFO | root | toml_config: None
|
||||
2025-04-30 00:01:23 INFO | root | trans_path: ../translations/
|
||||
2025-04-30 00:01:23 INFO | root | rel_urls: False
|
||||
2025-04-30 00:01:23 INFO | root | log_path: ./log/
|
||||
2025-04-30 00:01:23 INFO | root | log_backups: unlimited
|
||||
2025-04-30 00:01:23 INFO | root | LOG_LVL : None
|
||||
2025-04-30 00:01:23 INFO | root | ******
|
||||
2025-04-30 00:01:23 INFO | root | Read from /Users/sallius/tsun/tsun-gen3-proxy/app/src/cnf/default_config.toml => ok
|
||||
2025-04-30 00:01:23 INFO | root | Read from environment => ok
|
||||
2025-04-30 00:01:23 INFO | root | Read from ./config/config.json => n/a
|
||||
2025-04-30 00:01:23 INFO | root | Read from ./config/config.toml => n/a
|
||||
2025-04-30 00:01:23 INFO | root | ******
|
||||
2025-04-30 00:01:23 INFO | root | listen on port: 5005 for inverters
|
||||
2025-04-30 00:01:23 INFO | root | listen on port: 10000 for inverters
|
||||
2025-04-30 00:01:23 INFO | root | Start Quart
|
||||
@@ -3,8 +3,7 @@ import pytest
|
||||
import asyncio
|
||||
import aiomqtt
|
||||
import logging
|
||||
from aiomqtt import MqttError, MessagesIterator
|
||||
from aiomqtt import Message as AiomqttMessage
|
||||
from aiomqtt import MqttError
|
||||
from mock import patch, Mock
|
||||
|
||||
from async_stream import AsyncIfcImpl
|
||||
@@ -35,26 +34,6 @@ def test_hostname():
|
||||
# else:
|
||||
return 'test.mosquitto.org'
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def aiomqtt_mock(monkeypatch):
|
||||
recv_que = asyncio.Queue()
|
||||
|
||||
async def my_aenter(self):
|
||||
return self
|
||||
async def my_subscribe(self, *arg):
|
||||
return
|
||||
async def my_anext(self):
|
||||
return await recv_que.get()
|
||||
async def my_receive(self, topic: str, payload: bytes):
|
||||
msg = AiomqttMessage(topic, payload,qos=0, retain=False, mid=0, properties=None)
|
||||
await recv_que.put(msg)
|
||||
await asyncio.sleep(0) # dispath the msg
|
||||
|
||||
monkeypatch.setattr(aiomqtt.Client, "__aenter__", my_aenter)
|
||||
monkeypatch.setattr(aiomqtt.Client, "subscribe", my_subscribe)
|
||||
monkeypatch.setattr(MessagesIterator, "__anext__", my_anext)
|
||||
monkeypatch.setattr(Mqtt, "receive", my_receive, False)
|
||||
|
||||
@pytest.fixture
|
||||
def config_mqtt_conn(test_hostname, test_port):
|
||||
Config.act_config = {'mqtt':{'host': test_hostname, 'port': test_port, 'user': '', 'passwd': ''},
|
||||
@@ -182,17 +161,13 @@ async def test_ha_reconnect(config_mqtt_conn):
|
||||
await m.close()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_mqtt_no_config(config_no_conn, monkeypatch):
|
||||
async def test_mqtt_no_config(config_no_conn):
|
||||
_ = config_no_conn
|
||||
assert asyncio.get_running_loop()
|
||||
|
||||
on_connect = asyncio.Event()
|
||||
async def cb():
|
||||
on_connect.set()
|
||||
async def my_publish(*args):
|
||||
return
|
||||
|
||||
monkeypatch.setattr(aiomqtt.Client, "publish", my_publish)
|
||||
|
||||
try:
|
||||
m = Mqtt(cb)
|
||||
@@ -201,9 +176,9 @@ async def test_mqtt_no_config(config_no_conn, monkeypatch):
|
||||
assert not on_connect.is_set()
|
||||
try:
|
||||
await m.publish('homeassistant/status', 'online')
|
||||
assert m.published == 1
|
||||
assert False
|
||||
except Exception:
|
||||
assert False
|
||||
pass
|
||||
except TimeoutError:
|
||||
assert False
|
||||
finally:
|
||||
@@ -275,119 +250,92 @@ async def test_mqtt_except_def_config(config_def_conn, monkeypatch, caplog):
|
||||
assert 'MQTT is unconfigured; Check your config.toml!' in caplog.text
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_mqtt_dispatch(config_mqtt_conn, aiomqtt_mock, spy_modbus_cmd):
|
||||
async def test_msg_dispatch(config_mqtt_conn, spy_modbus_cmd):
|
||||
_ = config_mqtt_conn
|
||||
_ = aiomqtt_mock
|
||||
spy = spy_modbus_cmd
|
||||
try:
|
||||
m = Mqtt(None)
|
||||
assert m.ha_restarts == 0
|
||||
await m.receive('homeassistant/status', b'online') # send the message
|
||||
msg = aiomqtt.Message(topic= 'homeassistant/status', payload= b'online', qos= 0, retain = False, mid= 0, properties= None)
|
||||
await m.dispatch_msg(msg)
|
||||
assert m.ha_restarts == 1
|
||||
|
||||
await m.receive(topic= 'tsun/inv_1/rated_load', payload= b'2')
|
||||
spy.assert_called_once_with(Modbus.WRITE_SINGLE_REG, 0x2008, 2, logging.INFO)
|
||||
|
||||
spy.reset_mock()
|
||||
await m.receive(topic= 'tsun/inv_1/out_coeff', payload= b'100')
|
||||
spy.assert_called_once_with(Modbus.WRITE_SINGLE_REG, 0x202c, 1024, logging.INFO)
|
||||
msg = aiomqtt.Message(topic= 'tsun/inv_1/rated_load', payload= b'2', qos= 0, retain = False, mid= 0, properties= None)
|
||||
await m.dispatch_msg(msg)
|
||||
spy.assert_awaited_once_with(Modbus.WRITE_SINGLE_REG, 0x2008, 2, logging.INFO)
|
||||
|
||||
spy.reset_mock()
|
||||
await m.receive(topic= 'tsun/inv_1/out_coeff', payload= b'50')
|
||||
spy.assert_called_once_with(Modbus.WRITE_SINGLE_REG, 0x202c, 512, logging.INFO)
|
||||
msg = aiomqtt.Message(topic= 'tsun/inv_1/out_coeff', payload= b'100', qos= 0, retain = False, mid= 0, properties= None)
|
||||
await m.dispatch_msg(msg)
|
||||
spy.assert_awaited_once_with(Modbus.WRITE_SINGLE_REG, 0x202c, 1024, logging.INFO)
|
||||
|
||||
spy.reset_mock()
|
||||
msg = aiomqtt.Message(topic= 'tsun/inv_1/out_coeff', payload= b'50', qos= 0, retain = False, mid= 0, properties= None)
|
||||
await m.dispatch_msg(msg)
|
||||
spy.assert_awaited_once_with(Modbus.WRITE_SINGLE_REG, 0x202c, 512, logging.INFO)
|
||||
|
||||
spy.reset_mock()
|
||||
await m.receive(topic= 'tsun/inv_1/modbus_read_regs', payload= b'0x3000, 10')
|
||||
spy.assert_called_once_with(Modbus.READ_REGS, 0x3000, 10, logging.INFO)
|
||||
msg = aiomqtt.Message(topic= 'tsun/inv_1/modbus_read_regs', payload= b'0x3000, 10', qos= 0, retain = False, mid= 0, properties= None)
|
||||
await m.dispatch_msg(msg)
|
||||
spy.assert_awaited_once_with(Modbus.READ_REGS, 0x3000, 10, logging.INFO)
|
||||
|
||||
spy.reset_mock()
|
||||
await m.receive(topic= 'tsun/inv_1/modbus_read_inputs', payload= b'0x3000, 10')
|
||||
spy.assert_called_once_with(Modbus.READ_INPUTS, 0x3000, 10, logging.INFO)
|
||||
msg = aiomqtt.Message(topic= 'tsun/inv_1/modbus_read_inputs', payload= b'0x3000, 10', qos= 0, retain = False, mid= 0, properties= None)
|
||||
await m.dispatch_msg(msg)
|
||||
spy.assert_awaited_once_with(Modbus.READ_INPUTS, 0x3000, 10, logging.INFO)
|
||||
|
||||
# test dispatching with empty mapping table
|
||||
m.topic_defs.clear()
|
||||
spy.reset_mock()
|
||||
await m.receive(topic= 'tsun/inv_1/modbus_read_inputs', payload= b'0x3000, 10')
|
||||
msg = aiomqtt.Message(topic= 'tsun/inv_1/modbus_read_inputs', payload= b'0x3000, 10', qos= 0, retain = False, mid= 0, properties= None)
|
||||
await m.dispatch_msg(msg)
|
||||
spy.assert_not_called()
|
||||
|
||||
# test dispatching with incomplete mapping table - invalid fnc defined
|
||||
m.topic_defs.append(
|
||||
{'prefix': 'entity_prefix', 'topic': '/+/modbus_read_inputs',
|
||||
'full_topic': 'tsun/+/modbus_read_inputs', 'fnc': 'addr'}
|
||||
'full_topic': 'tsun/+/modbus_read_inputs', 'fnc': 'invalid'}
|
||||
)
|
||||
spy.reset_mock()
|
||||
await m.receive(topic= 'tsun/inv_1/modbus_read_inputs', payload= b'0x3000, 10')
|
||||
msg = aiomqtt.Message(topic= 'tsun/inv_1/modbus_read_inputs', payload= b'0x3000, 10', qos= 0, retain = False, mid= 0, properties= None)
|
||||
await m.dispatch_msg(msg)
|
||||
spy.assert_not_called()
|
||||
|
||||
except MqttError:
|
||||
assert False
|
||||
except Exception:
|
||||
assert False
|
||||
finally:
|
||||
await m.close()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_mqtt_dispatch_cb(config_mqtt_conn, aiomqtt_mock):
|
||||
async def test_msg_dispatch_err(config_mqtt_conn, spy_modbus_cmd):
|
||||
_ = config_mqtt_conn
|
||||
_ = aiomqtt_mock
|
||||
|
||||
on_connect = asyncio.Event()
|
||||
async def cb():
|
||||
on_connect.set()
|
||||
try:
|
||||
m = Mqtt(cb)
|
||||
assert m.ha_restarts == 0
|
||||
await m.receive('homeassistant/status', b'online') # send the message
|
||||
assert on_connect.is_set()
|
||||
assert m.ha_restarts == 1
|
||||
|
||||
except MqttError:
|
||||
assert False
|
||||
except Exception:
|
||||
assert False
|
||||
finally:
|
||||
await m.close()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_mqtt_dispatch_err(config_mqtt_conn, aiomqtt_mock, spy_modbus_cmd, caplog):
|
||||
_ = config_mqtt_conn
|
||||
_ = aiomqtt_mock
|
||||
spy = spy_modbus_cmd
|
||||
|
||||
LOGGER = logging.getLogger("mqtt")
|
||||
LOGGER.propagate = True
|
||||
LOGGER.setLevel(logging.INFO)
|
||||
|
||||
try:
|
||||
m = Mqtt(None)
|
||||
|
||||
# test out of range param
|
||||
await m.receive(topic= 'tsun/inv_1/out_coeff', payload= b'-1')
|
||||
msg = aiomqtt.Message(topic= 'tsun/inv_1/out_coeff', payload= b'-1', qos= 0, retain = False, mid= 0, properties= None)
|
||||
await m.dispatch_msg(msg)
|
||||
spy.assert_not_called()
|
||||
|
||||
# test unknown node_id
|
||||
await m.receive(topic= 'tsun/inv_2/out_coeff', payload= b'2')
|
||||
spy.reset_mock()
|
||||
msg = aiomqtt.Message(topic= 'tsun/inv_2/out_coeff', payload= b'2', qos= 0, retain = False, mid= 0, properties= None)
|
||||
await m.dispatch_msg(msg)
|
||||
spy.assert_not_called()
|
||||
|
||||
# test invalid fload param
|
||||
await m.receive(topic= 'tsun/inv_1/out_coeff', payload= b'2, 3')
|
||||
spy.assert_not_called()
|
||||
|
||||
await m.receive(topic= 'tsun/inv_1/modbus_read_regs', payload= b'0x3000, 10, 7')
|
||||
spy.reset_mock()
|
||||
msg = aiomqtt.Message(topic= 'tsun/inv_1/out_coeff', payload= b'2, 3', qos= 0, retain = False, mid= 0, properties= None)
|
||||
await m.dispatch_msg(msg)
|
||||
spy.assert_not_called()
|
||||
|
||||
await m.receive(topic= 'tsun/inv_1/dcu_power', payload= b'100W')
|
||||
spy.reset_mock()
|
||||
msg = aiomqtt.Message(topic= 'tsun/inv_1/modbus_read_regs', payload= b'0x3000, 10, 7', qos= 0, retain = False, mid= 0, properties= None)
|
||||
await m.dispatch_msg(msg)
|
||||
spy.assert_not_called()
|
||||
|
||||
spy.reset_mock()
|
||||
msg = aiomqtt.Message(topic= 'tsun/inv_1/dcu_power', payload= b'100W', qos= 0, retain = False, mid= 0, properties= None)
|
||||
await m.dispatch_msg(msg)
|
||||
spy.assert_not_called()
|
||||
|
||||
with caplog.at_level(logging.INFO):
|
||||
msg = aiomqtt.Message(topic= 'tsun/inv_1/out_coeff', payload= b'2', qos= 0, retain = False, mid= 0, properties= None)
|
||||
for _ in m.each_inverter(msg, "addr"):
|
||||
pass # do nothing here
|
||||
assert 'Cmd not supported by: inv_1/' in caplog.text
|
||||
except MqttError:
|
||||
assert False
|
||||
except Exception:
|
||||
assert False
|
||||
finally:
|
||||
await m.close()
|
||||
|
||||
|
||||
@@ -191,7 +191,6 @@ class TestApp:
|
||||
"""Test the ready route."""
|
||||
|
||||
ProxyState.set_up(False)
|
||||
app.testing = True
|
||||
client = app.test_client()
|
||||
response = await client.get('/-/ready')
|
||||
assert response.status_code == 503
|
||||
@@ -212,7 +211,6 @@ class TestApp:
|
||||
|
||||
with InverterBase(reader, writer, 'tsun', Talent):
|
||||
ProxyState.set_up(False)
|
||||
app.testing = True
|
||||
client = app.test_client()
|
||||
response = await client.get('/-/healthy')
|
||||
assert response.status_code == 200
|
||||
@@ -242,7 +240,6 @@ class TestApp:
|
||||
|
||||
with caplog.at_level(logging.INFO) and InverterBase(reader, writer, 'tsun', Talent):
|
||||
ProxyState.set_up(False)
|
||||
app.testing = True
|
||||
client = app.test_client()
|
||||
response = await client.get('/-/healthy')
|
||||
assert response.status_code == 200
|
||||
@@ -274,7 +271,6 @@ class TestApp:
|
||||
|
||||
with caplog.at_level(logging.INFO) and InverterBase(reader, writer, 'tsun', Talent):
|
||||
ProxyState.set_up(False)
|
||||
app.testing = True
|
||||
client = app.test_client()
|
||||
response = await client.get('/-/healthy')
|
||||
assert response.status_code == 200
|
||||
|
||||
@@ -1624,7 +1624,7 @@ async def test_msg_build_modbus_req(my_loop, config_tsun_inv1, device_ind_msg, d
|
||||
assert m.ifc.tx_fifo.get()==device_rsp_msg
|
||||
assert m.ifc.fwd_fifo.get()==device_ind_msg
|
||||
|
||||
m.send_modbus_cmd(Modbus.WRITE_SINGLE_REG, 0x2008, 0, logging.DEBUG)
|
||||
await m.send_modbus_cmd(Modbus.WRITE_SINGLE_REG, 0x2008, 0, logging.DEBUG)
|
||||
assert 0 == m.send_msg_ofs
|
||||
assert m.ifc.fwd_fifo.get() == b''
|
||||
assert m.sent_pdu == b'' # modbus command must be ignore, cause connection is still not up
|
||||
@@ -1642,7 +1642,7 @@ async def test_msg_build_modbus_req(my_loop, config_tsun_inv1, device_ind_msg, d
|
||||
assert m.ifc.tx_fifo.get()==inverter_rsp_msg
|
||||
assert m.ifc.fwd_fifo.get()==inverter_ind_msg
|
||||
|
||||
m.send_modbus_cmd(Modbus.WRITE_SINGLE_REG, 0x2008, 0, logging.DEBUG)
|
||||
await m.send_modbus_cmd(Modbus.WRITE_SINGLE_REG, 0x2008, 0, logging.DEBUG)
|
||||
assert 0 == m.send_msg_ofs
|
||||
assert m.ifc.fwd_fifo.get() == b''
|
||||
assert m.sent_pdu == msg_modbus_cmd
|
||||
@@ -2318,7 +2318,7 @@ async def test_start_client_mode(my_loop, config_tsun_inv1, str_test_ip):
|
||||
assert m.no_forwarding == False
|
||||
assert m.mb_timer.tim == None
|
||||
assert asyncio.get_running_loop() == m.mb_timer.loop
|
||||
m.send_start_cmd(get_sn_int(), str_test_ip, False, m.mb_first_timeout)
|
||||
await m.send_start_cmd(get_sn_int(), str_test_ip, False, m.mb_first_timeout)
|
||||
assert m.sent_pdu==bytearray(b'\xa5\x17\x00\x10E\x01\x00!Ce{\x02\xb0\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x030\x00\x000J\xde\xf1\x15')
|
||||
assert m.db.get_db_value(Register.IP_ADDRESS) == str_test_ip
|
||||
assert isclose(m.db.get_db_value(Register.POLLING_INTERVAL), 0.5)
|
||||
@@ -2351,7 +2351,7 @@ async def test_start_client_mode_scan(config_tsun_scan_dcu, str_test_ip, dcu_mod
|
||||
assert m.no_forwarding == False
|
||||
assert m.mb_timer.tim == None
|
||||
assert asyncio.get_running_loop() == m.mb_timer.loop
|
||||
m.send_start_cmd(get_dcu_sn_int(), str_test_ip, False, m.mb_first_timeout)
|
||||
await m.send_start_cmd(get_dcu_sn_int(), str_test_ip, False, m.mb_first_timeout)
|
||||
assert m.mb_start_reg == 0x0000
|
||||
assert m.mb_step == 0x100
|
||||
assert m.mb_bytes == 0x2d
|
||||
@@ -2662,7 +2662,6 @@ async def test_proxy_dcu_cmd(my_loop, config_tsun_dcu1, patch_open_connection, d
|
||||
assert l.db.stat['proxy']['AT_Command'] == 0
|
||||
assert l.db.stat['proxy']['AT_Command_Blocked'] == 0
|
||||
assert l.db.stat['proxy']['Modbus_Command'] == 0
|
||||
assert 2 == l.db.get_db_value(Register.NO_INPUTS, 0)
|
||||
|
||||
l.append_msg(dcu_command_rsp_msg)
|
||||
l.read() # read at resp
|
||||
|
||||
@@ -144,7 +144,7 @@ async def test_emu_start(my_loop, config_tsun_inv1, msg_modbus_rsp, str_test_ip,
|
||||
inv = InvStream(msg_modbus_rsp)
|
||||
|
||||
assert asyncio.get_running_loop() == inv.mb_timer.loop
|
||||
inv.send_start_cmd(get_sn_int(), str_test_ip, True, inv.mb_first_timeout)
|
||||
await inv.send_start_cmd(get_sn_int(), str_test_ip, True, inv.mb_first_timeout)
|
||||
inv.read() # read complete msg, and dispatch msg
|
||||
assert not inv.header_valid # must be invalid, since msg was handled and buffer flushed
|
||||
assert inv.msg_count == 1
|
||||
@@ -161,7 +161,7 @@ async def test_snd_hb(my_loop, config_tsun_inv1, heartbeat_ind):
|
||||
inv = InvStream()
|
||||
cld = CldStream(inv)
|
||||
|
||||
# inv.send_start_cmd(get_sn_int(), str_test_ip, False, inv.mb_first_timeout)
|
||||
# await inv.send_start_cmd(get_sn_int(), str_test_ip, False, inv.mb_first_timeout)
|
||||
cld.send_heartbeat_cb(0)
|
||||
assert cld.ifc.tx_fifo.peek() == heartbeat_ind
|
||||
cld.close()
|
||||
@@ -178,7 +178,7 @@ async def test_snd_inv_data(my_loop, config_tsun_inv1, inverter_ind_msg, inverte
|
||||
inv.db.set_db_def_value(Register.GRID_FREQUENCY, 50.05)
|
||||
inv.db.set_db_def_value(Register.PROD_COMPL_TYPE, 6)
|
||||
assert asyncio.get_running_loop() == inv.mb_timer.loop
|
||||
inv.send_start_cmd(get_sn_int(), str_test_ip, False, inv.mb_first_timeout)
|
||||
await inv.send_start_cmd(get_sn_int(), str_test_ip, False, inv.mb_first_timeout)
|
||||
inv.db.set_db_def_value(Register.DATA_UP_INTERVAL, 17) # set test value
|
||||
|
||||
cld = CldStream(inv)
|
||||
@@ -213,7 +213,7 @@ async def test_rcv_invalid(my_loop, config_tsun_inv1, inverter_ind_msg, inverter
|
||||
_ = config_tsun_inv1
|
||||
inv = InvStream()
|
||||
assert asyncio.get_running_loop() == inv.mb_timer.loop
|
||||
inv.send_start_cmd(get_sn_int(), str_test_ip, False, inv.mb_first_timeout)
|
||||
await inv.send_start_cmd(get_sn_int(), str_test_ip, False, inv.mb_first_timeout)
|
||||
inv.db.set_db_def_value(Register.DATA_UP_INTERVAL, 17) # set test value
|
||||
|
||||
cld = CldStream(inv)
|
||||
|
||||
@@ -2411,14 +2411,14 @@ async def test_msg_build_modbus_req(config_tsun_inv1, msg_modbus_cmd):
|
||||
_ = config_tsun_inv1
|
||||
m = MemoryStream(b'', (0,), True)
|
||||
m.id_str = b"R170000000000001"
|
||||
m.send_modbus_cmd(Modbus.WRITE_SINGLE_REG, 0x2008, 0, logging.DEBUG)
|
||||
await m.send_modbus_cmd(Modbus.WRITE_SINGLE_REG, 0x2008, 0, logging.DEBUG)
|
||||
assert 0 == m.send_msg_ofs
|
||||
assert m.ifc.fwd_fifo.get() == b''
|
||||
assert m.ifc.tx_fifo.get() == b''
|
||||
assert m.sent_pdu == b''
|
||||
|
||||
m.state = State.up
|
||||
m.send_modbus_cmd(Modbus.WRITE_SINGLE_REG, 0x2008, 0, logging.DEBUG)
|
||||
await m.send_modbus_cmd(Modbus.WRITE_SINGLE_REG, 0x2008, 0, logging.DEBUG)
|
||||
assert 0 == m.send_msg_ofs
|
||||
assert m.ifc.fwd_fifo.get() == b''
|
||||
assert m.ifc.tx_fifo.get() == b''
|
||||
|
||||
@@ -1,37 +1,22 @@
|
||||
# test_with_pytest.py
|
||||
import pytest
|
||||
import logging
|
||||
import os, errno
|
||||
import datetime
|
||||
from os import DirEntry, stat_result
|
||||
from quart import current_app
|
||||
from mock import patch
|
||||
|
||||
from server import app as my_app
|
||||
from server import Server
|
||||
from web import web
|
||||
from server import app
|
||||
from web import Web, web
|
||||
from async_stream import AsyncStreamClient
|
||||
from gen3plus.inverter_g3p import InverterG3P
|
||||
from web.log_handler import LogHandler
|
||||
from test_inverter_g3p import FakeReader, FakeWriter, config_conn
|
||||
from cnf.config import Config
|
||||
from mock import patch
|
||||
from proxy import Proxy
|
||||
|
||||
|
||||
class FakeServer(Server):
|
||||
def __init__(self):
|
||||
pass # don't call the suoer(.__init__ for unit tests
|
||||
|
||||
import os, errno
|
||||
from os import DirEntry, stat_result
|
||||
import datetime
|
||||
|
||||
pytest_plugins = ('pytest_asyncio',)
|
||||
@pytest.fixture(scope="session")
|
||||
def app():
|
||||
yield my_app
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def client(app):
|
||||
def client():
|
||||
app.secret_key = 'super secret key'
|
||||
app.testing = True
|
||||
return app.test_client()
|
||||
|
||||
@pytest.fixture
|
||||
@@ -67,7 +52,6 @@ async def test_home(client):
|
||||
response = await client.get('/')
|
||||
assert response.status_code == 200
|
||||
assert response.mimetype == 'text/html'
|
||||
assert b"<title>TSUN Proxy - Connections</title>" in await response.data
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_page(client):
|
||||
@@ -75,17 +59,14 @@ async def test_page(client):
|
||||
response = await client.get('/mqtt')
|
||||
assert response.status_code == 200
|
||||
assert response.mimetype == 'text/html'
|
||||
assert b"<title>TSUN Proxy - MQTT Status</title>" in await response.data
|
||||
assert b'fetch("/mqtt-fetch")' in await response.data
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_rel_page(client):
|
||||
"""Test the mqtt route with relative paths."""
|
||||
"""Test the mqtt route."""
|
||||
web.build_relative_urls = True
|
||||
response = await client.get('/mqtt')
|
||||
assert response.status_code == 200
|
||||
assert response.mimetype == 'text/html'
|
||||
assert b'fetch("./mqtt-fetch")' in await response.data
|
||||
web.build_relative_urls = False
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@@ -94,7 +75,6 @@ async def test_notes(client):
|
||||
response = await client.get('/notes')
|
||||
assert response.status_code == 200
|
||||
assert response.mimetype == 'text/html'
|
||||
assert b"<title>TSUN Proxy - Important Messages</title>" in await response.data
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_logging(client):
|
||||
@@ -102,7 +82,6 @@ async def test_logging(client):
|
||||
response = await client.get('/logging')
|
||||
assert response.status_code == 200
|
||||
assert response.mimetype == 'text/html'
|
||||
assert b"<title>TSUN Proxy - Log Files</title>" in await response.data
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_favicon96(client):
|
||||
@@ -140,37 +119,37 @@ async def test_manifest(client):
|
||||
assert response.mimetype == 'application/manifest+json'
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_data_fetch(client, create_inverter):
|
||||
async def test_data_fetch(create_inverter):
|
||||
"""Test the data-fetch route."""
|
||||
_ = create_inverter
|
||||
client = app.test_client()
|
||||
response = await client.get('/data-fetch')
|
||||
assert response.status_code == 200
|
||||
|
||||
response = await client.get('/data-fetch')
|
||||
assert response.status_code == 200
|
||||
assert b'<h5>Connections</h5>' in await response.data
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_data_fetch1(client, create_inverter_server):
|
||||
async def test_data_fetch1(create_inverter_server):
|
||||
"""Test the data-fetch route with server connection."""
|
||||
_ = create_inverter_server
|
||||
client = app.test_client()
|
||||
response = await client.get('/data-fetch')
|
||||
assert response.status_code == 200
|
||||
|
||||
response = await client.get('/data-fetch')
|
||||
assert response.status_code == 200
|
||||
assert b'<h5>Connections</h5>' in await response.data
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_data_fetch2(client, create_inverter_client):
|
||||
async def test_data_fetch2(create_inverter_client):
|
||||
"""Test the data-fetch route with client connection."""
|
||||
_ = create_inverter_client
|
||||
client = app.test_client()
|
||||
response = await client.get('/data-fetch')
|
||||
assert response.status_code == 200
|
||||
|
||||
response = await client.get('/data-fetch')
|
||||
assert response.status_code == 200
|
||||
assert b'<h5>Connections</h5>' in await response.data
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_language_en(client):
|
||||
@@ -180,44 +159,21 @@ async def test_language_en(client):
|
||||
assert response.content_language.pop() == 'en'
|
||||
assert response.location == '/index'
|
||||
assert response.mimetype == 'text/html'
|
||||
assert b'<html lang=en' in await response.data
|
||||
assert b'<title>Redirecting...</title>' in await response.data
|
||||
|
||||
client.set_cookie('test', key='language', value='de')
|
||||
response = await client.get('/')
|
||||
response = await client.get('/mqtt')
|
||||
assert response.status_code == 200
|
||||
assert response.mimetype == 'text/html'
|
||||
assert b'<html lang="en"' in await response.data
|
||||
assert b'<title>TSUN Proxy - Connections</title>' in await response.data
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_language_de(client):
|
||||
"""Test the language/de route."""
|
||||
|
||||
response = await client.get('/language/de', headers={'referer': '/'})
|
||||
assert response.status_code == 302
|
||||
assert response.content_language.pop() == 'de'
|
||||
assert response.location == '/'
|
||||
assert response.mimetype == 'text/html'
|
||||
assert b'<html lang=en>' in await response.data
|
||||
assert b'<title>Redirecting...</title>' in await response.data
|
||||
|
||||
client.set_cookie('test', key='language', value='en')
|
||||
response = await client.get('/')
|
||||
assert response.status_code == 200
|
||||
assert response.mimetype == 'text/html'
|
||||
assert b'<html lang="de"' in await response.data
|
||||
# the following assert fails on github runner, since the translation to german fails
|
||||
# assert b'<title>TSUN Proxy - Verbindungen</title>' in await response.data
|
||||
|
||||
"""Switch back to english"""
|
||||
response = await client.get('/language/en', headers={'referer': '/index'})
|
||||
assert response.status_code == 302
|
||||
assert response.content_language.pop() == 'en'
|
||||
assert response.location == '/index'
|
||||
assert response.mimetype == 'text/html'
|
||||
assert b'<html lang=en>' in await response.data
|
||||
assert b'<title>Redirecting...</title>' in await response.data
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_language_unknown(client):
|
||||
@@ -226,12 +182,6 @@ async def test_language_unknown(client):
|
||||
assert response.status_code == 404
|
||||
assert response.mimetype == 'text/html'
|
||||
|
||||
client.set_cookie('test', key='language', value='en')
|
||||
response = await client.get('/')
|
||||
assert response.status_code == 200
|
||||
assert response.mimetype == 'text/html'
|
||||
assert b'<title>TSUN Proxy - Connections</title>' in await response.data
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_mqtt_fetch(client, create_inverter):
|
||||
@@ -241,47 +191,15 @@ async def test_mqtt_fetch(client, create_inverter):
|
||||
|
||||
response = await client.get('/mqtt-fetch')
|
||||
assert response.status_code == 200
|
||||
assert b'<h5>MQTT devices</h5>' in await response.data
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_notes_fetch(client, config_conn):
|
||||
"""Test the notes-fetch route."""
|
||||
_ = config_conn
|
||||
_ = create_inverter
|
||||
|
||||
s = FakeServer()
|
||||
s.src_dir = 'app/src/'
|
||||
s.init_logging_system()
|
||||
|
||||
# First clear log and test Well done message
|
||||
logh = LogHandler()
|
||||
logh.clear()
|
||||
response = await client.get('/notes-fetch')
|
||||
assert response.status_code == 200
|
||||
assert b'<h2>Well done!</h2>' in await response.data
|
||||
|
||||
# Check info logs which must be ignored here
|
||||
logging.info('config_info')
|
||||
logh.flush()
|
||||
response = await client.get('/notes-fetch')
|
||||
assert response.status_code == 200
|
||||
assert b'<h2>Well done!</h2>' in await response.data
|
||||
|
||||
# Check warning logs which must be added to the note list
|
||||
logging.warning('config_warning')
|
||||
logh.flush()
|
||||
response = await client.get('/notes-fetch')
|
||||
assert response.status_code == 200
|
||||
assert b'WARNING' in await response.data
|
||||
assert b'config_warning' in await response.data
|
||||
|
||||
# Check error logs which must be added to the note list
|
||||
logging.error('config_err')
|
||||
logh.flush()
|
||||
response = await client.get('/notes-fetch')
|
||||
assert response.status_code == 200
|
||||
assert b'ERROR' in await response.data
|
||||
assert b'config_err' in await response.data
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@@ -311,7 +229,6 @@ async def test_file_fetch(client, config_conn, monkeypatch):
|
||||
monkeypatch.delattr(stat_result, "st_birthtime")
|
||||
response = await client.get('/file-fetch')
|
||||
assert response.status_code == 200
|
||||
assert b'<h4>test.txt</h4>' in await response.data
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_send_file(client, config_conn):
|
||||
@@ -320,7 +237,6 @@ async def test_send_file(client, config_conn):
|
||||
assert Config.log_path == 'app/tests/log/'
|
||||
response = await client.get('/send-file/test.txt')
|
||||
assert response.status_code == 200
|
||||
assert b'2025-04-30 00:01:23' in await response.data
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@@ -375,20 +291,3 @@ async def test_del_file_err(client, config_conn, patch_os_remove_err):
|
||||
assert Config.log_path == 'app/tests/log/'
|
||||
response = await client.delete ('/del-file/test.txt')
|
||||
assert response.status_code == 404
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_addon_links(client):
|
||||
"""Test links to HA add-on config/log in UI"""
|
||||
with patch.dict(os.environ, {'SLUG': 'c676133d', 'HOSTNAME': 'c676133d-tsun-proxy'}):
|
||||
response = await client.get('/')
|
||||
assert response.status_code == 200
|
||||
assert response.mimetype == 'text/html'
|
||||
assert b'Add-on Config' in await response.data
|
||||
assert b'href="/hassio/addon/c676133d_tsun-proxy/logs' in await response.data
|
||||
assert b'href="/hassio/addon/c676133d_tsun-proxy/config' in await response.data
|
||||
|
||||
# check that links are not available if env vars SLUG and HOSTNAME are not defined (docker version)
|
||||
response = await client.get('/')
|
||||
assert response.status_code == 200
|
||||
assert response.mimetype == 'text/html'
|
||||
assert b'Add-on Config' not in await response.data
|
||||
|
||||
@@ -8,7 +8,7 @@ msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: tsun-gen3-proxy 0.14.0\n"
|
||||
"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n"
|
||||
"POT-Creation-Date: 2025-05-13 22:34+0200\n"
|
||||
"POT-Creation-Date: 2025-05-18 15:30+0200\n"
|
||||
"PO-Revision-Date: 2025-04-18 16:24+0200\n"
|
||||
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"Language: de\n"
|
||||
@@ -75,14 +75,6 @@ msgstr "Wichtige Hinweise"
|
||||
msgid "Log Files"
|
||||
msgstr "Log Dateien"
|
||||
|
||||
#: src/web/templates/base.html.j2:64
|
||||
msgid "Add-on Config"
|
||||
msgstr "Add-on Konfiguration"
|
||||
|
||||
#: src/web/templates/base.html.j2:65
|
||||
msgid "Add-on Log"
|
||||
msgstr "Add-on Protokoll"
|
||||
|
||||
#: src/web/templates/page_index.html.j2:3
|
||||
msgid "TSUN Proxy - Connections"
|
||||
msgstr "TSUN Proxy - Verbindungen"
|
||||
|
||||
@@ -29,23 +29,27 @@ target "_common" {
|
||||
"type =sbom,generator=docker/scout-sbom-indexer:latest"
|
||||
]
|
||||
annotations = [
|
||||
"index:io.hass.version=${VERSION}",
|
||||
"index:io.hass.type=addon",
|
||||
"index:io.hass.arch=aarch64|amd64",
|
||||
"index,manifest-descriptor:org.opencontainers.image.title=TSUN-Proxy",
|
||||
"index,manifest-descriptor:org.opencontainers.image.authors=Stefan Allius",
|
||||
"index,manifest-descriptor:org.opencontainers.image.created=${BUILD_DATE}",
|
||||
"index,manifest-descriptor:org.opencontainers.image.version=${VERSION}",
|
||||
"index,manifest-descriptor:org.opencontainers.image.description=${DESCRIPTION}",
|
||||
"index:io.hass.arch=armhf|aarch64|i386|amd64",
|
||||
"index:org.opencontainers.image.title=TSUN-Proxy",
|
||||
"index:org.opencontainers.image.authors=Stefan Allius",
|
||||
"index:org.opencontainers.image.created=${BUILD_DATE}",
|
||||
"index:org.opencontainers.image.version=${VERSION}",
|
||||
"index:org.opencontainers.image.revision=${BRANCH}",
|
||||
"index:org.opencontainers.image.description=${DESCRIPTION}",
|
||||
"index:org.opencontainers.image.licenses=BSD-3-Clause",
|
||||
"index:org.opencontainers.image.source=https://github.com/s-allius/tsun-gen3-proxy/ha_addons/ha_addon",
|
||||
"index:org.opencontainers.image.source=https://github.com/s-allius/tsun-gen3-proxy/ha_addons/ha_addon"
|
||||
]
|
||||
labels = {
|
||||
"io.hass.version" = "${VERSION}"
|
||||
"io.hass.type" = "addon"
|
||||
"io.hass.arch" = "aarch64|amd64"
|
||||
"io.hass.arch" = "armhf|aarch64|i386|amd64"
|
||||
"org.opencontainers.image.title" = "TSUN-Proxy"
|
||||
"org.opencontainers.image.authors" = "Stefan Allius"
|
||||
"org.opencontainers.image.created" = "${BUILD_DATE}"
|
||||
"org.opencontainers.image.version" = "${VERSION}"
|
||||
"org.opencontainers.image.revision" = "${BRANCH}"
|
||||
"org.opencontainers.image.description" = "${DESCRIPTION}"
|
||||
"org.opencontainers.image.licenses" = "BSD-3-Clause"
|
||||
"org.opencontainers.image.source" = "https://github.com/s-allius/tsun-gen3-proxy/ha_addonsha_addon"
|
||||
@@ -55,7 +59,7 @@ target "_common" {
|
||||
]
|
||||
|
||||
no-cache = false
|
||||
platforms = ["linux/amd64", "linux/arm64"]
|
||||
platforms = ["linux/amd64", "linux/arm64", "linux/arm/v7"]
|
||||
}
|
||||
|
||||
target "_debug" {
|
||||
|
||||
@@ -13,12 +13,12 @@
|
||||
# 1 Build Base Image #
|
||||
######################
|
||||
|
||||
ARG BUILD_FROM="ghcr.io/hassio-addons/base:18.0.3"
|
||||
ARG BUILD_FROM="ghcr.io/hassio-addons/base:17.2.5"
|
||||
# hadolint ignore=DL3006
|
||||
FROM $BUILD_FROM AS base
|
||||
|
||||
# Installiere Python, pip und virtuelle Umgebungstools
|
||||
RUN apk add --no-cache python3=3.12.11-r0 py3-pip=25.1.1-r0 && \
|
||||
RUN apk add --no-cache python3=3.12.10-r1 py3-pip=24.3.1-r0 && \
|
||||
python -m venv /opt/venv && \
|
||||
. /opt/venv/bin/activate
|
||||
|
||||
|
||||
@@ -4,10 +4,8 @@ bashio::log.blue "-----------------------------------------------------------"
|
||||
bashio::log.blue "run.sh: info: setup Add-on environment"
|
||||
bashio::cache.flush_all
|
||||
MQTT_HOST=""
|
||||
SLUG=""
|
||||
HOSTNAME=""
|
||||
if bashio::supervisor.ping; then
|
||||
bashio::log "run.sh: info: check Home Assistant bashio for config values"
|
||||
bashio::log "run.sh: info: check for Home Assistant MQTT service"
|
||||
if bashio::services.available mqtt; then
|
||||
MQTT_HOST=$(bashio::services mqtt "host")
|
||||
MQTT_PORT=$(bashio::services mqtt "port")
|
||||
@@ -16,31 +14,15 @@ if bashio::supervisor.ping; then
|
||||
else
|
||||
bashio::log.yellow "run.sh: info: Home Assistant MQTT service not available!"
|
||||
fi
|
||||
SLUG=$(bashio::addon.repository)
|
||||
HOSTNAME=$(bashio::addon.hostname)
|
||||
else
|
||||
bashio::log.red "run.sh: error: Home Assistant Supervisor API not available!"
|
||||
fi
|
||||
|
||||
if [ -z "$SLUG" ]; then
|
||||
bashio::log.yellow "run.sh: info: addon slug not found"
|
||||
else
|
||||
bashio::log.green "run.sh: info: found addon slug: $SLUG"
|
||||
export SLUG
|
||||
|
||||
fi
|
||||
if [ -z "$HOSTNAME" ]; then
|
||||
bashio::log.yellow "run.sh: info: addon hostname not found"
|
||||
else
|
||||
bashio::log.green "run.sh: info: found addon hostname: $HOSTNAME"
|
||||
export HOSTNAME
|
||||
fi
|
||||
|
||||
# if a MQTT was/not found, drop a note
|
||||
if [ -z "$MQTT_HOST" ]; then
|
||||
bashio::log.yellow "run.sh: info: MQTT config not found"
|
||||
else
|
||||
bashio::log.green "run.sh: info: found MQTT config"
|
||||
bashio::log.green "run.sh: info: MQTT config found"
|
||||
export MQTT_HOST
|
||||
export MQTT_PORT
|
||||
export MQTT_USER
|
||||
|
||||
@@ -10,6 +10,8 @@ init: false
|
||||
arch:
|
||||
- aarch64
|
||||
- amd64
|
||||
- armhf
|
||||
- armv7
|
||||
startup: services
|
||||
homeassistant_api: true
|
||||
map:
|
||||
|
||||
Reference in New Issue
Block a user