Compare commits

..

18 Commits

Author SHA1 Message Date
Stefan Allius
50a7dbca41 undo changes 2025-05-20 23:37:37 +02:00
Stefan Allius
ef889dc53b undo last changes 2025-05-20 23:34:41 +02:00
Stefan Allius
c9ab95a02d Merge branch 'main' of https://github.com/s-allius/tsun-gen3-proxy into s-allius/issue397 2025-05-20 23:29:47 +02:00
Stefan Allius
783bb1c832 change icon for notes 2025-05-20 23:07:33 +02:00
Stefan Allius
f3bc22ef68 add unit tests 2025-05-04 18:44:01 +02:00
Stefan Allius
5e5a2ccdd0 add Notes page and table for important messages 2025-05-04 18:43:52 +02:00
Stefan Allius
09ae4f9bb8 more translations 2025-05-04 18:23:39 +02:00
Stefan Allius
838f6d76d0 translate page titles 2025-05-04 18:23:21 +02:00
Stefan Allius
d693c0b48a rename file 2025-05-04 18:22:30 +02:00
Stefan Allius
9a8ca02b47 add well done message if no errors in the logs 2025-05-04 18:22:00 +02:00
Stefan Allius
7a86f682b9 fix double defined build target 2025-05-04 18:21:05 +02:00
Stefan Allius
0048d71f9d add warnings and error page 2025-05-04 16:08:20 +02:00
Stefan Allius
c6822fd309 add page for warnings and errors 2025-05-04 16:06:52 +02:00
Stefan Allius
298b72bbc9 render warnings/errors as note list 2025-05-04 15:29:47 +02:00
Stefan Allius
089c35f89e create LogHandler for the dashborad
- simple memory log handler which stores the last
  64 warnings/errors for the dashboard
2025-05-04 15:28:28 +02:00
Stefan Allius
ba025eb09b add note list to page 2025-05-04 15:27:45 +02:00
Stefan Allius
ddf8222003 add list of last 3 warnings/errors to page 2025-05-04 15:26:46 +02:00
Stefan Allius
d5b3b804bc add Dashboards log handler to all known loggers 2025-05-04 15:25:30 +02:00
36 changed files with 202 additions and 638 deletions

View File

@@ -5,7 +5,7 @@ name: Python application
on: on:
push: push:
branches: [ "main", "dev-*", "*/issue*", "releases/*" ] branches: [ "main", "dev-*", "*/issue*" ]
paths-ignore: paths-ignore:
- '**.md' # Do no build on *.md changes - '**.md' # Do no build on *.md changes
- '**.yml' # Do no build on *.yml changes - '**.yml' # Do no build on *.yml changes
@@ -18,7 +18,7 @@ on:
- '**.dockerfile' # Do no build on *.dockerfile changes - '**.dockerfile' # Do no build on *.dockerfile changes
- '**.sh' # Do no build on *.sh changes - '**.sh' # Do no build on *.sh changes
pull_request: pull_request:
branches: [ "main", "dev-*", "releases/*" ] branches: [ "main", "dev-*" ]
permissions: permissions:
contents: read contents: read

View File

@@ -1 +1 @@
3.13.5 3.13.2

View File

@@ -7,26 +7,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [unreleased] ## [unreleased]
- catch socket.gaierror exception and log this with info level
- Update dependency coverage to v7.9.2
- add-on: bump base-image to version 18.0.3
- add-on: remove armhf and armv7 support
- add-on: add links to config and log-file to the web-UI
- fix some SonarQube warnings
- remove unused 32-bit architectures
- Babel don't build new po file if only the pot creation-date was changed
- Improve Makefile
- Update dependency pytest-asyncio to v1
## [0.14.1] - 2025-05-31
- handle missing MQTT addon [#438](https://github.com/s-allius/tsun-gen3-proxy/issues/438)
## [0.14.0] - 2025-05-29
- add-on: bump python to version 3.12.10-r1
- set no of pv modules for MS800 GEN3PLUS inverters
- fix the paths to copy the config.example.toml file during proxy start
- add MQTT topic `dcu_power` for setting output power on DCUs - add MQTT topic `dcu_power` for setting output power on DCUs
- Update ghcr.io/hassio-addons/base Docker tag to v17.2.5 - Update ghcr.io/hassio-addons/base Docker tag to v17.2.5
- fix a lot of pytest-asyncio problems in the unit tests - fix a lot of pytest-asyncio problems in the unit tests

View File

@@ -1,37 +1,27 @@
.PHONY: help build babel clean addon-dev addon-debug addon-rc addon-rel debug dev preview rc rel check-docker-compose install .PHONY: build babel clean addon-dev addon-debug addon-rc addon-rel debug dev preview rc rel check-docker-compose install
help: ## show help message babel:
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m\033[0m\n"} /^[$$()% a-zA-Z0-9_-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST)
babel: ## build language files
$(MAKE) -C app $@ $(MAKE) -C app $@
build: build:
$(MAKE) -C ha_addons $@ $(MAKE) -C ha_addons $@
clean: ## delete all built files clean:
$(MAKE) -C app $@ $(MAKE) -C app $@
$(MAKE) -C ha_addons $@ $(MAKE) -C ha_addons $@
debug dev preview rc rel: ## build docker container in <dev|debg|rc|rel> version debug dev preview rc rel:
$(MAKE) -C app babel $(MAKE) -C app babel
$(MAKE) -C app $@ $(MAKE) -C app $@
addon-dev addon-debug addon-rc addon-rel: ## build HA add-on in <dev|debg|rc|rel> version addon-dev addon-debug addon-rc addon-rel:
$(MAKE) -C app babel $(MAKE) -C app babel
$(MAKE) -C ha_addons $(patsubst addon-%,%,$@) $(MAKE) -C ha_addons $(patsubst addon-%,%,$@)
check-docker-compose: ## check the docker-compose file check-docker-compose:
docker-compose config -q docker-compose config -q
PY_VER := $(shell cat .python-version) install:
python3 -m pip install --upgrade pip
install: ## install requirements into the pyenv and switch to proper venv python3 -m pip install -r requirements.txt
@pyenv local $(PY_VER) || { pyenv install $(PY_VER) && pyenv local $(PY_VER) || exit 1; } python3 -m pip install -r requirements-test.txt
@pyenv exec pip install --upgrade pip
@pyenv exec pip install -r requirements.txt
@pyenv exec pip install -r requirements-test.txt
pyenv exec python --version
run: ## run proxy locally out of the actual venv
pyenv exec python app/src/server.py -c /app/src/cnf

View File

@@ -1 +1 @@
0.15.0 0.14.0

View File

@@ -55,7 +55,7 @@ $(BABEL_TRANSLATIONS)/%.pot : $(SRC)/.babel.cfg $(BABEL_INPUT)
$(BABEL_TRANSLATIONS)/%/LC_MESSAGES/messages.po : $(BABEL_TRANSLATIONS)/messages.pot $(BABEL_TRANSLATIONS)/%/LC_MESSAGES/messages.po : $(BABEL_TRANSLATIONS)/messages.pot
@mkdir -p $(@D) @mkdir -p $(@D)
@pybabel update --init-missing --ignore-pot-creation-date -i $< -d $(BABEL_TRANSLATIONS) -l $* @pybabel update --init-missing -i $< -d $(BABEL_TRANSLATIONS) -l $*
$(BABEL_TRANSLATIONS)/%/LC_MESSAGES/messages.mo : $(BABEL_TRANSLATIONS)/%/LC_MESSAGES/messages.po $(BABEL_TRANSLATIONS)/%/LC_MESSAGES/messages.mo : $(BABEL_TRANSLATIONS)/%/LC_MESSAGES/messages.po
@pybabel compile -d $(BABEL_TRANSLATIONS) -l $* @pybabel compile -d $(BABEL_TRANSLATIONS) -l $*

View File

@@ -29,17 +29,17 @@ target "_common" {
"type =sbom,generator=docker/scout-sbom-indexer:latest" "type =sbom,generator=docker/scout-sbom-indexer:latest"
] ]
annotations = [ annotations = [
"index,manifest-descriptor:org.opencontainers.image.title=TSUN-Proxy", "index:org.opencontainers.image.title=TSUN Gen3 Proxy",
"index,manifest-descriptor:org.opencontainers.image.authors=Stefan Allius", "index:org.opencontainers.image.authors=Stefan Allius",
"index,manifest-descriptor:org.opencontainers.image.created=${BUILD_DATE}", "index:org.opencontainers.image.created=${BUILD_DATE}",
"index,manifest-descriptor:org.opencontainers.image.version=${VERSION}", "index:org.opencontainers.image.version=${VERSION}",
"index,manifest-descriptor:org.opencontainers.image.revision=${BRANCH}", "index:org.opencontainers.image.revision=${BRANCH}",
"index,manifest-descriptor:org.opencontainers.image.description=${DESCRIPTION}", "index:org.opencontainers.image.description=${DESCRIPTION}",
"index:org.opencontainers.image.licenses=BSD-3-Clause", "index:org.opencontainers.image.licenses=BSD-3-Clause",
"index:org.opencontainers.image.source=https://github.com/s-allius/tsun-gen3-proxy" "index:org.opencontainers.image.source=https://github.com/s-allius/tsun-gen3-proxy"
] ]
labels = { labels = {
"org.opencontainers.image.title" = "TSUN-Proxy" "org.opencontainers.image.title" = "TSUN Gen3 Proxy"
"org.opencontainers.image.authors" = "Stefan Allius" "org.opencontainers.image.authors" = "Stefan Allius"
"org.opencontainers.image.created" = "${BUILD_DATE}" "org.opencontainers.image.created" = "${BUILD_DATE}"
"org.opencontainers.image.version" = "${VERSION}" "org.opencontainers.image.version" = "${VERSION}"
@@ -53,7 +53,7 @@ target "_common" {
] ]
no-cache = false no-cache = false
platforms = ["linux/amd64", "linux/arm64"] platforms = ["linux/amd64", "linux/arm64", "linux/arm/v7"]
} }
target "_debug" { target "_debug" {

View File

@@ -1,8 +1,8 @@
flake8==7.3.0 flake8==7.2.0
pytest==8.4.1 pytest==8.3.5
pytest-asyncio==1.0.0 pytest-asyncio==0.26.0
pytest-cov==6.2.1 pytest-cov==6.1.1
python-dotenv==1.1.1 python-dotenv==1.1.0
mock==5.2.0 mock==5.2.0
coverage==7.9.2 coverage==7.8.0
jinja2-cli==0.8.2 jinja2-cli==0.8.2

View File

@@ -162,8 +162,7 @@ class Config():
) )
@classmethod @classmethod
def init(cls, def_reader: ConfigIfc, log_path: str = '', def init(cls, def_reader: ConfigIfc, log_path: str = '') -> None | str:
cnf_path: str = 'config') -> None | str:
'''Initialise the Proxy-Config '''Initialise the Proxy-Config
Copy the internal default config file into the config directory Copy the internal default config file into the config directory
@@ -174,13 +173,12 @@ and initialise the Config with the default configuration '''
try: try:
# make the default config transparaent by copying it # make the default config transparaent by copying it
# in the config.example file # in the config.example file
logging.info( logging.debug('Copy Default Config to config.example.toml')
f'Copy Default Config to {cnf_path}config.example.toml')
shutil.copy2("cnf/default_config.toml", shutil.copy2("default_config.toml",
cnf_path + "config.example.toml") "config/config.example.toml")
except Exception as e: except Exception:
logging.error(e) pass
# read example config file as default configuration # read example config file as default configuration
try: try:

View File

@@ -216,7 +216,7 @@ class InfosG3P(Infos):
self.set_db_def_value(Register.MANUFACTURER, 'TSUN') self.set_db_def_value(Register.MANUFACTURER, 'TSUN')
self.set_db_def_value(Register.EQUIPMENT_MODEL, 'TSOL-MSxx00') self.set_db_def_value(Register.EQUIPMENT_MODEL, 'TSOL-MSxx00')
self.set_db_def_value(Register.CHIP_TYPE, 'IGEN TECH') self.set_db_def_value(Register.CHIP_TYPE, 'IGEN TECH')
self.set_db_def_value(Register.NO_INPUTS, 2) self.set_db_def_value(Register.NO_INPUTS, 4)
def __hide_topic(self, row: dict) -> bool: def __hide_topic(self, row: dict) -> bool:
if 'dep' in row: if 'dep' in row:

View File

@@ -341,9 +341,9 @@ class SolarmanV5(SolarmanBase):
self.log_lvl.clear() self.log_lvl.clear()
super().close() super().close()
def send_start_cmd(self, snr: int, host: str, async def send_start_cmd(self, snr: int, host: str,
forward: bool, forward: bool,
start_timeout=MB_CLIENT_DATA_UP): start_timeout=MB_CLIENT_DATA_UP):
self.no_forwarding = True self.no_forwarding = True
self.establish_inv_emu = forward self.establish_inv_emu = forward
self.snr = snr self.snr = snr
@@ -562,17 +562,12 @@ class SolarmanV5(SolarmanBase):
rated = db.get_db_value(Register.RATED_POWER, 0) rated = db.get_db_value(Register.RATED_POWER, 0)
model = None model = None
if max_pow == 2000: if max_pow == 2000:
db.set_db_def_value(Register.NO_INPUTS, 4)
if rated == 800 or rated == 600: if rated == 800 or rated == 600:
model = f'TSOL-MS{max_pow}({rated})' model = f'TSOL-MS{max_pow}({rated})'
else: else:
model = f'TSOL-MS{max_pow}' model = f'TSOL-MS{max_pow}'
elif max_pow == 1800 or max_pow == 1600: elif max_pow == 1800 or max_pow == 1600:
db.set_db_def_value(Register.NO_INPUTS, 4)
model = f'TSOL-MS{max_pow}' model = f'TSOL-MS{max_pow}'
elif max_pow <= 800:
model = f'TSOL-MS{max_pow}'
if model: if model:
logger.info(f'Model: {model}') logger.info(f'Model: {model}')
self.db.set_db_def_value(Register.EQUIPMENT_MODEL, model) self.db.set_db_def_value(Register.EQUIPMENT_MODEL, model)

View File

@@ -4,7 +4,6 @@ import logging
import traceback import traceback
import json import json
import gc import gc
import socket
from aiomqtt import MqttCodeError from aiomqtt import MqttCodeError
from asyncio import StreamReader, StreamWriter from asyncio import StreamReader, StreamWriter
from ipaddress import ip_address from ipaddress import ip_address
@@ -139,9 +138,7 @@ class InverterBase(InverterIfc, Proxy):
f'Connected to {addr}') f'Connected to {addr}')
asyncio.create_task(self.remote.ifc.client_loop(addr)) asyncio.create_task(self.remote.ifc.client_loop(addr))
except (ConnectionRefusedError, except (ConnectionRefusedError, TimeoutError) as error:
TimeoutError,
socket.gaierror) as error:
logging.info(f'{error}') logging.info(f'{error}')
except Exception: except Exception:
Infos.inc_counter('SW_Exception') Infos.inc_counter('SW_Exception')

View File

@@ -193,7 +193,7 @@ class Message(ProtocolIfc):
return return
self.mb.build_msg(dev_id, func, addr, val, log_lvl) self.mb.build_msg(dev_id, func, addr, val, log_lvl)
def send_modbus_cmd(self, func, addr, val, log_lvl) -> None: async def send_modbus_cmd(self, func, addr, val, log_lvl) -> None:
self._send_modbus_cmd(Modbus.INV_ADDR, func, addr, val, log_lvl) self._send_modbus_cmd(Modbus.INV_ADDR, func, addr, val, log_lvl)
def _send_modbus_scan(self): def _send_modbus_scan(self):

View File

@@ -66,7 +66,7 @@ class ModbusTcp():
try: try:
async with ModbusConn(host, port) as inverter: async with ModbusConn(host, port) as inverter:
stream = inverter.local.stream stream = inverter.local.stream
stream.send_start_cmd(snr, host, forward) await stream.send_start_cmd(snr, host, forward)
await stream.ifc.loop() await stream.ifc.loop()
logger.info(f'[{stream.node_id}:{stream.conn_no}] ' logger.info(f'[{stream.node_id}:{stream.conn_no}] '
f'Connection closed - Shutdown: ' f'Connection closed - Shutdown: '

View File

@@ -112,7 +112,7 @@ class Mqtt(metaclass=Singleton):
except asyncio.CancelledError: except asyncio.CancelledError:
logger_mqtt.debug("MQTT task cancelled") logger_mqtt.debug("MQTT task cancelled")
self.__client = None self.__client = None
raise return
except Exception: except Exception:
# self.inc_counter('SW_Exception') # fixme # self.inc_counter('SW_Exception') # fixme
self.ctime = None self.ctime = None
@@ -151,7 +151,7 @@ class Mqtt(metaclass=Singleton):
if self.__cb_mqtt_is_up: if self.__cb_mqtt_is_up:
await self.__cb_mqtt_is_up() await self.__cb_mqtt_is_up()
def _out_coeff(self, message): async def _out_coeff(self, message):
payload = message.payload.decode("UTF-8") payload = message.payload.decode("UTF-8")
try: try:
val = round(float(payload) * 1024/100) val = round(float(payload) * 1024/100)
@@ -160,9 +160,9 @@ class Mqtt(metaclass=Singleton):
'the range 0..100,' 'the range 0..100,'
f' got: {payload}') f' got: {payload}')
else: else:
self._modbus_cmd(message, await self._modbus_cmd(message,
Modbus.WRITE_SINGLE_REG, Modbus.WRITE_SINGLE_REG,
0, 0x202c, val) 0, 0x202c, val)
except Exception: except Exception:
pass pass
@@ -182,7 +182,7 @@ class Mqtt(metaclass=Singleton):
else: else:
logger_mqtt.warning(f'Node_id: {node_id} not found') logger_mqtt.warning(f'Node_id: {node_id} not found')
def _modbus_cmd(self, message, func, params=0, addr=0, val=0): async def _modbus_cmd(self, message, func, params=0, addr=0, val=0):
payload = message.payload.decode("UTF-8") payload = message.payload.decode("UTF-8")
for fnc in self.each_inverter(message, "send_modbus_cmd"): for fnc in self.each_inverter(message, "send_modbus_cmd"):
res = payload.split(',') res = payload.split(',')
@@ -195,7 +195,7 @@ class Mqtt(metaclass=Singleton):
elif params == 2: elif params == 2:
addr = int(res[0], base=16) addr = int(res[0], base=16)
val = int(res[1]) # lenght val = int(res[1]) # lenght
fnc(func, addr, val, logging.INFO) await fnc(func, addr, val, logging.INFO)
async def _at_cmd(self, message): async def _at_cmd(self, message):
payload = message.payload.decode("UTF-8") payload = message.payload.decode("UTF-8")

View File

@@ -12,7 +12,7 @@ class Schedule:
count = 0 count = 0
@classmethod @classmethod
def start(cls) -> None: # pragma: no cover def start(cls) -> None:
'''Start the scheduler and schedule the tasks (cron jobs)''' '''Start the scheduler and schedule the tasks (cron jobs)'''
logging.debug("Scheduler init") logging.debug("Scheduler init")
cls.mqtt = Mqtt(None) cls.mqtt = Mqtt(None)
@@ -20,7 +20,7 @@ class Schedule:
crontab('0 0 * * *', func=cls.atmidnight, start=True) crontab('0 0 * * *', func=cls.atmidnight, start=True)
@classmethod @classmethod
async def atmidnight(cls) -> None: # pragma: no cover async def atmidnight(cls) -> None:
'''Clear daily counters at midnight''' '''Clear daily counters at midnight'''
logging.info("Clear daily counters at midnight") logging.info("Clear daily counters at midnight")

View File

@@ -60,16 +60,7 @@ class Server():
@app.context_processor @app.context_processor
def utility_processor(): def utility_processor():
var = {'version': self.version, return dict(version=self.version)
'slug': os.getenv("SLUG"),
'hostname': os.getenv("HOSTNAME"),
}
if var['slug']:
var['hassio'] = True
slug_len = len(var['slug'])
var['addonname'] = var['slug'] + '_' + \
var['hostname'][slug_len+1:]
return var
def parse_args(self, arg_list: list[str] | None): def parse_args(self, arg_list: list[str] | None):
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
@@ -136,8 +127,7 @@ class Server():
def build_config(self): def build_config(self):
# read config file # read config file
Config.init(ConfigReadToml(self.src_dir + "cnf/default_config.toml"), Config.init(ConfigReadToml(self.src_dir + "cnf/default_config.toml"),
log_path=self.log_path, log_path=self.log_path)
cnf_path=self.config_path)
ConfigReadEnv() ConfigReadEnv()
ConfigReadJson(self.config_path + "config.json") ConfigReadJson(self.config_path + "config.json")
ConfigReadToml(self.config_path + "config.toml") ConfigReadToml(self.config_path + "config.toml")

View File

@@ -29,9 +29,9 @@ def get_tz():
@web.context_processor @web.context_processor
def utility_processor(): def utility_processor():
return {'lang': babel_get_locale(), return dict(lang=babel_get_locale(),
'lang_str': LANGUAGES.get(str(babel_get_locale()), "English"), lang_str=LANGUAGES.get(str(babel_get_locale()), "English"),
'languages': LANGUAGES} languages=LANGUAGES)
@web.route('/language/<language>') @web.route('/language/<language>')

View File

@@ -22,6 +22,3 @@ class LogHandler(Handler, metaclass=Singleton):
def get_buffer(self, elms=0) -> list: def get_buffer(self, elms=0) -> list:
return list(self.buffer)[-elms:] return list(self.buffer)[-elms:]
def clear(self):
self.buffer.clear()

View File

@@ -7,4 +7,3 @@
.fa-rotate-right:before{content:"\f01e"} .fa-rotate-right:before{content:"\f01e"}
.fa-cloud-arrow-down-alt:before{content:"\f381"} .fa-cloud-arrow-down-alt:before{content:"\f381"}
.fa-cloud-arrow-up-alt:before{content:"\f382"} .fa-cloud-arrow-up-alt:before{content:"\f382"}
.fa-gear:before{content:"\f013"}

View File

@@ -59,11 +59,6 @@
<a href="{{ url_for('.mqtt')}}" class="w3-bar-item w3-button w3-padding {% block menu2_class %}{% endblock %}"><i class="fa fa-database fa-fw"></i>  MQTT</a> <a href="{{ url_for('.mqtt')}}" class="w3-bar-item w3-button w3-padding {% block menu2_class %}{% endblock %}"><i class="fa fa-database fa-fw"></i>  MQTT</a>
<a href="{{ url_for('.notes')}}" class="w3-bar-item w3-button w3-padding {% block menu3_class %}{% endblock %}"><i class="fa fa-info fa-fw"></i>  {{_('Important Messages')}}</a> <a href="{{ url_for('.notes')}}" class="w3-bar-item w3-button w3-padding {% block menu3_class %}{% endblock %}"><i class="fa fa-info fa-fw"></i>  {{_('Important Messages')}}</a>
<a href="{{ url_for('.logging')}}" class="w3-bar-item w3-button w3-padding {% block menu4_class %}{% endblock %}"><i class="fa fa-file-export fa-fw"></i>  {{_('Log Files')}}</a> <a href="{{ url_for('.logging')}}" class="w3-bar-item w3-button w3-padding {% block menu4_class %}{% endblock %}"><i class="fa fa-file-export fa-fw"></i>  {{_('Log Files')}}</a>
{% if hassio is defined %}
<br>
<a href="/hassio/addon/{{addonname}}/config" target="_top" class="w3-bar-item w3-button w3-padding"><i class="fa fa-gear fa-fw"></i>  {{_('Add-on Config')}}</a>
<a href="/hassio/addon/{{addonname}}/logs" target="_top" class="w3-bar-item w3-button w3-padding"><i class="fa fa-file fa-fw"></i>  {{_('Add-on Log')}}</a>
{% endif %}
</div> </div>
</nav> </nav>

View File

@@ -1,19 +1,19 @@
2025-04-30 00:01:23 INFO | root | Server "proxy - unknown" will be started 2025-04-30 00:01:23 INFO | root | Server "proxy - unknown" will be started
2025-04-30 00:01:24 INFO | root | current dir: /Users/sallius/tsun/tsun-gen3-proxy 2025-04-30 00:01:23 INFO | root | current dir: /Users/sallius/tsun/tsun-gen3-proxy
2025-04-30 00:01:25 INFO | root | config_path: ./config/ 2025-04-30 00:01:23 INFO | root | config_path: ./config/
2025-04-30 00:01:26 INFO | root | json_config: None 2025-04-30 00:01:23 INFO | root | json_config: None
2025-04-30 00:01:27 INFO | root | toml_config: None 2025-04-30 00:01:23 INFO | root | toml_config: None
2025-04-30 00:01:28 INFO | root | trans_path: ../translations/ 2025-04-30 00:01:23 INFO | root | trans_path: ../translations/
2025-04-30 00:01:29 INFO | root | rel_urls: False 2025-04-30 00:01:23 INFO | root | rel_urls: False
2025-04-30 00:01:30 INFO | root | log_path: ./log/ 2025-04-30 00:01:23 INFO | root | log_path: ./log/
2025-04-30 00:01:31 INFO | root | log_backups: unlimited 2025-04-30 00:01:23 INFO | root | log_backups: unlimited
2025-04-30 00:01:32 INFO | root | LOG_LVL : None 2025-04-30 00:01:23 INFO | root | LOG_LVL : None
2025-04-30 00:01:33 INFO | root | ****** 2025-04-30 00:01:23 INFO | root | ******
2025-04-30 00:01:34 INFO | root | Read from /Users/sallius/tsun/tsun-gen3-proxy/app/src/cnf/default_config.toml => ok 2025-04-30 00:01:23 INFO | root | Read from /Users/sallius/tsun/tsun-gen3-proxy/app/src/cnf/default_config.toml => ok
2025-04-30 00:01:35 INFO | root | Read from environment => ok 2025-04-30 00:01:23 INFO | root | Read from environment => ok
2025-04-30 00:01:36 INFO | root | Read from ./config/config.json => n/a 2025-04-30 00:01:23 INFO | root | Read from ./config/config.json => n/a
2025-04-30 00:01:37 INFO | root | Read from ./config/config.toml => n/a 2025-04-30 00:01:23 INFO | root | Read from ./config/config.toml => n/a
2025-04-30 00:01:38 INFO | root | ****** 2025-04-30 00:01:23 INFO | root | ******
2025-04-30 00:01:39 INFO | root | listen on port: 5005 for inverters 2025-04-30 00:01:23 INFO | root | listen on port: 5005 for inverters
2025-04-30 00:01:40 INFO | root | listen on port: 10000 for inverters 2025-04-30 00:01:23 INFO | root | listen on port: 10000 for inverters
2025-04-30 00:01:41 INFO | root | Start Quart 2025-04-30 00:01:23 INFO | root | Start Quart

View File

@@ -109,7 +109,7 @@ def test_default_db():
i = InfosG3P(client_mode=False) i = InfosG3P(client_mode=False)
assert json.dumps(i.db) == json.dumps({ assert json.dumps(i.db) == json.dumps({
"inverter": {"Manufacturer": "TSUN", "Equipment_Model": "TSOL-MSxx00", "No_Inputs": 2}, "inverter": {"Manufacturer": "TSUN", "Equipment_Model": "TSOL-MSxx00", "No_Inputs": 4},
"collector": {"Chip_Type": "IGEN TECH"}, "collector": {"Chip_Type": "IGEN TECH"},
}) })
@@ -271,7 +271,7 @@ def test_build_ha_conf1():
elif id == 'inv_count_456': elif id == 'inv_count_456':
assert False assert False
assert tests==5 assert tests==7
def test_build_ha_conf2(): def test_build_ha_conf2():
i = InfosG3P(client_mode=False) i = InfosG3P(client_mode=False)
@@ -346,7 +346,7 @@ def test_build_ha_conf3():
elif id == 'inv_count_456': elif id == 'inv_count_456':
assert False assert False
assert tests==5 assert tests==7
def test_build_ha_conf4(): def test_build_ha_conf4():
i = InfosG3P(client_mode=True) i = InfosG3P(client_mode=True)

View File

@@ -3,8 +3,7 @@ import pytest
import asyncio import asyncio
import aiomqtt import aiomqtt
import logging import logging
from aiomqtt import MqttError, MessagesIterator from aiomqtt import MqttError
from aiomqtt import Message as AiomqttMessage
from mock import patch, Mock from mock import patch, Mock
from async_stream import AsyncIfcImpl from async_stream import AsyncIfcImpl
@@ -35,26 +34,6 @@ def test_hostname():
# else: # else:
return 'test.mosquitto.org' return 'test.mosquitto.org'
@pytest.fixture(scope="function")
def aiomqtt_mock(monkeypatch):
recv_que = asyncio.Queue()
async def my_aenter(self):
return self
async def my_subscribe(self, *arg):
return
async def my_anext(self):
return await recv_que.get()
async def my_receive(self, topic: str, payload: bytes):
msg = AiomqttMessage(topic, payload,qos=0, retain=False, mid=0, properties=None)
await recv_que.put(msg)
await asyncio.sleep(0) # dispath the msg
monkeypatch.setattr(aiomqtt.Client, "__aenter__", my_aenter)
monkeypatch.setattr(aiomqtt.Client, "subscribe", my_subscribe)
monkeypatch.setattr(MessagesIterator, "__anext__", my_anext)
monkeypatch.setattr(Mqtt, "receive", my_receive, False)
@pytest.fixture @pytest.fixture
def config_mqtt_conn(test_hostname, test_port): def config_mqtt_conn(test_hostname, test_port):
Config.act_config = {'mqtt':{'host': test_hostname, 'port': test_port, 'user': '', 'passwd': ''}, Config.act_config = {'mqtt':{'host': test_hostname, 'port': test_port, 'user': '', 'passwd': ''},
@@ -182,17 +161,13 @@ async def test_ha_reconnect(config_mqtt_conn):
await m.close() await m.close()
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_mqtt_no_config(config_no_conn, monkeypatch): async def test_mqtt_no_config(config_no_conn):
_ = config_no_conn _ = config_no_conn
assert asyncio.get_running_loop() assert asyncio.get_running_loop()
on_connect = asyncio.Event() on_connect = asyncio.Event()
async def cb(): async def cb():
on_connect.set() on_connect.set()
async def my_publish(*args):
return
monkeypatch.setattr(aiomqtt.Client, "publish", my_publish)
try: try:
m = Mqtt(cb) m = Mqtt(cb)
@@ -201,9 +176,9 @@ async def test_mqtt_no_config(config_no_conn, monkeypatch):
assert not on_connect.is_set() assert not on_connect.is_set()
try: try:
await m.publish('homeassistant/status', 'online') await m.publish('homeassistant/status', 'online')
assert m.published == 1 assert False
except Exception: except Exception:
assert False pass
except TimeoutError: except TimeoutError:
assert False assert False
finally: finally:
@@ -275,119 +250,92 @@ async def test_mqtt_except_def_config(config_def_conn, monkeypatch, caplog):
assert 'MQTT is unconfigured; Check your config.toml!' in caplog.text assert 'MQTT is unconfigured; Check your config.toml!' in caplog.text
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_mqtt_dispatch(config_mqtt_conn, aiomqtt_mock, spy_modbus_cmd): async def test_msg_dispatch(config_mqtt_conn, spy_modbus_cmd):
_ = config_mqtt_conn _ = config_mqtt_conn
_ = aiomqtt_mock
spy = spy_modbus_cmd spy = spy_modbus_cmd
try: try:
m = Mqtt(None) m = Mqtt(None)
assert m.ha_restarts == 0 msg = aiomqtt.Message(topic= 'homeassistant/status', payload= b'online', qos= 0, retain = False, mid= 0, properties= None)
await m.receive('homeassistant/status', b'online') # send the message await m.dispatch_msg(msg)
assert m.ha_restarts == 1 assert m.ha_restarts == 1
await m.receive(topic= 'tsun/inv_1/rated_load', payload= b'2') msg = aiomqtt.Message(topic= 'tsun/inv_1/rated_load', payload= b'2', qos= 0, retain = False, mid= 0, properties= None)
spy.assert_called_once_with(Modbus.WRITE_SINGLE_REG, 0x2008, 2, logging.INFO) await m.dispatch_msg(msg)
spy.assert_awaited_once_with(Modbus.WRITE_SINGLE_REG, 0x2008, 2, logging.INFO)
spy.reset_mock()
await m.receive(topic= 'tsun/inv_1/out_coeff', payload= b'100')
spy.assert_called_once_with(Modbus.WRITE_SINGLE_REG, 0x202c, 1024, logging.INFO)
spy.reset_mock() spy.reset_mock()
await m.receive(topic= 'tsun/inv_1/out_coeff', payload= b'50') msg = aiomqtt.Message(topic= 'tsun/inv_1/out_coeff', payload= b'100', qos= 0, retain = False, mid= 0, properties= None)
spy.assert_called_once_with(Modbus.WRITE_SINGLE_REG, 0x202c, 512, logging.INFO) await m.dispatch_msg(msg)
spy.assert_awaited_once_with(Modbus.WRITE_SINGLE_REG, 0x202c, 1024, logging.INFO)
spy.reset_mock()
msg = aiomqtt.Message(topic= 'tsun/inv_1/out_coeff', payload= b'50', qos= 0, retain = False, mid= 0, properties= None)
await m.dispatch_msg(msg)
spy.assert_awaited_once_with(Modbus.WRITE_SINGLE_REG, 0x202c, 512, logging.INFO)
spy.reset_mock() spy.reset_mock()
await m.receive(topic= 'tsun/inv_1/modbus_read_regs', payload= b'0x3000, 10') msg = aiomqtt.Message(topic= 'tsun/inv_1/modbus_read_regs', payload= b'0x3000, 10', qos= 0, retain = False, mid= 0, properties= None)
spy.assert_called_once_with(Modbus.READ_REGS, 0x3000, 10, logging.INFO) await m.dispatch_msg(msg)
spy.assert_awaited_once_with(Modbus.READ_REGS, 0x3000, 10, logging.INFO)
spy.reset_mock() spy.reset_mock()
await m.receive(topic= 'tsun/inv_1/modbus_read_inputs', payload= b'0x3000, 10') msg = aiomqtt.Message(topic= 'tsun/inv_1/modbus_read_inputs', payload= b'0x3000, 10', qos= 0, retain = False, mid= 0, properties= None)
spy.assert_called_once_with(Modbus.READ_INPUTS, 0x3000, 10, logging.INFO) await m.dispatch_msg(msg)
spy.assert_awaited_once_with(Modbus.READ_INPUTS, 0x3000, 10, logging.INFO)
# test dispatching with empty mapping table # test dispatching with empty mapping table
m.topic_defs.clear() m.topic_defs.clear()
spy.reset_mock() spy.reset_mock()
await m.receive(topic= 'tsun/inv_1/modbus_read_inputs', payload= b'0x3000, 10') msg = aiomqtt.Message(topic= 'tsun/inv_1/modbus_read_inputs', payload= b'0x3000, 10', qos= 0, retain = False, mid= 0, properties= None)
await m.dispatch_msg(msg)
spy.assert_not_called() spy.assert_not_called()
# test dispatching with incomplete mapping table - invalid fnc defined # test dispatching with incomplete mapping table - invalid fnc defined
m.topic_defs.append( m.topic_defs.append(
{'prefix': 'entity_prefix', 'topic': '/+/modbus_read_inputs', {'prefix': 'entity_prefix', 'topic': '/+/modbus_read_inputs',
'full_topic': 'tsun/+/modbus_read_inputs', 'fnc': 'addr'} 'full_topic': 'tsun/+/modbus_read_inputs', 'fnc': 'invalid'}
) )
spy.reset_mock() spy.reset_mock()
await m.receive(topic= 'tsun/inv_1/modbus_read_inputs', payload= b'0x3000, 10') msg = aiomqtt.Message(topic= 'tsun/inv_1/modbus_read_inputs', payload= b'0x3000, 10', qos= 0, retain = False, mid= 0, properties= None)
await m.dispatch_msg(msg)
spy.assert_not_called() spy.assert_not_called()
except MqttError:
assert False
except Exception:
assert False
finally: finally:
await m.close() await m.close()
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_mqtt_dispatch_cb(config_mqtt_conn, aiomqtt_mock): async def test_msg_dispatch_err(config_mqtt_conn, spy_modbus_cmd):
_ = config_mqtt_conn _ = config_mqtt_conn
_ = aiomqtt_mock
on_connect = asyncio.Event()
async def cb():
on_connect.set()
try:
m = Mqtt(cb)
assert m.ha_restarts == 0
await m.receive('homeassistant/status', b'online') # send the message
assert on_connect.is_set()
assert m.ha_restarts == 1
except MqttError:
assert False
except Exception:
assert False
finally:
await m.close()
@pytest.mark.asyncio
async def test_mqtt_dispatch_err(config_mqtt_conn, aiomqtt_mock, spy_modbus_cmd, caplog):
_ = config_mqtt_conn
_ = aiomqtt_mock
spy = spy_modbus_cmd spy = spy_modbus_cmd
LOGGER = logging.getLogger("mqtt")
LOGGER.propagate = True
LOGGER.setLevel(logging.INFO)
try: try:
m = Mqtt(None) m = Mqtt(None)
# test out of range param # test out of range param
await m.receive(topic= 'tsun/inv_1/out_coeff', payload= b'-1') msg = aiomqtt.Message(topic= 'tsun/inv_1/out_coeff', payload= b'-1', qos= 0, retain = False, mid= 0, properties= None)
await m.dispatch_msg(msg)
spy.assert_not_called() spy.assert_not_called()
# test unknown node_id # test unknown node_id
await m.receive(topic= 'tsun/inv_2/out_coeff', payload= b'2') spy.reset_mock()
msg = aiomqtt.Message(topic= 'tsun/inv_2/out_coeff', payload= b'2', qos= 0, retain = False, mid= 0, properties= None)
await m.dispatch_msg(msg)
spy.assert_not_called() spy.assert_not_called()
# test invalid fload param # test invalid fload param
await m.receive(topic= 'tsun/inv_1/out_coeff', payload= b'2, 3') spy.reset_mock()
spy.assert_not_called() msg = aiomqtt.Message(topic= 'tsun/inv_1/out_coeff', payload= b'2, 3', qos= 0, retain = False, mid= 0, properties= None)
await m.dispatch_msg(msg)
await m.receive(topic= 'tsun/inv_1/modbus_read_regs', payload= b'0x3000, 10, 7')
spy.assert_not_called() spy.assert_not_called()
await m.receive(topic= 'tsun/inv_1/dcu_power', payload= b'100W') spy.reset_mock()
msg = aiomqtt.Message(topic= 'tsun/inv_1/modbus_read_regs', payload= b'0x3000, 10, 7', qos= 0, retain = False, mid= 0, properties= None)
await m.dispatch_msg(msg)
spy.assert_not_called()
spy.reset_mock()
msg = aiomqtt.Message(topic= 'tsun/inv_1/dcu_power', payload= b'100W', qos= 0, retain = False, mid= 0, properties= None)
await m.dispatch_msg(msg)
spy.assert_not_called() spy.assert_not_called()
with caplog.at_level(logging.INFO):
msg = aiomqtt.Message(topic= 'tsun/inv_1/out_coeff', payload= b'2', qos= 0, retain = False, mid= 0, properties= None)
for _ in m.each_inverter(msg, "addr"):
pass # do nothing here
assert 'Cmd not supported by: inv_1/' in caplog.text
except MqttError:
assert False
except Exception:
assert False
finally: finally:
await m.close() await m.close()

View File

@@ -4,10 +4,6 @@ import logging
import os import os
from mock import patch from mock import patch
from server import app, Server, ProxyState, HypercornLogHndl from server import app, Server, ProxyState, HypercornLogHndl
from inverter_base import InverterBase
from gen3.talent import Talent
from test_inverter_base import FakeReader, FakeWriter
pytest_plugins = ('pytest_asyncio',) pytest_plugins = ('pytest_asyncio',)
@@ -112,20 +108,20 @@ class TestServerClass:
assert logging.getLogger('hypercorn.access').level == logging.INFO assert logging.getLogger('hypercorn.access').level == logging.INFO
assert logging.getLogger('hypercorn.error').level == logging.INFO assert logging.getLogger('hypercorn.error').level == logging.INFO
with patch.dict(os.environ, {'LOG_LVL': 'WARN'}): os.environ["LOG_LVL"] = "WARN"
s.parse_args(['--log_backups', '3']) s.parse_args(['--log_backups', '3'])
s.init_logging_system() s.init_logging_system()
assert s.log_backups == 3 assert s.log_backups == 3
assert s.log_level == logging.WARNING assert s.log_level == logging.WARNING
assert logging.handlers.log_backups == 3 assert logging.handlers.log_backups == 3
assert logging.getLogger().level == s.log_level assert logging.getLogger().level == s.log_level
assert logging.getLogger('msg').level == s.log_level assert logging.getLogger('msg').level == s.log_level
assert logging.getLogger('conn').level == s.log_level assert logging.getLogger('conn').level == s.log_level
assert logging.getLogger('data').level == s.log_level assert logging.getLogger('data').level == s.log_level
assert logging.getLogger('tracer').level == s.log_level assert logging.getLogger('tracer').level == s.log_level
assert logging.getLogger('asyncio').level == s.log_level assert logging.getLogger('asyncio').level == s.log_level
assert logging.getLogger('hypercorn.access').level == logging.INFO assert logging.getLogger('hypercorn.access').level == logging.INFO
assert logging.getLogger('hypercorn.error').level == logging.INFO assert logging.getLogger('hypercorn.error').level == logging.INFO
def test_build_config_error(self, caplog): def test_build_config_error(self, caplog):
s = self.FakeServer() s = self.FakeServer()
@@ -191,7 +187,6 @@ class TestApp:
"""Test the ready route.""" """Test the ready route."""
ProxyState.set_up(False) ProxyState.set_up(False)
app.testing = True
client = app.test_client() client = app.test_client()
response = await client.get('/-/ready') response = await client.get('/-/ready')
assert response.status_code == 503 assert response.status_code == 503
@@ -207,84 +202,17 @@ class TestApp:
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_healthy(self): async def test_healthy(self):
"""Test the healthy route.""" """Test the healthy route."""
reader = FakeReader()
writer = FakeWriter()
with InverterBase(reader, writer, 'tsun', Talent): ProxyState.set_up(False)
ProxyState.set_up(False) client = app.test_client()
app.testing = True response = await client.get('/-/healthy')
client = app.test_client() assert response.status_code == 200
response = await client.get('/-/healthy') result = await response.get_data()
assert response.status_code == 200 assert result == b"I'm fine"
result = await response.get_data()
assert result == b"I'm fine"
ProxyState.set_up(True) ProxyState.set_up(True)
response = await client.get('/-/healthy') response = await client.get('/-/healthy')
assert response.status_code == 200 assert response.status_code == 200
result = await response.get_data() result = await response.get_data()
assert result == b"I'm fine" assert result == b"I'm fine"
@pytest.mark.asyncio
async def test_unhealthy(self, monkeypatch, caplog):
"""Test the healthy route."""
def result_false(self):
return False
LOGGER = logging.getLogger("mqtt")
LOGGER.propagate = True
LOGGER.setLevel(logging.INFO)
monkeypatch.setattr(InverterBase, "healthy", result_false)
InverterBase._registry.clear()
reader = FakeReader()
writer = FakeWriter()
with caplog.at_level(logging.INFO) and InverterBase(reader, writer, 'tsun', Talent):
ProxyState.set_up(False)
app.testing = True
client = app.test_client()
response = await client.get('/-/healthy')
assert response.status_code == 200
result = await response.get_data()
assert result == b"I'm fine"
assert "" == caplog.text
ProxyState.set_up(True)
response = await client.get('/-/healthy')
assert response.status_code == 503
result = await response.get_data()
assert result == b"I have a problem"
assert "" == caplog.text
@pytest.mark.asyncio
async def test_healthy_exception(self, monkeypatch, caplog):
"""Test the healthy route."""
def result_except(self):
raise ValueError
LOGGER = logging.getLogger("mqtt")
LOGGER.propagate = True
LOGGER.setLevel(logging.INFO)
monkeypatch.setattr(InverterBase, "healthy", result_except)
InverterBase._registry.clear()
reader = FakeReader()
writer = FakeWriter()
with caplog.at_level(logging.INFO) and InverterBase(reader, writer, 'tsun', Talent):
ProxyState.set_up(False)
app.testing = True
client = app.test_client()
response = await client.get('/-/healthy')
assert response.status_code == 200
result = await response.get_data()
assert result == b"I'm fine"
assert "" == caplog.text
ProxyState.set_up(True)
response = await client.get('/-/healthy')
assert response.status_code == 200
result = await response.get_data()
assert result == b"I'm fine"
assert "Exception:" in caplog.text

View File

@@ -462,39 +462,6 @@ def inverter_ind_msg800(): # 0x4210 rated Power 800W
msg += b'\x15' msg += b'\x15'
return msg return msg
@pytest.fixture
def inverter_ind_msg900(): # 0x4210 rated Power 900W
msg = b'\xa5\x99\x01\x10\x42\xe6\x9e' +get_sn() +b'\x01\xb0\x02\xbc\xc8'
msg += b'\x24\x32\x6c\x1f\x00\x00\xa0\x47\xe4\x33\x01\x00\x03\x08\x00\x00'
msg += b'\x59\x31\x37\x45\x37\x41\x30\x46\x30\x31\x30\x42\x30\x31\x33\x45'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x01\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x40\x10\x08\xc8\x00\x49\x13\x8d\x00\x36\x00\x00\x03\x84\x06\x7a'
msg += b'\x01\x61\x00\xa8\x02\x54\x01\x5a\x00\x8a\x01\xe4\x01\x5a\x00\xbd'
msg += b'\x02\x8f\x00\x11\x00\x01\x00\x00\x00\x0b\x00\x00\x27\x98\x00\x04'
msg += b'\x00\x00\x0c\x04\x00\x03\x00\x00\x0a\xe7\x00\x05\x00\x00\x0c\x75'
msg += b'\x00\x00\x00\x00\x06\x16\x02\x00\x00\x00\x55\xaa\x00\x01\x00\x00'
msg += b'\x00\x00\x00\x00\xff\xff\x03\x84\x00\x03\x04\x00\x04\x00\x04\x00'
msg += b'\x04\x00\x00\x01\xff\xff\x00\x01\x00\x06\x00\x68\x00\x68\x05\x00'
msg += b'\x09\xcd\x07\xb6\x13\x9c\x13\x24\x00\x01\x07\xae\x04\x0f\x00\x41'
msg += b'\x00\x0f\x0a\x64\x0a\x64\x00\x06\x00\x06\x09\xf6\x12\x8c\x12\x8c'
msg += b'\x00\x10\x00\x10\x14\x52\x14\x52\x00\x10\x00\x10\x01\x51\x00\x05'
msg += b'\x04\x00\x00\x01\x13\x9c\x0f\xa0\x00\x4e\x00\x66\x03\xe8\x04\x00'
msg += b'\x09\xce\x07\xa8\x13\x9c\x13\x26\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x04\x00\x04\x00\x00\x00\x00\x00\xff\xff\x00\x00'
msg += b'\x00\x00\x00\x00'
msg += correct_checksum(msg)
msg += b'\x15'
return msg
@pytest.fixture @pytest.fixture
def inverter_ind_msg_81(): # 0x4210 fcode 0x81 def inverter_ind_msg_81(): # 0x4210 fcode 0x81
msg = b'\xa5\x99\x01\x10\x42\x02\x03' +get_sn() +b'\x81\xb0\x02\xbc\xc8' msg = b'\xa5\x99\x01\x10\x42\x02\x03' +get_sn() +b'\x81\xb0\x02\xbc\xc8'
@@ -709,19 +676,6 @@ def msg_modbus_rsp(): # 0x1510
msg += b'\x15' msg += b'\x15'
return msg return msg
@pytest.fixture
def msg_modbus_rsp_mb_4(): # 0x1510, MODBUS Type:4
msg = b'\xa5\x3b\x00\x10\x15\x03\x03' +get_sn() +b'\x02\x01'
msg += total()
msg += hb()
msg += b'\x0a\xe2\xfa\x33\x01\x04\x28\x40\x10\x08\xd8'
msg += b'\x00\x00\x13\x87\x00\x31\x00\x68\x02\x58\x00\x00\x01\x53\x00\x02'
msg += b'\x00\x00\x01\x52\x00\x02\x00\x00\x01\x53\x00\x03\x00\x00\x00\x04'
msg += b'\x00\x01\x00\x00\x9e\xa4'
msg += correct_checksum(msg)
msg += b'\x15'
return msg
@pytest.fixture @pytest.fixture
def msg_modbus_interim_rsp(): # 0x0510 def msg_modbus_interim_rsp(): # 0x0510
msg = b'\xa5\x3b\x00\x10\x15\x03\x03' +get_sn() +b'\x02\x01' msg = b'\xa5\x3b\x00\x10\x15\x03\x03' +get_sn() +b'\x02\x01'
@@ -1481,7 +1435,6 @@ async def test_build_modell_600(my_loop, config_tsun_allow_all, inverter_ind_msg
m.read() # read complete msg, and dispatch msg m.read() # read complete msg, and dispatch msg
assert 2000 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0) assert 2000 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0)
assert 600 == m.db.get_db_value(Register.RATED_POWER, 0) assert 600 == m.db.get_db_value(Register.RATED_POWER, 0)
assert 4 == m.db.get_db_value(Register.NO_INPUTS, 0)
assert 'TSOL-MS2000(600)' == m.db.get_db_value(Register.EQUIPMENT_MODEL, 0) assert 'TSOL-MS2000(600)' == m.db.get_db_value(Register.EQUIPMENT_MODEL, 0)
assert '02b0' == m.db.get_db_value(Register.SENSOR_LIST, None) assert '02b0' == m.db.get_db_value(Register.SENSOR_LIST, None)
assert 0 == m.sensor_list # must not been set by an inverter data ind assert 0 == m.sensor_list # must not been set by an inverter data ind
@@ -1501,7 +1454,6 @@ async def test_build_modell_1600(my_loop, config_tsun_allow_all, inverter_ind_ms
m.read() # read complete msg, and dispatch msg m.read() # read complete msg, and dispatch msg
assert 1600 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0) assert 1600 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0)
assert 1600 == m.db.get_db_value(Register.RATED_POWER, 0) assert 1600 == m.db.get_db_value(Register.RATED_POWER, 0)
assert 4 == m.db.get_db_value(Register.NO_INPUTS, 0)
assert 'TSOL-MS1600' == m.db.get_db_value(Register.EQUIPMENT_MODEL, 0) assert 'TSOL-MS1600' == m.db.get_db_value(Register.EQUIPMENT_MODEL, 0)
m.close() m.close()
@@ -1515,7 +1467,6 @@ async def test_build_modell_1800(my_loop, config_tsun_allow_all, inverter_ind_ms
m.read() # read complete msg, and dispatch msg m.read() # read complete msg, and dispatch msg
assert 1800 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0) assert 1800 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0)
assert 1800 == m.db.get_db_value(Register.RATED_POWER, 0) assert 1800 == m.db.get_db_value(Register.RATED_POWER, 0)
assert 4 == m.db.get_db_value(Register.NO_INPUTS, 0)
assert 'TSOL-MS1800' == m.db.get_db_value(Register.EQUIPMENT_MODEL, 0) assert 'TSOL-MS1800' == m.db.get_db_value(Register.EQUIPMENT_MODEL, 0)
m.close() m.close()
@@ -1529,7 +1480,6 @@ async def test_build_modell_2000(my_loop, config_tsun_allow_all, inverter_ind_ms
m.read() # read complete msg, and dispatch msg m.read() # read complete msg, and dispatch msg
assert 2000 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0) assert 2000 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0)
assert 2000 == m.db.get_db_value(Register.RATED_POWER, 0) assert 2000 == m.db.get_db_value(Register.RATED_POWER, 0)
assert 4 == m.db.get_db_value(Register.NO_INPUTS, 0)
assert 'TSOL-MS2000' == m.db.get_db_value(Register.EQUIPMENT_MODEL, 0) assert 'TSOL-MS2000' == m.db.get_db_value(Register.EQUIPMENT_MODEL, 0)
m.close() m.close()
@@ -1543,21 +1493,6 @@ async def test_build_modell_800(my_loop, config_tsun_allow_all, inverter_ind_msg
m.read() # read complete msg, and dispatch msg m.read() # read complete msg, and dispatch msg
assert 800 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0) assert 800 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0)
assert 800 == m.db.get_db_value(Register.RATED_POWER, 0) assert 800 == m.db.get_db_value(Register.RATED_POWER, 0)
assert 2 == m.db.get_db_value(Register.NO_INPUTS, 0)
assert 'TSOL-MS800' == m.db.get_db_value(Register.EQUIPMENT_MODEL, 0)
m.close()
@pytest.mark.asyncio
async def test_build_modell_900(my_loop, config_tsun_allow_all, inverter_ind_msg900):
_ = config_tsun_allow_all
m = MemoryStream(inverter_ind_msg900, (0,))
assert 0 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0)
assert None == m.db.get_db_value(Register.RATED_POWER, None)
assert None == m.db.get_db_value(Register.INVERTER_TEMP, None)
m.read() # read complete msg, and dispatch msg
assert 900 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0)
assert 900 == m.db.get_db_value(Register.RATED_POWER, 0)
assert 2 == m.db.get_db_value(Register.NO_INPUTS, 0)
assert 'TSOL-MSxx00' == m.db.get_db_value(Register.EQUIPMENT_MODEL, 0) assert 'TSOL-MSxx00' == m.db.get_db_value(Register.EQUIPMENT_MODEL, 0)
m.close() m.close()
@@ -1624,7 +1559,7 @@ async def test_msg_build_modbus_req(my_loop, config_tsun_inv1, device_ind_msg, d
assert m.ifc.tx_fifo.get()==device_rsp_msg assert m.ifc.tx_fifo.get()==device_rsp_msg
assert m.ifc.fwd_fifo.get()==device_ind_msg assert m.ifc.fwd_fifo.get()==device_ind_msg
m.send_modbus_cmd(Modbus.WRITE_SINGLE_REG, 0x2008, 0, logging.DEBUG) await m.send_modbus_cmd(Modbus.WRITE_SINGLE_REG, 0x2008, 0, logging.DEBUG)
assert 0 == m.send_msg_ofs assert 0 == m.send_msg_ofs
assert m.ifc.fwd_fifo.get() == b'' assert m.ifc.fwd_fifo.get() == b''
assert m.sent_pdu == b'' # modbus command must be ignore, cause connection is still not up assert m.sent_pdu == b'' # modbus command must be ignore, cause connection is still not up
@@ -1642,7 +1577,7 @@ async def test_msg_build_modbus_req(my_loop, config_tsun_inv1, device_ind_msg, d
assert m.ifc.tx_fifo.get()==inverter_rsp_msg assert m.ifc.tx_fifo.get()==inverter_rsp_msg
assert m.ifc.fwd_fifo.get()==inverter_ind_msg assert m.ifc.fwd_fifo.get()==inverter_ind_msg
m.send_modbus_cmd(Modbus.WRITE_SINGLE_REG, 0x2008, 0, logging.DEBUG) await m.send_modbus_cmd(Modbus.WRITE_SINGLE_REG, 0x2008, 0, logging.DEBUG)
assert 0 == m.send_msg_ofs assert 0 == m.send_msg_ofs
assert m.ifc.fwd_fifo.get() == b'' assert m.ifc.fwd_fifo.get() == b''
assert m.sent_pdu == msg_modbus_cmd assert m.sent_pdu == msg_modbus_cmd
@@ -2254,61 +2189,6 @@ async def test_modbus_scaning(config_tsun_scan, heartbeat_ind_msg, heartbeat_rsp
assert next(m.mb_timer.exp_count) == 3 assert next(m.mb_timer.exp_count) == 3
m.close() m.close()
@pytest.mark.asyncio
async def test_modbus_scaning_inv_rsp(config_tsun_scan, heartbeat_ind_msg, heartbeat_rsp_msg, msg_modbus_rsp_mb_4):
_ = config_tsun_scan
assert asyncio.get_running_loop()
m = MemoryStream(heartbeat_ind_msg, (0x15,0x56,0))
m.append_msg(msg_modbus_rsp_mb_4)
assert m.mb_scan == False
assert asyncio.get_running_loop() == m.mb_timer.loop
m.db.stat['proxy']['Unknown_Ctrl'] = 0
assert m.mb_timer.tim == None
m.read() # read complete msg, and dispatch msg
assert m.mb_scan == True
assert m.mb_start_reg == 0xff80
assert m.mb_step == 0x40
assert m.mb_bytes == 0x14
assert asyncio.get_running_loop() == m.mb_timer.loop
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.snr == 2070233889
assert m.control == 0x4710
assert m.msg_recvd[0]['control']==0x4710
assert m.msg_recvd[0]['seq']=='84:11'
assert m.msg_recvd[0]['data_len']==0x1
assert m.ifc.tx_fifo.get()==heartbeat_rsp_msg
assert m.ifc.fwd_fifo.get()==heartbeat_ind_msg
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m.ifc.tx_clear() # clear send buffer for next test
assert isclose(m.mb_timeout, 0.5)
assert next(m.mb_timer.exp_count) == 0
await asyncio.sleep(0.5)
assert m.sent_pdu==b'\xa5\x17\x00\x10E\x12\x84!Ce{\x02\xb0\x02\x00\x00\x00\x00\x00\x00' \
b'\x00\x00\x00\x00\x00\x00\x01\x03\xff\xc0\x00\x14\x75\xed\x33\x15'
assert m.ifc.tx_fifo.get()==b''
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 2
assert m.msg_recvd[1]['control']==0x1510
assert m.msg_recvd[1]['seq']=='03:03'
assert m.msg_recvd[1]['data_len']==0x3b
assert m.mb.last_addr == 1
assert m.mb.last_fcode == 3
assert m.mb.last_reg == 0xffc0 # mb_start_reg + mb_step
assert m.mb.last_len == 20
assert m.mb.err == 3
assert next(m.mb_timer.exp_count) == 2
m.close()
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_start_client_mode(my_loop, config_tsun_inv1, str_test_ip): async def test_start_client_mode(my_loop, config_tsun_inv1, str_test_ip):
_ = config_tsun_inv1 _ = config_tsun_inv1
@@ -2318,7 +2198,7 @@ async def test_start_client_mode(my_loop, config_tsun_inv1, str_test_ip):
assert m.no_forwarding == False assert m.no_forwarding == False
assert m.mb_timer.tim == None assert m.mb_timer.tim == None
assert asyncio.get_running_loop() == m.mb_timer.loop assert asyncio.get_running_loop() == m.mb_timer.loop
m.send_start_cmd(get_sn_int(), str_test_ip, False, m.mb_first_timeout) await m.send_start_cmd(get_sn_int(), str_test_ip, False, m.mb_first_timeout)
assert m.sent_pdu==bytearray(b'\xa5\x17\x00\x10E\x01\x00!Ce{\x02\xb0\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x030\x00\x000J\xde\xf1\x15') assert m.sent_pdu==bytearray(b'\xa5\x17\x00\x10E\x01\x00!Ce{\x02\xb0\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x030\x00\x000J\xde\xf1\x15')
assert m.db.get_db_value(Register.IP_ADDRESS) == str_test_ip assert m.db.get_db_value(Register.IP_ADDRESS) == str_test_ip
assert isclose(m.db.get_db_value(Register.POLLING_INTERVAL), 0.5) assert isclose(m.db.get_db_value(Register.POLLING_INTERVAL), 0.5)
@@ -2351,7 +2231,7 @@ async def test_start_client_mode_scan(config_tsun_scan_dcu, str_test_ip, dcu_mod
assert m.no_forwarding == False assert m.no_forwarding == False
assert m.mb_timer.tim == None assert m.mb_timer.tim == None
assert asyncio.get_running_loop() == m.mb_timer.loop assert asyncio.get_running_loop() == m.mb_timer.loop
m.send_start_cmd(get_dcu_sn_int(), str_test_ip, False, m.mb_first_timeout) await m.send_start_cmd(get_dcu_sn_int(), str_test_ip, False, m.mb_first_timeout)
assert m.mb_start_reg == 0x0000 assert m.mb_start_reg == 0x0000
assert m.mb_step == 0x100 assert m.mb_step == 0x100
assert m.mb_bytes == 0x2d assert m.mb_bytes == 0x2d
@@ -2662,7 +2542,6 @@ async def test_proxy_dcu_cmd(my_loop, config_tsun_dcu1, patch_open_connection, d
assert l.db.stat['proxy']['AT_Command'] == 0 assert l.db.stat['proxy']['AT_Command'] == 0
assert l.db.stat['proxy']['AT_Command_Blocked'] == 0 assert l.db.stat['proxy']['AT_Command_Blocked'] == 0
assert l.db.stat['proxy']['Modbus_Command'] == 0 assert l.db.stat['proxy']['Modbus_Command'] == 0
assert 2 == l.db.get_db_value(Register.NO_INPUTS, 0)
l.append_msg(dcu_command_rsp_msg) l.append_msg(dcu_command_rsp_msg)
l.read() # read at resp l.read() # read at resp

View File

@@ -144,7 +144,7 @@ async def test_emu_start(my_loop, config_tsun_inv1, msg_modbus_rsp, str_test_ip,
inv = InvStream(msg_modbus_rsp) inv = InvStream(msg_modbus_rsp)
assert asyncio.get_running_loop() == inv.mb_timer.loop assert asyncio.get_running_loop() == inv.mb_timer.loop
inv.send_start_cmd(get_sn_int(), str_test_ip, True, inv.mb_first_timeout) await inv.send_start_cmd(get_sn_int(), str_test_ip, True, inv.mb_first_timeout)
inv.read() # read complete msg, and dispatch msg inv.read() # read complete msg, and dispatch msg
assert not inv.header_valid # must be invalid, since msg was handled and buffer flushed assert not inv.header_valid # must be invalid, since msg was handled and buffer flushed
assert inv.msg_count == 1 assert inv.msg_count == 1
@@ -161,7 +161,7 @@ async def test_snd_hb(my_loop, config_tsun_inv1, heartbeat_ind):
inv = InvStream() inv = InvStream()
cld = CldStream(inv) cld = CldStream(inv)
# inv.send_start_cmd(get_sn_int(), str_test_ip, False, inv.mb_first_timeout) # await inv.send_start_cmd(get_sn_int(), str_test_ip, False, inv.mb_first_timeout)
cld.send_heartbeat_cb(0) cld.send_heartbeat_cb(0)
assert cld.ifc.tx_fifo.peek() == heartbeat_ind assert cld.ifc.tx_fifo.peek() == heartbeat_ind
cld.close() cld.close()
@@ -178,7 +178,7 @@ async def test_snd_inv_data(my_loop, config_tsun_inv1, inverter_ind_msg, inverte
inv.db.set_db_def_value(Register.GRID_FREQUENCY, 50.05) inv.db.set_db_def_value(Register.GRID_FREQUENCY, 50.05)
inv.db.set_db_def_value(Register.PROD_COMPL_TYPE, 6) inv.db.set_db_def_value(Register.PROD_COMPL_TYPE, 6)
assert asyncio.get_running_loop() == inv.mb_timer.loop assert asyncio.get_running_loop() == inv.mb_timer.loop
inv.send_start_cmd(get_sn_int(), str_test_ip, False, inv.mb_first_timeout) await inv.send_start_cmd(get_sn_int(), str_test_ip, False, inv.mb_first_timeout)
inv.db.set_db_def_value(Register.DATA_UP_INTERVAL, 17) # set test value inv.db.set_db_def_value(Register.DATA_UP_INTERVAL, 17) # set test value
cld = CldStream(inv) cld = CldStream(inv)
@@ -213,7 +213,7 @@ async def test_rcv_invalid(my_loop, config_tsun_inv1, inverter_ind_msg, inverter
_ = config_tsun_inv1 _ = config_tsun_inv1
inv = InvStream() inv = InvStream()
assert asyncio.get_running_loop() == inv.mb_timer.loop assert asyncio.get_running_loop() == inv.mb_timer.loop
inv.send_start_cmd(get_sn_int(), str_test_ip, False, inv.mb_first_timeout) await inv.send_start_cmd(get_sn_int(), str_test_ip, False, inv.mb_first_timeout)
inv.db.set_db_def_value(Register.DATA_UP_INTERVAL, 17) # set test value inv.db.set_db_def_value(Register.DATA_UP_INTERVAL, 17) # set test value
cld = CldStream(inv) cld = CldStream(inv)

View File

@@ -2411,14 +2411,14 @@ async def test_msg_build_modbus_req(config_tsun_inv1, msg_modbus_cmd):
_ = config_tsun_inv1 _ = config_tsun_inv1
m = MemoryStream(b'', (0,), True) m = MemoryStream(b'', (0,), True)
m.id_str = b"R170000000000001" m.id_str = b"R170000000000001"
m.send_modbus_cmd(Modbus.WRITE_SINGLE_REG, 0x2008, 0, logging.DEBUG) await m.send_modbus_cmd(Modbus.WRITE_SINGLE_REG, 0x2008, 0, logging.DEBUG)
assert 0 == m.send_msg_ofs assert 0 == m.send_msg_ofs
assert m.ifc.fwd_fifo.get() == b'' assert m.ifc.fwd_fifo.get() == b''
assert m.ifc.tx_fifo.get() == b'' assert m.ifc.tx_fifo.get() == b''
assert m.sent_pdu == b'' assert m.sent_pdu == b''
m.state = State.up m.state = State.up
m.send_modbus_cmd(Modbus.WRITE_SINGLE_REG, 0x2008, 0, logging.DEBUG) await m.send_modbus_cmd(Modbus.WRITE_SINGLE_REG, 0x2008, 0, logging.DEBUG)
assert 0 == m.send_msg_ofs assert 0 == m.send_msg_ofs
assert m.ifc.fwd_fifo.get() == b'' assert m.ifc.fwd_fifo.get() == b''
assert m.ifc.tx_fifo.get() == b'' assert m.ifc.tx_fifo.get() == b''

View File

@@ -1,37 +1,22 @@
# test_with_pytest.py # test_with_pytest.py
import pytest import pytest
import logging from server import app
import os, errno from web import Web, web
import datetime
from os import DirEntry, stat_result
from quart import current_app
from mock import patch
from server import app as my_app
from server import Server
from web import web
from async_stream import AsyncStreamClient from async_stream import AsyncStreamClient
from gen3plus.inverter_g3p import InverterG3P from gen3plus.inverter_g3p import InverterG3P
from web.log_handler import LogHandler
from test_inverter_g3p import FakeReader, FakeWriter, config_conn from test_inverter_g3p import FakeReader, FakeWriter, config_conn
from cnf.config import Config from cnf.config import Config
from mock import patch
from proxy import Proxy from proxy import Proxy
import os, errno
from os import DirEntry, stat_result
class FakeServer(Server): import datetime
def __init__(self):
pass # don't call the suoer(.__init__ for unit tests
pytest_plugins = ('pytest_asyncio',) pytest_plugins = ('pytest_asyncio',)
@pytest.fixture(scope="session")
def app():
yield my_app
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
def client(app): def client():
app.secret_key = 'super secret key' app.secret_key = 'super secret key'
app.testing = True
return app.test_client() return app.test_client()
@pytest.fixture @pytest.fixture
@@ -67,7 +52,6 @@ async def test_home(client):
response = await client.get('/') response = await client.get('/')
assert response.status_code == 200 assert response.status_code == 200
assert response.mimetype == 'text/html' assert response.mimetype == 'text/html'
assert b"<title>TSUN Proxy - Connections</title>" in await response.data
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_page(client): async def test_page(client):
@@ -75,17 +59,14 @@ async def test_page(client):
response = await client.get('/mqtt') response = await client.get('/mqtt')
assert response.status_code == 200 assert response.status_code == 200
assert response.mimetype == 'text/html' assert response.mimetype == 'text/html'
assert b"<title>TSUN Proxy - MQTT Status</title>" in await response.data
assert b'fetch("/mqtt-fetch")' in await response.data
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_rel_page(client): async def test_rel_page(client):
"""Test the mqtt route with relative paths.""" """Test the mqtt route."""
web.build_relative_urls = True web.build_relative_urls = True
response = await client.get('/mqtt') response = await client.get('/mqtt')
assert response.status_code == 200 assert response.status_code == 200
assert response.mimetype == 'text/html' assert response.mimetype == 'text/html'
assert b'fetch("./mqtt-fetch")' in await response.data
web.build_relative_urls = False web.build_relative_urls = False
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -94,7 +75,6 @@ async def test_notes(client):
response = await client.get('/notes') response = await client.get('/notes')
assert response.status_code == 200 assert response.status_code == 200
assert response.mimetype == 'text/html' assert response.mimetype == 'text/html'
assert b"<title>TSUN Proxy - Important Messages</title>" in await response.data
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_logging(client): async def test_logging(client):
@@ -102,7 +82,6 @@ async def test_logging(client):
response = await client.get('/logging') response = await client.get('/logging')
assert response.status_code == 200 assert response.status_code == 200
assert response.mimetype == 'text/html' assert response.mimetype == 'text/html'
assert b"<title>TSUN Proxy - Log Files</title>" in await response.data
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_favicon96(client): async def test_favicon96(client):
@@ -140,37 +119,37 @@ async def test_manifest(client):
assert response.mimetype == 'application/manifest+json' assert response.mimetype == 'application/manifest+json'
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_data_fetch(client, create_inverter): async def test_data_fetch(create_inverter):
"""Test the data-fetch route.""" """Test the data-fetch route."""
_ = create_inverter _ = create_inverter
client = app.test_client()
response = await client.get('/data-fetch') response = await client.get('/data-fetch')
assert response.status_code == 200 assert response.status_code == 200
response = await client.get('/data-fetch') response = await client.get('/data-fetch')
assert response.status_code == 200 assert response.status_code == 200
assert b'<h5>Connections</h5>' in await response.data
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_data_fetch1(client, create_inverter_server): async def test_data_fetch1(create_inverter_server):
"""Test the data-fetch route with server connection.""" """Test the data-fetch route with server connection."""
_ = create_inverter_server _ = create_inverter_server
client = app.test_client()
response = await client.get('/data-fetch') response = await client.get('/data-fetch')
assert response.status_code == 200 assert response.status_code == 200
response = await client.get('/data-fetch') response = await client.get('/data-fetch')
assert response.status_code == 200 assert response.status_code == 200
assert b'<h5>Connections</h5>' in await response.data
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_data_fetch2(client, create_inverter_client): async def test_data_fetch2(create_inverter_client):
"""Test the data-fetch route with client connection.""" """Test the data-fetch route with client connection."""
_ = create_inverter_client _ = create_inverter_client
client = app.test_client()
response = await client.get('/data-fetch') response = await client.get('/data-fetch')
assert response.status_code == 200 assert response.status_code == 200
response = await client.get('/data-fetch') response = await client.get('/data-fetch')
assert response.status_code == 200 assert response.status_code == 200
assert b'<h5>Connections</h5>' in await response.data
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_language_en(client): async def test_language_en(client):
@@ -180,44 +159,21 @@ async def test_language_en(client):
assert response.content_language.pop() == 'en' assert response.content_language.pop() == 'en'
assert response.location == '/index' assert response.location == '/index'
assert response.mimetype == 'text/html' assert response.mimetype == 'text/html'
assert b'<html lang=en' in await response.data
assert b'<title>Redirecting...</title>' in await response.data
client.set_cookie('test', key='language', value='de') client.set_cookie('test', key='language', value='de')
response = await client.get('/') response = await client.get('/mqtt')
assert response.status_code == 200 assert response.status_code == 200
assert response.mimetype == 'text/html' assert response.mimetype == 'text/html'
assert b'<html lang="en"' in await response.data
assert b'<title>TSUN Proxy - Connections</title>' in await response.data
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_language_de(client): async def test_language_de(client):
"""Test the language/de route.""" """Test the language/de route."""
response = await client.get('/language/de', headers={'referer': '/'}) response = await client.get('/language/de', headers={'referer': '/'})
assert response.status_code == 302 assert response.status_code == 302
assert response.content_language.pop() == 'de' assert response.content_language.pop() == 'de'
assert response.location == '/' assert response.location == '/'
assert response.mimetype == 'text/html' assert response.mimetype == 'text/html'
assert b'<html lang=en>' in await response.data
assert b'<title>Redirecting...</title>' in await response.data
client.set_cookie('test', key='language', value='en')
response = await client.get('/')
assert response.status_code == 200
assert response.mimetype == 'text/html'
assert b'<html lang="de"' in await response.data
# the following assert fails on github runner, since the translation to german fails
# assert b'<title>TSUN Proxy - Verbindungen</title>' in await response.data
"""Switch back to english"""
response = await client.get('/language/en', headers={'referer': '/index'})
assert response.status_code == 302
assert response.content_language.pop() == 'en'
assert response.location == '/index'
assert response.mimetype == 'text/html'
assert b'<html lang=en>' in await response.data
assert b'<title>Redirecting...</title>' in await response.data
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_language_unknown(client): async def test_language_unknown(client):
@@ -226,12 +182,6 @@ async def test_language_unknown(client):
assert response.status_code == 404 assert response.status_code == 404
assert response.mimetype == 'text/html' assert response.mimetype == 'text/html'
client.set_cookie('test', key='language', value='en')
response = await client.get('/')
assert response.status_code == 200
assert response.mimetype == 'text/html'
assert b'<title>TSUN Proxy - Connections</title>' in await response.data
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_mqtt_fetch(client, create_inverter): async def test_mqtt_fetch(client, create_inverter):
@@ -241,47 +191,15 @@ async def test_mqtt_fetch(client, create_inverter):
response = await client.get('/mqtt-fetch') response = await client.get('/mqtt-fetch')
assert response.status_code == 200 assert response.status_code == 200
assert b'<h5>MQTT devices</h5>' in await response.data
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_notes_fetch(client, config_conn): async def test_notes_fetch(client, config_conn):
"""Test the notes-fetch route.""" """Test the notes-fetch route."""
_ = config_conn _ = create_inverter
s = FakeServer()
s.src_dir = 'app/src/'
s.init_logging_system()
# First clear log and test Well done message
logh = LogHandler()
logh.clear()
response = await client.get('/notes-fetch') response = await client.get('/notes-fetch')
assert response.status_code == 200 assert response.status_code == 200
assert b'<h2>Well done!</h2>' in await response.data
# Check info logs which must be ignored here
logging.info('config_info')
logh.flush()
response = await client.get('/notes-fetch')
assert response.status_code == 200
assert b'<h2>Well done!</h2>' in await response.data
# Check warning logs which must be added to the note list
logging.warning('config_warning')
logh.flush()
response = await client.get('/notes-fetch')
assert response.status_code == 200
assert b'WARNING' in await response.data
assert b'config_warning' in await response.data
# Check error logs which must be added to the note list
logging.error('config_err')
logh.flush()
response = await client.get('/notes-fetch')
assert response.status_code == 200
assert b'ERROR' in await response.data
assert b'config_err' in await response.data
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -311,7 +229,6 @@ async def test_file_fetch(client, config_conn, monkeypatch):
monkeypatch.delattr(stat_result, "st_birthtime") monkeypatch.delattr(stat_result, "st_birthtime")
response = await client.get('/file-fetch') response = await client.get('/file-fetch')
assert response.status_code == 200 assert response.status_code == 200
assert b'<h4>test.txt</h4>' in await response.data
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_send_file(client, config_conn): async def test_send_file(client, config_conn):
@@ -320,7 +237,6 @@ async def test_send_file(client, config_conn):
assert Config.log_path == 'app/tests/log/' assert Config.log_path == 'app/tests/log/'
response = await client.get('/send-file/test.txt') response = await client.get('/send-file/test.txt')
assert response.status_code == 200 assert response.status_code == 200
assert b'2025-04-30 00:01:23' in await response.data
@pytest.mark.asyncio @pytest.mark.asyncio
@@ -375,20 +291,3 @@ async def test_del_file_err(client, config_conn, patch_os_remove_err):
assert Config.log_path == 'app/tests/log/' assert Config.log_path == 'app/tests/log/'
response = await client.delete ('/del-file/test.txt') response = await client.delete ('/del-file/test.txt')
assert response.status_code == 404 assert response.status_code == 404
@pytest.mark.asyncio
async def test_addon_links(client):
"""Test links to HA add-on config/log in UI"""
with patch.dict(os.environ, {'SLUG': 'c676133d', 'HOSTNAME': 'c676133d-tsun-proxy'}):
response = await client.get('/')
assert response.status_code == 200
assert response.mimetype == 'text/html'
assert b'Add-on Config' in await response.data
assert b'href="/hassio/addon/c676133d_tsun-proxy/logs' in await response.data
assert b'href="/hassio/addon/c676133d_tsun-proxy/config' in await response.data
# check that links are not available if env vars SLUG and HOSTNAME are not defined (docker version)
response = await client.get('/')
assert response.status_code == 200
assert response.mimetype == 'text/html'
assert b'Add-on Config' not in await response.data

View File

@@ -75,14 +75,6 @@ msgstr "Wichtige Hinweise"
msgid "Log Files" msgid "Log Files"
msgstr "Log Dateien" msgstr "Log Dateien"
#: src/web/templates/base.html.j2:64
msgid "Add-on Config"
msgstr "Add-on Konfiguration"
#: src/web/templates/base.html.j2:65
msgid "Add-on Log"
msgstr "Add-on Protokoll"
#: src/web/templates/page_index.html.j2:3 #: src/web/templates/page_index.html.j2:3
msgid "TSUN Proxy - Connections" msgid "TSUN Proxy - Connections"
msgstr "TSUN Proxy - Verbindungen" msgstr "TSUN Proxy - Verbindungen"
@@ -128,7 +120,6 @@ msgid "TSUN Proxy - Log Files"
msgstr "TSUN Proxy - Log Dateien" msgstr "TSUN Proxy - Log Dateien"
#: src/web/templates/page_logging.html.j2:10 #: src/web/templates/page_logging.html.j2:10
#, python-format
msgid "Do you really want to delete the log file: <br>%(file)s ?" msgid "Do you really want to delete the log file: <br>%(file)s ?"
msgstr "Soll die Datei: <br>%(file)s<br>wirklich gelöscht werden?" msgstr "Soll die Datei: <br>%(file)s<br>wirklich gelöscht werden?"

View File

@@ -192,7 +192,7 @@ $(repro_all_subdirs) :
mkdir -p $@ mkdir -p $@
$(repro_all_templates) : $(INST_BASE)/ha_addon_%/config.yaml: $(TEMPL)/config.jinja $(TEMPL)/%_data.json $(SRC)/.version FORCE $(repro_all_templates) : $(INST_BASE)/ha_addon_%/config.yaml: $(TEMPL)/config.jinja $(TEMPL)/%_data.json $(SRC)/.version FORCE
$(JINJA) --strict -D AppVersion=$(VERSION)-$*$(RC) -D BuildID=$(BUILD_ID) $< $(filter %.json,$^) -o $@ $(JINJA) --strict -D AppVersion=$(VERSION)-$* -D BuildID=$(BUILD_ID) $< $(filter %.json,$^) -o $@
$(repro_all_apparmor) : $(INST_BASE)/ha_addon_%/apparmor.txt: $(TEMPL)/apparmor.jinja $(TEMPL)/%_data.json $(repro_all_apparmor) : $(INST_BASE)/ha_addon_%/apparmor.txt: $(TEMPL)/apparmor.jinja $(TEMPL)/%_data.json
$(JINJA) --strict $< $(filter %.json,$^) -o $@ $(JINJA) --strict $< $(filter %.json,$^) -o $@

View File

@@ -29,23 +29,27 @@ target "_common" {
"type =sbom,generator=docker/scout-sbom-indexer:latest" "type =sbom,generator=docker/scout-sbom-indexer:latest"
] ]
annotations = [ annotations = [
"index:io.hass.version=${VERSION}",
"index:io.hass.type=addon", "index:io.hass.type=addon",
"index:io.hass.arch=aarch64|amd64", "index:io.hass.arch=armhf|aarch64|i386|amd64",
"index,manifest-descriptor:org.opencontainers.image.title=TSUN-Proxy", "index:org.opencontainers.image.title=TSUN-Proxy",
"index,manifest-descriptor:org.opencontainers.image.authors=Stefan Allius", "index:org.opencontainers.image.authors=Stefan Allius",
"index,manifest-descriptor:org.opencontainers.image.created=${BUILD_DATE}", "index:org.opencontainers.image.created=${BUILD_DATE}",
"index,manifest-descriptor:org.opencontainers.image.version=${VERSION}", "index:org.opencontainers.image.version=${VERSION}",
"index,manifest-descriptor:org.opencontainers.image.description=${DESCRIPTION}", "index:org.opencontainers.image.revision=${BRANCH}",
"index:org.opencontainers.image.description=${DESCRIPTION}",
"index:org.opencontainers.image.licenses=BSD-3-Clause", "index:org.opencontainers.image.licenses=BSD-3-Clause",
"index:org.opencontainers.image.source=https://github.com/s-allius/tsun-gen3-proxy/ha_addons/ha_addon", "index:org.opencontainers.image.source=https://github.com/s-allius/tsun-gen3-proxy/ha_addons/ha_addon"
] ]
labels = { labels = {
"io.hass.version" = "${VERSION}"
"io.hass.type" = "addon" "io.hass.type" = "addon"
"io.hass.arch" = "aarch64|amd64" "io.hass.arch" = "armhf|aarch64|i386|amd64"
"org.opencontainers.image.title" = "TSUN-Proxy" "org.opencontainers.image.title" = "TSUN-Proxy"
"org.opencontainers.image.authors" = "Stefan Allius" "org.opencontainers.image.authors" = "Stefan Allius"
"org.opencontainers.image.created" = "${BUILD_DATE}" "org.opencontainers.image.created" = "${BUILD_DATE}"
"org.opencontainers.image.version" = "${VERSION}" "org.opencontainers.image.version" = "${VERSION}"
"org.opencontainers.image.revision" = "${BRANCH}"
"org.opencontainers.image.description" = "${DESCRIPTION}" "org.opencontainers.image.description" = "${DESCRIPTION}"
"org.opencontainers.image.licenses" = "BSD-3-Clause" "org.opencontainers.image.licenses" = "BSD-3-Clause"
"org.opencontainers.image.source" = "https://github.com/s-allius/tsun-gen3-proxy/ha_addonsha_addon" "org.opencontainers.image.source" = "https://github.com/s-allius/tsun-gen3-proxy/ha_addonsha_addon"
@@ -55,7 +59,7 @@ target "_common" {
] ]
no-cache = false no-cache = false
platforms = ["linux/amd64", "linux/arm64"] platforms = ["linux/amd64", "linux/arm64", "linux/arm/v7"]
} }
target "_debug" { target "_debug" {

View File

@@ -13,12 +13,12 @@
# 1 Build Base Image # # 1 Build Base Image #
###################### ######################
ARG BUILD_FROM="ghcr.io/hassio-addons/base:18.0.3" ARG BUILD_FROM="ghcr.io/hassio-addons/base:17.2.5"
# hadolint ignore=DL3006 # hadolint ignore=DL3006
FROM $BUILD_FROM AS base FROM $BUILD_FROM AS base
# Installiere Python, pip und virtuelle Umgebungstools # Installiere Python, pip und virtuelle Umgebungstools
RUN apk add --no-cache python3=3.12.11-r0 py3-pip=25.1.1-r0 && \ RUN apk add --no-cache python3=3.12.10-r0 py3-pip=24.3.1-r0 && \
python -m venv /opt/venv && \ python -m venv /opt/venv && \
. /opt/venv/bin/activate . /opt/venv/bin/activate

View File

@@ -1,46 +1,18 @@
#!/usr/bin/with-contenv bashio #!/usr/bin/with-contenv bashio
bashio::log.blue "-----------------------------------------------------------" echo "Add-on environment started"
bashio::log.blue "run.sh: info: setup Add-on environment"
bashio::cache.flush_all
MQTT_HOST=""
SLUG=""
HOSTNAME=""
if bashio::supervisor.ping; then
bashio::log "run.sh: info: check Home Assistant bashio for config values"
if bashio::services.available mqtt; then
MQTT_HOST=$(bashio::services mqtt "host")
MQTT_PORT=$(bashio::services mqtt "port")
MQTT_USER=$(bashio::services mqtt "username")
MQTT_PASSWORD=$(bashio::services mqtt "password")
else
bashio::log.yellow "run.sh: info: Home Assistant MQTT service not available!"
fi
SLUG=$(bashio::addon.repository)
HOSTNAME=$(bashio::addon.hostname)
else
bashio::log.red "run.sh: error: Home Assistant Supervisor API not available!"
fi
if [ -z "$SLUG" ]; then echo "check for Home Assistant MQTT"
bashio::log.yellow "run.sh: info: addon slug not found" MQTT_HOST=$(bashio::services mqtt "host")
else MQTT_PORT=$(bashio::services mqtt "port")
bashio::log.green "run.sh: info: found addon slug: $SLUG" MQTT_USER=$(bashio::services mqtt "username")
export SLUG MQTT_PASSWORD=$(bashio::services mqtt "password")
fi
if [ -z "$HOSTNAME" ]; then
bashio::log.yellow "run.sh: info: addon hostname not found"
else
bashio::log.green "run.sh: info: found addon hostname: $HOSTNAME"
export HOSTNAME
fi
# if a MQTT was/not found, drop a note # if a MQTT was/not found, drop a note
if [ -z "$MQTT_HOST" ]; then if [ -z "$MQTT_HOST" ]; then
bashio::log.yellow "run.sh: info: MQTT config not found" echo "MQTT not found"
else else
bashio::log.green "run.sh: info: found MQTT config" echo "MQTT found"
export MQTT_HOST export MQTT_HOST
export MQTT_PORT export MQTT_PORT
export MQTT_USER export MQTT_USER
@@ -57,6 +29,5 @@ cd /home/proxy || exit
export VERSION=$(cat /proxy-version.txt) export VERSION=$(cat /proxy-version.txt)
bashio::log.blue "run.sh: info: Start Proxyserver..." echo "Start Proxyserver..."
bashio::log.blue "-----------------------------------------------------------"
python3 server.py --rel_urls --json_config=/data/options.json --log_path=/homeassistant/tsun-proxy/logs/ --config_path=/homeassistant/tsun-proxy/ --log_backups=2 python3 server.py --rel_urls --json_config=/data/options.json --log_path=/homeassistant/tsun-proxy/logs/ --config_path=/homeassistant/tsun-proxy/ --log_backups=2

View File

@@ -10,6 +10,8 @@ init: false
arch: arch:
- aarch64 - aarch64
- amd64 - amd64
- armhf
- armv7
startup: services startup: services
homeassistant_api: true homeassistant_api: true
map: map:

View File

@@ -2,6 +2,7 @@
{ {
"name": "TSUN-Proxy (Release Candidate)", "name": "TSUN-Proxy (Release Candidate)",
"description": "MQTT Proxy for TSUN Photovoltaic Inverters", "description": "MQTT Proxy for TSUN Photovoltaic Inverters",
"version": "rc",
"image": "ghcr.io/s-allius/tsun-gen3-addon", "image": "ghcr.io/s-allius/tsun-gen3-addon",
"slug": "tsun-proxy-rc", "slug": "tsun-proxy-rc",
"advanced": true, "advanced": true,