S allius/issue206 (#213)

* update changelog

* add addon-dev target

* initial version

* use prebuild docker image

* initial version for multi arch images

* fix missing label latest

* create log and config folder first.

* clean up and translate to english

* set labels with docker bake

* add addon-debug and addon-dev targets

* pass version number to proxy at runtime

* add two more callbacks

* get addon version from app

* deploy rc addon container to ghcr

* move ha_addon test into subdir

* fix crash on container restart

- mkdir -p returns no error even if the director
  exists

* prepation for unit testing

- move script into a method

* added further config to schema

* typo fixed

* added monitor_sn + PV-strings 3-6 to create toml

* added options.json for testing

* prepare pytest and coverage for addons

* fix missing values in resulting config.toml
- define mqtt default values
- convert filter configuration

* first running unittest for addons

* add ha_addons

* increase test coverage

* test empty options.json file for HA AddOn

* fix pytest call in terminal

* improve test coverage

* remove uneeded options.json

* move config.py into subdir cnf

---------

Co-authored-by: Michael Metz <michael.metz@siemens.com>
This commit is contained in:
Stefan Allius
2024-12-03 22:22:50 +01:00
committed by GitHub
parent a5b2b4b7c2
commit be3b4d6df0
52 changed files with 660 additions and 5323 deletions

View File

@@ -54,7 +54,7 @@ jobs:
flake8 --exit-zero --ignore=C901,E121,E123,E126,E133,E226,E241,E242,E704,W503,W504,W505 --format=pylint --output-file=output_flake.txt --exclude=*.pyc app/src/
- name: Test with pytest
run: |
python -m pytest app ha_addon --cov=app/src --cov=ha_addon/rootfs/home --cov-report=xml
python -m pytest app ha_addons --cov=app/src --cov=ha_addons/ha_addon/rootfs/home --cov-report=xml
coverage report
- name: Analyze with SonarCloud
if: ${{ env.SONAR_TOKEN != 0 }}

4
.gitignore vendored
View File

@@ -4,8 +4,8 @@ __pycache__
bin/**
mosquitto/**
homeassistant/**
ha_addon/rootfs/home/proxy/*
ha_addon/rootfs/requirements.txt
ha_addons/ha_addon/rootfs/home/proxy/*
ha_addons/ha_addon/rootfs/requirements.txt
tsun_proxy/**
Doku/**
.DS_Store

15
.vscode/settings.json vendored
View File

@@ -1,20 +1,22 @@
{
"python.analysis.extraPaths": [
"app/src",
".venv/lib" ],
"app/tests",
".venv/lib",
"ha_addons/ha_addon/rootfs" ],
"python.testing.pytestArgs": [
"-v",
"-vvv",
"--cov=app/src",
"--cov=ha_addon/rootfs/home",
"--cov=ha_addons/ha_addon/rootfs",
"--cov-report=xml",
"app",
"system_tests",
"ha_addon"
"ha_addons"
],
"python.testing.unittestEnabled": false,
"python.testing.pytestEnabled": true,
"flake8.args": [
"--extend-exclude=app/tests/*.py system_tests/*.py"
"--extend-exclude=app/tests/*.py system_tests/*.py ha_addons/ha_addon/tests/*.py"
],
"sonarlint.connectedMode.project": {
"connectionId": "s-allius",
@@ -27,5 +29,6 @@
"python.autoComplete.extraPaths": [
".venv/lib"
],
"coverage-gutters.coverageBaseDir": "tsun"
"coverage-gutters.coverageBaseDir": "tsun",
"makefile.configureOnOpen": false
}

View File

@@ -7,6 +7,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [unreleased]
- add initial support for home assistant add-ons from @mime24
- github action: use ubuntu 24.04 and sonar-scanner-action 4 [#222](https://github.com/s-allius/tsun-gen3-proxy/issues/222)
- migrate paho.mqtt CallbackAPIVersion to VERSION2 [#224](https://github.com/s-allius/tsun-gen3-proxy/issues/224)
- add PROD_COMPL_TYPE to trace

View File

@@ -1,7 +1,10 @@
.PHONY: build clean
.PHONY: build clean addon-dev addon-debug sddon-rc
# debug dev:
# $(MAKE) -C app $@
clean build:
$(MAKE) -C ha_addon $@
$(MAKE) -C ha_addons/ha_addon $@
addon-dev addon-debug addon-rc:
$(MAKE) -C ha_addons/ha_addon $(patsubst addon-%,%,$@)

1
app/.version Normal file
View File

@@ -0,0 +1 @@
0.12.0

View File

@@ -1,4 +1,5 @@
# test_with_pytest.py
import pytest
import tomllib
from schema import SchemaMissingKeyError
from cnf.config import Config, ConfigIfc
@@ -22,6 +23,77 @@ def test_empty_config():
except SchemaMissingKeyError:
pass
@pytest.fixture
def ConfigComplete():
return {
'gen3plus': {
'at_acl': {
'mqtt': {'allow': ['AT+'], 'block': ['AT+SUPDATE']},
'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'],
'block': ['AT+SUPDATE']}
}
},
'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com',
'port': 5005},
'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com',
'port': 10000},
'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None},
'ha': {'auto_conf_prefix': 'homeassistant',
'discovery_prefix': 'homeassistant',
'entity_prefix': 'tsun',
'proxy_node_id': 'proxy',
'proxy_unique_id': 'P170000000000001'},
'inverters': {
'allow_all': False,
'R170000000000001': {'node_id': 'PV-Garage/',
'modbus_polling': False,
'monitor_sn': 0,
'pv1': {'manufacturer': 'man1',
'type': 'type1'},
'pv2': {'manufacturer': 'man2',
'type': 'type2'},
'suggested_area': 'Garage',
'sensor_list': 688},
'Y170000000000001': {'modbus_polling': True,
'monitor_sn': 2000000000,
'node_id': 'PV-Garage2/',
'pv1': {'manufacturer': 'man1',
'type': 'type1'},
'pv2': {'manufacturer': 'man2',
'type': 'type2'},
'pv3': {'manufacturer': 'man3',
'type': 'type3'},
'pv4': {'manufacturer': 'man4',
'type': 'type4'},
'suggested_area': 'Garage2',
'sensor_list': 688}
}
}
@pytest.fixture
def ConfigMinimum():
return {
'gen3plus': {
'at_acl': {
'mqtt': {'allow': ['AT+'], 'block': []},
'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'],
'block': []}
}
},
'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com',
'port': 5005},
'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000},
'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None},
'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'},
'inverters': {
'allow_all': True,
'R170000000000001': {'node_id': '',
'modbus_polling': True,
'monitor_sn': 0,
'suggested_area': '',
'sensor_list': 688}}}
def test_default_config():
with open("app/config/default_config.toml", "rb") as f:
cnf = tomllib.load(f)
@@ -58,23 +130,23 @@ def test_default_config():
'suggested_area': '',
'sensor_list': 688}}}
def test_full_config():
def test_full_config(ConfigComplete):
cnf = {'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005},
'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []},
'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}},
'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': ['AT+SUPDATE']},
'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': ['AT+SUPDATE']}}},
'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000},
'mqtt': {'host': 'mqtt', 'port': 1883, 'user': '', 'passwd': ''},
'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'},
'inverters': {'allow_all': True,
'R170000000000001': {'modbus_polling': True, 'node_id': '', 'sensor_list': 0, 'suggested_area': '', 'pv1': {'type': 'type1', 'manufacturer': 'man1'}, 'pv2': {'type': 'type2', 'manufacturer': 'man2'}, 'pv3': {'type': 'type3', 'manufacturer': 'man3'}},
'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'node_id': '', 'sensor_list': 0x1511, 'suggested_area': ''}}}
'inverters': {'allow_all': False,
'R170000000000001': {'modbus_polling': False, 'node_id': 'PV-Garage/', 'sensor_list': 0x02B0, 'suggested_area': 'Garage', 'pv1': {'type': 'type1', 'manufacturer': 'man1'}, 'pv2': {'type': 'type2', 'manufacturer': 'man2'}},
'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'node_id': 'PV-Garage2/', 'sensor_list': 0x02B0, 'suggested_area': 'Garage2', 'pv1': {'type': 'type1', 'manufacturer': 'man1'}, 'pv2': {'type': 'type2', 'manufacturer': 'man2'}, 'pv3': {'type': 'type3', 'manufacturer': 'man3'}, 'pv4': {'type': 'type4', 'manufacturer': 'man4'}}}}
try:
validated = Config.conf_schema.validate(cnf)
except Exception:
assert False
assert validated == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'node_id': '', 'modbus_polling': True, 'monitor_sn': 0, 'pv1': {'manufacturer': 'man1','type': 'type1'},'pv2': {'manufacturer': 'man2','type': 'type2'},'pv3': {'manufacturer': 'man3','type': 'type3'}, 'suggested_area': '', 'sensor_list': 0}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'node_id': '', 'suggested_area': '', 'sensor_list': 5393}}}
assert validated == ConfigComplete
def test_mininum_config():
def test_mininum_config(ConfigMinimum):
cnf = {'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005},
'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+']},
'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE']}}},
@@ -89,7 +161,7 @@ def test_mininum_config():
validated = Config.conf_schema.validate(cnf)
except Exception:
assert False
assert validated == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'node_id': '', 'modbus_polling': True, 'monitor_sn': 0, 'suggested_area': '', 'sensor_list': 688}}}
assert validated == ConfigMinimum
def test_read_empty():
cnf = {}

View File

@@ -1,45 +0,0 @@
SHELL = /bin/sh
# Folders
SRC=../app
SRC_PROXY=$(SRC)/src
CNF_PROXY=$(SRC)/config
DST=rootfs
DST_PROXY=$(DST)/home/proxy
# collect source files
SRC_FILES := $(wildcard $(SRC_PROXY)/*.py)\
$(wildcard $(SRC_PROXY)/*.ini)\
$(wildcard $(SRC_PROXY)/gen3/*.py)\
$(wildcard $(SRC_PROXY)/gen3plus/*.py)
CNF_FILES := $(wildcard $(CNF_PROXY)/*.toml)
# determine destination files
TARGET_FILES = $(SRC_FILES:$(SRC_PROXY)/%=$(DST_PROXY)/%)
CONFIG_FILES = $(CNF_FILES:$(CNF_PROXY)/%=$(DST_PROXY)/%)
build: rootfs
clean:
rm -r -f $(DST_PROXY)
rm -f $(DST)/requirements.txt
rootfs: $(TARGET_FILES) $(CONFIG_FILES) $(DST)/requirements.txt
.PHONY: build clean rootfs
$(CONFIG_FILES): $(DST_PROXY)/% : $(CNF_PROXY)/%
@echo Copy $< to $@
@mkdir -p $(@D)
@cp $< $@
$(TARGET_FILES): $(DST_PROXY)/% : $(SRC_PROXY)/%
@echo Copy $< to $@
@mkdir -p $(@D)
@cp $< $@
$(DST)/requirements.txt : $(SRC)/requirements.txt
@echo Copy $< to $@
@cp $< $@

View File

@@ -1,65 +0,0 @@
import json
import os
# Dieses file übernimmt die Add-On Konfiguration und schreibt sie in die
# Konfigurationsdatei des tsun-proxy
# Die Addon Konfiguration wird in der Datei /data/options.json bereitgestellt
# Die Konfiguration wird in der Datei /home/proxy/config/config.toml
# gespeichert
# Übernehme die Umgebungsvariablen
# alternativ kann auch auf die homeassistant supervisor API zugegriffen werden
data = {}
data['mqtt.host'] = os.getenv('MQTT_HOST')
data['mqtt.port'] = os.getenv('MQTT_PORT')
data['mqtt.user'] = os.getenv('MQTT_USER')
data['mqtt.passwd'] = os.getenv('MQTT_PASSWORD')
# Lese die Add-On Konfiguration aus der Datei /data/options.json
with open('/data/options.json') as json_file:
# with open('options.json') as json_file:
options_data = json.load(json_file)
data.update(options_data)
# Schreibe die Add-On Konfiguration in die Datei /home/proxy/config/config.toml # noqa: E501
with open('/home/proxy/config/config.toml', 'w+') as f:
# with open('./config/config.toml', 'w+') as f:
f.write(f"""
mqtt.host = '{data.get('mqtt.host')}' # URL or IP address of the mqtt broker
mqtt.port = {data.get('mqtt.port')}
mqtt.user = '{data.get('mqtt.user')}'
mqtt.passwd = '{data.get('mqtt.passwd')}'
ha.auto_conf_prefix = '{data.get('ha.auto_conf_prefix', 'homeassistant')}' # MQTT prefix for subscribing for homeassistant status updates # noqa: E501
ha.discovery_prefix = '{data.get('ha.discovery_prefix', 'homeassistant')}' # MQTT prefix for discovery topic # noqa: E501
ha.entity_prefix = '{data.get('ha.entity_prefix', 'tsun')}' # MQTT topic prefix for publishing inverter values # noqa: E501
ha.proxy_node_id = '{data.get('ha.proxy_node_id', 'proxy')}' # MQTT node id, for the proxy_node_id
ha.proxy_unique_id = '{data.get('ha.proxy_unique_id', 'P170000000000001')}' # MQTT unique id, to identify a proxy instance
tsun.enabled = {str(data.get('tsun.enabled', True)).lower()}
tsun.host = '{data.get('tsun.host', 'logger.talent-monitoring.com')}'
tsun.port = {data.get('tsun.port', 5005)}
solarman.enabled = {str(data.get('solarman.enabled', True)).lower()}
solarman.host = '{data.get('solarman.host', 'iot.talent-monitoring.com')}'
solarman.port = {data.get('solarman.port', 10000)}
inverters.allow_all = {str(data.get('inverters.allow_all', False)).lower()}
""")
for inverter in data['inverters']:
f.write(f"""
[inverters."{inverter['serial']}"]
node_id = '{inverter['node_id']}'
suggested_area = '{inverter['suggested_area']}'
modbus_polling = {str(inverter['modbus_polling']).lower()}
pv1 = {{type = '{inverter['pv1_type']}', manufacturer = '{inverter['pv1_manufacturer']}'}} # Optional, PV module descr # noqa: E501
pv2 = {{type = '{inverter['pv2_type']}', manufacturer = '{inverter['pv2_manufacturer']}'}} # Optional, PV module descr # noqa: E501
""")

View File

@@ -1,19 +0,0 @@
{
"inverters": [
{
"serial": "R17E760702080400",
"node_id": "PV-Garage",
"suggested_area": "Garage",
"modbus_polling": false,
"pv1_manufacturer": "Shinefar",
"pv1_type": "SF-M18/144550",
"pv2_manufacturer": "Shinefar",
"pv2_type": "SF-M18/144550"
}
],
"tsun.enabled": false,
"solarman.enabled": false,
"inverters.allow_all": false
}

View File

@@ -1,6 +0,0 @@
# test_with_pytest.py
# import ha_addon.rootfs.home.create_config_toml
def test_config():
pass

View File

@@ -13,10 +13,7 @@
# 1 Build Image #
######################
# opt for suitable build base. I opted for the recommended hassio-addon base
#ARG BUILD_FROM="ghcr.io/hassio-addons/debian-base:latest"
ARG BUILD_FROM="ghcr.io/hassio-addons/base:latest"
ARG BUILD_FROM="ghcr.io/hassio-addons/base:stable"
FROM $BUILD_FROM
@@ -70,18 +67,16 @@ COPY rootfs/ /
RUN chmod a+x /run.sh
# no idea whether needed or not
ENV SERVICE_NAME="tsun-proxy"
ENV UID=1000
ENV GID=1000
ENV VERSION="0.0"
#######################
# 6 run app #
#######################
ARG SERVICE_NAME
ARG VERSION
ENV SERVICE_NAME=${SERVICE_NAME}
RUN echo ${VERSION} > /proxy-version.txt
# command to run on container start
CMD [ "/run.sh" ]
@@ -90,8 +85,3 @@ CMD [ "/run.sh" ]
#######################
# Labels
LABEL \
io.hass.version="VERSION" \
io.hass.type="addon" \
io.hass.arch="armhf|aarch64|i386|amd64"

View File

@@ -0,0 +1,74 @@
#!make
include ../../.env
SHELL = /bin/sh
IMAGE = tsun-gen3-addon
# Folders
SRC=../../app
SRC_PROXY=$(SRC)/src
CNF_PROXY=$(SRC)/config
DST=rootfs
DST_PROXY=$(DST)/home/proxy
# collect source files
SRC_FILES := $(wildcard $(SRC_PROXY)/*.py)\
$(wildcard $(SRC_PROXY)/*.ini)\
$(wildcard $(SRC_PROXY)/cnf/*.py)\
$(wildcard $(SRC_PROXY)/gen3/*.py)\
$(wildcard $(SRC_PROXY)/gen3plus/*.py)
CNF_FILES := $(wildcard $(CNF_PROXY)/*.toml)
# determine destination files
TARGET_FILES = $(SRC_FILES:$(SRC_PROXY)/%=$(DST_PROXY)/%)
CONFIG_FILES = $(CNF_FILES:$(CNF_PROXY)/%=$(DST_PROXY)/%)
export BUILD_DATE := ${shell date -Iminutes}
VERSION := $(shell cat $(SRC)/.version)
export MAJOR := $(shell echo $(VERSION) | cut -f1 -d.)
PUBLIC_URL := $(shell echo $(PUBLIC_CONTAINER_REGISTRY) | cut -f1 -d/)
PUBLIC_USER :=$(shell echo $(PUBLIC_CONTAINER_REGISTRY) | cut -f2 -d/)
dev debug: build
@echo version: $(VERSION) build-date: $(BUILD_DATE) image: $(PRIVAT_CONTAINER_REGISTRY)$(IMAGE)
export VERSION=$(VERSION)-$@ && \
export IMAGE=$(PRIVAT_CONTAINER_REGISTRY)$(IMAGE) && \
docker buildx bake -f docker-bake.hcl $@
rc: build
@echo version: $(VERSION) build-date: $(BUILD_DATE) image: $(PUBLIC_CONTAINER_REGISTRY)$(IMAGE)
@echo login at $(PUBLIC_URL) as $(PUBLIC_USER)
@DO_LOGIN="$(shell echo $(PUBLIC_CR_KEY) | docker login $(PUBLIC_URL) -u $(PUBLIC_USER) --password-stdin)"
export VERSION=$(VERSION)-$@ && \
export IMAGE=$(PUBLIC_CONTAINER_REGISTRY)$(IMAGE) && \
docker buildx bake -f docker-bake.hcl $@
build: rootfs
clean:
rm -r -f $(DST_PROXY)
rm -f $(DST)/requirements.txt
rootfs: $(TARGET_FILES) $(CONFIG_FILES) $(DST)/requirements.txt
.PHONY: debug dev build clean rootfs
$(CONFIG_FILES): $(DST_PROXY)/% : $(CNF_PROXY)/%
@echo Copy $< to $@
@mkdir -p $(@D)
@cp $< $@
$(TARGET_FILES): $(DST_PROXY)/% : $(SRC_PROXY)/%
@echo Copy $< to $@
@mkdir -p $(@D)
@cp $< $@
$(DST)/requirements.txt : $(SRC)/requirements.txt
@echo Copy $< to $@
@cp $< $@

View File

@@ -1,6 +1,8 @@
name: "TSUN-Proxy"
description: "MQTT Proxy for TSUN Photovoltaic Inverters"
version: "0.0.7"
version: "dev"
image: docker.io/sallius/tsun-gen3-addon
url: https://github.com/s-allius/tsun-gen3-proxy
slug: "tsun-proxy"
init: false
arch:
@@ -20,24 +22,35 @@ ports:
# Definition of parameters in the configuration tab of the addon
# parameters are available within the container as /data/options.json
# and should become picked up by the proxy - current workarround as a transfer script
# TODO: add further schema for remaining config parameters
# and should become picked up by the proxy - current workaround as a transfer script
# TODO: check again for multi hierarchie parameters
# TODO: implement direct reading of the configuration file
schema:
inverters:
- serial: str
monitor_sn: int?
node_id: str
suggested_area: str
modbus_polling: bool
#strings: # leider funktioniert es nicht die folgenden 3 parameter im schema aufzulisten. möglicherweise wird die verschachtelung nicht unterstützt.
client_mode_host: str?
client_mode_port: int?
#strings: # leider funktioniert es nicht die folgenden 3 parameter im schema aufzulisten. möglicherweise wird die verschachtelung nicht unterstützt.
# - string: str
# type: str
# manufacturer: str
# daher diese variante
pv1_manufacturer: str
pv1_type: str
pv2_manufacturer: str
pv2_type: str
pv1_manufacturer: str?
pv1_type: str?
pv2_manufacturer: str?
pv2_type: str?
pv3_manufacturer: str?
pv3_type: str?
pv4_manufacturer: str?
pv4_type: str?
pv5_manufacturer: str?
pv5_type: str?
pv6_manufacturer: str?
pv6_type: str?
tsun.enabled: bool
solarman.enabled: bool
inverters.allow_all: bool
@@ -52,6 +65,16 @@ schema:
ha.entity_prefix: str? #dito
ha.proxy_node_id: str? #dito
ha.proxy_unique_id: str? #dito
tsun.host: str?
solarman.host: str?
gen3plus.at_acl.tsun.allow:
- str
gen3plus.at_acl.tsun.block:
- str?
gen3plus.at_acl.mqtt.allow:
- str
gen3plus.at_acl.mqtt.block:
- str?
# set default options for mandatory parameters
# for optional parameters do not define any default value in the options dictionary.
@@ -62,7 +85,7 @@ options:
node_id: PV-Garage
suggested_area: Garage
modbus_polling: false
#strings:
# strings:
# - string: PV1
# type: SF-M18/144550
# manufacturer: Shinefar
@@ -76,3 +99,5 @@ options:
tsun.enabled: true # set default
solarman.enabled: true # set default
inverters.allow_all: false # set default
gen3plus.at_acl.tsun.allow: ["AT+Z", "AT+UPURL", "AT+SUPDATE"]
gen3plus.at_acl.mqtt.allow: ["AT+"]

View File

@@ -0,0 +1,99 @@
variable "IMAGE" {
default = "tsun-gen3-addon"
}
variable "VERSION" {
default = "0.0.0"
}
variable "MAJOR" {
default = "0"
}
variable "BUILD_DATE" {
default = "dev"
}
variable "BRANCH" {
default = ""
}
variable "DESCRIPTION" {
default = "This proxy enables a reliable connection between TSUN third generation inverters (eg. TSOL MS600, MS800, MS2000) and an MQTT broker to integrate the inverter into typical home automations."
}
target "_common" {
context = "."
dockerfile = "Dockerfile"
args = {
VERSION = "${VERSION}"
environment = "production"
}
attest = [
"type =provenance,mode=max",
"type =sbom,generator=docker/scout-sbom-indexer:latest"
]
annotations = [
"index:io.hass.version=${VERSION}",
"index:io.hass.type=addon",
"index:io.hass.arch=armhf|aarch64|i386|amd64",
"index:org.opencontainers.image.title=TSUN-Proxy",
"index:org.opencontainers.image.authors=Stefan Allius",
"index:org.opencontainers.image.created=${BUILD_DATE}",
"index:org.opencontainers.image.version=${VERSION}",
"index:org.opencontainers.image.revision=${BRANCH}",
"index:org.opencontainers.image.description=${DESCRIPTION}",
"index:org.opencontainers.image.licenses=BSD-3-Clause",
"index:org.opencontainers.image.source=https://github.com/s-allius/tsun-gen3-proxy/ha_addons/ha_addon"
]
labels = {
"io.hass.version" = "${VERSION}"
"io.hass.type" = "addon"
"io.hass.arch" = "armhf|aarch64|i386|amd64"
"org.opencontainers.image.title" = "TSUN-Proxy"
"org.opencontainers.image.authors" = "Stefan Allius"
"org.opencontainers.image.created" = "${BUILD_DATE}"
"org.opencontainers.image.version" = "${VERSION}"
"org.opencontainers.image.revision" = "${BRANCH}"
"org.opencontainers.image.description" = "${DESCRIPTION}"
"org.opencontainers.image.licenses" = "BSD-3-Clause"
"org.opencontainers.image.source" = "https://github.com/s-allius/tsun-gen3-proxy/ha_addonsha_addon"
}
output = [
"type=image,push=true"
]
no-cache = false
platforms = ["linux/amd64", "linux/arm64", "linux/arm/v7"]
}
target "_debug" {
args = {
LOG_LVL = "DEBUG"
environment = "dev"
}
}
target "_prod" {
args = {
}
}
target "debug" {
inherits = ["_common", "_debug"]
tags = ["${IMAGE}:debug"]
}
target "dev" {
inherits = ["_common"]
tags = ["${IMAGE}:dev"]
}
target "preview" {
inherits = ["_common", "_prod"]
tags = ["${IMAGE}:preview", "${IMAGE}:${VERSION}"]
}
target "rc" {
inherits = ["_common", "_prod"]
tags = ["${IMAGE}:rc", "${IMAGE}:${VERSION}"]
}
target "rel" {
inherits = ["_common", "_prod"]
tags = ["${IMAGE}:latest", "${IMAGE}:${MAJOR}", "${IMAGE}:${VERSION}"]
no-cache = true
}

View File

Before

Width:  |  Height:  |  Size: 113 KiB

After

Width:  |  Height:  |  Size: 113 KiB

View File

Before

Width:  |  Height:  |  Size: 113 KiB

After

Width:  |  Height:  |  Size: 113 KiB

View File

@@ -0,0 +1,115 @@
import json
import os
# Dieses file übernimmt die Add-On Konfiguration und schreibt sie in die
# Konfigurationsdatei des tsun-proxy
# Die Addon Konfiguration wird in der Datei /data/options.json bereitgestellt
# Die Konfiguration wird in der Datei /home/proxy/config/config.toml
# gespeichert
# Übernehme die Umgebungsvariablen
# alternativ kann auch auf die homeassistant supervisor API zugegriffen werden
def create_config():
data = {}
data['mqtt.host'] = os.getenv('MQTT_HOST', "mqtt")
data['mqtt.port'] = os.getenv('MQTT_PORT', 1883)
data['mqtt.user'] = os.getenv('MQTT_USER', "")
data['mqtt.passwd'] = os.getenv('MQTT_PASSWORD', "")
# Lese die Add-On Konfiguration aus der Datei /data/options.json
# with open('data/options.json') as json_file:
with open('/data/options.json') as json_file:
try:
options_data = json.load(json_file)
data.update(options_data)
except json.JSONDecodeError:
pass
# Schreibe die Add-On Konfiguration in die Datei /home/proxy/config/config.toml # noqa: E501
# with open('./config/config.toml', 'w+') as f:
with open('/home/proxy/config/config.toml', 'w+') as f:
f.write(f"""
mqtt.host = '{data.get('mqtt.host')}' # URL or IP address of the mqtt broker
mqtt.port = {data.get('mqtt.port')}
mqtt.user = '{data.get('mqtt.user')}'
mqtt.passwd = '{data.get('mqtt.passwd')}'
ha.auto_conf_prefix = '{data.get('ha.auto_conf_prefix', 'homeassistant')}' # MQTT prefix for subscribing for homeassistant status updates
ha.discovery_prefix = '{data.get('ha.discovery_prefix', 'homeassistant')}' # MQTT prefix for discovery topic
ha.entity_prefix = '{data.get('ha.entity_prefix', 'tsun')}' # MQTT topic prefix for publishing inverter values
ha.proxy_node_id = '{data.get('ha.proxy_node_id', 'proxy')}' # MQTT node id, for the proxy_node_id
ha.proxy_unique_id = '{data.get('ha.proxy_unique_id', 'P170000000000001')}' # MQTT unique id, to identify a proxy instance
tsun.enabled = {str(data.get('tsun.enabled', True)).lower()}
tsun.host = '{data.get('tsun.host', 'logger.talent-monitoring.com')}'
tsun.port = {data.get('tsun.port', 5005)}
solarman.enabled = {str(data.get('solarman.enabled', True)).lower()}
solarman.host = '{data.get('solarman.host', 'iot.talent-monitoring.com')}'
solarman.port = {data.get('solarman.port', 10000)}
inverters.allow_all = {str(data.get('inverters.allow_all', False)).lower()}
""") # noqa: E501
if 'inverters' in data:
for inverter in data['inverters']:
f.write(f"""
[inverters."{inverter['serial']}"]
node_id = '{inverter['node_id']}'
suggested_area = '{inverter['suggested_area']}'
modbus_polling = {str(inverter['modbus_polling']).lower()}
# check if inverter has monitor_sn key. if not, skip monitor_sn
{f"monitor_sn = '{inverter['monitor_sn']}'" if 'monitor_sn' in inverter else ''}
# check if inverter has 'pv1_type' and 'pv1_manufacturer' keys. if not, skip pv1
{f"pv1 = {{type = '{inverter['pv1_type']}', manufacturer = '{inverter['pv1_manufacturer']}'}}" if 'pv1_type' in inverter and 'pv1_manufacturer' in inverter else ''}
# check if inverter has 'pv2_type' and 'pv2_manufacturer' keys. if not, skip pv2
{f"pv2 = {{type = '{inverter['pv2_type']}', manufacturer = '{inverter['pv2_manufacturer']}'}}" if 'pv2_type' in inverter and 'pv2_manufacturer' in inverter else ''}
# check if inverter has 'pv3_type' and 'pv3_manufacturer' keys. if not, skip pv3
{f"pv3 = {{type = '{inverter['pv3_type']}', manufacturer = '{inverter['pv3_manufacturer']}'}}" if 'pv3_type' in inverter and 'pv3_manufacturer' in inverter else ''}
# check if inverter has 'pv4_type' and 'pv4_manufacturer' keys. if not, skip pv4
{f"pv4 = {{type = '{inverter['pv4_type']}', manufacturer = '{inverter['pv4_manufacturer']}'}}" if 'pv4_type' in inverter and 'pv4_manufacturer' in inverter else ''}
# check if inverter has 'pv5_type' and 'pv5_manufacturer' keys. if not, skip pv5
{f"pv5 = {{type = '{inverter['pv5_type']}', manufacturer = '{inverter['pv5_manufacturer']}'}}" if 'pv5_type' in inverter and 'pv5_manufacturer' in inverter else ''}
# check if inverter has 'pv6_type' and 'pv6_manufacturer' keys. if not, skip pv6
{f"pv6 = {{type = '{inverter['pv6_type']}', manufacturer = '{inverter['pv6_manufacturer']}'}}" if 'pv6_type' in inverter and 'pv6_manufacturer' in inverter else ''}
""") # noqa: E501
# add filters
f.write("""
[gen3plus.at_acl]
# filter for received commands from the internet
tsun.allow = [""")
if 'gen3plus.at_acl.tsun.allow' in data:
for rule in data['gen3plus.at_acl.tsun.allow']:
f.write(f"'{rule}',")
f.write("]\ntsun.block = [")
if 'gen3plus.at_acl.tsun.block' in data:
for rule in data['gen3plus.at_acl.tsun.block']:
f.write(f"'{rule}',")
f.write("""]
# filter for received commands from the MQTT broker
mqtt.allow = [""")
if 'gen3plus.at_acl.mqtt.allow' in data:
for rule in data['gen3plus.at_acl.mqtt.allow']:
f.write(f"'{rule}',")
f.write("]\nmqtt.block = [")
if 'gen3plus.at_acl.mqtt.block' in data:
for rule in data['gen3plus.at_acl.mqtt.block']:
f.write(f"'{rule}',")
f.write("]")
if __name__ == "__main__": # pragma: no cover
create_config()

View File

@@ -1,104 +0,0 @@
from abc import ABC, abstractmethod
class AsyncIfc(ABC):
@abstractmethod
def get_conn_no(self):
pass # pragma: no cover
@abstractmethod
def set_node_id(self, value: str):
pass # pragma: no cover
#
# TX - QUEUE
#
@abstractmethod
def tx_add(self, data: bytearray):
''' add data to transmit queue'''
pass # pragma: no cover
@abstractmethod
def tx_flush(self):
''' send transmit queue and clears it'''
pass # pragma: no cover
@abstractmethod
def tx_peek(self, size: int = None) -> bytearray:
'''returns size numbers of byte without removing them'''
pass # pragma: no cover
@abstractmethod
def tx_log(self, level, info):
''' log the transmit queue'''
pass # pragma: no cover
@abstractmethod
def tx_clear(self):
''' clear transmit queue'''
pass # pragma: no cover
@abstractmethod
def tx_len(self):
''' get numner of bytes in the transmit queue'''
pass # pragma: no cover
#
# FORWARD - QUEUE
#
@abstractmethod
def fwd_add(self, data: bytearray):
''' add data to forward queue'''
pass # pragma: no cover
@abstractmethod
def fwd_log(self, level, info):
''' log the forward queue'''
pass # pragma: no cover
#
# RX - QUEUE
#
@abstractmethod
def rx_get(self, size: int = None) -> bytearray:
'''removes size numbers of bytes and return them'''
pass # pragma: no cover
@abstractmethod
def rx_peek(self, size: int = None) -> bytearray:
'''returns size numbers of byte without removing them'''
pass # pragma: no cover
@abstractmethod
def rx_log(self, level, info):
''' logs the receive queue'''
pass # pragma: no cover
@abstractmethod
def rx_clear(self):
''' clear receive queue'''
pass # pragma: no cover
@abstractmethod
def rx_len(self):
''' get numner of bytes in the receive queue'''
pass # pragma: no cover
@abstractmethod
def rx_set_cb(self, callback):
pass # pragma: no cover
#
# Protocol Callbacks
#
@abstractmethod
def prot_set_timeout_cb(self, callback):
pass # pragma: no cover
@abstractmethod
def prot_set_init_new_client_conn_cb(self, callback):
pass # pragma: no cover
@abstractmethod
def prot_set_update_header_cb(self, callback):
pass # pragma: no cover

View File

@@ -1,397 +0,0 @@
import asyncio
import logging
import traceback
import time
from asyncio import StreamReader, StreamWriter
from typing import Self
from itertools import count
from proxy import Proxy
from byte_fifo import ByteFifo
from async_ifc import AsyncIfc
from infos import Infos
import gc
logger = logging.getLogger('conn')
class AsyncIfcImpl(AsyncIfc):
_ids = count(0)
def __init__(self) -> None:
logger.debug('AsyncIfcImpl.__init__')
self.fwd_fifo = ByteFifo()
self.tx_fifo = ByteFifo()
self.rx_fifo = ByteFifo()
self.conn_no = next(self._ids)
self.node_id = ''
self.timeout_cb = None
self.init_new_client_conn_cb = None
self.update_header_cb = None
def close(self):
self.timeout_cb = None
self.fwd_fifo.reg_trigger(None)
self.tx_fifo.reg_trigger(None)
self.rx_fifo.reg_trigger(None)
def set_node_id(self, value: str):
self.node_id = value
def get_conn_no(self):
return self.conn_no
def tx_add(self, data: bytearray):
''' add data to transmit queue'''
self.tx_fifo += data
def tx_flush(self):
''' send transmit queue and clears it'''
self.tx_fifo()
def tx_peek(self, size: int = None) -> bytearray:
'''returns size numbers of byte without removing them'''
return self.tx_fifo.peek(size)
def tx_log(self, level, info):
''' log the transmit queue'''
self.tx_fifo.logging(level, info)
def tx_clear(self):
''' clear transmit queue'''
self.tx_fifo.clear()
def tx_len(self):
''' get numner of bytes in the transmit queue'''
return len(self.tx_fifo)
def fwd_add(self, data: bytearray):
''' add data to forward queue'''
self.fwd_fifo += data
def fwd_log(self, level, info):
''' log the forward queue'''
self.fwd_fifo.logging(level, info)
def rx_get(self, size: int = None) -> bytearray:
'''removes size numbers of bytes and return them'''
return self.rx_fifo.get(size)
def rx_peek(self, size: int = None) -> bytearray:
'''returns size numbers of byte without removing them'''
return self.rx_fifo.peek(size)
def rx_log(self, level, info):
''' logs the receive queue'''
self.rx_fifo.logging(level, info)
def rx_clear(self):
''' clear receive queue'''
self.rx_fifo.clear()
def rx_len(self):
''' get numner of bytes in the receive queue'''
return len(self.rx_fifo)
def rx_set_cb(self, callback):
self.rx_fifo.reg_trigger(callback)
def prot_set_timeout_cb(self, callback):
self.timeout_cb = callback
def prot_set_init_new_client_conn_cb(self, callback):
self.init_new_client_conn_cb = callback
def prot_set_update_header_cb(self, callback):
self.update_header_cb = callback
class StreamPtr():
'''Descr StreamPtr'''
def __init__(self, _stream, _ifc=None):
self.stream = _stream
self.ifc = _ifc
@property
def ifc(self):
return self._ifc
@ifc.setter
def ifc(self, value):
self._ifc = value
@property
def stream(self):
return self._stream
@stream.setter
def stream(self, value):
self._stream = value
class AsyncStream(AsyncIfcImpl):
MAX_PROC_TIME = 2
'''maximum processing time for a received msg in sec'''
MAX_START_TIME = 400
'''maximum time without a received msg in sec'''
MAX_INV_IDLE_TIME = 120
'''maximum time without a received msg from the inverter in sec'''
MAX_DEF_IDLE_TIME = 360
'''maximum default time without a received msg in sec'''
def __init__(self, reader: StreamReader, writer: StreamWriter,
rstream: "StreamPtr") -> None:
AsyncIfcImpl.__init__(self)
logger.debug('AsyncStream.__init__')
self.remote = rstream
self.tx_fifo.reg_trigger(self.__write_cb)
self._reader = reader
self._writer = writer
self.r_addr = writer.get_extra_info('peername')
self.l_addr = writer.get_extra_info('sockname')
self.proc_start = None # start processing start timestamp
self.proc_max = 0
self.async_publ_mqtt = None # will be set AsyncStreamServer only
def __write_cb(self):
self._writer.write(self.tx_fifo.get())
def __timeout(self) -> int:
if self.timeout_cb:
return self.timeout_cb()
return 360
async def loop(self) -> Self:
"""Async loop handler for precessing all received messages"""
self.proc_start = time.time()
while True:
try:
self.__calc_proc_time()
dead_conn_to = self.__timeout()
await asyncio.wait_for(self.__async_read(),
dead_conn_to)
await self.__async_write()
await self.__async_forward()
if self.async_publ_mqtt:
await self.async_publ_mqtt()
except asyncio.TimeoutError:
logger.warning(f'[{self.node_id}:{self.conn_no}] Dead '
f'connection timeout ({dead_conn_to}s) '
f'for {self.l_addr}')
await self.disc()
return self
except OSError as error:
logger.error(f'[{self.node_id}:{self.conn_no}] '
f'{error} for l{self.l_addr} | '
f'r{self.r_addr}')
await self.disc()
return self
except RuntimeError as error:
logger.info(f'[{self.node_id}:{self.conn_no}] '
f'{error} for {self.l_addr}')
await self.disc()
return self
except Exception:
Infos.inc_counter('SW_Exception')
logger.error(
f"Exception for {self.r_addr}:\n"
f"{traceback.format_exc()}")
await asyncio.sleep(0) # be cooperative to other task
def __calc_proc_time(self):
if self.proc_start:
proc = time.time() - self.proc_start
if proc > self.proc_max:
self.proc_max = proc
self.proc_start = None
async def disc(self) -> None:
"""Async disc handler for graceful disconnect"""
if self._writer.is_closing():
return
logger.debug(f'AsyncStream.disc() l{self.l_addr} | r{self.r_addr}')
self._writer.close()
await self._writer.wait_closed()
def close(self) -> None:
logging.debug(f'AsyncStream.close() l{self.l_addr} | r{self.r_addr}')
"""close handler for a no waiting disconnect
hint: must be called before releasing the connection instance
"""
super().close()
self._reader.feed_eof() # abort awaited read
if self._writer.is_closing():
return
self._writer.close()
def healthy(self) -> bool:
elapsed = 0
if self.proc_start is not None:
elapsed = time.time() - self.proc_start
if elapsed > self.MAX_PROC_TIME:
logging.debug(f'[{self.node_id}:{self.conn_no}:'
f'{type(self).__name__}]'
f' act:{round(1000*elapsed)}ms'
f' max:{round(1000*self.proc_max)}ms')
logging.debug(f'Healthy()) refs: {gc.get_referrers(self)}')
return elapsed < 5
'''
Our private methods
'''
async def __async_read(self) -> None:
"""Async read handler to read received data from TCP stream"""
data = await self._reader.read(4096)
if data:
self.proc_start = time.time()
self.rx_fifo += data
wait = self.rx_fifo() # call read in parent class
if wait and wait > 0:
await asyncio.sleep(wait)
else:
raise RuntimeError("Peer closed.")
async def __async_write(self, headline: str = 'Transmit to ') -> None:
"""Async write handler to transmit the send_buffer"""
if len(self.tx_fifo) > 0:
self.tx_fifo.logging(logging.INFO, f'{headline}{self.r_addr}:')
self._writer.write(self.tx_fifo.get())
await self._writer.drain()
async def __async_forward(self) -> None:
"""forward handler transmits data over the remote connection"""
if len(self.fwd_fifo) == 0:
return
try:
await self._async_forward()
except OSError as error:
if self.remote.stream:
rmt = self.remote
logger.error(f'[{rmt.stream.node_id}:{rmt.stream.conn_no}] '
f'Fwd: {error} for '
f'l{rmt.ifc.l_addr} | r{rmt.ifc.r_addr}')
await rmt.ifc.disc()
if rmt.ifc.close_cb:
rmt.ifc.close_cb()
except RuntimeError as error:
if self.remote.stream:
rmt = self.remote
logger.info(f'[{rmt.stream.node_id}:{rmt.stream.conn_no}] '
f'Fwd: {error} for {rmt.ifc.l_addr}')
await rmt.ifc.disc()
if rmt.ifc.close_cb:
rmt.ifc.close_cb()
except Exception:
Infos.inc_counter('SW_Exception')
logger.error(
f"Fwd Exception for {self.r_addr}:\n"
f"{traceback.format_exc()}")
async def publish_outstanding_mqtt(self):
'''Publish all outstanding MQTT topics'''
try:
await self.async_publ_mqtt()
await Proxy._async_publ_mqtt_proxy_stat('proxy')
except Exception:
pass
class AsyncStreamServer(AsyncStream):
def __init__(self, reader: StreamReader, writer: StreamWriter,
async_publ_mqtt, create_remote,
rstream: "StreamPtr") -> None:
AsyncStream.__init__(self, reader, writer, rstream)
self.create_remote = create_remote
self.async_publ_mqtt = async_publ_mqtt
def close(self) -> None:
logging.debug('AsyncStreamServer.close()')
self.create_remote = None
self.async_publ_mqtt = None
super().close()
async def server_loop(self) -> None:
'''Loop for receiving messages from the inverter (server-side)'''
logger.info(f'[{self.node_id}:{self.conn_no}] '
f'Accept connection from {self.r_addr}')
Infos.inc_counter('Inverter_Cnt')
await self.publish_outstanding_mqtt()
await self.loop()
Infos.dec_counter('Inverter_Cnt')
await self.publish_outstanding_mqtt()
logger.info(f'[{self.node_id}:{self.conn_no}] Server loop stopped for'
f' r{self.r_addr}')
# if the server connection closes, we also have to disconnect
# the connection to te TSUN cloud
if self.remote and self.remote.stream:
logger.info(f'[{self.node_id}:{self.conn_no}] disc client '
f'connection: [{self.remote.ifc.node_id}:'
f'{self.remote.ifc.conn_no}]')
await self.remote.ifc.disc()
async def _async_forward(self) -> None:
"""forward handler transmits data over the remote connection"""
if not self.remote.stream:
await self.create_remote()
if self.remote.stream and \
self.remote.ifc.init_new_client_conn_cb():
await self.remote.ifc._AsyncStream__async_write()
if self.remote.stream:
self.remote.ifc.update_header_cb(self.fwd_fifo.peek())
self.fwd_fifo.logging(logging.INFO, 'Forward to '
f'{self.remote.ifc.r_addr}:')
self.remote.ifc._writer.write(self.fwd_fifo.get())
await self.remote.ifc._writer.drain()
class AsyncStreamClient(AsyncStream):
def __init__(self, reader: StreamReader, writer: StreamWriter,
rstream: "StreamPtr", close_cb) -> None:
AsyncStream.__init__(self, reader, writer, rstream)
self.close_cb = close_cb
async def disc(self) -> None:
logging.debug('AsyncStreamClient.disc()')
self.remote = None
await super().disc()
def close(self) -> None:
logging.debug('AsyncStreamClient.close()')
self.close_cb = None
super().close()
async def client_loop(self, _: str) -> None:
'''Loop for receiving messages from the TSUN cloud (client-side)'''
Infos.inc_counter('Cloud_Conn_Cnt')
await self.publish_outstanding_mqtt()
await self.loop()
Infos.dec_counter('Cloud_Conn_Cnt')
await self.publish_outstanding_mqtt()
logger.info(f'[{self.node_id}:{self.conn_no}] '
'Client loop stopped for'
f' l{self.l_addr}')
if self.close_cb:
self.close_cb()
async def _async_forward(self) -> None:
"""forward handler transmits data over the remote connection"""
if self.remote.stream:
self.remote.ifc.update_header_cb(self.fwd_fifo.peek())
self.fwd_fifo.logging(logging.INFO, 'Forward to '
f'{self.remote.ifc.r_addr}:')
self.remote.ifc._writer.write(self.fwd_fifo.get())
await self.remote.ifc._writer.drain()

View File

@@ -1,52 +0,0 @@
from messages import hex_dump_str, hex_dump_memory
class ByteFifo:
""" a byte FIFO buffer with trigger callback """
__slots__ = ('__buf', '__trigger_cb')
def __init__(self):
self.__buf = bytearray()
self.__trigger_cb = None
def reg_trigger(self, cb) -> None:
self.__trigger_cb = cb
def __iadd__(self, data):
self.__buf.extend(data)
return self
def __call__(self):
'''triggers the observer'''
if callable(self.__trigger_cb):
return self.__trigger_cb()
return None
def get(self, size: int = None) -> bytearray:
'''removes size numbers of byte and return them'''
if not size:
data = self.__buf
self.clear()
else:
data = self.__buf[:size]
# The fast delete syntax
self.__buf[:size] = b''
return data
def peek(self, size: int = None) -> bytearray:
'''returns size numbers of byte without removing them'''
if not size:
return self.__buf
return self.__buf[:size]
def clear(self):
self.__buf = bytearray()
def __len__(self) -> int:
return len(self.__buf)
def __str__(self) -> str:
return hex_dump_str(self.__buf, self.__len__())
def logging(self, level, info):
hex_dump_memory(level, info, self.__buf, self.__len__())

View File

@@ -1,181 +0,0 @@
'''Config module handles the proxy configuration in the config.toml file'''
import shutil
import tomllib
import logging
from schema import Schema, And, Or, Use, Optional
class Config():
'''Static class Config is reads and sanitize the config.
Read config.toml file and sanitize it with read().
Get named parts of the config with get()'''
act_config = {}
def_config = {}
conf_schema = Schema({
'tsun': {
'enabled': Use(bool),
'host': Use(str),
'port': And(Use(int), lambda n: 1024 <= n <= 65535)
},
'solarman': {
'enabled': Use(bool),
'host': Use(str),
'port': And(Use(int), lambda n: 1024 <= n <= 65535)
},
'mqtt': {
'host': Use(str),
'port': And(Use(int), lambda n: 1024 <= n <= 65535),
'user': And(Use(str), Use(lambda s: s if len(s) > 0 else None)),
'passwd': And(Use(str), Use(lambda s: s if len(s) > 0 else None))
},
'ha': {
'auto_conf_prefix': Use(str),
'discovery_prefix': Use(str),
'entity_prefix': Use(str),
'proxy_node_id': Use(str),
'proxy_unique_id': Use(str)
},
'gen3plus': {
'at_acl': {
Or('mqtt', 'tsun'): {
'allow': [str],
Optional('block', default=[]): [str]
}
}
},
'inverters': {
'allow_all': Use(bool), And(Use(str), lambda s: len(s) == 16): {
Optional('monitor_sn', default=0): Use(int),
Optional('node_id', default=""): And(Use(str),
Use(lambda s: s + '/'
if len(s) > 0
and s[-1] != '/'
else s)),
Optional('client_mode'): {
'host': Use(str),
Optional('port', default=8899):
And(Use(int), lambda n: 1024 <= n <= 65535),
Optional('forward', default=False): Use(bool),
},
Optional('modbus_polling', default=True): Use(bool),
Optional('suggested_area', default=""): Use(str),
Optional('sensor_list', default=0x2b0): Use(int),
Optional('pv1'): {
Optional('type'): Use(str),
Optional('manufacturer'): Use(str),
},
Optional('pv2'): {
Optional('type'): Use(str),
Optional('manufacturer'): Use(str),
},
Optional('pv3'): {
Optional('type'): Use(str),
Optional('manufacturer'): Use(str),
},
Optional('pv4'): {
Optional('type'): Use(str),
Optional('manufacturer'): Use(str),
},
Optional('pv5'): {
Optional('type'): Use(str),
Optional('manufacturer'): Use(str),
},
Optional('pv6'): {
Optional('type'): Use(str),
Optional('manufacturer'): Use(str),
}
}
}
}, ignore_extra_keys=True
)
@classmethod
def class_init(cls) -> None | str: # pragma: no cover
try:
# make the default config transparaent by copying it
# in the config.example file
logging.debug('Copy Default Config to config.example.toml')
shutil.copy2("default_config.toml",
"config/config.example.toml")
except Exception:
pass
err_str = cls.read()
del cls.conf_schema
return err_str
@classmethod
def _read_config_file(cls) -> dict: # pragma: no cover
usr_config = {}
try:
with open("config/config.toml", "rb") as f:
usr_config = tomllib.load(f)
except Exception as error:
err = f'Config.read: {error}'
logging.error(err)
logging.info(
'\n To create the missing config.toml file, '
'you can rename the template config.example.toml\n'
' and customize it for your scenario.\n')
return usr_config
@classmethod
def read(cls, path='') -> None | str:
'''Read config file, merge it with the default config
and sanitize the result'''
err = None
config = {}
logger = logging.getLogger('data')
try:
# read example config file as default configuration
cls.def_config = {}
with open(f"{path}default_config.toml", "rb") as f:
def_config = tomllib.load(f)
cls.def_config = cls.conf_schema.validate(def_config)
# overwrite the default values, with values from
# the config.toml file
usr_config = cls._read_config_file()
# merge the default and the user config
config = def_config.copy()
for key in ['tsun', 'solarman', 'mqtt', 'ha', 'inverters',
'gen3plus']:
if key in usr_config:
config[key] |= usr_config[key]
try:
cls.act_config = cls.conf_schema.validate(config)
except Exception as error:
err = f'Config.read: {error}'
logging.error(err)
# logging.debug(f'Readed config: "{cls.act_config}" ')
except Exception as error:
err = f'Config.read: {error}'
logger.error(err)
cls.act_config = {}
return err
@classmethod
def get(cls, member: str = None):
'''Get a named attribute from the proxy config. If member ==
None it returns the complete config dict'''
if member:
return cls.act_config.get(member, {})
else:
return cls.act_config
@classmethod
def is_default(cls, member: str) -> bool:
'''Check if the member is the default value'''
return cls.act_config.get(member) == cls.def_config.get(member)

View File

@@ -1,177 +0,0 @@
##########################################################################################
###
### T S U N - G E N 3 - P R O X Y
###
### from Stefan Allius
###
##########################################################################################
###
### The readme will give you an overview of the project:
### https://s-allius.github.io/tsun-gen3-proxy/
###
### The proxy supports different operation modes. Select the proper mode
### which depends on your inverter type and you inverter firmware.
### Please read:
### https://github.com/s-allius/tsun-gen3-proxy/wiki/Operation-Modes-Overview
###
### Here you will find a description of all configuration options:
### https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-details
###
### The configration uses the TOML format, which aims to be easy to read due to
### obvious semantics. You find more details here: https://toml.io/en/v1.0.0
###
##########################################################################################
##########################################################################################
##
## MQTT broker configuration
##
## In this block, you must configure the connection to your MQTT broker and specify the
## required credentials. As the proxy does not currently support an encrypted connection
## to the MQTT broker, it is strongly recommended that you do not use a public broker.
##
## https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-details#mqtt-broker-account
##
mqtt.host = 'mqtt' # URL or IP address of the mqtt broker
mqtt.port = 1883
mqtt.user = ''
mqtt.passwd = ''
##########################################################################################
##
## HOME ASSISTANT
##
## The proxy supports the MQTT autoconfiguration of Home Assistant (HA). The default
## values match the HA default configuration. If you need to change these or want to use
## a different MQTT client, you can adjust the prefixes of the MQTT topics below.
##
## https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-details#home-assistant
##
ha.auto_conf_prefix = 'homeassistant' # MQTT prefix for subscribing for homeassistant status updates
ha.discovery_prefix = 'homeassistant' # MQTT prefix for discovery topic
ha.entity_prefix = 'tsun' # MQTT topic prefix for publishing inverter values
ha.proxy_node_id = 'proxy' # MQTT node id, for the proxy_node_id
ha.proxy_unique_id = 'P170000000000001' # MQTT unique id, to identify a proxy instance
##########################################################################################
##
## GEN3 Proxy Mode Configuration
##
## In this block, you can configure an optional connection to the TSUN cloud for GEN3
## inverters. This connection is only required if you want send data to the TSUN cloud
## to use the TSUN APPs or receive firmware updates.
##
## https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-details#tsun-cloud-for-gen3-inverter-only
##
tsun.enabled = true # false: disables connecting to the tsun cloud, and avoids updates
tsun.host = 'logger.talent-monitoring.com'
tsun.port = 5005
##########################################################################################
##
## GEN3PLUS Proxy Mode Configuration
##
## In this block, you can configure an optional connection to the TSUN cloud for GEN3PLUS
## inverters. This connection is only required if you want send data to the TSUN cloud
## to use the TSUN APPs or receive firmware updates.
##
## https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-details#solarman-cloud-for-gen3plus-inverter-only
##
solarman.enabled = true # false: disables connecting to the tsun cloud, and avoids updates
solarman.host = 'iot.talent-monitoring.com'
solarman.port = 10000
##########################################################################################
###
### Inverter Definitions
###
### The proxy supports the simultaneous operation of several inverters, even of different
### types. A configuration block must be defined for each inverter, in which all necessary
### parameters must be specified. These depend on the operation mode used and also differ
### slightly depending on the inverter type.
###
### In addition, the PV modules can be defined at the individual inputs for documentation
### purposes, whereby these are displayed in Home Assistant.
###
### The proxy only accepts connections from known inverters. This can be switched off for
### test purposes and unknown serial numbers are also accepted.
###
inverters.allow_all = false # only allow known inverters
##########################################################################################
##
## For each GEN3 inverter, the serial number of the inverter must be mapped to an MQTT
## definition. To do this, the corresponding configuration block is started with
## `[Inverter.“<16-digit serial number>”]` so that all subsequent parameters are assigned
## to this inverter. Further inverter-specific parameters (e.g. polling mode) can be set
## in the configuration block
##
## The serial numbers of all GEN3 inverters start with `R17`!
##
[inverters."R170000000000001"]
node_id = '' # MQTT replacement for inverters serial number
suggested_area = '' # suggested installation area for home-assistant
modbus_polling = false # Disable optional MODBUS polling
pv1 = {type = 'RSM40-8-395M', manufacturer = 'Risen'} # Optional, PV module descr
pv2 = {type = 'RSM40-8-395M', manufacturer = 'Risen'} # Optional, PV module descr
##########################################################################################
##
## For each GEN3PLUS inverter, the serial number of the inverter must be mapped to an MQTT
## definition. To do this, the corresponding configuration block is started with
## `[Inverter.“<16-digit serial number>”]` so that all subsequent parameters are assigned
## to this inverter. Further inverter-specific parameters (e.g. polling mode, client mode)
## can be set in the configuration block
##
## The serial numbers of all GEN3PLUS inverters start with `Y17` or Y47! Each GEN3PLUS
## inverter is supplied with a “Monitoring SN:”. This can be found on a sticker enclosed
## with the inverter.
##
[inverters."Y170000000000001"]
monitor_sn = 2000000000 # The GEN3PLUS "Monitoring SN:"
node_id = '' # MQTT replacement for inverters serial number
suggested_area = '' # suggested installation place for home-assistant
modbus_polling = true # Enable optional MODBUS polling
# if your inverter supports SSL connections you must use the client_mode. Pls, uncomment
# the next line and configure the fixed IP of your inverter
#client_mode = {host = '192.168.0.1', port = 8899}
pv1 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr
pv2 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr
pv3 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr
pv4 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr
##########################################################################################
###
### If the proxy mode is configured, commands from TSUN can be sent to the inverter via
### this connection or parameters (e.g. network credentials) can be queried. Filters can
### then be configured for the AT+ commands from the TSUN Cloud so that only certain
### accesses are permitted.
###
### An overview of all known AT+ commands can be found here:
### https://github.com/s-allius/tsun-gen3-proxy/wiki/AT--commands
###
[gen3plus.at_acl]
# filter for received commands from the internet
tsun.allow = ['AT+Z', 'AT+UPURL', 'AT+SUPDATE']
tsun.block = []
# filter for received commands from the MQTT broker
mqtt.allow = ['AT+']
mqtt.block = []

View File

@@ -1,194 +0,0 @@
import struct
import logging
from typing import Generator
from infos import Infos, Register
class RegisterMap:
__slots__ = ()
map = {
0x00092ba8: {'reg': Register.COLLECTOR_FW_VERSION},
0x000927c0: {'reg': Register.CHIP_TYPE},
0x00092f90: {'reg': Register.CHIP_MODEL},
0x00094ae8: {'reg': Register.MAC_ADDR},
0x00095a88: {'reg': Register.TRACE_URL},
0x00095aec: {'reg': Register.LOGGER_URL},
0x0000000a: {'reg': Register.PRODUCT_NAME},
0x00000014: {'reg': Register.MANUFACTURER},
0x0000001e: {'reg': Register.VERSION},
0x00000028: {'reg': Register.SERIAL_NUMBER},
0x00000032: {'reg': Register.EQUIPMENT_MODEL},
0x00013880: {'reg': Register.NO_INPUTS},
0xffffff00: {'reg': Register.INVERTER_CNT},
0xffffff01: {'reg': Register.UNKNOWN_SNR},
0xffffff02: {'reg': Register.UNKNOWN_MSG},
0xffffff03: {'reg': Register.INVALID_DATA_TYPE},
0xffffff04: {'reg': Register.INTERNAL_ERROR},
0xffffff05: {'reg': Register.UNKNOWN_CTRL},
0xffffff06: {'reg': Register.OTA_START_MSG},
0xffffff07: {'reg': Register.SW_EXCEPTION},
0xffffff08: {'reg': Register.POLLING_INTERVAL},
0xfffffffe: {'reg': Register.TEST_REG1},
0xffffffff: {'reg': Register.TEST_REG2},
0x00000640: {'reg': Register.OUTPUT_POWER},
0x000005dc: {'reg': Register.RATED_POWER},
0x00000514: {'reg': Register.INVERTER_TEMP},
0x000006a4: {'reg': Register.PV1_VOLTAGE},
0x00000708: {'reg': Register.PV1_CURRENT},
0x0000076c: {'reg': Register.PV1_POWER},
0x000007d0: {'reg': Register.PV2_VOLTAGE},
0x00000834: {'reg': Register.PV2_CURRENT},
0x00000898: {'reg': Register.PV2_POWER},
0x000008fc: {'reg': Register.PV3_VOLTAGE},
0x00000960: {'reg': Register.PV3_CURRENT},
0x000009c4: {'reg': Register.PV3_POWER},
0x00000a28: {'reg': Register.PV4_VOLTAGE},
0x00000a8c: {'reg': Register.PV4_CURRENT},
0x00000af0: {'reg': Register.PV4_POWER},
0x00000c1c: {'reg': Register.PV1_DAILY_GENERATION},
0x00000c80: {'reg': Register.PV1_TOTAL_GENERATION},
0x00000ce4: {'reg': Register.PV2_DAILY_GENERATION},
0x00000d48: {'reg': Register.PV2_TOTAL_GENERATION},
0x00000dac: {'reg': Register.PV3_DAILY_GENERATION},
0x00000e10: {'reg': Register.PV3_TOTAL_GENERATION},
0x00000e74: {'reg': Register.PV4_DAILY_GENERATION},
0x00000ed8: {'reg': Register.PV4_TOTAL_GENERATION},
0x00000b54: {'reg': Register.DAILY_GENERATION},
0x00000bb8: {'reg': Register.TOTAL_GENERATION},
0x000003e8: {'reg': Register.GRID_VOLTAGE},
0x0000044c: {'reg': Register.GRID_CURRENT},
0x000004b0: {'reg': Register.GRID_FREQUENCY},
0x000cfc38: {'reg': Register.CONNECT_COUNT},
0x000c3500: {'reg': Register.SIGNAL_STRENGTH},
0x000c96a8: {'reg': Register.POWER_ON_TIME},
0x000d0020: {'reg': Register.COLLECT_INTERVAL},
0x000cf850: {'reg': Register.DATA_UP_INTERVAL},
0x000c7f38: {'reg': Register.COMMUNICATION_TYPE},
0x00000190: {'reg': Register.EVENT_ALARM},
0x000001f4: {'reg': Register.EVENT_FAULT},
0x00000258: {'reg': Register.EVENT_BF1},
0x000002bc: {'reg': Register.EVENT_BF2},
0x00000064: {'reg': Register.INVERTER_STATUS},
0x00000fa0: {'reg': Register.BOOT_STATUS},
0x00001004: {'reg': Register.DSP_STATUS},
0x000010cc: {'reg': Register.WORK_MODE},
0x000011f8: {'reg': Register.OUTPUT_SHUTDOWN},
0x0000125c: {'reg': Register.MAX_DESIGNED_POWER},
0x000012c0: {'reg': Register.RATED_LEVEL},
0x00001324: {'reg': Register.INPUT_COEFFICIENT, 'ratio': 100/1024},
0x00001388: {'reg': Register.GRID_VOLT_CAL_COEF},
0x00002710: {'reg': Register.PROD_COMPL_TYPE},
0x00003200: {'reg': Register.OUTPUT_COEFFICIENT, 'ratio': 100/1024},
}
class InfosG3(Infos):
__slots__ = ()
def ha_confs(self, ha_prfx: str, node_id: str, snr: str,
sug_area: str = '') \
-> Generator[tuple[dict, str], None, None]:
'''Generator function yields a json register struct for home-assistant
auto configuration and a unique entity string
arguments:
prfx:str ==> MQTT prefix for the home assistant 'stat_t string
snr:str ==> serial number of the inverter, used to build unique
entity strings
sug_area:str ==> suggested area string from the config file'''
# iterate over RegisterMap.map and get the register values
for row in RegisterMap.map.values():
reg = row['reg']
res = self.ha_conf(reg, ha_prfx, node_id, snr, False, sug_area) # noqa: E501
if res:
yield res
def parse(self, buf, ind=0, node_id: str = '') -> \
Generator[tuple[str, bool], None, None]:
'''parse a data sequence received from the inverter and
stores the values in Infos.db
buf: buffer of the sequence to parse'''
result = struct.unpack_from('!l', buf, ind)
elms = result[0]
i = 0
ind += 4
while i < elms:
result = struct.unpack_from('!lB', buf, ind)
addr = result[0]
if addr not in RegisterMap.map:
row = None
info_id = -1
else:
row = RegisterMap.map[addr]
info_id = row['reg']
data_type = result[1]
ind += 5
if data_type == 0x54: # 'T' -> Pascal-String
str_len = buf[ind]
result = struct.unpack_from(f'!{str_len+1}p', buf,
ind)[0].decode(encoding='ascii',
errors='replace')
ind += str_len+1
elif data_type == 0x00: # 'Nul' -> end
i = elms # abort the loop
elif data_type == 0x41: # 'A' -> Nop ??
ind += 0
i += 1
continue
elif data_type == 0x42: # 'B' -> byte, int8
result = struct.unpack_from('!B', buf, ind)[0]
ind += 1
elif data_type == 0x49: # 'I' -> int32
result = struct.unpack_from('!l', buf, ind)[0]
ind += 4
elif data_type == 0x53: # 'S' -> short, int16
result = struct.unpack_from('!h', buf, ind)[0]
ind += 2
elif data_type == 0x46: # 'F' -> float32
result = round(struct.unpack_from('!f', buf, ind)[0], 2)
ind += 4
elif data_type == 0x4c: # 'L' -> long, int64
result = struct.unpack_from('!q', buf, ind)[0]
ind += 8
else:
self.inc_counter('Invalid_Data_Type')
logging.error(f"Infos.parse: data_type: {data_type}"
f" @0x{addr:04x} No:{i}"
" not supported")
return
result = self.__modify_val(row, result)
yield from self.__store_result(addr, result, info_id, node_id)
i += 1
def __modify_val(self, row, result):
if row and 'ratio' in row:
result = round(result * row['ratio'], 2)
return result
def __store_result(self, addr, result, info_id, node_id):
keys, level, unit, must_incr = self._key_obj(info_id)
if keys:
name, update = self.update_db(keys, must_incr, result)
yield keys[0], update
else:
update = False
name = str(f'info-id.0x{addr:x}')
if update:
self.tracer.log(level, f'[{node_id}] GEN3: {name} :'
f' {result}{unit}')

View File

@@ -1,9 +0,0 @@
from asyncio import StreamReader, StreamWriter
from inverter_base import InverterBase
from gen3.talent import Talent
class InverterG3(InverterBase):
def __init__(self, reader: StreamReader, writer: StreamWriter):
super().__init__(reader, writer, 'tsun', Talent)

View File

@@ -1,569 +0,0 @@
import struct
import logging
from zoneinfo import ZoneInfo
from datetime import datetime
from tzlocal import get_localzone
from async_ifc import AsyncIfc
from messages import Message, State
from modbus import Modbus
from config import Config
from gen3.infos_g3 import InfosG3
from infos import Register
logger = logging.getLogger('msg')
class Control:
def __init__(self, ctrl: int):
self.ctrl = ctrl
def __int__(self) -> int:
return self.ctrl
def is_ind(self) -> bool:
return (self.ctrl == 0x91)
def is_req(self) -> bool:
return (self.ctrl == 0x70)
def is_resp(self) -> bool:
return (self.ctrl == 0x99)
class Talent(Message):
TXT_UNKNOWN_CTRL = 'Unknown Ctrl'
def __init__(self, addr, ifc: "AsyncIfc", server_side: bool,
client_mode: bool = False, id_str=b''):
super().__init__('G3', ifc, server_side, self.send_modbus_cb,
mb_timeout=15)
ifc.rx_set_cb(self.read)
ifc.prot_set_timeout_cb(self._timeout)
ifc.prot_set_init_new_client_conn_cb(self._init_new_client_conn)
ifc.prot_set_update_header_cb(self._update_header)
self.addr = addr
self.conn_no = ifc.get_conn_no()
self.await_conn_resp_cnt = 0
self.id_str = id_str
self.contact_name = b''
self.contact_mail = b''
self.ts_offset = 0 # time offset between tsun cloud and local
self.db = InfosG3()
self.switch = {
0x00: self.msg_contact_info,
0x13: self.msg_ota_update,
0x22: self.msg_get_time,
0x99: self.msg_heartbeat,
0x71: self.msg_collector_data,
# 0x76:
0x77: self.msg_modbus,
# 0x78:
0x87: self.msg_modbus2,
0x04: self.msg_inverter_data,
}
self.log_lvl = {
0x00: logging.INFO,
0x13: logging.INFO,
0x22: logging.INFO,
0x99: logging.INFO,
0x71: logging.INFO,
# 0x76:
0x77: self.get_modbus_log_lvl,
# 0x78:
0x87: self.get_modbus_log_lvl,
0x04: logging.INFO,
}
'''
Our puplic methods
'''
def close(self) -> None:
logging.debug('Talent.close()')
# we have references to methods of this class in self.switch
# so we have to erase self.switch, otherwise this instance can't be
# deallocated by the garbage collector ==> we get a memory leak
self.switch.clear()
self.log_lvl.clear()
super().close()
def __set_serial_no(self, serial_no: str):
if self.unique_id == serial_no:
logger.debug(f'SerialNo: {serial_no}')
else:
inverters = Config.get('inverters')
# logger.debug(f'Inverters: {inverters}')
if serial_no in inverters:
inv = inverters[serial_no]
self.node_id = inv['node_id']
self.sug_area = inv['suggested_area']
self.modbus_polling = inv['modbus_polling']
logger.debug(f'SerialNo {serial_no} allowed! area:{self.sug_area}') # noqa: E501
self.db.set_pv_module_details(inv)
if self.mb:
self.mb.set_node_id(self.node_id)
else:
self.node_id = ''
self.sug_area = ''
if 'allow_all' not in inverters or not inverters['allow_all']:
self.inc_counter('Unknown_SNR')
self.unique_id = None
logger.warning(f'ignore message from unknow inverter! (SerialNo: {serial_no})') # noqa: E501
return
logger.debug(f'SerialNo {serial_no} not known but accepted!')
self.unique_id = serial_no
self.db.set_db_def_value(Register.COLLECTOR_SNR, serial_no)
def read(self) -> float:
'''process all received messages in the _recv_buffer'''
self._read()
while True:
if not self.header_valid:
self.__parse_header(self.ifc.rx_peek(), self.ifc.rx_len())
if self.header_valid and \
self.ifc.rx_len() >= (self.header_len + self.data_len):
if self.state == State.init:
self.state = State.received # received 1st package
log_lvl = self.log_lvl.get(self.msg_id, logging.WARNING)
if callable(log_lvl):
log_lvl = log_lvl()
self.ifc.rx_log(log_lvl, f'Received from {self.addr}:'
f' BufLen: {self.ifc.rx_len()}'
f' HdrLen: {self.header_len}'
f' DtaLen: {self.data_len}')
self.__set_serial_no(self.id_str.decode("utf-8"))
self.__dispatch_msg()
self.__flush_recv_msg()
else:
return 0 # don not wait before sending a response
def forward(self) -> None:
'''add the actual receive msg to the forwarding queue'''
tsun = Config.get('tsun')
if tsun['enabled']:
buflen = self.header_len+self.data_len
buffer = self.ifc.rx_peek(buflen)
self.ifc.fwd_add(buffer)
self.ifc.fwd_log(logging.DEBUG, 'Store for forwarding:')
fnc = self.switch.get(self.msg_id, self.msg_unknown)
logger.info(self.__flow_str(self.server_side, 'forwrd') +
f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}')
def send_modbus_cb(self, modbus_pdu: bytearray, log_lvl: int, state: str):
if self.state != State.up:
logger.warning(f'[{self.node_id}] ignore MODBUS cmd,'
' cause the state is not UP anymore')
return
self.__build_header(0x70, 0x77)
self.ifc.tx_add(b'\x00\x01\xa3\x28') # magic ?
self.ifc.tx_add(struct.pack('!B', len(modbus_pdu)))
self.ifc.tx_add(modbus_pdu)
self.__finish_send_msg()
self.ifc.tx_log(log_lvl, f'Send Modbus {state}:{self.addr}:')
self.ifc.tx_flush()
def mb_timout_cb(self, exp_cnt):
self.mb_timer.start(self.mb_timeout)
if 2 == (exp_cnt % 30):
# logging.info("Regular Modbus Status request")
self._send_modbus_cmd(Modbus.READ_REGS, 0x2000, 96, logging.DEBUG)
else:
self._send_modbus_cmd(Modbus.READ_REGS, 0x3000, 48, logging.DEBUG)
def _init_new_client_conn(self) -> bool:
contact_name = self.contact_name
contact_mail = self.contact_mail
logger.info(f'name: {contact_name} mail: {contact_mail}')
self.msg_id = 0
self.await_conn_resp_cnt += 1
self.__build_header(0x91)
self.ifc.tx_add(struct.pack(f'!{len(contact_name)+1}p'
f'{len(contact_mail)+1}p',
contact_name, contact_mail))
self.__finish_send_msg()
return True
'''
Our private methods
'''
def __flow_str(self, server_side: bool, type: str): # noqa: F821
switch = {
'rx': ' <',
'tx': ' >',
'forwrd': '<< ',
'drop': ' xx',
'rxS': '> ',
'txS': '< ',
'forwrdS': ' >>',
'dropS': 'xx ',
}
if server_side:
type += 'S'
return switch.get(type, '???')
def _timestamp(self): # pragma: no cover
'''returns timestamp fo the inverter as localtime
since 1.1.1970 in msec'''
# convert localtime in epoche
ts = (datetime.now() - datetime(1970, 1, 1)).total_seconds()
return round(ts*1000)
def _utcfromts(self, ts: float):
'''converts inverter timestamp into unix time (epoche)'''
dt = datetime.fromtimestamp(ts/1000, tz=ZoneInfo("UTC")). \
replace(tzinfo=get_localzone())
return dt.timestamp()
def _utc(self): # pragma: no cover
'''returns unix time (epoche)'''
return datetime.now().timestamp()
def _update_header(self, _forward_buffer):
'''update header for message before forwarding,
add time offset to timestamp'''
_len = len(_forward_buffer)
ofs = 0
while ofs < _len:
result = struct.unpack_from('!lB', _forward_buffer, 0)
msg_len = 4 + result[0]
id_len = result[1] # len of variable id string
if _len < 2*id_len + 21:
return
result = struct.unpack_from('!B', _forward_buffer, id_len+6)
msg_code = result[0]
if msg_code == 0x71 or msg_code == 0x04:
result = struct.unpack_from('!q', _forward_buffer, 13+2*id_len)
ts = result[0] + self.ts_offset
logger.debug(f'offset: {self.ts_offset:08x}'
f' proxy-time: {ts:08x}')
struct.pack_into('!q', _forward_buffer, 13+2*id_len, ts)
ofs += msg_len
# check if there is a complete header in the buffer, parse it
# and set
# self.header_len
# self.data_len
# self.id_str
# self.ctrl
# self.msg_id
#
# if the header is incomplete, than self.header_len is still 0
#
def __parse_header(self, buf: bytes, buf_len: int) -> None:
if (buf_len < 5): # enough bytes to read len and id_len?
return
result = struct.unpack_from('!lB', buf, 0)
msg_len = result[0] # len of complete message
id_len = result[1] # len of variable id string
if id_len > 17:
logger.warning(f'len of ID string must == 16 but is {id_len}')
self.inc_counter('Invalid_Msg_Format')
# erase broken recv buffer
self.ifc.rx_clear()
return
hdr_len = 5+id_len+2
if (buf_len < hdr_len): # enough bytes for complete header?
return
result = struct.unpack_from(f'!{id_len+1}pBB', buf, 4)
# store parsed header values in the class
self.id_str = result[0]
self.ctrl = Control(result[1])
self.msg_id = result[2]
self.data_len = msg_len-id_len-3
self.header_len = hdr_len
self.header_valid = True
def __build_header(self, ctrl, msg_id=None) -> None:
if not msg_id:
msg_id = self.msg_id
self.send_msg_ofs = self.ifc.tx_len()
self.ifc.tx_add(struct.pack(f'!l{len(self.id_str)+1}pBB',
0, self.id_str, ctrl, msg_id))
fnc = self.switch.get(msg_id, self.msg_unknown)
logger.info(self.__flow_str(self.server_side, 'tx') +
f' Ctl: {int(ctrl):#02x} Msg: {fnc.__name__!r}')
def __finish_send_msg(self) -> None:
_len = self.ifc.tx_len() - self.send_msg_ofs
struct.pack_into('!l', self.ifc.tx_peek(), self.send_msg_ofs,
_len-4)
def __dispatch_msg(self) -> None:
fnc = self.switch.get(self.msg_id, self.msg_unknown)
if self.unique_id:
logger.info(self.__flow_str(self.server_side, 'rx') +
f' Ctl: {int(self.ctrl):#02x} ({self.state}) '
f'Msg: {fnc.__name__!r}')
fnc()
else:
logger.info(self.__flow_str(self.server_side, 'drop') +
f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}')
def __flush_recv_msg(self) -> None:
self.ifc.rx_get(self.header_len+self.data_len)
self.header_valid = False
'''
Message handler methods
'''
def msg_contact_info(self):
if self.ctrl.is_ind():
if self.server_side and self.__process_contact_info():
self.__build_header(0x91)
self.ifc.tx_add(b'\x01')
self.__finish_send_msg()
# don't forward this contact info here, we will build one
# when the remote connection is established
elif self.await_conn_resp_cnt > 0:
self.await_conn_resp_cnt -= 1
else:
self.forward()
else:
logger.warning(self.TXT_UNKNOWN_CTRL)
self.inc_counter('Unknown_Ctrl')
self.forward()
def __process_contact_info(self) -> bool:
buf = self.ifc.rx_peek()
result = struct.unpack_from('!B', buf, self.header_len)
name_len = result[0]
if self.data_len == 1: # this is a response withone status byte
return False
if self.data_len >= name_len+2:
result = struct.unpack_from(f'!{name_len+1}pB', buf,
self.header_len)
self.contact_name = result[0]
mail_len = result[1]
logger.info(f'name: {self.contact_name}')
result = struct.unpack_from(f'!{mail_len+1}p', buf,
self.header_len+name_len+1)
self.contact_mail = result[0]
logger.info(f'mail: {self.contact_mail}')
return True
def msg_get_time(self):
if self.ctrl.is_ind():
if self.data_len == 0:
if self.state == State.up:
self.state = State.pend # block MODBUS cmds
ts = self._timestamp()
logger.debug(f'time: {ts:08x}')
self.__build_header(0x91)
self.ifc.tx_add(struct.pack('!q', ts))
self.__finish_send_msg()
elif self.data_len >= 8:
ts = self._timestamp()
result = struct.unpack_from('!q', self.ifc.rx_peek(),
self.header_len)
self.ts_offset = result[0]-ts
if self.ifc.remote.stream:
self.ifc.remote.stream.ts_offset = self.ts_offset
logger.debug(f'tsun-time: {int(result[0]):08x}'
f' proxy-time: {ts:08x}'
f' offset: {self.ts_offset}')
return # ignore received response
else:
logger.warning(self.TXT_UNKNOWN_CTRL)
self.inc_counter('Unknown_Ctrl')
self.forward()
def msg_heartbeat(self):
if self.ctrl.is_ind():
if self.data_len == 9:
self.state = State.up # allow MODBUS cmds
if (self.modbus_polling):
self.mb_timer.start(self.mb_first_timeout)
self.db.set_db_def_value(Register.POLLING_INTERVAL,
self.mb_timeout)
self.__build_header(0x99)
self.ifc.tx_add(b'\x02')
self.__finish_send_msg()
result = struct.unpack_from('!Bq', self.ifc.rx_peek(),
self.header_len)
resp_code = result[0]
ts = result[1]+self.ts_offset
logger.debug(f'inv-time: {int(result[1]):08x}'
f' tsun-time: {ts:08x}'
f' offset: {self.ts_offset}')
struct.pack_into('!Bq', self.ifc.rx_peek(),
self.header_len, resp_code, ts)
elif self.ctrl.is_resp():
result = struct.unpack_from('!B', self.ifc.rx_peek(),
self.header_len)
resp_code = result[0]
logging.debug(f'Heartbeat-RespCode: {resp_code}')
return
else:
logger.warning(self.TXT_UNKNOWN_CTRL)
self.inc_counter('Unknown_Ctrl')
self.forward()
def parse_msg_header(self):
result = struct.unpack_from('!lB', self.ifc.rx_peek(),
self.header_len)
data_id = result[0] # len of complete message
id_len = result[1] # len of variable id string
logger.debug(f'Data_ID: 0x{data_id:08x} id_len: {id_len}')
msg_hdr_len = 5+id_len+9
result = struct.unpack_from(f'!{id_len+1}pBq', self.ifc.rx_peek(),
self.header_len + 4)
timestamp = result[2]
logger.debug(f'ID: {result[0]} B: {result[1]}')
logger.debug(f'time: {timestamp:08x}')
# logger.info(f'time: {datetime.utcfromtimestamp(result[2]).strftime(
# "%Y-%m-%d %H:%M:%S")}')
return msg_hdr_len, timestamp
def msg_collector_data(self):
if self.ctrl.is_ind():
self.__build_header(0x99)
self.ifc.tx_add(b'\x01')
self.__finish_send_msg()
self.__process_data()
elif self.ctrl.is_resp():
return # ignore received response
else:
logger.warning(self.TXT_UNKNOWN_CTRL)
self.inc_counter('Unknown_Ctrl')
self.forward()
def msg_inverter_data(self):
if self.ctrl.is_ind():
self.__build_header(0x99)
self.ifc.tx_add(b'\x01')
self.__finish_send_msg()
self.__process_data()
self.state = State.up # allow MODBUS cmds
if (self.modbus_polling):
self.mb_timer.start(self.mb_first_timeout)
self.db.set_db_def_value(Register.POLLING_INTERVAL,
self.mb_timeout)
elif self.ctrl.is_resp():
return # ignore received response
else:
logger.warning(self.TXT_UNKNOWN_CTRL)
self.inc_counter('Unknown_Ctrl')
self.forward()
def __process_data(self):
msg_hdr_len, ts = self.parse_msg_header()
for key, update in self.db.parse(self.ifc.rx_peek(), self.header_len
+ msg_hdr_len, self.node_id):
if update:
self._set_mqtt_timestamp(key, self._utcfromts(ts))
self.new_data[key] = True
def msg_ota_update(self):
if self.ctrl.is_req():
self.inc_counter('OTA_Start_Msg')
elif self.ctrl.is_ind():
pass # Ok, nothing to do
else:
logger.warning(self.TXT_UNKNOWN_CTRL)
self.inc_counter('Unknown_Ctrl')
self.forward()
def parse_modbus_header(self):
msg_hdr_len = 5
result = struct.unpack_from('!lBB', self.ifc.rx_peek(),
self.header_len)
modbus_len = result[1]
return msg_hdr_len, modbus_len
def parse_modbus_header2(self):
msg_hdr_len = 6
result = struct.unpack_from('!lBBB', self.ifc.rx_peek(),
self.header_len)
modbus_len = result[2]
return msg_hdr_len, modbus_len
def get_modbus_log_lvl(self) -> int:
if self.ctrl.is_req():
return logging.INFO
elif self.ctrl.is_ind() and self.server_side:
return self.mb.last_log_lvl
return logging.WARNING
def msg_modbus(self):
hdr_len, _ = self.parse_modbus_header()
self.__msg_modbus(hdr_len)
def msg_modbus2(self):
hdr_len, _ = self.parse_modbus_header2()
self.__msg_modbus(hdr_len)
def __msg_modbus(self, hdr_len):
data = self.ifc.rx_peek()[self.header_len:
self.header_len+self.data_len]
if self.ctrl.is_req():
rstream = self.ifc.remote.stream
if rstream.mb.recv_req(data[hdr_len:], rstream.msg_forward):
self.inc_counter('Modbus_Command')
else:
self.inc_counter('Invalid_Msg_Format')
elif self.ctrl.is_ind():
self.modbus_elms = 0
# logger.debug(f'Modbus Ind MsgLen: {modbus_len}')
if not self.server_side:
logger.warning('Unknown Message')
self.inc_counter('Unknown_Msg')
return
for key, update, _ in self.mb.recv_resp(self.db, data[
hdr_len:]):
if update:
self._set_mqtt_timestamp(key, self._utc())
self.new_data[key] = True
self.modbus_elms += 1 # count for unit tests
else:
logger.warning(self.TXT_UNKNOWN_CTRL)
self.inc_counter('Unknown_Ctrl')
self.forward()
def msg_forward(self):
self.forward()
def msg_unknown(self):
logger.warning(f"Unknow Msg: ID:{self.msg_id}")
self.inc_counter('Unknown_Msg')
self.forward()

View File

@@ -1,204 +0,0 @@
from typing import Generator
from infos import Infos, Register, ProxyMode, Fmt
class RegisterMap:
# make the class read/only by using __slots__
__slots__ = ()
FMT_2_16BIT_VAL = '!HH'
FMT_3_16BIT_VAL = '!HHH'
FMT_4_16BIT_VAL = '!HHHH'
map = {
# 0x41020007: {'reg': Register.DEVICE_SNR, 'fmt': '<L'}, # noqa: E501
0x41020018: {'reg': Register.DATA_UP_INTERVAL, 'fmt': '<B', 'ratio': 60, 'dep': ProxyMode.SERVER}, # noqa: E501
0x41020019: {'reg': Register.COLLECT_INTERVAL, 'fmt': '<B', 'quotient': 60, 'dep': ProxyMode.SERVER}, # noqa: E501
0x4102001a: {'reg': Register.HEARTBEAT_INTERVAL, 'fmt': '<B', 'ratio': 1}, # noqa: E501
0x4102001b: {'reg': None, 'fmt': '<B', 'const': 1}, # noqa: E501 Max No Of Connected Devices
0x4102001c: {'reg': Register.SIGNAL_STRENGTH, 'fmt': '<B', 'ratio': 1, 'dep': ProxyMode.SERVER}, # noqa: E501
0x4102001d: {'reg': None, 'fmt': '<B', 'const': 1}, # noqa: E501
0x4102001e: {'reg': Register.CHIP_MODEL, 'fmt': '!40s'}, # noqa: E501
0x41020046: {'reg': Register.MAC_ADDR, 'fmt': '!6B', 'func': Fmt.mac}, # noqa: E501
0x4102004c: {'reg': Register.IP_ADDRESS, 'fmt': '!16s'}, # noqa: E501
0x4102005c: {'reg': None, 'fmt': '<B', 'const': 15}, # noqa: E501
0x4102005e: {'reg': None, 'fmt': '<B', 'const': 1}, # noqa: E501 No Of Sensors (ListLen)
0x4102005f: {'reg': Register.SENSOR_LIST, 'fmt': '<H', 'func': Fmt.hex4}, # noqa: E501
0x41020061: {'reg': None, 'fmt': '<HB', 'const': (15, 255)}, # noqa: E501
0x41020064: {'reg': Register.COLLECTOR_FW_VERSION, 'fmt': '!40s'}, # noqa: E501
0x4102008c: {'reg': None, 'fmt': '<BB', 'const': (254, 254)}, # noqa: E501
0x4102008e: {'reg': None, 'fmt': '<B'}, # noqa: E501 Encryption Certificate File Status
0x4102008f: {'reg': None, 'fmt': '!40s'}, # noqa: E501
0x410200b7: {'reg': Register.SSID, 'fmt': '!40s'}, # noqa: E501
0x4201000c: {'reg': Register.SENSOR_LIST, 'fmt': '<H', 'func': Fmt.hex4}, # noqa: E501
0x4201001c: {'reg': Register.POWER_ON_TIME, 'fmt': '<H', 'ratio': 1, 'dep': ProxyMode.SERVER}, # noqa: E501, or packet number
0x42010020: {'reg': Register.SERIAL_NUMBER, 'fmt': '!16s'}, # noqa: E501
# Start MODBUS Block: 0x3000 (R/O Measurements)
0x420100c0: {'reg': Register.INVERTER_STATUS, 'fmt': '!H'}, # noqa: E501
0x420100c2: {'reg': Register.DETECT_STATUS_1, 'fmt': '!H'}, # noqa: E501
0x420100c4: {'reg': Register.DETECT_STATUS_2, 'fmt': '!H'}, # noqa: E501
0x420100c6: {'reg': Register.EVENT_ALARM, 'fmt': '!H'}, # noqa: E501
0x420100c8: {'reg': Register.EVENT_FAULT, 'fmt': '!H'}, # noqa: E501
0x420100ca: {'reg': Register.EVENT_BF1, 'fmt': '!H'}, # noqa: E501
0x420100cc: {'reg': Register.EVENT_BF2, 'fmt': '!H'}, # noqa: E501
# 0x420100ce
0x420100d0: {'reg': Register.VERSION, 'fmt': '!H', 'func': Fmt.version}, # noqa: E501
0x420100d2: {'reg': Register.GRID_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
0x420100d4: {'reg': Register.GRID_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x420100d6: {'reg': Register.GRID_FREQUENCY, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x420100d8: {'reg': Register.INVERTER_TEMP, 'fmt': '!H', 'offset': -40}, # noqa: E501
# 0x420100da
0x420100dc: {'reg': Register.RATED_POWER, 'fmt': '!H', 'ratio': 1}, # noqa: E501
0x420100de: {'reg': Register.OUTPUT_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
0x420100e0: {'reg': Register.PV1_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
0x420100e2: {'reg': Register.PV1_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x420100e4: {'reg': Register.PV1_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
0x420100e6: {'reg': Register.PV2_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
0x420100e8: {'reg': Register.PV2_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x420100ea: {'reg': Register.PV2_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
0x420100ec: {'reg': Register.PV3_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
0x420100ee: {'reg': Register.PV3_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x420100f0: {'reg': Register.PV3_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
0x420100f2: {'reg': Register.PV4_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
0x420100f4: {'reg': Register.PV4_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x420100f6: {'reg': Register.PV4_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
0x420100f8: {'reg': Register.DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x420100fa: {'reg': Register.TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501
0x420100fe: {'reg': Register.PV1_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x42010100: {'reg': Register.PV1_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501
0x42010104: {'reg': Register.PV2_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x42010106: {'reg': Register.PV2_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501
0x4201010a: {'reg': Register.PV3_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x4201010c: {'reg': Register.PV3_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501
0x42010110: {'reg': Register.PV4_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x42010112: {'reg': Register.PV4_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501
0x42010116: {'reg': Register.INV_UNKNOWN_1, 'fmt': '!H'}, # noqa: E501
# Start MODBUS Block: 0x2000 (R/W Config Paramaneters)
0x42010118: {'reg': Register.BOOT_STATUS, 'fmt': '!H'},
0x4201011a: {'reg': Register.DSP_STATUS, 'fmt': '!H'},
0x4201011c: {'reg': None, 'fmt': '!H', 'const': 1}, # noqa: E501
0x4201011e: {'reg': Register.WORK_MODE, 'fmt': '!H'},
0x42010124: {'reg': Register.OUTPUT_SHUTDOWN, 'fmt': '!H'},
0x42010126: {'reg': Register.MAX_DESIGNED_POWER, 'fmt': '!H'},
0x42010128: {'reg': Register.RATED_LEVEL, 'fmt': '!H'},
0x4201012a: {'reg': Register.INPUT_COEFFICIENT, 'fmt': '!H', 'ratio': 100/1024}, # noqa: E501
0x4201012c: {'reg': Register.GRID_VOLT_CAL_COEF, 'fmt': '!H'},
0x4201012e: {'reg': None, 'fmt': '!H', 'const': 1024}, # noqa: E501
0x42010130: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (1024, 1, 0xffff, 1)}, # noqa: E501
0x42010138: {'reg': Register.PROD_COMPL_TYPE, 'fmt': '!H'},
0x4201013a: {'reg': None, 'fmt': FMT_3_16BIT_VAL, 'const': (0x68, 0x68, 0x500)}, # noqa: E501
0x42010140: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0x9cd, 0x7b6, 0x139c, 0x1324)}, # noqa: E501
0x42010148: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (1, 0x7ae, 0x40f, 0x41)}, # noqa: E501
0x42010150: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0xf, 0xa64, 0xa64, 0x6)}, # noqa: E501
0x42010158: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0x6, 0x9f6, 0x128c, 0x128c)}, # noqa: E501
0x42010160: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0x10, 0x10, 0x1452, 0x1452)}, # noqa: E501
0x42010168: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0x10, 0x10, 0x151, 0x5)}, # noqa: E501
0x42010170: {'reg': Register.OUTPUT_COEFFICIENT, 'fmt': '!H', 'ratio': 100/1024}, # noqa: E501
0x42010172: {'reg': None, 'fmt': FMT_3_16BIT_VAL, 'const': (0x1, 0x139c, 0xfa0)}, # noqa: E501
0x42010178: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0x4e, 0x66, 0x3e8, 0x400)}, # noqa: E501
0x42010180: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0x9ce, 0x7a8, 0x139c, 0x1326)}, # noqa: E501
0x42010188: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0x0, 0x0, 0x0, 0)}, # noqa: E501
0x42010190: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0x0, 0x0, 1024, 1024)}, # noqa: E501
0x42010198: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0, 0, 0xffff, 0)}, # noqa: E501
0x420101a0: {'reg': None, 'fmt': FMT_2_16BIT_VAL, 'const': (0x0, 0x0)}, # noqa: E501
0xffffff02: {'reg': Register.POLLING_INTERVAL},
# 0x4281001c: {'reg': Register.POWER_ON_TIME, 'fmt': '<H', 'ratio': 1}, # noqa: E501
}
class InfosG3P(Infos):
__slots__ = ('client_mode', )
def __init__(self, client_mode: bool):
super().__init__()
self.client_mode = client_mode
self.set_db_def_value(Register.MANUFACTURER, 'TSUN')
self.set_db_def_value(Register.EQUIPMENT_MODEL, 'TSOL-MSxx00')
self.set_db_def_value(Register.CHIP_TYPE, 'IGEN TECH')
self.set_db_def_value(Register.NO_INPUTS, 4)
def __hide_topic(self, row: dict) -> bool:
if 'dep' in row:
mode = row['dep']
if self.client_mode:
return mode != ProxyMode.CLIENT
else:
return mode != ProxyMode.SERVER
return False
def ha_confs(self, ha_prfx: str, node_id: str, snr: str,
sug_area: str = '') \
-> Generator[tuple[dict, str], None, None]:
'''Generator function yields a json register struct for home-assistant
auto configuration and a unique entity string
arguments:
prfx:str ==> MQTT prefix for the home assistant 'stat_t string
snr:str ==> serial number of the inverter, used to build unique
entity strings
sug_area:str ==> suggested area string from the config file'''
# iterate over RegisterMap.map and get the register values
for row in RegisterMap.map.values():
info_id = row['reg']
if self.__hide_topic(row):
res = self.ha_remove(info_id, node_id, snr) # noqa: E501
else:
res = self.ha_conf(info_id, ha_prfx, node_id, snr, False, sug_area) # noqa: E501
if res:
yield res
def parse(self, buf, msg_type: int, rcv_ftype: int, node_id: str = '') \
-> Generator[tuple[str, bool], None, None]:
'''parse a data sequence received from the inverter and
stores the values in Infos.db
buf: buffer of the sequence to parse'''
for idx, row in RegisterMap.map.items():
addr = idx & 0xffff
ftype = (idx >> 16) & 0xff
mtype = (idx >> 24) & 0xff
if ftype != rcv_ftype or mtype != msg_type:
continue
if not isinstance(row, dict):
continue
info_id = row['reg']
result = Fmt.get_value(buf, addr, row)
keys, level, unit, must_incr = self._key_obj(info_id)
if keys:
name, update = self.update_db(keys, must_incr, result)
yield keys[0], update
else:
name = str(f'info-id.0x{addr:x}')
update = False
if update:
self.tracer.log(level, f'[{node_id}] GEN3PLUS: {name}'
f' : {result}{unit}')
def build(self, len, msg_type: int, rcv_ftype: int):
buf = bytearray(len)
for idx, row in RegisterMap.map.items():
addr = idx & 0xffff
ftype = (idx >> 16) & 0xff
mtype = (idx >> 24) & 0xff
if ftype != rcv_ftype or mtype != msg_type:
continue
if not isinstance(row, dict):
continue
if 'const' in row:
val = row['const']
else:
info_id = row['reg']
val = self.get_db_value(info_id)
if not val:
continue
Fmt.set_value(buf, addr, row, val)
return buf

View File

@@ -1,15 +0,0 @@
from asyncio import StreamReader, StreamWriter
from inverter_base import InverterBase
from gen3plus.solarman_v5 import SolarmanV5
from gen3plus.solarman_emu import SolarmanEmu
class InverterG3P(InverterBase):
def __init__(self, reader: StreamReader, writer: StreamWriter,
client_mode: bool = False):
remote_prot = None
if client_mode:
remote_prot = SolarmanEmu
super().__init__(reader, writer, 'solarman',
SolarmanV5, client_mode, remote_prot)

View File

@@ -1,138 +0,0 @@
import logging
import struct
from async_ifc import AsyncIfc
from gen3plus.solarman_v5 import SolarmanBase
from my_timer import Timer
from infos import Register
logger = logging.getLogger('msg')
class SolarmanEmu(SolarmanBase):
def __init__(self, addr, ifc: "AsyncIfc",
server_side: bool, client_mode: bool):
super().__init__(addr, ifc, server_side=False,
_send_modbus_cb=None,
mb_timeout=8)
logging.debug('SolarmanEmu.init()')
self.db = ifc.remote.stream.db
self.snr = ifc.remote.stream.snr
self.hb_timeout = 60
'''actual heatbeat timeout from the last response message'''
self.data_up_inv = self.db.get_db_value(Register.DATA_UP_INTERVAL)
'''time interval for getting new MQTT data messages'''
self.hb_timer = Timer(self.send_heartbeat_cb, self.node_id)
self.data_timer = Timer(self.send_data_cb, self.node_id)
self.last_sync = self._emu_timestamp()
'''timestamp when we send the last sync message (4110)'''
self.pkt_cnt = 0
'''last sent packet number'''
self.switch = {
0x4210: 'msg_data_ind', # real time data
0x1210: self.msg_response, # at least every 5 minutes
0x4710: 'msg_hbeat_ind', # heatbeat
0x1710: self.msg_response, # every 2 minutes
0x4110: 'msg_dev_ind', # device data, sync start
0x1110: self.msg_response, # every 3 hours
}
self.log_lvl = {
0x4110: logging.INFO, # device data, sync start
0x1110: logging.INFO, # every 3 hours
0x4210: logging.INFO, # real time data
0x1210: logging.INFO, # at least every 5 minutes
0x4710: logging.DEBUG, # heatbeat
0x1710: logging.DEBUG, # every 2 minutes
}
'''
Our puplic methods
'''
def close(self) -> None:
logging.info('SolarmanEmu.close()')
# we have references to methods of this class in self.switch
# so we have to erase self.switch, otherwise this instance can't be
# deallocated by the garbage collector ==> we get a memory leak
self.switch.clear()
self.log_lvl.clear()
self.hb_timer.close()
self.data_timer.close()
self.db = None
super().close()
def _set_serial_no(self, snr: int):
logging.debug(f'SolarmanEmu._set_serial_no, snr: {snr}')
self.unique_id = str(snr)
def _init_new_client_conn(self) -> bool:
logging.debug('SolarmanEmu.init_new()')
self.data_timer.start(self.data_up_inv)
return False
def next_pkt_cnt(self):
'''get the next packet number'''
self.pkt_cnt = (self.pkt_cnt + 1) & 0xffffffff
return self.pkt_cnt
def seconds_since_last_sync(self):
'''get seconds since last 0x4110 message was sent'''
return self._emu_timestamp() - self.last_sync
def send_heartbeat_cb(self, exp_cnt):
'''send a heartbeat to the TSUN cloud'''
self._build_header(0x4710)
self.ifc.tx_add(struct.pack('<B', 0))
self._finish_send_msg()
log_lvl = self.log_lvl.get(0x4710, logging.WARNING)
self.ifc.tx_log(log_lvl, 'Send heartbeat:')
self.ifc.tx_flush()
def send_data_cb(self, exp_cnt):
'''send a inverter data message to the TSUN cloud'''
self.hb_timer.start(self.hb_timeout)
self.data_timer.start(self.data_up_inv)
_len = 420
ftype = 1
build_msg = self.db.build(_len, 0x42, ftype)
self._build_header(0x4210)
self.ifc.tx_add(
struct.pack(
'<BHLLLHL', ftype, 0x02b0,
self._emu_timestamp(),
self.seconds_since_last_sync(),
self.time_ofs,
1, # offset 0x1a
self.next_pkt_cnt()))
self.ifc.tx_add(build_msg[0x20:])
self._finish_send_msg()
log_lvl = self.log_lvl.get(0x4210, logging.WARNING)
self.ifc.tx_log(log_lvl, 'Send inv-data:')
self.ifc.tx_flush()
'''
Message handler methods
'''
def msg_response(self):
'''handle a received response from the TSUN cloud'''
logger.debug("EMU received rsp:")
_, _, ts, hb = super().msg_response()
logger.debug(f"EMU ts:{ts} hb:{hb}")
self.hb_timeout = hb
self.time_ofs = ts - self._emu_timestamp()
self.hb_timer.start(self.hb_timeout)
def msg_unknown(self):
'''counts a unknown or unexpected message from the TSUN cloud'''
logger.warning(f"EMU Unknow Msg: ID:{int(self.control):#04x}")
self.inc_counter('Unknown_Msg')

View File

@@ -1,706 +0,0 @@
import struct
import logging
import time
import asyncio
from datetime import datetime
from async_ifc import AsyncIfc
from messages import hex_dump_memory, Message, State
from config import Config
from modbus import Modbus
from gen3plus.infos_g3p import InfosG3P
from infos import Register, Fmt
logger = logging.getLogger('msg')
class Sequence():
def __init__(self, server_side: bool):
self.rcv_idx = 0
self.snd_idx = 0
self.server_side = server_side
def set_recv(self, val: int):
if self.server_side:
self.rcv_idx = val >> 8
self.snd_idx = val & 0xff
else:
self.rcv_idx = val & 0xff
self.snd_idx = val >> 8
def get_send(self):
self.snd_idx += 1
self.snd_idx &= 0xff
if self.server_side:
return (self.rcv_idx << 8) | self.snd_idx
else:
return (self.snd_idx << 8) | self.rcv_idx
def __str__(self):
return f'{self.rcv_idx:02x}:{self.snd_idx:02x}'
class SolarmanBase(Message):
def __init__(self, addr, ifc: "AsyncIfc", server_side: bool,
_send_modbus_cb, mb_timeout: int):
super().__init__('G3P', ifc, server_side, _send_modbus_cb,
mb_timeout)
ifc.rx_set_cb(self.read)
ifc.prot_set_timeout_cb(self._timeout)
ifc.prot_set_init_new_client_conn_cb(self._init_new_client_conn)
ifc.prot_set_update_header_cb(self.__update_header)
self.addr = addr
self.conn_no = ifc.get_conn_no()
self.header_len = 11 # overwrite construcor in class Message
self.control = 0
self.seq = Sequence(server_side)
self.snr = 0
self.time_ofs = 0
def read(self) -> float:
'''process all received messages in the _recv_buffer'''
self._read()
while True:
if not self.header_valid:
self.__parse_header(self.ifc.rx_peek(),
self.ifc.rx_len())
if self.header_valid and self.ifc.rx_len() >= \
(self.header_len + self.data_len+2):
self.__process_complete_received_msg()
self.__flush_recv_msg()
else:
return 0 # wait 0s before sending a response
'''
Our public methods
'''
def _flow_str(self, server_side: bool, type: str): # noqa: F821
switch = {
'rx': ' <',
'tx': ' >',
'forwrd': '<< ',
'drop': ' xx',
'rxS': '> ',
'txS': '< ',
'forwrdS': ' >>',
'dropS': 'xx ',
}
if server_side:
type += 'S'
return switch.get(type, '???')
def get_fnc_handler(self, ctrl):
fnc = self.switch.get(ctrl, self.msg_unknown)
if callable(fnc):
return fnc, repr(fnc.__name__)
else:
return self.msg_unknown, repr(fnc)
def _build_header(self, ctrl) -> None:
'''build header for new transmit message'''
self.send_msg_ofs = self.ifc.tx_len()
self.ifc.tx_add(struct.pack(
'<BHHHL', 0xA5, 0, ctrl, self.seq.get_send(), self.snr))
_fnc, _str = self.get_fnc_handler(ctrl)
logger.info(self._flow_str(self.server_side, 'tx') +
f' Ctl: {int(ctrl):#04x} Msg: {_str}')
def _finish_send_msg(self) -> None:
'''finish the transmit message, set lenght and checksum'''
_len = self.ifc.tx_len() - self.send_msg_ofs
struct.pack_into('<H', self.ifc.tx_peek(), self.send_msg_ofs+1,
_len-11)
check = sum(self.ifc.tx_peek()[
self.send_msg_ofs+1:self.send_msg_ofs + _len]) & 0xff
self.ifc.tx_add(struct.pack('<BB', check, 0x15)) # crc & stop
def _timestamp(self):
# utc as epoche
return int(time.time()) # pragma: no cover
def _emu_timestamp(self):
'''timestamp for an emulated inverter (realtime - 1 day)'''
one_day = 24*60*60
return self._timestamp()-one_day
'''
Our private methods
'''
def __update_header(self, _forward_buffer):
'''update header for message before forwarding,
set sequence and checksum'''
_len = len(_forward_buffer)
ofs = 0
while ofs < _len:
result = struct.unpack_from('<BH', _forward_buffer, ofs)
data_len = result[1] # len of variable id string
struct.pack_into('<H', _forward_buffer, ofs+5,
self.seq.get_send())
check = sum(_forward_buffer[ofs+1:ofs+data_len+11]) & 0xff
struct.pack_into('<B', _forward_buffer, ofs+data_len+11, check)
ofs += (13 + data_len)
def __process_complete_received_msg(self):
log_lvl = self.log_lvl.get(self.control, logging.WARNING)
if callable(log_lvl):
log_lvl = log_lvl()
self.ifc.rx_log(log_lvl, f'Received from {self.addr}:')
# self._recv_buffer, self.header_len +
# self.data_len+2)
if self.__trailer_is_ok(self.ifc.rx_peek(), self.header_len
+ self.data_len + 2):
if self.state == State.init:
self.state = State.received
self._set_serial_no(self.snr)
self.__dispatch_msg()
def __parse_header(self, buf: bytes, buf_len: int) -> None:
if (buf_len < self.header_len): # enough bytes for complete header?
return
result = struct.unpack_from('<BHHHL', buf, 0)
# store parsed header values in the class
start = result[0] # start byte
self.data_len = result[1] # len of variable id string
self.control = result[2]
self.seq.set_recv(result[3])
self.snr = result[4]
if start != 0xA5:
hex_dump_memory(logging.ERROR,
'Drop packet w invalid start byte from'
f' {self.addr}:', buf, buf_len)
self.inc_counter('Invalid_Msg_Format')
# erase broken recv buffer
self.ifc.rx_clear()
return
self.header_valid = True
def __trailer_is_ok(self, buf: bytes, buf_len: int) -> bool:
crc = buf[self.data_len+11]
stop = buf[self.data_len+12]
if stop != 0x15:
hex_dump_memory(logging.ERROR,
'Drop packet w invalid stop byte from '
f'{self.addr}:', buf, buf_len)
self.inc_counter('Invalid_Msg_Format')
if self.ifc.rx_len() > (self.data_len+13):
next_start = buf[self.data_len+13]
if next_start != 0xa5:
# erase broken recv buffer
self.ifc.rx_clear()
return False
check = sum(buf[1:buf_len-2]) & 0xff
if check != crc:
self.inc_counter('Invalid_Msg_Format')
logger.debug(f'CRC {int(crc):#02x} {int(check):#08x}'
f' Stop:{int(stop):#02x}')
# start & stop byte are valid, discard only this message
return False
return True
def __flush_recv_msg(self) -> None:
self.ifc.rx_get(self.header_len + self.data_len+2)
self.header_valid = False
def __dispatch_msg(self) -> None:
_fnc, _str = self.get_fnc_handler(self.control)
if self.unique_id:
logger.info(self._flow_str(self.server_side, 'rx') +
f' Ctl: {int(self.control):#04x}' +
f' Msg: {_str}')
_fnc()
else:
logger.info(self._flow_str(self.server_side, 'drop') +
f' Ctl: {int(self.control):#04x}' +
f' Msg: {_str}')
'''
Message handler methods
'''
def msg_response(self):
data = self.ifc.rx_peek()[self.header_len:]
result = struct.unpack_from('<BBLL', data, 0)
ftype = result[0] # always 2
valid = result[1] == 1 # status
ts = result[2]
set_hb = result[3] # always 60 or 120
logger.debug(f'ftype:{ftype} accepted:{valid}'
f' ts:{ts:08x} nextHeartbeat: {set_hb}s')
dt = datetime.fromtimestamp(ts)
logger.debug(f'ts: {dt.strftime("%Y-%m-%d %H:%M:%S")}')
return ftype, valid, ts, set_hb
class SolarmanV5(SolarmanBase):
AT_CMD = 1
MB_RTU_CMD = 2
MB_CLIENT_DATA_UP = 30
'''Data up time in client mode'''
HDR_FMT = '<BLLL'
'''format string for packing of the header'''
def __init__(self, addr, ifc: "AsyncIfc",
server_side: bool, client_mode: bool):
super().__init__(addr, ifc, server_side, self.send_modbus_cb,
mb_timeout=8)
self.db = InfosG3P(client_mode)
self.forward_at_cmd_resp = False
self.no_forwarding = False
'''not allowed to connect to TSUN cloud by connection type'''
self.establish_inv_emu = False
'''create an Solarman EMU instance to send data to the TSUN cloud'''
self.switch = {
0x4210: self.msg_data_ind, # real time data
0x1210: self.msg_response, # at least every 5 minutes
0x4710: self.msg_hbeat_ind, # heatbeat
0x1710: self.msg_response, # every 2 minutes
# every 3 hours comes a sync seuqence:
# 00:00:00 0x4110 device data ftype: 0x02
# 00:00:02 0x4210 real time data ftype: 0x01
# 00:00:03 0x4210 real time data ftype: 0x81
# 00:00:05 0x4310 wifi data ftype: 0x81 sub-id 0x0018: 0c # noqa: E501
# 00:00:06 0x4310 wifi data ftype: 0x81 sub-id 0x0018: 1c # noqa: E501
# 00:00:07 0x4310 wifi data ftype: 0x01 sub-id 0x0018: 0c # noqa: E501
# 00:00:08 0x4810 options? ftype: 0x01
0x4110: self.msg_dev_ind, # device data, sync start
0x1110: self.msg_response, # every 3 hours
0x4310: self.msg_sync_start, # regulary after 3-6 hours
0x1310: self.msg_response,
0x4810: self.msg_sync_end, # sync end
0x1810: self.msg_response,
#
# MODbus or AT cmd
0x4510: self.msg_command_req, # from server
0x1510: self.msg_command_rsp, # from inverter
# 0x0510: self.msg_command_rsp, # from inverter
}
self.log_lvl = {
0x4210: logging.INFO, # real time data
0x1210: logging.INFO, # at least every 5 minutes
0x4710: logging.DEBUG, # heatbeat
0x1710: logging.DEBUG, # every 2 minutes
0x4110: logging.INFO, # device data, sync start
0x1110: logging.INFO, # every 3 hours
0x4310: logging.INFO, # regulary after 3-6 hours
0x1310: logging.INFO,
0x4810: logging.INFO, # sync end
0x1810: logging.INFO,
#
# MODbus or AT cmd
0x4510: logging.INFO, # from server
0x1510: self.get_cmd_rsp_log_lvl,
}
g3p_cnf = Config.get('gen3plus')
if 'at_acl' in g3p_cnf: # pragma: no cover
self.at_acl = g3p_cnf['at_acl']
self.sensor_list = 0
'''
Our puplic methods
'''
def close(self) -> None:
logging.debug('Solarman.close()')
# we have references to methods of this class in self.switch
# so we have to erase self.switch, otherwise this instance can't be
# deallocated by the garbage collector ==> we get a memory leak
self.switch.clear()
self.log_lvl.clear()
super().close()
async def send_start_cmd(self, snr: int, host: str,
forward: bool,
start_timeout=MB_CLIENT_DATA_UP):
self.no_forwarding = True
self.establish_inv_emu = forward
self.snr = snr
self._set_serial_no(snr)
self.mb_timeout = start_timeout
self.db.set_db_def_value(Register.IP_ADDRESS, host)
self.db.set_db_def_value(Register.POLLING_INTERVAL,
self.mb_timeout)
self.db.set_db_def_value(Register.DATA_UP_INTERVAL,
300)
self.db.set_db_def_value(Register.COLLECT_INTERVAL,
1)
self.db.set_db_def_value(Register.HEARTBEAT_INTERVAL,
120)
self.db.set_db_def_value(Register.SENSOR_LIST,
Fmt.hex4((self.sensor_list, )))
self.new_data['controller'] = True
self.state = State.up
self._send_modbus_cmd(Modbus.READ_REGS, 0x3000, 48, logging.DEBUG)
self.mb_timer.start(self.mb_timeout)
def new_state_up(self):
if self.state is not State.up:
self.state = State.up
if (self.modbus_polling):
self.mb_timer.start(self.mb_first_timeout)
self.db.set_db_def_value(Register.POLLING_INTERVAL,
self.mb_timeout)
def establish_emu(self):
_len = 223
build_msg = self.db.build(_len, 0x41, 2)
struct.pack_into(
'<BHHHLBL', build_msg, 0, 0xA5, _len-11, 0x4110,
0, self.snr, 2, self._emu_timestamp())
self.ifc.fwd_add(build_msg)
self.ifc.fwd_add(struct.pack('<BB', 0, 0x15)) # crc & stop
def __set_config_parms(self, inv: dict):
'''init connection with params from the configuration'''
self.node_id = inv['node_id']
self.sug_area = inv['suggested_area']
self.modbus_polling = inv['modbus_polling']
self.sensor_list = inv['sensor_list']
if self.mb:
self.mb.set_node_id(self.node_id)
def _set_serial_no(self, snr: int):
'''check the serial number and configure the inverter connection'''
serial_no = str(snr)
if self.unique_id == serial_no:
logger.debug(f'SerialNo: {serial_no}')
else:
inverters = Config.get('inverters')
# logger.debug(f'Inverters: {inverters}')
for key, inv in inverters.items():
# logger.debug(f'key: {key} -> {inv}')
if (type(inv) is dict and 'monitor_sn' in inv
and inv['monitor_sn'] == snr):
self.__set_config_parms(inv)
self.db.set_pv_module_details(inv)
logger.debug(f'SerialNo {serial_no} allowed! area:{self.sug_area}') # noqa: E501
self.db.set_db_def_value(Register.COLLECTOR_SNR, snr)
self.db.set_db_def_value(Register.SERIAL_NUMBER, key)
break
else:
self.node_id = ''
self.sug_area = ''
if 'allow_all' not in inverters or not inverters['allow_all']:
self.inc_counter('Unknown_SNR')
self.unique_id = None
logger.warning(f'ignore message from unknow inverter! (SerialNo: {serial_no})') # noqa: E501
return
logger.warning(f'SerialNo {serial_no} not known but accepted!')
self.unique_id = serial_no
def forward(self, buffer, buflen) -> None:
'''add the actual receive msg to the forwarding queue'''
if self.no_forwarding:
return
tsun = Config.get('solarman')
if tsun['enabled']:
self.ifc.fwd_add(buffer[:buflen])
self.ifc.fwd_log(logging.DEBUG, 'Store for forwarding:')
_, _str = self.get_fnc_handler(self.control)
logger.info(self._flow_str(self.server_side, 'forwrd') +
f' Ctl: {int(self.control):#04x}'
f' Msg: {_str}')
def _init_new_client_conn(self) -> bool:
return False
def _heartbeat(self) -> int:
return 60 # pragma: no cover
def __send_ack_rsp(self, msgtype, ftype, ack=1):
self._build_header(msgtype)
self.ifc.tx_add(struct.pack('<BBLL', ftype, ack,
self._timestamp(),
self._heartbeat()))
self._finish_send_msg()
def send_modbus_cb(self, pdu: bytearray, log_lvl: int, state: str):
if self.state != State.up:
logger.warning(f'[{self.node_id}] ignore MODBUS cmd,'
' cause the state is not UP anymore')
return
self._build_header(0x4510)
self.ifc.tx_add(struct.pack('<BHLLL', self.MB_RTU_CMD,
self.sensor_list, 0, 0, 0))
self.ifc.tx_add(pdu)
self._finish_send_msg()
self.ifc.tx_log(log_lvl, f'Send Modbus {state}:{self.addr}:')
self.ifc.tx_flush()
def mb_timout_cb(self, exp_cnt):
self.mb_timer.start(self.mb_timeout)
self._send_modbus_cmd(Modbus.READ_REGS, 0x3000, 48, logging.DEBUG)
if 1 == (exp_cnt % 30):
# logging.info("Regular Modbus Status request")
self._send_modbus_cmd(Modbus.READ_REGS, 0x2000, 96, logging.DEBUG)
def at_cmd_forbidden(self, cmd: str, connection: str) -> bool:
return not cmd.startswith(tuple(self.at_acl[connection]['allow'])) or \
cmd.startswith(tuple(self.at_acl[connection]['block']))
async def send_at_cmd(self, at_cmd: str) -> None:
if self.state != State.up:
logger.warning(f'[{self.node_id}] ignore AT+ cmd,'
' as the state is not UP')
return
at_cmd = at_cmd.strip()
if self.at_cmd_forbidden(cmd=at_cmd, connection='mqtt'):
data_json = f'\'{at_cmd}\' is forbidden'
node_id = self.node_id
key = 'at_resp'
logger.info(f'{key}: {data_json}')
await self.mqtt.publish(f'{self.entity_prfx}{node_id}{key}', data_json) # noqa: E501
return
self.forward_at_cmd_resp = False
self._build_header(0x4510)
self.ifc.tx_add(struct.pack(f'<BHLLL{len(at_cmd)}sc', self.AT_CMD,
0x0002, 0, 0, 0,
at_cmd.encode('utf-8'), b'\r'))
self._finish_send_msg()
self.ifc.tx_log(logging.INFO, 'Send AT Command:')
try:
self.ifc.tx_flush()
except Exception:
self.ifc.tx_clear()
def __forward_msg(self):
self.forward(self.ifc.rx_peek(), self.header_len+self.data_len+2)
def __build_model_name(self):
db = self.db
max_pow = db.get_db_value(Register.MAX_DESIGNED_POWER, 0)
rated = db.get_db_value(Register.RATED_POWER, 0)
model = None
if max_pow == 2000:
if rated == 800 or rated == 600:
model = f'TSOL-MS{max_pow}({rated})'
else:
model = f'TSOL-MS{max_pow}'
elif max_pow == 1800 or max_pow == 1600:
model = f'TSOL-MS{max_pow}'
if model:
logger.info(f'Model: {model}')
self.db.set_db_def_value(Register.EQUIPMENT_MODEL, model)
def __process_data(self, ftype, ts):
inv_update = False
msg_type = self.control >> 8
for key, update in self.db.parse(self.ifc.rx_peek(), msg_type, ftype,
self.node_id):
if update:
if key == 'inverter':
inv_update = True
self._set_mqtt_timestamp(key, ts)
self.new_data[key] = True
if inv_update:
self.__build_model_name()
'''
Message handler methods
'''
def msg_unknown(self):
logger.warning(f"Unknow Msg: ID:{int(self.control):#04x}")
self.inc_counter('Unknown_Msg')
self.__forward_msg()
def msg_dev_ind(self):
data = self.ifc.rx_peek()[self.header_len:]
result = struct.unpack_from(self.HDR_FMT, data, 0)
ftype = result[0] # always 2
total = result[1]
tim = result[2]
res = result[3] # always zero
logger.info(f'frame type:{ftype:02x}'
f' timer:{tim:08x}s null:{res}')
if self.time_ofs:
# dt = datetime.fromtimestamp(total + self.time_ofs)
# logger.info(f'ts: {dt.strftime("%Y-%m-%d %H:%M:%S")}')
ts = total + self.time_ofs
else:
ts = None
self.__process_data(ftype, ts)
self.sensor_list = int(self.db.get_db_value(Register.SENSOR_LIST, 0),
16)
self.__forward_msg()
self.__send_ack_rsp(0x1110, ftype)
def msg_data_ind(self):
data = self.ifc.rx_peek()
result = struct.unpack_from('<BHLLLHL', data, self.header_len)
ftype = result[0] # 1 or 0x81
sensor = result[1]
total = result[2]
tim = result[3]
if 1 == ftype:
self.time_ofs = result[4]
unkn = result[5]
cnt = result[6]
if sensor != self.sensor_list:
logger.warning(f'Unexpected Sensor-List:{sensor:04x}'
f' (!={self.sensor_list:04x})')
logger.info(f'ftype:{ftype:02x} timer:{tim:08x}s'
f' ??: {unkn:04x} cnt:{cnt}')
if self.time_ofs:
# dt = datetime.fromtimestamp(total + self.time_ofs)
# logger.info(f'ts: {dt.strftime("%Y-%m-%d %H:%M:%S")}')
ts = total + self.time_ofs
else:
ts = None
self.__process_data(ftype, ts)
self.__forward_msg()
self.__send_ack_rsp(0x1210, ftype)
self.new_state_up()
def msg_sync_start(self):
data = self.ifc.rx_peek()[self.header_len:]
result = struct.unpack_from(self.HDR_FMT, data, 0)
ftype = result[0]
total = result[1]
self.time_ofs = result[3]
dt = datetime.fromtimestamp(total + self.time_ofs)
logger.info(f'ts: {dt.strftime("%Y-%m-%d %H:%M:%S")}')
self.__forward_msg()
self.__send_ack_rsp(0x1310, ftype)
def msg_command_req(self):
data = self.ifc.rx_peek()[self.header_len:
self.header_len+self.data_len]
result = struct.unpack_from('<B', data, 0)
ftype = result[0]
if ftype == self.AT_CMD:
at_cmd = data[15:].decode()
if self.at_cmd_forbidden(cmd=at_cmd, connection='tsun'):
self.inc_counter('AT_Command_Blocked')
return
self.inc_counter('AT_Command')
self.forward_at_cmd_resp = True
elif ftype == self.MB_RTU_CMD:
rstream = self.ifc.remote.stream
if rstream.mb.recv_req(data[15:],
rstream.__forward_msg):
self.inc_counter('Modbus_Command')
else:
logger.error('Invalid Modbus Msg')
self.inc_counter('Invalid_Msg_Format')
return
self.__forward_msg()
def publish_mqtt(self, key, data): # pragma: no cover
asyncio.ensure_future(
self.mqtt.publish(key, data))
def get_cmd_rsp_log_lvl(self) -> int:
ftype = self.ifc.rx_peek()[self.header_len]
if ftype == self.AT_CMD:
if self.forward_at_cmd_resp:
return logging.INFO
return logging.DEBUG
elif ftype == self.MB_RTU_CMD \
and self.server_side:
return self.mb.last_log_lvl
return logging.WARNING
def msg_command_rsp(self):
data = self.ifc.rx_peek()[self.header_len:
self.header_len+self.data_len]
ftype = data[0]
if ftype == self.AT_CMD:
if not self.forward_at_cmd_resp:
data_json = data[14:].decode("utf-8")
node_id = self.node_id
key = 'at_resp'
logger.info(f'{key}: {data_json}')
self.publish_mqtt(f'{self.entity_prfx}{node_id}{key}', data_json) # noqa: E501
return
elif ftype == self.MB_RTU_CMD:
self.__modbus_command_rsp(data)
return
self.__forward_msg()
def __parse_modbus_rsp(self, data):
inv_update = False
self.modbus_elms = 0
for key, update, _ in self.mb.recv_resp(self.db, data[14:]):
self.modbus_elms += 1
if update:
if key == 'inverter':
inv_update = True
self._set_mqtt_timestamp(key, self._timestamp())
self.new_data[key] = True
return inv_update
def __modbus_command_rsp(self, data):
'''precess MODBUS RTU response'''
valid = data[1]
modbus_msg_len = self.data_len - 14
# logger.debug(f'modbus_len:{modbus_msg_len} accepted:{valid}')
if valid == 1 and modbus_msg_len > 4:
# logger.info(f'first byte modbus:{data[14]}')
inv_update = self.__parse_modbus_rsp(data)
if inv_update:
self.__build_model_name()
if self.establish_inv_emu and not self.ifc.remote.stream:
self.establish_emu()
def msg_hbeat_ind(self):
data = self.ifc.rx_peek()[self.header_len:]
result = struct.unpack_from('<B', data, 0)
ftype = result[0]
self.__forward_msg()
self.__send_ack_rsp(0x1710, ftype)
self.new_state_up()
def msg_sync_end(self):
data = self.ifc.rx_peek()[self.header_len:]
result = struct.unpack_from(self.HDR_FMT, data, 0)
ftype = result[0]
total = result[1]
self.time_ofs = result[3]
dt = datetime.fromtimestamp(total + self.time_ofs)
logger.info(f'ts: {dt.strftime("%Y-%m-%d %H:%M:%S")}')
self.__forward_msg()
self.__send_ack_rsp(0x1810, ftype)

View File

@@ -1,871 +0,0 @@
import logging
import json
import struct
import os
from enum import Enum
from typing import Generator
class ProxyMode(Enum):
SERVER = 1
CLIENT = 2
class Register(Enum):
COLLECTOR_FW_VERSION = 1
CHIP_TYPE = 2
CHIP_MODEL = 3
TRACE_URL = 4
LOGGER_URL = 5
MAC_ADDR = 6
COLLECTOR_SNR = 7
PRODUCT_NAME = 20
MANUFACTURER = 21
VERSION = 22
SERIAL_NUMBER = 23
EQUIPMENT_MODEL = 24
NO_INPUTS = 25
MAX_DESIGNED_POWER = 26
RATED_LEVEL = 27
INPUT_COEFFICIENT = 28
GRID_VOLT_CAL_COEF = 29
OUTPUT_COEFFICIENT = 30
PROD_COMPL_TYPE = 31
INVERTER_CNT = 50
UNKNOWN_SNR = 51
UNKNOWN_MSG = 52
INVALID_DATA_TYPE = 53
INTERNAL_ERROR = 54
UNKNOWN_CTRL = 55
OTA_START_MSG = 56
SW_EXCEPTION = 57
INVALID_MSG_FMT = 58
AT_COMMAND = 59
MODBUS_COMMAND = 60
AT_COMMAND_BLOCKED = 61
CLOUD_CONN_CNT = 62
OUTPUT_POWER = 83
RATED_POWER = 84
INVERTER_TEMP = 85
INVERTER_STATUS = 86
DETECT_STATUS_1 = 87
DETECT_STATUS_2 = 88
PV1_VOLTAGE = 100
PV1_CURRENT = 101
PV1_POWER = 102
PV1_MANUFACTURER = 103
PV1_MODEL = 104
PV2_VOLTAGE = 110
PV2_CURRENT = 111
PV2_POWER = 112
PV2_MANUFACTURER = 113
PV2_MODEL = 114
PV3_VOLTAGE = 120
PV3_CURRENT = 121
PV3_POWER = 122
PV3_MANUFACTURER = 123
PV3_MODEL = 124
PV4_VOLTAGE = 130
PV4_CURRENT = 131
PV4_POWER = 132
PV4_MANUFACTURER = 133
PV4_MODEL = 134
PV5_VOLTAGE = 140
PV5_CURRENT = 141
PV5_POWER = 142
PV5_MANUFACTURER = 143
PV5_MODEL = 144
PV6_VOLTAGE = 150
PV6_CURRENT = 151
PV6_POWER = 152
PV6_MANUFACTURER = 153
PV6_MODEL = 154
PV1_DAILY_GENERATION = 200
PV1_TOTAL_GENERATION = 201
PV2_DAILY_GENERATION = 210
PV2_TOTAL_GENERATION = 211
PV3_DAILY_GENERATION = 220
PV3_TOTAL_GENERATION = 221
PV4_DAILY_GENERATION = 230
PV4_TOTAL_GENERATION = 231
PV5_DAILY_GENERATION = 240
PV5_TOTAL_GENERATION = 241
PV6_DAILY_GENERATION = 250
PV6_TOTAL_GENERATION = 251
INV_UNKNOWN_1 = 252
BOOT_STATUS = 253
DSP_STATUS = 254
WORK_MODE = 255
OUTPUT_SHUTDOWN = 256
GRID_VOLTAGE = 300
GRID_CURRENT = 301
GRID_FREQUENCY = 302
DAILY_GENERATION = 303
TOTAL_GENERATION = 304
COMMUNICATION_TYPE = 400
SIGNAL_STRENGTH = 401
POWER_ON_TIME = 402
COLLECT_INTERVAL = 403
DATA_UP_INTERVAL = 404
CONNECT_COUNT = 405
HEARTBEAT_INTERVAL = 406
IP_ADDRESS = 407
POLLING_INTERVAL = 408
SENSOR_LIST = 409
SSID = 410
EVENT_ALARM = 500
EVENT_FAULT = 501
EVENT_BF1 = 502
EVENT_BF2 = 503
TS_INPUT = 600
TS_GRID = 601
TS_TOTAL = 602
VALUE_1 = 9000
TEST_REG1 = 10000
TEST_REG2 = 10001
class Fmt:
@staticmethod
def get_value(buf: bytes, idx: int, row: dict):
'''Get a value from buf and interpret as in row defined'''
fmt = row['fmt']
res = struct.unpack_from(fmt, buf, idx)
result = res[0]
if isinstance(result, (bytearray, bytes)):
result = result.decode().split('\x00')[0]
if 'func' in row:
result = row['func'](res)
if 'ratio' in row:
result = round(result * row['ratio'], 2)
if 'quotient' in row:
result = round(result/row['quotient'])
if 'offset' in row:
result = result + row['offset']
return result
@staticmethod
def hex4(val: tuple | str, reverse=False) -> str | int:
if not reverse:
return f'{val[0]:04x}'
else:
return int(val, 16)
@staticmethod
def mac(val: tuple | str, reverse=False) -> str | tuple:
if not reverse:
return "%02x:%02x:%02x:%02x:%02x:%02x" % val
else:
return (
int(val[0:2], 16), int(val[3:5], 16),
int(val[6:8], 16), int(val[9:11], 16),
int(val[12:14], 16), int(val[15:], 16))
@staticmethod
def version(val: tuple | str, reverse=False) -> str | int:
if not reverse:
x = val[0]
return f'V{(x >> 12)}.{(x >> 8) & 0xf}' \
f'.{(x >> 4) & 0xf}{x & 0xf:1X}'
else:
arr = val[1:].split('.')
return int(arr[0], 10) << 12 | \
int(arr[1], 10) << 8 | \
int(arr[2][:-1], 10) << 4 | \
int(arr[2][-1:], 16)
@staticmethod
def set_value(buf: bytearray, idx: int, row: dict, val):
'''Get a value from buf and interpret as in row defined'''
fmt = row['fmt']
if 'offset' in row:
val = val - row['offset']
if 'quotient' in row:
val = round(val * row['quotient'])
if 'ratio' in row:
val = round(val / row['ratio'])
if 'func' in row:
val = row['func'](val, reverse=True)
if isinstance(val, str):
val = bytes(val, 'UTF8')
if isinstance(val, tuple):
struct.pack_into(fmt, buf, idx, *val)
else:
struct.pack_into(fmt, buf, idx, val)
class ClrAtMidnight:
__clr_at_midnight = [Register.PV1_DAILY_GENERATION, Register.PV2_DAILY_GENERATION, Register.PV3_DAILY_GENERATION, Register.PV4_DAILY_GENERATION, Register.PV5_DAILY_GENERATION, Register.PV6_DAILY_GENERATION, Register.DAILY_GENERATION] # noqa: E501
db = {}
@classmethod
def add(cls, keys: list, prfx: str, reg: Register) -> None:
if reg not in cls.__clr_at_midnight:
return
prfx += f'{keys[0]}'
db_dict = cls.db
if prfx not in db_dict:
db_dict[prfx] = {}
db_dict = db_dict[prfx]
for key in keys[1:-1]:
if key not in db_dict:
db_dict[key] = {}
db_dict = db_dict[key]
db_dict[keys[-1]] = 0
@classmethod
def elm(cls) -> Generator[tuple[str, dict], None, None]:
for reg, name in cls.db.items():
yield reg, name
cls.db = {}
class Infos:
__slots__ = ('db', 'tracer', )
LIGHTNING = 'mdi:lightning-bolt'
COUNTER = 'mdi:counter'
GAUGE = 'mdi:gauge'
SOLAR_POWER_VAR = 'mdi:solar-power-variant'
SOLAR_POWER = 'mdi:solar-power'
WIFI = 'mdi:wifi'
UPDATE = 'mdi:update'
DAILY_GEN = 'Daily Generation'
TOTAL_GEN = 'Total Generation'
FMT_INT = '| int'
FMT_FLOAT = '| float'
FMT_STRING_SEC = '| string + " s"'
stat = {}
app_name = os.getenv('SERVICE_NAME', 'proxy')
version = os.getenv('VERSION', 'unknown')
new_stat_data = {}
@classmethod
def static_init(cls):
logging.debug('Initialize proxy statistics')
# init proxy counter in the class.stat dictionary
cls.stat['proxy'] = {}
for key in cls.__info_defs:
name = cls.__info_defs[key]['name']
if name[0] == 'proxy':
cls.stat['proxy'][name[1]] = 0
# add values from the environment to the device definition table
prxy = cls.__info_devs['proxy']
prxy['sw'] = cls.version
prxy['mdl'] = cls.app_name
def __init__(self):
self.db = {}
self.tracer = logging.getLogger('data')
__info_devs = {
'proxy': {'singleton': True, 'name': 'Proxy', 'mf': 'Stefan Allius'}, # noqa: E501
'controller': {'via': 'proxy', 'name': 'Controller', 'mdl': Register.CHIP_MODEL, 'mf': Register.CHIP_TYPE, 'sw': Register.COLLECTOR_FW_VERSION, 'mac': Register.MAC_ADDR, 'sn': Register.COLLECTOR_SNR}, # noqa: E501
'inverter': {'via': 'controller', 'name': 'Micro Inverter', 'mdl': Register.EQUIPMENT_MODEL, 'mf': Register.MANUFACTURER, 'sw': Register.VERSION, 'sn': Register.SERIAL_NUMBER}, # noqa: E501
'input_pv1': {'via': 'inverter', 'name': 'Module PV1', 'mdl': Register.PV1_MODEL, 'mf': Register.PV1_MANUFACTURER}, # noqa: E501
'input_pv2': {'via': 'inverter', 'name': 'Module PV2', 'mdl': Register.PV2_MODEL, 'mf': Register.PV2_MANUFACTURER, 'dep': {'reg': Register.NO_INPUTS, 'gte': 2}}, # noqa: E501
'input_pv3': {'via': 'inverter', 'name': 'Module PV3', 'mdl': Register.PV3_MODEL, 'mf': Register.PV3_MANUFACTURER, 'dep': {'reg': Register.NO_INPUTS, 'gte': 3}}, # noqa: E501
'input_pv4': {'via': 'inverter', 'name': 'Module PV4', 'mdl': Register.PV4_MODEL, 'mf': Register.PV4_MANUFACTURER, 'dep': {'reg': Register.NO_INPUTS, 'gte': 4}}, # noqa: E501
'input_pv5': {'via': 'inverter', 'name': 'Module PV5', 'mdl': Register.PV5_MODEL, 'mf': Register.PV5_MANUFACTURER, 'dep': {'reg': Register.NO_INPUTS, 'gte': 5}}, # noqa: E501
'input_pv6': {'via': 'inverter', 'name': 'Module PV6', 'mdl': Register.PV6_MODEL, 'mf': Register.PV6_MANUFACTURER, 'dep': {'reg': Register.NO_INPUTS, 'gte': 6}}, # noqa: E501
}
__comm_type_val_tpl = "{%set com_types = ['n/a','Wi-Fi', 'G4', 'G5', 'GPRS'] %}{{com_types[value_json['Communication_Type']|int(0)]|default(value_json['Communication_Type'])}}" # noqa: E501
__work_mode_val_tpl = "{%set mode = ['Normal-Mode', 'Aging-Mode', 'ATE-Mode', 'Shielding GFDI', 'DTU-Mode'] %}{{mode[value_json['Work_Mode']|int(0)]|default(value_json['Work_Mode'])}}" # noqa: E501
__status_type_val_tpl = "{%set inv_status = ['Off-line', 'On-grid', 'Off-grid'] %}{{inv_status[value_json['Inverter_Status']|int(0)]|default(value_json['Inverter_Status'])}}" # noqa: E501
__rated_power_val_tpl = "{% if 'Rated_Power' in value_json and value_json['Rated_Power'] != None %}{{value_json['Rated_Power']|string() +' W'}}{% else %}{{ this.state }}{% endif %}" # noqa: E501
__designed_power_val_tpl = '''
{% if 'Max_Designed_Power' in value_json and
value_json['Max_Designed_Power'] != None %}
{% if value_json['Max_Designed_Power'] | int(0xffff) < 0x8000 %}
{{value_json['Max_Designed_Power']|string() +' W'}}
{% else %}
n/a
{% endif %}
{% else %}
{{ this.state }}
{% endif %}
'''
__inv_alarm_val_tpl = '''
{% if 'Inverter_Alarm' in value_json and
value_json['Inverter_Alarm'] != None %}
{% set val_int = value_json['Inverter_Alarm'] | int %}
{% if val_int == 0 %}
{% set result = 'noAlarm'%}
{%else%}
{% set result = '' %}
{% if val_int | bitwise_and(1)%}{% set result = result + 'Bit1, '%}
{% endif %}
{% if val_int | bitwise_and(2)%}{% set result = result + 'Bit2, '%}
{% endif %}
{% if val_int | bitwise_and(3)%}{% set result = result + 'Bit3, '%}
{% endif %}
{% if val_int | bitwise_and(4)%}{% set result = result + 'Bit4, '%}
{% endif %}
{% if val_int | bitwise_and(5)%}{% set result = result + 'Bit5, '%}
{% endif %}
{% if val_int | bitwise_and(6)%}{% set result = result + 'Bit6, '%}
{% endif %}
{% if val_int | bitwise_and(7)%}{% set result = result + 'Bit7, '%}
{% endif %}
{% if val_int | bitwise_and(8)%}{% set result = result + 'Bit8, '%}
{% endif %}
{% if val_int | bitwise_and(9)%}{% set result = result + 'noUtility, '%}
{% endif %}
{% if val_int | bitwise_and(10)%}{% set result = result + 'Bit10, '%}
{% endif %}
{% if val_int | bitwise_and(11)%}{% set result = result + 'Bit11, '%}
{% endif %}
{% if val_int | bitwise_and(12)%}{% set result = result + 'Bit12, '%}
{% endif %}
{% if val_int | bitwise_and(13)%}{% set result = result + 'Bit13, '%}
{% endif %}
{% if val_int | bitwise_and(14)%}{% set result = result + 'Bit14, '%}
{% endif %}
{% if val_int | bitwise_and(15)%}{% set result = result + 'Bit15, '%}
{% endif %}
{% if val_int | bitwise_and(16)%}{% set result = result + 'Bit16, '%}
{% endif %}
{% endif %}
{{ result }}
{% else %}
{{ this.state }}
{% endif %}
'''
__inv_fault_val_tpl = '''
{% if 'Inverter_Fault' in value_json and
value_json['Inverter_Fault'] != None %}
{% set val_int = value_json['Inverter_Fault'] | int %}
{% if val_int == 0 %}
{% set result = 'noFault'%}
{%else%}
{% set result = '' %}
{% if val_int | bitwise_and(1)%}{% set result = result + 'Bit1, '%}
{% endif %}
{% if val_int | bitwise_and(2)%}{% set result = result + 'Bit2, '%}
{% endif %}
{% if val_int | bitwise_and(3)%}{% set result = result + 'Bit3, '%}
{% endif %}
{% if val_int | bitwise_and(4)%}{% set result = result + 'Bit4, '%}
{% endif %}
{% if val_int | bitwise_and(5)%}{% set result = result + 'Bit5, '%}
{% endif %}
{% if val_int | bitwise_and(6)%}{% set result = result + 'Bit6, '%}
{% endif %}
{% if val_int | bitwise_and(7)%}{% set result = result + 'Bit7, '%}
{% endif %}
{% if val_int | bitwise_and(8)%}{% set result = result + 'Bit8, '%}
{% endif %}
{% if val_int | bitwise_and(9)%}{% set result = result + 'Bit9, '%}
{% endif %}
{% if val_int | bitwise_and(10)%}{% set result = result + 'Bit10, '%}
{% endif %}
{% if val_int | bitwise_and(11)%}{% set result = result + 'Bit11, '%}
{% endif %}
{% if val_int | bitwise_and(12)%}{% set result = result + 'Bit12, '%}
{% endif %}
{% if val_int | bitwise_and(13)%}{% set result = result + 'Bit13, '%}
{% endif %}
{% if val_int | bitwise_and(14)%}{% set result = result + 'Bit14, '%}
{% endif %}
{% if val_int | bitwise_and(15)%}{% set result = result + 'Bit15, '%}
{% endif %}
{% if val_int | bitwise_and(16)%}{% set result = result + 'Bit16, '%}
{% endif %}
{% endif %}
{{ result }}
{% else %}
{{ this.state }}
{% endif %}
'''
__input_coef_val_tpl = "{% if 'Output_Coefficient' in value_json and value_json['Input_Coefficient'] != None %}{{value_json['Input_Coefficient']|string() +' %'}}{% else %}{{ this.state }}{% endif %}" # noqa: E501
__output_coef_val_tpl = "{% if 'Output_Coefficient' in value_json and value_json['Output_Coefficient'] != None %}{{value_json['Output_Coefficient']|string() +' %'}}{% else %}{{ this.state }}{% endif %}" # noqa: E501
__info_defs = {
# collector values used for device registration:
Register.COLLECTOR_FW_VERSION: {'name': ['collector', 'Collector_Fw_Version'], 'level': logging.INFO, 'unit': ''}, # noqa: E501
Register.CHIP_TYPE: {'name': ['collector', 'Chip_Type'], 'singleton': False, 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.CHIP_MODEL: {'name': ['collector', 'Chip_Model'], 'singleton': False, 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.TRACE_URL: {'name': ['collector', 'Trace_URL'], 'singleton': False, 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.LOGGER_URL: {'name': ['collector', 'Logger_URL'], 'singleton': False, 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.MAC_ADDR: {'name': ['collector', 'MAC-Addr'], 'singleton': False, 'level': logging.INFO, 'unit': ''}, # noqa: E501
Register.COLLECTOR_SNR: {'name': ['collector', 'Serial_Number'], 'singleton': False, 'level': logging.INFO, 'unit': ''}, # noqa: E501
# inverter values used for device registration:
Register.PRODUCT_NAME: {'name': ['inverter', 'Product_Name'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.MANUFACTURER: {'name': ['inverter', 'Manufacturer'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.VERSION: {'name': ['inverter', 'Version'], 'level': logging.INFO, 'unit': ''}, # noqa: E501
Register.SERIAL_NUMBER: {'name': ['inverter', 'Serial_Number'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.EQUIPMENT_MODEL: {'name': ['inverter', 'Equipment_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.NO_INPUTS: {'name': ['inverter', 'No_Inputs'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.MAX_DESIGNED_POWER: {'name': ['inverter', 'Max_Designed_Power'], 'level': logging.INFO, 'unit': 'W', 'ha': {'dev': 'inverter', 'dev_cla': None, 'stat_cla': None, 'id': 'designed_power_', 'val_tpl': __designed_power_val_tpl, 'name': 'Max Designed Power', 'icon': LIGHTNING, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.RATED_POWER: {'name': ['inverter', 'Rated_Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'inverter', 'dev_cla': None, 'stat_cla': None, 'id': 'rated_power_', 'val_tpl': __rated_power_val_tpl, 'name': 'Rated Power', 'icon': LIGHTNING, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.WORK_MODE: {'name': ['inverter', 'Work_Mode'], 'level': logging.DEBUG, 'unit': '', 'ha': {'dev': 'inverter', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'work_mode_', 'name': 'Work Mode', 'val_tpl': __work_mode_val_tpl, 'icon': 'mdi:power', 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.INPUT_COEFFICIENT: {'name': ['inverter', 'Input_Coefficient'], 'level': logging.DEBUG, 'unit': '%', 'ha': {'dev': 'inverter', 'dev_cla': None, 'stat_cla': None, 'id': 'input_coef_', 'val_tpl': __input_coef_val_tpl, 'name': 'Input Coefficient', 'icon': LIGHTNING, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.OUTPUT_COEFFICIENT: {'name': ['inverter', 'Output_Coefficient'], 'level': logging.INFO, 'unit': '%', 'ha': {'dev': 'inverter', 'dev_cla': None, 'stat_cla': None, 'id': 'output_coef_', 'val_tpl': __output_coef_val_tpl, 'name': 'Output Coefficient', 'icon': LIGHTNING, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.PV1_MANUFACTURER: {'name': ['inverter', 'PV1_Manufacturer'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.PV1_MODEL: {'name': ['inverter', 'PV1_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.PV2_MANUFACTURER: {'name': ['inverter', 'PV2_Manufacturer'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.PV2_MODEL: {'name': ['inverter', 'PV2_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.PV3_MANUFACTURER: {'name': ['inverter', 'PV3_Manufacturer'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.PV3_MODEL: {'name': ['inverter', 'PV3_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.PV4_MANUFACTURER: {'name': ['inverter', 'PV4_Manufacturer'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.PV4_MODEL: {'name': ['inverter', 'PV4_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.PV5_MANUFACTURER: {'name': ['inverter', 'PV5_Manufacturer'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.PV5_MODEL: {'name': ['inverter', 'PV5_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.PV6_MANUFACTURER: {'name': ['inverter', 'PV6_Manufacturer'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.PV6_MODEL: {'name': ['inverter', 'PV6_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.BOOT_STATUS: {'name': ['inverter', 'BOOT_STATUS'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.DSP_STATUS: {'name': ['inverter', 'DSP_STATUS'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
# proxy:
Register.INVERTER_CNT: {'name': ['proxy', 'Inverter_Cnt'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_count_', 'fmt': FMT_INT, 'name': 'Active Inverter Connections', 'icon': COUNTER}}, # noqa: E501
Register.CLOUD_CONN_CNT: {'name': ['proxy', 'Cloud_Conn_Cnt'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'cloud_conn_count_', 'fmt': FMT_INT, 'name': 'Active Cloud Connections', 'icon': COUNTER}}, # noqa: E501
Register.UNKNOWN_SNR: {'name': ['proxy', 'Unknown_SNR'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'unknown_snr_', 'fmt': FMT_INT, 'name': 'Unknown Serial No', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.UNKNOWN_MSG: {'name': ['proxy', 'Unknown_Msg'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'unknown_msg_', 'fmt': FMT_INT, 'name': 'Unknown Msg Type', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.INVALID_DATA_TYPE: {'name': ['proxy', 'Invalid_Data_Type'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_data_type_', 'fmt': FMT_INT, 'name': 'Invalid Data Type', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.INTERNAL_ERROR: {'name': ['proxy', 'Internal_Error'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'intern_err_', 'fmt': FMT_INT, 'name': 'Internal Error', 'icon': COUNTER, 'ent_cat': 'diagnostic', 'en': False}}, # noqa: E501
Register.UNKNOWN_CTRL: {'name': ['proxy', 'Unknown_Ctrl'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'unknown_ctrl_', 'fmt': FMT_INT, 'name': 'Unknown Control Type', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.OTA_START_MSG: {'name': ['proxy', 'OTA_Start_Msg'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'ota_start_cmd_', 'fmt': FMT_INT, 'name': 'OTA Start Cmd', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.SW_EXCEPTION: {'name': ['proxy', 'SW_Exception'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'sw_exception_', 'fmt': FMT_INT, 'name': 'Internal SW Exception', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.INVALID_MSG_FMT: {'name': ['proxy', 'Invalid_Msg_Format'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_msg_fmt_', 'fmt': FMT_INT, 'name': 'Invalid Message Format', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.AT_COMMAND: {'name': ['proxy', 'AT_Command'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'at_cmd_', 'fmt': FMT_INT, 'name': 'AT Command', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.AT_COMMAND_BLOCKED: {'name': ['proxy', 'AT_Command_Blocked'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'at_cmd_blocked_', 'fmt': FMT_INT, 'name': 'AT Command Blocked', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.MODBUS_COMMAND: {'name': ['proxy', 'Modbus_Command'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'modbus_cmd_', 'fmt': FMT_INT, 'name': 'Modbus Command', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501
# 0xffffff03: {'name':['proxy', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha':{'dev':'proxy', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id':'proxy_volt_', 'fmt':FMT_FLOAT,'name': 'Grid Voltage'}}, # noqa: E501
# events
Register.EVENT_ALARM: {'name': ['events', 'Inverter_Alarm'], 'level': logging.INFO, 'unit': '', 'ha': {'dev': 'inverter', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_alarm_', 'name': 'Inverter Alarm', 'val_tpl': __inv_alarm_val_tpl, 'icon': 'mdi:alarm-light'}}, # noqa: E501
Register.EVENT_FAULT: {'name': ['events', 'Inverter_Fault'], 'level': logging.INFO, 'unit': '', 'ha': {'dev': 'inverter', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_fault_', 'name': 'Inverter Fault', 'val_tpl': __inv_fault_val_tpl, 'icon': 'mdi:alarm-light'}}, # noqa: E501
Register.EVENT_BF1: {'name': ['events', 'Inverter_Bitfield_1'], 'level': logging.INFO, 'unit': ''}, # noqa: E501
Register.EVENT_BF2: {'name': ['events', 'Inverter_bitfield_2'], 'level': logging.INFO, 'unit': ''}, # noqa: E501
# Register.EVENT_409: {'name': ['events', '409_No_Utility'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
# Register.EVENT_415: {'name': ['events', '415_GridFreqOverRating'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
# grid measures:
Register.TS_GRID: {'name': ['grid', 'Timestamp'], 'level': logging.INFO, 'unit': ''}, # noqa: E501
Register.GRID_VOLTAGE: {'name': ['grid', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'inverter', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'out_volt_', 'fmt': FMT_FLOAT, 'name': 'Grid Voltage', 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.GRID_CURRENT: {'name': ['grid', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'inverter', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'out_cur_', 'fmt': FMT_FLOAT, 'name': 'Grid Current', 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.GRID_FREQUENCY: {'name': ['grid', 'Frequency'], 'level': logging.DEBUG, 'unit': 'Hz', 'ha': {'dev': 'inverter', 'dev_cla': 'frequency', 'stat_cla': 'measurement', 'id': 'out_freq_', 'fmt': FMT_FLOAT, 'name': 'Grid Frequency', 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.OUTPUT_POWER: {'name': ['grid', 'Output_Power'], 'level': logging.INFO, 'unit': 'W', 'ha': {'dev': 'inverter', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'out_power_', 'fmt': FMT_FLOAT, 'name': 'Power'}}, # noqa: E501
Register.INVERTER_TEMP: {'name': ['env', 'Inverter_Temp'], 'level': logging.DEBUG, 'unit': '°C', 'ha': {'dev': 'inverter', 'dev_cla': 'temperature', 'stat_cla': 'measurement', 'id': 'temp_', 'fmt': FMT_INT, 'name': 'Temperature'}}, # noqa: E501
Register.INVERTER_STATUS: {'name': ['env', 'Inverter_Status'], 'level': logging.INFO, 'unit': '', 'ha': {'dev': 'inverter', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_status_', 'name': 'Inverter Status', 'val_tpl': __status_type_val_tpl, 'icon': 'mdi:power'}}, # noqa: E501
Register.DETECT_STATUS_1: {'name': ['env', 'Detect_Status_1'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.DETECT_STATUS_2: {'name': ['env', 'Detect_Status_2'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
# input measures:
Register.TS_INPUT: {'name': ['input', 'Timestamp'], 'level': logging.INFO, 'unit': ''}, # noqa: E501
Register.PV1_VOLTAGE: {'name': ['input', 'pv1', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'input_pv1', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'volt_pv1_', 'val_tpl': "{{ (value_json['pv1']['Voltage'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.PV1_CURRENT: {'name': ['input', 'pv1', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'input_pv1', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'cur_pv1_', 'val_tpl': "{{ (value_json['pv1']['Current'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.PV1_POWER: {'name': ['input', 'pv1', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'input_pv1', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'power_pv1_', 'val_tpl': "{{ (value_json['pv1']['Power'] | float)}}"}}, # noqa: E501
Register.PV2_VOLTAGE: {'name': ['input', 'pv2', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'input_pv2', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'volt_pv2_', 'val_tpl': "{{ (value_json['pv2']['Voltage'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.PV2_CURRENT: {'name': ['input', 'pv2', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'input_pv2', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'cur_pv2_', 'val_tpl': "{{ (value_json['pv2']['Current'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.PV2_POWER: {'name': ['input', 'pv2', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'input_pv2', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'power_pv2_', 'val_tpl': "{{ (value_json['pv2']['Power'] | float)}}"}}, # noqa: E501
Register.PV3_VOLTAGE: {'name': ['input', 'pv3', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'input_pv3', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'volt_pv3_', 'val_tpl': "{{ (value_json['pv3']['Voltage'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.PV3_CURRENT: {'name': ['input', 'pv3', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'input_pv3', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'cur_pv3_', 'val_tpl': "{{ (value_json['pv3']['Current'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.PV3_POWER: {'name': ['input', 'pv3', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'input_pv3', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'power_pv3_', 'val_tpl': "{{ (value_json['pv3']['Power'] | float)}}"}}, # noqa: E501
Register.PV4_VOLTAGE: {'name': ['input', 'pv4', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'input_pv4', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'volt_pv4_', 'val_tpl': "{{ (value_json['pv4']['Voltage'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.PV4_CURRENT: {'name': ['input', 'pv4', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'input_pv4', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'cur_pv4_', 'val_tpl': "{{ (value_json['pv4']['Current'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.PV4_POWER: {'name': ['input', 'pv4', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'input_pv4', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'power_pv4_', 'val_tpl': "{{ (value_json['pv4']['Power'] | float)}}"}}, # noqa: E501
Register.PV5_VOLTAGE: {'name': ['input', 'pv5', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'input_pv5', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'volt_pv5_', 'val_tpl': "{{ (value_json['pv5']['Voltage'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.PV5_CURRENT: {'name': ['input', 'pv5', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'input_pv5', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'cur_pv5_', 'val_tpl': "{{ (value_json['pv5']['Current'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.PV5_POWER: {'name': ['input', 'pv5', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'input_pv5', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'power_pv5_', 'val_tpl': "{{ (value_json['pv5']['Power'] | float)}}"}}, # noqa: E501
Register.PV6_VOLTAGE: {'name': ['input', 'pv6', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'input_pv6', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'volt_pv6_', 'val_tpl': "{{ (value_json['pv6']['Voltage'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.PV6_CURRENT: {'name': ['input', 'pv6', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'input_pv6', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'cur_pv6_', 'val_tpl': "{{ (value_json['pv6']['Current'] | float)}}", 'icon': GAUGE, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.PV6_POWER: {'name': ['input', 'pv6', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'input_pv6', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'power_pv6_', 'val_tpl': "{{ (value_json['pv6']['Power'] | float)}}"}}, # noqa: E501
Register.PV1_DAILY_GENERATION: {'name': ['input', 'pv1', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv1', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_pv1_', 'name': DAILY_GEN, 'val_tpl': "{{ (value_json['pv1']['Daily_Generation'] | float)}}", 'icon': SOLAR_POWER_VAR, 'must_incr': True}}, # noqa: E501
Register.PV1_TOTAL_GENERATION: {'name': ['input', 'pv1', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv1', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_pv1_', 'name': TOTAL_GEN, 'val_tpl': "{{ (value_json['pv1']['Total_Generation'] | float)}}", 'icon': SOLAR_POWER, 'must_incr': True}}, # noqa: E501
Register.PV2_DAILY_GENERATION: {'name': ['input', 'pv2', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv2', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_pv2_', 'name': DAILY_GEN, 'val_tpl': "{{ (value_json['pv2']['Daily_Generation'] | float)}}", 'icon': SOLAR_POWER_VAR, 'must_incr': True}}, # noqa: E501
Register.PV2_TOTAL_GENERATION: {'name': ['input', 'pv2', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv2', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_pv2_', 'name': TOTAL_GEN, 'val_tpl': "{{ (value_json['pv2']['Total_Generation'] | float)}}", 'icon': SOLAR_POWER, 'must_incr': True}}, # noqa: E501
Register.PV3_DAILY_GENERATION: {'name': ['input', 'pv3', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv3', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_pv3_', 'name': DAILY_GEN, 'val_tpl': "{{ (value_json['pv3']['Daily_Generation'] | float)}}", 'icon': SOLAR_POWER_VAR, 'must_incr': True}}, # noqa: E501
Register.PV3_TOTAL_GENERATION: {'name': ['input', 'pv3', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv3', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_pv3_', 'name': TOTAL_GEN, 'val_tpl': "{{ (value_json['pv3']['Total_Generation'] | float)}}", 'icon': SOLAR_POWER, 'must_incr': True}}, # noqa: E501
Register.PV4_DAILY_GENERATION: {'name': ['input', 'pv4', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv4', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_pv4_', 'name': DAILY_GEN, 'val_tpl': "{{ (value_json['pv4']['Daily_Generation'] | float)}}", 'icon': SOLAR_POWER_VAR, 'must_incr': True}}, # noqa: E501
Register.PV4_TOTAL_GENERATION: {'name': ['input', 'pv4', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv4', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_pv4_', 'name': TOTAL_GEN, 'val_tpl': "{{ (value_json['pv4']['Total_Generation'] | float)}}", 'icon': SOLAR_POWER, 'must_incr': True}}, # noqa: E501
Register.PV5_DAILY_GENERATION: {'name': ['input', 'pv5', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv5', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_pv5_', 'name': DAILY_GEN, 'val_tpl': "{{ (value_json['pv5']['Daily_Generation'] | float)}}", 'icon': SOLAR_POWER_VAR, 'must_incr': True}}, # noqa: E501
Register.PV5_TOTAL_GENERATION: {'name': ['input', 'pv5', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv5', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_pv5_', 'name': TOTAL_GEN, 'val_tpl': "{{ (value_json['pv5']['Total_Generation'] | float)}}", 'icon': SOLAR_POWER, 'must_incr': True}}, # noqa: E501
Register.PV6_DAILY_GENERATION: {'name': ['input', 'pv6', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv6', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_pv6_', 'name': DAILY_GEN, 'val_tpl': "{{ (value_json['pv6']['Daily_Generation'] | float)}}", 'icon': SOLAR_POWER_VAR, 'must_incr': True}}, # noqa: E501
Register.PV6_TOTAL_GENERATION: {'name': ['input', 'pv6', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv6', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_pv6_', 'name': TOTAL_GEN, 'val_tpl': "{{ (value_json['pv6']['Total_Generation'] | float)}}", 'icon': SOLAR_POWER, 'must_incr': True}}, # noqa: E501
# total:
Register.TS_TOTAL: {'name': ['total', 'Timestamp'], 'level': logging.INFO, 'unit': ''}, # noqa: E501
Register.DAILY_GENERATION: {'name': ['total', 'Daily_Generation'], 'level': logging.INFO, 'unit': 'kWh', 'ha': {'dev': 'inverter', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_', 'fmt': FMT_FLOAT, 'name': DAILY_GEN, 'icon': SOLAR_POWER_VAR, 'must_incr': True}}, # noqa: E501
Register.TOTAL_GENERATION: {'name': ['total', 'Total_Generation'], 'level': logging.INFO, 'unit': 'kWh', 'ha': {'dev': 'inverter', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_', 'fmt': FMT_FLOAT, 'name': TOTAL_GEN, 'icon': SOLAR_POWER, 'must_incr': True}}, # noqa: E501
# controller:
Register.SIGNAL_STRENGTH: {'name': ['controller', 'Signal_Strength'], 'level': logging.DEBUG, 'unit': '%', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': 'measurement', 'id': 'signal_', 'fmt': FMT_INT, 'name': 'Signal Strength', 'icon': WIFI}}, # noqa: E501
Register.POWER_ON_TIME: {'name': ['controller', 'Power_On_Time'], 'level': logging.DEBUG, 'unit': 's', 'ha': {'dev': 'controller', 'dev_cla': 'duration', 'stat_cla': 'measurement', 'id': 'power_on_time_', 'fmt': FMT_INT, 'name': 'Power on Time', 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.COLLECT_INTERVAL: {'name': ['controller', 'Collect_Interval'], 'level': logging.DEBUG, 'unit': 'min', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'data_collect_intval_', 'fmt': '| string + " min"', 'name': 'Data Collect Interval', 'icon': UPDATE, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.CONNECT_COUNT: {'name': ['controller', 'Connect_Count'], 'level': logging.DEBUG, 'unit': '', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'connect_count_', 'fmt': FMT_INT, 'name': 'Connect Count', 'icon': COUNTER, 'comp': 'sensor', 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.COMMUNICATION_TYPE: {'name': ['controller', 'Communication_Type'], 'level': logging.DEBUG, 'unit': '', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'comm_type_', 'name': 'Communication Type', 'val_tpl': __comm_type_val_tpl, 'comp': 'sensor', 'icon': WIFI}}, # noqa: E501
Register.DATA_UP_INTERVAL: {'name': ['controller', 'Data_Up_Interval'], 'level': logging.DEBUG, 'unit': 's', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'data_up_intval_', 'fmt': FMT_STRING_SEC, 'name': 'Data Up Interval', 'icon': UPDATE, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.HEARTBEAT_INTERVAL: {'name': ['controller', 'Heartbeat_Interval'], 'level': logging.DEBUG, 'unit': 's', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'heartbeat_intval_', 'fmt': FMT_STRING_SEC, 'name': 'Heartbeat Interval', 'icon': UPDATE, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.IP_ADDRESS: {'name': ['controller', 'IP_Address'], 'level': logging.DEBUG, 'unit': '', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'ip_address_', 'fmt': '| string', 'name': 'IP Address', 'icon': WIFI, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.POLLING_INTERVAL: {'name': ['controller', 'Polling_Interval'], 'level': logging.DEBUG, 'unit': 's', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'polling_intval_', 'fmt': FMT_STRING_SEC, 'name': 'Polling Interval', 'icon': UPDATE, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.SENSOR_LIST: {'name': ['controller', 'Sensor_List'], 'level': logging.INFO, 'unit': ''}, # noqa: E501
Register.SSID: {'name': ['controller', 'WiFi_SSID'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.OUTPUT_SHUTDOWN: {'name': ['other', 'Output_Shutdown'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.RATED_LEVEL: {'name': ['other', 'Rated_Level'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.GRID_VOLT_CAL_COEF: {'name': ['other', 'Grid_Volt_Cal_Coef'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.PROD_COMPL_TYPE: {'name': ['other', 'Prod_Compliance_Type'], 'level': logging.INFO, 'unit': ''}, # noqa: E501
Register.INV_UNKNOWN_1: {'name': ['inv_unknown', 'Unknown_1'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
}
@property
def info_devs(self) -> dict:
return self.__info_devs
@property
def info_defs(self) -> dict:
return self.__info_defs
def dev_value(self, idx: str | int) -> str | int | float | dict | None:
'''returns the stored device value from our database
idx:int ==> lookup the value in the database and return it as str,
int or float. If the value is not available return 'None'
idx:str ==> returns the string as a fixed value without a
database lookup
'''
if type(idx) is str:
return idx # return idx as a fixed value
elif idx in self.info_defs:
row = self.info_defs[idx]
if 'singleton' in row and row['singleton']:
db_dict = self.stat
else:
db_dict = self.db
keys = row['name']
for key in keys:
if key not in db_dict:
return None # value not found in the database
db_dict = db_dict[key]
return db_dict # value of the reqeusted entry
return None # unknwon idx, not in info_defs
@classmethod
def inc_counter(cls, counter: str) -> None:
'''inc proxy statistic counter'''
db_dict = cls.stat['proxy']
db_dict[counter] += 1
cls.new_stat_data['proxy'] = True
@classmethod
def dec_counter(cls, counter: str) -> None:
'''dec proxy statistic counter'''
db_dict = cls.stat['proxy']
db_dict[counter] -= 1
cls.new_stat_data['proxy'] = True
def ha_proxy_confs(self, ha_prfx: str, node_id: str, snr: str) \
-> Generator[tuple[str, str, str, str], None, None]:
'''Generator function yields json register struct for home-assistant
auto configuration and the unique entity string, for all proxy
registers
arguments:
ha_prfx:str ==> MQTT prefix for the home assistant 'stat_t string
node_id:str ==> node id of the inverter, used to build unique entity
snr:str ==> serial number of the inverter, used to build unique
entity strings
'''
# iterate over RegisterMap.map and get the register values for entries
# with Singleton=True, which means that this is a proxy register
for reg in self.info_defs.keys():
res = self.ha_conf(reg, ha_prfx, node_id, snr, True) # noqa: E501
if res:
yield res
def ha_conf(self, key, ha_prfx, node_id, snr, singleton: bool,
sug_area: str = '') -> tuple[str, str, str, str] | None:
'''Method to build json register struct for home-assistant
auto configuration and the unique entity string, for all proxy
registers
arguments:
key ==> index of info_defs dict which reference the topic
ha_prfx:str ==> MQTT prefix for the home assistant 'stat_t string
node_id:str ==> node id of the inverter, used to build unique entity
snr:str ==> serial number of the inverter, used to build unique
entity strings
singleton ==> bool to allow/disaalow proxy topics which are common
for all invters
sug_area ==> area name for home assistant
'''
if key not in self.info_defs:
return None
row = self.info_defs[key]
if 'singleton' in row:
if singleton != row['singleton']:
return None
elif singleton:
return None
# check if we have details for home assistant
if 'ha' in row:
return self.__ha_conf(row, key, ha_prfx, node_id, snr, sug_area)
return None
def __ha_conf(self, row, key, ha_prfx, node_id, snr,
sug_area: str) -> tuple[str, str, str, str] | None:
ha = row['ha']
if 'comp' in ha:
component = ha['comp']
else:
component = 'sensor'
attr = self.__build_attr(row, key, ha_prfx, node_id, snr)
if 'dev' in ha:
device = self.info_devs[ha['dev']]
if 'dep' in device and self.ignore_this_device(device['dep']): # noqa: E501
return None
attr['dev'] = self.__build_dev(device, key, ha, snr,
sug_area)
attr['o'] = self.__build_origin()
else:
self.inc_counter('Internal_Error')
logging.error(f"Infos.info_defs: the row for {key} "
"missing 'dev' value for ha register")
return json.dumps(attr), component, node_id, attr['uniq_id']
def __build_attr(self, row, key, ha_prfx, node_id, snr):
attr = {}
ha = row['ha']
if 'name' in ha:
attr['name'] = ha['name']
else:
attr['name'] = row['name'][-1]
prfx = ha_prfx + node_id
attr['stat_t'] = prfx + row['name'][0]
attr['dev_cla'] = ha['dev_cla']
attr['stat_cla'] = ha['stat_cla']
attr['uniq_id'] = ha['id']+snr
if 'val_tpl' in ha:
attr['val_tpl'] = ha['val_tpl']
elif 'fmt' in ha:
attr['val_tpl'] = '{{value_json' + f"['{row['name'][-1]}'] {ha['fmt']}" + '}}' # eg. 'val_tpl': "{{ value_json['Output_Power']|float }} # noqa: E501
else:
self.inc_counter('Internal_Error')
logging.error(f"Infos.info_defs: the row for {key} do"
" not have a 'val_tpl' nor a 'fmt' value")
# add unit_of_meas only, if status_class isn't none. If
# status_cla is None we want a number format and not line
# graph in home assistant. A unit will change the number
# format to a line graph
if 'unit' in row and attr['stat_cla'] is not None:
attr['unit_of_meas'] = row['unit'] # 'unit_of_meas'
if 'icon' in ha:
attr['ic'] = ha['icon'] # icon for the entity
if 'nat_prc' in ha: # pragma: no cover
attr['sug_dsp_prc'] = ha['nat_prc'] # precison of floats
if 'ent_cat' in ha:
attr['ent_cat'] = ha['ent_cat'] # diagnostic, config
# enabled_by_default is deactivated, since it avoid the via
# setup of the devices. It seems, that there is a bug in home
# assistant. tested with 'Home Assistant 2023.10.4'
# if 'en' in ha: # enabled_by_default
# attr['en'] = ha['en']
return attr
def __build_dev(self, device, key, ha, snr, sug_area):
dev = {}
singleton = 'singleton' in device and device['singleton']
# the same name for 'name' and 'suggested area', so we get
# dedicated devices in home assistant with short value
# name and headline
if (sug_area == '' or singleton):
dev['name'] = device['name']
dev['sa'] = device['name']
else:
dev['name'] = device['name']+' - '+sug_area
dev['sa'] = device['name']+' - '+sug_area
self.__add_via_dev(dev, device, key, snr)
for key in ('mdl', 'mf', 'sw', 'hw', 'sn'): # add optional
# values fpr 'modell', 'manufacturer', 'sw version' and
# 'hw version'
if key in device:
data = self.dev_value(device[key])
if data is not None:
dev[key] = data
if singleton:
dev['ids'] = [f"{ha['dev']}"]
else:
dev['ids'] = [f"{ha['dev']}_{snr}"]
self.__add_connection(dev, device)
return dev
def __add_connection(self, dev, device):
if 'mac' in device:
mac_str = self.dev_value(device['mac'])
if mac_str is not None:
if 12 == len(mac_str):
mac_str = ':'.join(mac_str[i:i+2] for i in range(0, 12, 2))
dev['cns'] = [["mac", f"{mac_str}"]]
def __add_via_dev(self, dev, device, key, snr):
if 'via' in device: # add the link to the parent device
via = device['via']
if via in self.info_devs:
via_dev = self.info_devs[via]
if 'singleton' in via_dev and via_dev['singleton']:
dev['via_device'] = via
else:
dev['via_device'] = f"{via}_{snr}"
else:
self.inc_counter('Internal_Error')
logging.error(f"Infos.info_defs: the row for "
f"{key} has an invalid via value: "
f"{via}")
def __build_origin(self):
origin = {}
origin['name'] = self.app_name
origin['sw'] = self.version
return origin
def ha_remove(self, key, node_id, snr) -> tuple[str, str, str, str] | None:
'''Method to build json unregister struct for home-assistant
to remove topics per auto configuration. Only for inverer topics.
arguments:
key ==> index of info_defs dict which reference the topic
node_id:str ==> node id of the inverter, used to build unique entity
snr:str ==> serial number of the inverter, used to build unique
entity strings
hint:
the returned tuple must have the same format as self.ha_conf()
'''
if key not in self.info_defs:
return None
row = self.info_defs[key]
if 'singleton' in row and row['singleton']:
return None
# check if we have details for home assistant
if 'ha' in row:
ha = row['ha']
if 'comp' in ha:
component = ha['comp']
else:
component = 'sensor'
attr = {}
uniq_id = ha['id']+snr
return json.dumps(attr), component, node_id, uniq_id
return None
def _key_obj(self, id: Register) -> tuple:
d = self.info_defs.get(id, {'name': None, 'level': logging.DEBUG,
'unit': ''})
if 'ha' in d and 'must_incr' in d['ha']:
must_incr = d['ha']['must_incr']
else:
must_incr = False
return d['name'], d['level'], d['unit'], must_incr
def update_db(self, keys: list, must_incr: bool, result):
name = ''
db_dict = self.db
for key in keys[:-1]:
if key not in db_dict:
db_dict[key] = {}
db_dict = db_dict[key]
name += key + '.'
if keys[-1] not in db_dict:
update = (not must_incr or result > 0)
else:
if must_incr:
update = db_dict[keys[-1]] < result
else:
update = db_dict[keys[-1]] != result
if update:
db_dict[keys[-1]] = result
name += keys[-1]
return name, update
def set_db_def_value(self, id: Register, value) -> None:
'''set default value'''
row = self.info_defs[id]
if isinstance(row, dict):
keys = row['name']
self.update_db(keys, False, value)
def reg_clr_at_midnight(self, prfx: str,
check_dependencies: bool = True) -> None:
'''register all registers for the 'ClrAtMidnight' class and
check if device of every register is available otherwise ignore
the register.
prfx:str ==> prefix for the home assistant 'stat_t string''
'''
for id, row in self.info_defs.items():
if check_dependencies and 'ha' in row:
ha = row['ha']
if 'dev' in ha:
device = self.info_devs[ha['dev']]
if 'dep' in device and self.ignore_this_device(device['dep']): # noqa: E501
continue
keys = row['name']
ClrAtMidnight.add(keys, prfx, id)
def get_db_value(self, id: Register, not_found_result: any = None):
'''get database value'''
if id not in self.info_defs:
return not_found_result
row = self.info_defs[id]
if isinstance(row, dict):
keys = row['name']
elm = self.db
for key in keys:
if key not in elm:
return not_found_result
elm = elm[key]
return elm
return not_found_result
def ignore_this_device(self, dep: dict) -> bool:
'''Checks the equation in the dep(endency) dict
returns 'False' only if the equation is valid;
'True' in any other case'''
if 'reg' in dep:
value = self.dev_value(dep['reg'])
if not value:
return True
if 'gte' in dep:
return value < dep['gte']
elif 'less_eq' in dep:
return value > dep['less_eq']
return True
def set_pv_module_details(self, inv: dict) -> None:
pvs = {'pv1': {'manufacturer': Register.PV1_MANUFACTURER, 'model': Register.PV1_MODEL}, # noqa: E501
'pv2': {'manufacturer': Register.PV2_MANUFACTURER, 'model': Register.PV2_MODEL}, # noqa: E501
'pv3': {'manufacturer': Register.PV3_MANUFACTURER, 'model': Register.PV3_MODEL}, # noqa: E501
'pv4': {'manufacturer': Register.PV4_MANUFACTURER, 'model': Register.PV4_MODEL}, # noqa: E501
'pv5': {'manufacturer': Register.PV5_MANUFACTURER, 'model': Register.PV5_MODEL}, # noqa: E501
'pv6': {'manufacturer': Register.PV6_MANUFACTURER, 'model': Register.PV6_MODEL} # noqa: E501
}
for key, reg in pvs.items():
if key in inv:
if 'manufacturer' in inv[key]:
self.set_db_def_value(reg['manufacturer'],
inv[key]['manufacturer'])
if 'type' in inv[key]:
self.set_db_def_value(reg['model'], inv[key]['type'])

View File

@@ -1,178 +0,0 @@
import weakref
import asyncio
import logging
import traceback
import json
import gc
from aiomqtt import MqttCodeError
from asyncio import StreamReader, StreamWriter
from inverter_ifc import InverterIfc
from proxy import Proxy
from async_stream import StreamPtr
from async_stream import AsyncStreamClient
from async_stream import AsyncStreamServer
from config import Config
from infos import Infos
logger_mqtt = logging.getLogger('mqtt')
class InverterBase(InverterIfc, Proxy):
def __init__(self, reader: StreamReader, writer: StreamWriter,
config_id: str, prot_class,
client_mode: bool = False,
remote_prot_class=None):
Proxy.__init__(self)
self._registry.append(weakref.ref(self))
self.addr = writer.get_extra_info('peername')
self.config_id = config_id
if remote_prot_class:
self.prot_class = remote_prot_class
else:
self.prot_class = prot_class
self.__ha_restarts = -1
self.remote = StreamPtr(None)
ifc = AsyncStreamServer(reader, writer,
self.async_publ_mqtt,
self.create_remote,
self.remote)
self.local = StreamPtr(
prot_class(self.addr, ifc, True, client_mode), ifc
)
def __enter__(self):
return self
def __exit__(self, exc_type, exc, tb) -> None:
logging.debug(f'InverterBase.__exit__() {self.addr}')
self.__del_remote()
self.local.stream.close()
self.local.stream = None
self.local.ifc.close()
self.local.ifc = None
# now explicitly call garbage collector to release unreachable objects
unreachable_obj = gc.collect()
logging.debug(
f'InverterBase.__exit: freed unreachable obj: {unreachable_obj}')
def __del_remote(self):
if self.remote.stream:
self.remote.stream.close()
self.remote.stream = None
if self.remote.ifc:
self.remote.ifc.close()
self.remote.ifc = None
async def disc(self, shutdown_started=False) -> None:
if self.remote.stream:
self.remote.stream.shutdown_started = shutdown_started
if self.remote.ifc:
await self.remote.ifc.disc()
if self.local.stream:
self.local.stream.shutdown_started = shutdown_started
if self.local.ifc:
await self.local.ifc.disc()
def healthy(self) -> bool:
logging.debug('InverterBase healthy()')
if self.local.ifc and not self.local.ifc.healthy():
return False
if self.remote.ifc and not self.remote.ifc.healthy():
return False
return True
async def create_remote(self) -> None:
'''Establish a client connection to the TSUN cloud'''
tsun = Config.get(self.config_id)
host = tsun['host']
port = tsun['port']
addr = (host, port)
stream = self.local.stream
try:
logging.info(f'[{stream.node_id}] Connect to {addr}')
connect = asyncio.open_connection(host, port)
reader, writer = await connect
ifc = AsyncStreamClient(
reader, writer, self.local, self.__del_remote)
self.remote.ifc = ifc
if hasattr(stream, 'id_str'):
self.remote.stream = self.prot_class(
addr, ifc, server_side=False,
client_mode=False, id_str=stream.id_str)
else:
self.remote.stream = self.prot_class(
addr, ifc, server_side=False,
client_mode=False)
logging.info(f'[{self.remote.stream.node_id}:'
f'{self.remote.stream.conn_no}] '
f'Connected to {addr}')
asyncio.create_task(self.remote.ifc.client_loop(addr))
except (ConnectionRefusedError, TimeoutError) as error:
logging.info(f'{error}')
except Exception:
Infos.inc_counter('SW_Exception')
logging.error(
f"Inverter: Exception for {addr}:\n"
f"{traceback.format_exc()}")
async def async_publ_mqtt(self) -> None:
'''publish data to MQTT broker'''
stream = self.local.stream
if not stream or not stream.unique_id:
return
# check if new inverter or collector infos are available or when the
# home assistant has changed the status back to online
try:
if (('inverter' in stream.new_data and stream.new_data['inverter'])
or ('collector' in stream.new_data and
stream.new_data['collector'])
or self.mqtt.ha_restarts != self.__ha_restarts):
await self._register_proxy_stat_home_assistant()
await self.__register_home_assistant(stream)
self.__ha_restarts = self.mqtt.ha_restarts
for key in stream.new_data:
await self.__async_publ_mqtt_packet(stream, key)
for key in Infos.new_stat_data:
await Proxy._async_publ_mqtt_proxy_stat(key)
except MqttCodeError as error:
logging.error(f'Mqtt except: {error}')
except Exception:
Infos.inc_counter('SW_Exception')
logging.error(
f"Inverter: Exception:\n"
f"{traceback.format_exc()}")
async def __async_publ_mqtt_packet(self, stream, key):
db = stream.db.db
if key in db and stream.new_data[key]:
data_json = json.dumps(db[key])
node_id = stream.node_id
logger_mqtt.debug(f'{key}: {data_json}')
await self.mqtt.publish(f'{self.entity_prfx}{node_id}{key}', data_json) # noqa: E501
stream.new_data[key] = False
async def __register_home_assistant(self, stream) -> None:
'''register all our topics at home assistant'''
for data_json, component, node_id, id in stream.db.ha_confs(
self.entity_prfx, stream.node_id, stream.unique_id,
stream.sug_area):
logger_mqtt.debug(f"MQTT Register: cmp:'{component}'"
f" node_id:'{node_id}' {data_json}")
await self.mqtt.publish(f"{self.discovery_prfx}{component}"
f"/{node_id}{id}/config", data_json)
stream.db.reg_clr_at_midnight(f'{self.entity_prfx}{stream.node_id}')

View File

@@ -1,37 +0,0 @@
from abc import abstractmethod
import logging
from asyncio import StreamReader, StreamWriter
from iter_registry import AbstractIterMeta
logger_mqtt = logging.getLogger('mqtt')
class InverterIfc(metaclass=AbstractIterMeta):
_registry = []
@abstractmethod
def __init__(self, reader: StreamReader, writer: StreamWriter,
config_id: str, prot_class,
client_mode: bool):
pass # pragma: no cover
@abstractmethod
def __enter__(self):
pass # pragma: no cover
@abstractmethod
def __exit__(self, exc_type, exc, tb):
pass # pragma: no cover
@abstractmethod
def healthy(self) -> bool:
pass # pragma: no cover
@abstractmethod
async def disc(self, shutdown_started=False) -> None:
pass # pragma: no cover
@abstractmethod
async def create_remote(self) -> None:
pass # pragma: no cover

View File

@@ -1,9 +0,0 @@
from abc import ABCMeta
class AbstractIterMeta(ABCMeta):
def __iter__(cls):
for ref in cls._registry:
obj = ref()
if obj is not None:
yield obj

View File

@@ -1,76 +0,0 @@
[loggers]
keys=root,tracer,mesg,conn,data,mqtt,asyncio
[handlers]
keys=console_handler,file_handler_name1,file_handler_name2
[formatters]
keys=console_formatter,file_formatter
[logger_root]
level=DEBUG
handlers=console_handler,file_handler_name1
[logger_conn]
level=DEBUG
handlers=console_handler,file_handler_name1
propagate=0
qualname=conn
[logger_mqtt]
level=INFO
handlers=console_handler,file_handler_name1
propagate=0
qualname=mqtt
[logger_asyncio]
level=INFO
handlers=console_handler,file_handler_name1
propagate=0
qualname=asyncio
[logger_data]
level=DEBUG
handlers=file_handler_name1
propagate=0
qualname=data
[logger_mesg]
level=DEBUG
handlers=file_handler_name2
propagate=0
qualname=msg
[logger_tracer]
level=INFO
handlers=file_handler_name2
propagate=0
qualname=tracer
[handler_console_handler]
class=StreamHandler
level=DEBUG
formatter=console_formatter
[handler_file_handler_name1]
class=handlers.TimedRotatingFileHandler
level=INFO
formatter=file_formatter
args=('log/proxy.log', when:='midnight')
[handler_file_handler_name2]
class=handlers.TimedRotatingFileHandler
level=NOTSET
formatter=file_formatter
args=('log/trace.log', when:='midnight')
[formatter_console_formatter]
format=%(asctime)s %(levelname)5s | %(name)4s | %(message)s'
datefmt='%Y-%m-%d %H:%M:%S
[formatter_file_formatter]
format=%(asctime)s %(levelname)5s | %(name)4s | %(message)s'
datefmt='%Y-%m-%d %H:%M:%S

View File

@@ -1,203 +0,0 @@
import logging
import weakref
from typing import Callable
from enum import Enum
from async_ifc import AsyncIfc
from protocol_ifc import ProtocolIfc
from infos import Infos, Register
from modbus import Modbus
from my_timer import Timer
logger = logging.getLogger('msg')
def __hex_val(n, data, data_len):
line = ''
for j in range(n-16, n):
if j >= data_len:
break
line += '%02x ' % abs(data[j])
return line
def __asc_val(n, data, data_len):
line = ''
for j in range(n-16, n):
if j >= data_len:
break
c = data[j] if not (data[j] < 0x20 or data[j] > 0x7e) else '.'
line += '%c' % c
return line
def hex_dump(data, data_len) -> list:
n = 0
lines = []
for i in range(0, data_len, 16):
line = ' '
line += '%04x | ' % (i)
n += 16
line += __hex_val(n, data, data_len)
line += ' ' * (3 * 16 + 9 - len(line)) + ' | '
line += __asc_val(n, data, data_len)
lines.append(line)
return lines
def hex_dump_str(data, data_len):
lines = hex_dump(data, data_len)
return '\n'.join(lines)
def hex_dump_memory(level, info, data, data_len):
lines = []
lines.append(info)
tracer = logging.getLogger('tracer')
if not tracer.isEnabledFor(level):
return
lines += hex_dump(data, data_len)
tracer.log(level, '\n'.join(lines))
class State(Enum):
'''state of the logical connection'''
init = 0
'''just created'''
received = 1
'''at least one packet received'''
up = 2
'''at least one cmd-rsp transaction'''
pend = 3
'''inverter transaction pending, don't send MODBUS cmds'''
closed = 4
'''connection closed'''
class Message(ProtocolIfc):
MAX_START_TIME = 400
'''maximum time without a received msg in sec'''
MAX_INV_IDLE_TIME = 120
'''maximum time without a received msg from the inverter in sec'''
MAX_DEF_IDLE_TIME = 360
'''maximum default time without a received msg in sec'''
MB_START_TIMEOUT = 40
'''start delay for Modbus polling in server mode'''
MB_REGULAR_TIMEOUT = 60
'''regular Modbus polling time in server mode'''
def __init__(self, node_id, ifc: "AsyncIfc", server_side: bool,
send_modbus_cb: Callable[[bytes, int, str], None],
mb_timeout: int):
self._registry.append(weakref.ref(self))
self.server_side = server_side
self.ifc = ifc
self.node_id = node_id
if server_side:
self.mb = Modbus(send_modbus_cb, mb_timeout)
self.mb_timer = Timer(self.mb_timout_cb, self.node_id)
else:
self.mb = None
self.mb_timer = None
self.header_valid = False
self.header_len = 0
self.data_len = 0
self.unique_id = 0
self.sug_area = ''
self.new_data = {}
self.state = State.init
self.shutdown_started = False
self.modbus_elms = 0 # for unit tests
self.mb_timeout = self.MB_REGULAR_TIMEOUT
self.mb_first_timeout = self.MB_START_TIMEOUT
'''timer value for next Modbus polling request'''
self.modbus_polling = False
@property
def node_id(self):
return self._node_id
@node_id.setter
def node_id(self, value):
self._node_id = value
self.ifc.set_node_id(value)
'''
Empty methods, that have to be implemented in any child class which
don't use asyncio
'''
def _read(self) -> None: # read data bytes from socket and copy them
# to our _recv_buffer
return # pragma: no cover
def _set_mqtt_timestamp(self, key, ts: float | None):
if key not in self.new_data or \
not self.new_data[key]:
if key == 'grid':
info_id = Register.TS_GRID
elif key == 'input':
info_id = Register.TS_INPUT
elif key == 'total':
info_id = Register.TS_TOTAL
else:
return
# tstr = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(ts))
# logger.info(f'update: key: {key} ts:{tstr}'
self.db.set_db_def_value(info_id, round(ts))
def _timeout(self) -> int:
if self.state == State.init or self.state == State.received:
to = self.MAX_START_TIME
elif self.state == State.up and \
self.server_side and self.modbus_polling:
to = self.MAX_INV_IDLE_TIME
else:
to = self.MAX_DEF_IDLE_TIME
return to
def _send_modbus_cmd(self, func, addr, val, log_lvl) -> None:
if self.state != State.up:
logger.log(log_lvl, f'[{self.node_id}] ignore MODBUS cmd,'
' as the state is not UP')
return
self.mb.build_msg(Modbus.INV_ADDR, func, addr, val, log_lvl)
async def send_modbus_cmd(self, func, addr, val, log_lvl) -> None:
self._send_modbus_cmd(func, addr, val, log_lvl)
'''
Our puplic methods
'''
def close(self) -> None:
if self.server_side:
# set inverter state to offline, if output power is very low
logging.debug('close power: '
f'{self.db.get_db_value(Register.OUTPUT_POWER, -1)}')
if self.db.get_db_value(Register.OUTPUT_POWER, 999) < 2:
self.db.set_db_def_value(Register.INVERTER_STATUS, 0)
self.new_data['env'] = True
self.mb_timer.close()
self.state = State.closed
self.ifc.rx_set_cb(None)
self.ifc.prot_set_timeout_cb(None)
self.ifc.prot_set_init_new_client_conn_cb(None)
self.ifc.prot_set_update_header_cb(None)
self.ifc = None
if self.mb:
self.mb.close()
self.mb = None
# pragma: no cover
def inc_counter(self, counter: str) -> None:
self.db.inc_counter(counter)
Infos.new_stat_data['proxy'] = True
def dec_counter(self, counter: str) -> None:
self.db.dec_counter(counter)
Infos.new_stat_data['proxy'] = True

View File

@@ -1,345 +0,0 @@
'''MODBUS module for TSUN inverter support
TSUN uses the MODBUS in the RTU transmission mode over serial line.
see: https://modbus.org/docs/Modbus_Application_Protocol_V1_1b3.pdf
see: https://modbus.org/docs/Modbus_over_serial_line_V1_02.pdf
A Modbus PDU consists of: 'Function-Code' + 'Data'
A Modbus RTU message consists of: 'Addr' + 'Modbus-PDU' + 'CRC-16'
The inverter is a MODBUS server and the proxy the MODBUS client.
The 16-bit CRC is known as CRC-16-ANSI(reverse)
see: https://en.wikipedia.org/wiki/Computation_of_cyclic_redundancy_checks
'''
import struct
import logging
import asyncio
from typing import Generator, Callable
from infos import Register, Fmt
logger = logging.getLogger('data')
CRC_POLY = 0xA001 # (LSBF/reverse)
CRC_INIT = 0xFFFF
class Modbus():
'''Simple MODBUS implementation with TX queue and retransmit timer'''
INV_ADDR = 1
'''MODBUS server address of the TSUN inverter'''
READ_REGS = 3
'''MODBUS function code: Read Holding Register'''
READ_INPUTS = 4
'''MODBUS function code: Read Input Register'''
WRITE_SINGLE_REG = 6
'''Modbus function code: Write Single Register'''
__crc_tab = []
mb_reg_mapping = {
0x2000: {'reg': Register.BOOT_STATUS, 'fmt': '!H'}, # noqa: E501
0x2001: {'reg': Register.DSP_STATUS, 'fmt': '!H'}, # noqa: E501
0x2003: {'reg': Register.WORK_MODE, 'fmt': '!H'},
0x2006: {'reg': Register.OUTPUT_SHUTDOWN, 'fmt': '!H'},
0x2007: {'reg': Register.MAX_DESIGNED_POWER, 'fmt': '!H', 'ratio': 1}, # noqa: E501
0x2008: {'reg': Register.RATED_LEVEL, 'fmt': '!H'},
0x2009: {'reg': Register.INPUT_COEFFICIENT, 'fmt': '!H', 'ratio': 100/1024}, # noqa: E501
0x200a: {'reg': Register.GRID_VOLT_CAL_COEF, 'fmt': '!H'},
0x2010: {'reg': Register.PROD_COMPL_TYPE, 'fmt': '!H'},
0x202c: {'reg': Register.OUTPUT_COEFFICIENT, 'fmt': '!H', 'ratio': 100/1024}, # noqa: E501
0x3000: {'reg': Register.INVERTER_STATUS, 'fmt': '!H'}, # noqa: E501
0x3001: {'reg': Register.DETECT_STATUS_1, 'fmt': '!H'}, # noqa: E501
0x3002: {'reg': Register.DETECT_STATUS_2, 'fmt': '!H'}, # noqa: E501
0x3003: {'reg': Register.EVENT_ALARM, 'fmt': '!H'}, # noqa: E501
0x3004: {'reg': Register.EVENT_FAULT, 'fmt': '!H'}, # noqa: E501
0x3005: {'reg': Register.EVENT_BF1, 'fmt': '!H'}, # noqa: E501
0x3006: {'reg': Register.EVENT_BF2, 'fmt': '!H'}, # noqa: E501
0x3008: {'reg': Register.VERSION, 'fmt': '!H', 'func': Fmt.version}, # noqa: E501
0x3009: {'reg': Register.GRID_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
0x300a: {'reg': Register.GRID_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x300b: {'reg': Register.GRID_FREQUENCY, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x300c: {'reg': Register.INVERTER_TEMP, 'fmt': '!H', 'offset': -40}, # noqa: E501
# 0x300d
0x300e: {'reg': Register.RATED_POWER, 'fmt': '!H', 'ratio': 1}, # noqa: E501
0x300f: {'reg': Register.OUTPUT_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
0x3010: {'reg': Register.PV1_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
0x3011: {'reg': Register.PV1_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x3012: {'reg': Register.PV1_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
0x3013: {'reg': Register.PV2_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
0x3014: {'reg': Register.PV2_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x3015: {'reg': Register.PV2_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
0x3016: {'reg': Register.PV3_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
0x3017: {'reg': Register.PV3_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x3018: {'reg': Register.PV3_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
0x3019: {'reg': Register.PV4_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
0x301a: {'reg': Register.PV4_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x301b: {'reg': Register.PV4_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
0x301c: {'reg': Register.DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x301d: {'reg': Register.TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501
0x301f: {'reg': Register.PV1_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x3020: {'reg': Register.PV1_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501
0x3022: {'reg': Register.PV2_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x3023: {'reg': Register.PV2_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501
0x3025: {'reg': Register.PV3_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x3026: {'reg': Register.PV3_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501
0x3028: {'reg': Register.PV4_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x3029: {'reg': Register.PV4_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501
# 0x302a
}
def __init__(self, snd_handler: Callable[[bytes, int, str], None],
timeout: int = 1):
if not len(self.__crc_tab):
self.__build_crc_tab(CRC_POLY)
self.que = asyncio.Queue(100)
self.snd_handler = snd_handler
'''Send handler to transmit a MODBUS RTU request'''
self.rsp_handler = None
'''Response handler to forward the response'''
self.timeout = timeout
'''MODBUS response timeout in seconds'''
self.max_retries = 1
'''Max retransmit for MODBUS requests'''
self.retry_cnt = 0
self.last_req = b''
self.counter = {}
'''Dictenary with statistic counter'''
self.counter['timeouts'] = 0
self.counter['retries'] = {}
for i in range(0, self.max_retries+1):
self.counter['retries'][f'{i}'] = 0
self.last_log_lvl = logging.DEBUG
self.last_addr = 0
self.last_fcode = 0
self.last_len = 0
self.last_reg = 0
self.err = 0
self.loop = asyncio.get_event_loop()
self.req_pend = False
self.tim = None
self.node_id = ''
def close(self):
"""free the queue and erase the callback handlers"""
logging.debug('Modbus close:')
self.__stop_timer()
self.rsp_handler = None
self.snd_handler = None
while not self.que.empty():
self.que.get_nowait()
def set_node_id(self, node_id: str):
self.node_id = node_id
def build_msg(self, addr: int, func: int, reg: int, val: int,
log_lvl=logging.DEBUG) -> None:
"""Build MODBUS RTU request frame and add it to the tx queue
Keyword arguments:
addr: RTU server address (inverter)
func: MODBUS function code
reg: 16-bit register number
val: 16 bit value
"""
msg = struct.pack('>BBHH', addr, func, reg, val)
msg += struct.pack('<H', self.__calc_crc(msg))
self.que.put_nowait({'req': msg,
'rsp_hdl': None,
'log_lvl': log_lvl})
if self.que.qsize() == 1:
self.__send_next_from_que()
def recv_req(self, buf: bytes,
rsp_handler: Callable[[None], None] = None) -> bool:
"""Add the received Modbus RTU request to the tx queue
Keyword arguments:
buf: Modbus RTU pdu incl ADDR byte and trailing CRC
rsp_handler: Callback, if the received pdu is valid
Returns:
True: PDU was added to the queue
False: PDU was ignored, due to an error
"""
# logging.info(f'recv_req: first byte modbus:{buf[0]} len:{len(buf)}')
if not self.__check_crc(buf):
self.err = 1
logger.error('Modbus recv: CRC error')
return False
self.que.put_nowait({'req': buf,
'rsp_hdl': rsp_handler,
'log_lvl': logging.INFO})
if self.que.qsize() == 1:
self.__send_next_from_que()
return True
def recv_resp(self, info_db, buf: bytes) -> \
Generator[tuple[str, bool, int | float | str], None, None]:
"""Generator which check and parse a received MODBUS response.
Keyword arguments:
info_db: database for info lockups
buf: received Modbus RTU response frame
Returns on error and set Self.err to:
1: CRC error
2: Wrong server address
3: Unexpected function code
4: Unexpected data length
5: No MODBUS request pending
"""
# logging.info(f'recv_resp: first byte modbus:{buf[0]} len:{len(buf)}')
fcode = buf[1]
data_available = self.last_addr == self.INV_ADDR and \
(fcode == 3 or fcode == 4)
if self.__resp_error_check(buf, data_available):
return
if data_available:
elmlen = buf[2] >> 1
first_reg = self.last_reg # save last_reg before sending next pdu
self.__stop_timer() # stop timer and send next pdu
yield from self.__process_data(info_db, buf, first_reg, elmlen)
else:
self.__stop_timer()
self.counter['retries'][f'{self.retry_cnt}'] += 1
if self.rsp_handler:
self.rsp_handler()
self.__send_next_from_que()
def __resp_error_check(self, buf: bytes, data_available: bool) -> bool:
'''Check the MODBUS response for errors, returns True if one accure'''
if not self.req_pend:
self.err = 5
return True
if not self.__check_crc(buf):
logger.error(f'[{self.node_id}] Modbus resp: CRC error')
self.err = 1
return True
if buf[0] != self.last_addr:
logger.info(f'[{self.node_id}] Modbus resp: Wrong addr {buf[0]}')
self.err = 2
return True
fcode = buf[1]
if fcode != self.last_fcode:
logger.info(f'[{self.node_id}] Modbus: Wrong fcode {fcode}'
f' != {self.last_fcode}')
self.err = 3
return True
if data_available:
elmlen = buf[2] >> 1
if elmlen != self.last_len:
logger.info(f'[{self.node_id}] Modbus: len error {elmlen}'
f' != {self.last_len}')
self.err = 4
return True
return False
def __process_data(self, info_db, buf: bytes, first_reg, elmlen):
'''Generator over received registers, updates the db'''
for i in range(0, elmlen):
addr = first_reg+i
if addr in self.mb_reg_mapping:
row = self.mb_reg_mapping[addr]
info_id = row['reg']
keys, level, unit, must_incr = info_db._key_obj(info_id)
if keys:
result = Fmt.get_value(buf, 3+2*i, row)
name, update = info_db.update_db(keys, must_incr,
result)
yield keys[0], update, result
if update:
info_db.tracer.log(level,
f'[{self.node_id}] MODBUS: {name}'
f' : {result}{unit}')
'''
MODBUS response timer
'''
def __start_timer(self) -> None:
'''Start response timer and set `req_pend` to True'''
self.req_pend = True
self.tim = self.loop.call_later(self.timeout, self.__timeout_cb)
# logging.debug(f'Modbus start timer {self}')
def __stop_timer(self) -> None:
'''Stop response timer and set `req_pend` to False'''
self.req_pend = False
# logging.debug(f'Modbus stop timer {self}')
if self.tim:
self.tim.cancel()
self.tim = None
def __timeout_cb(self) -> None:
'''Rsponse timeout handler retransmit pdu or send next pdu'''
self.req_pend = False
if self.retry_cnt < self.max_retries:
logger.debug(f'Modbus retrans {self}')
self.retry_cnt += 1
self.__start_timer()
self.snd_handler(self.last_req, self.last_log_lvl, state='Retrans')
else:
logger.info(f'[{self.node_id}] Modbus timeout '
f'(FCode: {self.last_fcode} '
f'Reg: 0x{self.last_reg:04x}, '
f'{self.last_len})')
self.counter['timeouts'] += 1
self.__send_next_from_que()
def __send_next_from_que(self) -> None:
'''Get next MODBUS pdu from queue and transmit it'''
if self.req_pend:
return
try:
item = self.que.get_nowait()
req = item['req']
self.last_req = req
self.rsp_handler = item['rsp_hdl']
self.last_log_lvl = item['log_lvl']
self.last_addr = req[0]
self.last_fcode = req[1]
res = struct.unpack_from('>HH', req, 2)
self.last_reg = res[0]
self.last_len = res[1]
self.retry_cnt = 0
self.__start_timer()
self.snd_handler(self.last_req, self.last_log_lvl, state='Command')
except asyncio.QueueEmpty:
pass
'''
Helper function for CRC-16 handling
'''
def __check_crc(self, msg: bytes) -> bool:
'''Check CRC-16 and returns True if valid'''
return 0 == self.__calc_crc(msg)
def __calc_crc(self, buffer: bytes) -> int:
'''Build CRC-16 for buffer and returns it'''
crc = CRC_INIT
for cur in buffer:
crc = (crc >> 8) ^ self.__crc_tab[(crc ^ cur) & 0xFF]
return crc
def __build_crc_tab(self, poly: int) -> None:
'''Build CRC-16 helper table, must be called exactly one time'''
for index in range(256):
data = index << 1
crc = 0
for _ in range(8, 0, -1):
data >>= 1
if (data ^ crc) & 1:
crc = (crc >> 1) ^ poly
else:
crc >>= 1
self.__crc_tab.append(crc)

View File

@@ -1,88 +0,0 @@
import logging
import traceback
import asyncio
from config import Config
from gen3plus.inverter_g3p import InverterG3P
from infos import Infos
logger = logging.getLogger('conn')
class ModbusConn():
def __init__(self, host, port):
self.host = host
self.port = port
self.addr = (host, port)
self.inverter = None
async def __aenter__(self) -> 'InverterG3P':
'''Establish a client connection to the TSUN cloud'''
connection = asyncio.open_connection(self.host, self.port)
reader, writer = await connection
self.inverter = InverterG3P(reader, writer,
client_mode=True)
self.inverter.__enter__()
stream = self.inverter.local.stream
logging.info(f'[{stream.node_id}:{stream.conn_no}] '
f'Connected to {self.addr}')
Infos.inc_counter('Inverter_Cnt')
await self.inverter.local.ifc.publish_outstanding_mqtt()
return self.inverter
async def __aexit__(self, exc_type, exc, tb):
Infos.dec_counter('Inverter_Cnt')
await self.inverter.local.ifc.publish_outstanding_mqtt()
self.inverter.__exit__(exc_type, exc, tb)
class ModbusTcp():
def __init__(self, loop, tim_restart=10) -> None:
self.tim_restart = tim_restart
inverters = Config.get('inverters')
# logging.info(f'Inverters: {inverters}')
for inv in inverters.values():
if (type(inv) is dict
and 'monitor_sn' in inv
and 'client_mode' in inv):
client = inv['client_mode']
# logging.info(f"SerialNo:{inv['monitor_sn']} host:{client['host']} port:{client['port']}") # noqa: E501
loop.create_task(self.modbus_loop(client['host'],
client['port'],
inv['monitor_sn'],
client['forward']))
async def modbus_loop(self, host, port,
snr: int, forward: bool) -> None:
'''Loop for receiving messages from the TSUN cloud (client-side)'''
while True:
try:
async with ModbusConn(host, port) as inverter:
stream = inverter.local.stream
await stream.send_start_cmd(snr, host, forward)
await stream.ifc.loop()
logger.info(f'[{stream.node_id}:{stream.conn_no}] '
f'Connection closed - Shutdown: '
f'{stream.shutdown_started}')
if stream.shutdown_started:
return
del inverter # decrease ref counter after the with block
except (ConnectionRefusedError, TimeoutError) as error:
logging.debug(f'Inv-conn:{error}')
except OSError as error:
if error.errno == 113: # pragma: no cover
logging.debug(f'os-error:{error}')
else:
logging.info(f'os-error: {error}')
except Exception:
logging.error(
f"ModbusTcpCreate: Exception for {(host, port)}:\n"
f"{traceback.format_exc()}")
await asyncio.sleep(self.tim_restart)

View File

@@ -1,182 +0,0 @@
import asyncio
import logging
import aiomqtt
import traceback
from modbus import Modbus
from messages import Message
from config import Config
from singleton import Singleton
logger_mqtt = logging.getLogger('mqtt')
class Mqtt(metaclass=Singleton):
__client = None
__cb_mqtt_is_up = None
def __init__(self, cb_mqtt_is_up):
logger_mqtt.debug('MQTT: __init__')
if cb_mqtt_is_up:
self.__cb_mqtt_is_up = cb_mqtt_is_up
loop = asyncio.get_event_loop()
self.task = loop.create_task(self.__loop())
self.ha_restarts = 0
ha = Config.get('ha')
self.ha_status_topic = f"{ha['auto_conf_prefix']}/status"
self.mb_rated_topic = f"{ha['entity_prefix']}/+/rated_load"
self.mb_out_coeff_topic = f"{ha['entity_prefix']}/+/out_coeff"
self.mb_reads_topic = f"{ha['entity_prefix']}/+/modbus_read_regs"
self.mb_inputs_topic = f"{ha['entity_prefix']}/+/modbus_read_inputs"
self.mb_at_cmd_topic = f"{ha['entity_prefix']}/+/at_cmd"
@property
def ha_restarts(self):
return self._ha_restarts
@ha_restarts.setter
def ha_restarts(self, value):
self._ha_restarts = value
async def close(self) -> None:
logger_mqtt.debug('MQTT: close')
self.task.cancel()
try:
await self.task
except (asyncio.CancelledError, Exception) as e:
logging.debug(f"Mqtt.close: exception: {e} ...")
async def publish(self, topic: str, payload: str | bytes | bytearray
| int | float | None = None) -> None:
if self.__client:
await self.__client.publish(topic, payload)
async def __loop(self) -> None:
mqtt = Config.get('mqtt')
logger_mqtt.info(f'start MQTT: host:{mqtt["host"]} port:'
f'{mqtt["port"]} '
f'user:{mqtt["user"]}')
self.__client = aiomqtt.Client(hostname=mqtt['host'],
port=mqtt['port'],
username=mqtt['user'],
password=mqtt['passwd'])
interval = 5 # Seconds
while True:
try:
async with self.__client:
logger_mqtt.info('MQTT broker connection established')
if self.__cb_mqtt_is_up:
await self.__cb_mqtt_is_up()
await self.__client.subscribe(self.ha_status_topic)
await self.__client.subscribe(self.mb_rated_topic)
await self.__client.subscribe(self.mb_out_coeff_topic)
await self.__client.subscribe(self.mb_reads_topic)
await self.__client.subscribe(self.mb_inputs_topic)
await self.__client.subscribe(self.mb_at_cmd_topic)
async for message in self.__client.messages:
await self.dispatch_msg(message)
except aiomqtt.MqttError:
if Config.is_default('mqtt'):
logger_mqtt.info(
"MQTT is unconfigured; Check your config.toml!")
interval = 30
else:
interval = 5 # Seconds
logger_mqtt.info(
f"Connection lost; Reconnecting in {interval}"
" seconds ...")
await asyncio.sleep(interval)
except asyncio.CancelledError:
logger_mqtt.debug("MQTT task cancelled")
self.__client = None
return
except Exception:
# self.inc_counter('SW_Exception') # fixme
logger_mqtt.error(
f"Exception:\n"
f"{traceback.format_exc()}")
async def dispatch_msg(self, message):
if message.topic.matches(self.ha_status_topic):
status = message.payload.decode("UTF-8")
logger_mqtt.info('Home-Assistant Status:'
f' {status}')
if status == 'online':
self.ha_restarts += 1
await self.__cb_mqtt_is_up()
if message.topic.matches(self.mb_rated_topic):
await self.modbus_cmd(message,
Modbus.WRITE_SINGLE_REG,
1, 0x2008)
if message.topic.matches(self.mb_out_coeff_topic):
payload = message.payload.decode("UTF-8")
try:
val = round(float(payload) * 1024/100)
if val < 0 or val > 1024:
logger_mqtt.error('out_coeff: value must be in'
'the range 0..100,'
f' got: {payload}')
else:
await self.modbus_cmd(message,
Modbus.WRITE_SINGLE_REG,
0, 0x202c, val)
except Exception:
pass
if message.topic.matches(self.mb_reads_topic):
await self.modbus_cmd(message,
Modbus.READ_REGS, 2)
if message.topic.matches(self.mb_inputs_topic):
await self.modbus_cmd(message,
Modbus.READ_INPUTS, 2)
if message.topic.matches(self.mb_at_cmd_topic):
await self.at_cmd(message)
def each_inverter(self, message, func_name: str):
topic = str(message.topic)
node_id = topic.split('/')[1] + '/'
for m in Message:
if m.server_side and (m.node_id == node_id):
logger_mqtt.debug(f'Found: {node_id}')
fnc = getattr(m, func_name, None)
if callable(fnc):
yield fnc
else:
logger_mqtt.warning(f'Cmd not supported by: {node_id}')
break
else:
logger_mqtt.warning(f'Node_id: {node_id} not found')
async def modbus_cmd(self, message, func, params=0, addr=0, val=0):
payload = message.payload.decode("UTF-8")
for fnc in self.each_inverter(message, "send_modbus_cmd"):
res = payload.split(',')
if params > 0 and params != len(res):
logger_mqtt.error(f'Parameter expected: {params}, '
f'got: {len(res)}')
return
if params == 1:
val = int(payload)
elif params == 2:
addr = int(res[0], base=16)
val = int(res[1]) # lenght
await fnc(func, addr, val, logging.INFO)
async def at_cmd(self, message):
payload = message.payload.decode("UTF-8")
for fnc in self.each_inverter(message, "send_at_cmd"):
await fnc(payload)

View File

@@ -1,35 +0,0 @@
import asyncio
import logging
from itertools import count
class Timer:
def __init__(self, cb, id_str: str = ''):
self.__timeout_cb = cb
self.loop = asyncio.get_event_loop()
self.tim = None
self.id_str = id_str
self.exp_count = count(0)
def start(self, timeout: float) -> None:
'''Start timer with timeout seconds'''
if self.tim:
self.tim.cancel()
self.tim = self.loop.call_later(timeout, self.__timeout)
logging.debug(f'[{self.id_str}]Start timer')
def stop(self) -> None:
'''Stop timer'''
logging.debug(f'[{self.id_str}]Stop timer')
if self.tim:
self.tim.cancel()
self.tim = None
def __timeout(self) -> None:
'''timer expired handler'''
logging.debug(f'[{self.id_str}]Timer expired')
self.__timeout_cb(next(self.exp_count))
def close(self) -> None:
self.stop()
self.__timeout_cb = None

View File

@@ -1,17 +0,0 @@
from abc import abstractmethod
from async_ifc import AsyncIfc
from iter_registry import AbstractIterMeta
class ProtocolIfc(metaclass=AbstractIterMeta):
_registry = []
@abstractmethod
def __init__(self, addr, ifc: "AsyncIfc", server_side: bool,
client_mode: bool = False, id_str=b''):
pass # pragma: no cover
@abstractmethod
def close(self):
pass # pragma: no cover

View File

@@ -1,101 +0,0 @@
import asyncio
import logging
import json
from config import Config
from mqtt import Mqtt
from infos import Infos
logger_mqtt = logging.getLogger('mqtt')
class Proxy():
'''class Proxy is a baseclass
The class has some class method for managing common resources like a
connection to the MQTT broker or proxy error counter which are common
for all inverter connection
Instances of the class are connections to an inverter and can have an
optional link to an remote connection to the TSUN cloud. A remote
connection dies with the inverter connection.
class methods:
class_init(): initialize the common resources of the proxy (MQTT
broker, Proxy DB, etc). Must be called before the
first inverter instance can be created
class_close(): release the common resources of the proxy. Should not
be called before any instances of the class are
destroyed
methods:
create_remote(): Establish a client connection to the TSUN cloud
async_publ_mqtt(): Publish data to MQTT broker
'''
@classmethod
def class_init(cls) -> None:
logging.debug('Proxy.class_init')
# initialize the proxy statistics
Infos.static_init()
cls.db_stat = Infos()
ha = Config.get('ha')
cls.entity_prfx = ha['entity_prefix'] + '/'
cls.discovery_prfx = ha['discovery_prefix'] + '/'
cls.proxy_node_id = ha['proxy_node_id'] + '/'
cls.proxy_unique_id = ha['proxy_unique_id']
# call Mqtt singleton to establisch the connection to the mqtt broker
cls.mqtt = Mqtt(cls._cb_mqtt_is_up)
# register all counters which should be reset at midnight.
# This is needed if the proxy is restated before midnight
# and the inverters are offline, cause the normal refgistering
# needs an update on the counters.
# Without this registration here the counters would not be
# reset at midnight when you restart the proxy just before
# midnight!
inverters = Config.get('inverters')
# logger.debug(f'Proxys: {inverters}')
for inv in inverters.values():
if (type(inv) is dict):
node_id = inv['node_id']
cls.db_stat.reg_clr_at_midnight(f'{cls.entity_prfx}{node_id}',
check_dependencies=False)
@classmethod
async def _cb_mqtt_is_up(cls) -> None:
logging.info('Initialize proxy device on home assistant')
# register proxy status counters at home assistant
await cls._register_proxy_stat_home_assistant()
# send values of the proxy status counters
await asyncio.sleep(0.5) # wait a bit, before sending data
Infos.new_stat_data['proxy'] = True # force sending data to sync ha
await cls._async_publ_mqtt_proxy_stat('proxy')
@classmethod
async def _register_proxy_stat_home_assistant(cls) -> None:
'''register all our topics at home assistant'''
for data_json, component, node_id, id in cls.db_stat.ha_proxy_confs(
cls.entity_prfx, cls.proxy_node_id, cls.proxy_unique_id):
logger_mqtt.debug(f"MQTT Register: cmp:'{component}' node_id:'{node_id}' {data_json}") # noqa: E501
await cls.mqtt.publish(f'{cls.discovery_prfx}{component}/{node_id}{id}/config', data_json) # noqa: E501
@classmethod
async def _async_publ_mqtt_proxy_stat(cls, key) -> None:
stat = Infos.stat
if key in stat and Infos.new_stat_data[key]:
data_json = json.dumps(stat[key])
node_id = cls.proxy_node_id
logger_mqtt.debug(f'{key}: {data_json}')
await cls.mqtt.publish(f"{cls.entity_prfx}{node_id}{key}",
data_json)
Infos.new_stat_data[key] = False
@classmethod
def class_close(cls, loop) -> None: # pragma: no cover
logging.debug('Proxy.class_close')
logging.info('Close MQTT Task')
loop.run_until_complete(cls.mqtt.close())
cls.mqtt = None

View File

@@ -1,30 +0,0 @@
import logging
import json
from mqtt import Mqtt
from aiocron import crontab
from infos import ClrAtMidnight
logger_mqtt = logging.getLogger('mqtt')
class Schedule:
mqtt = None
count = 0
@classmethod
def start(cls) -> None:
'''Start the scheduler and schedule the tasks (cron jobs)'''
logging.debug("Scheduler init")
cls.mqtt = Mqtt(None)
crontab('0 0 * * *', func=cls.atmidnight, start=True)
@classmethod
async def atmidnight(cls) -> None:
'''Clear daily counters at midnight'''
logging.info("Clear daily counters at midnight")
for key, data in ClrAtMidnight.elm():
logger_mqtt.debug(f'{key}: {data}')
data_json = json.dumps(data)
await cls.mqtt.publish(f"{key}", data_json)

View File

@@ -1,191 +0,0 @@
import logging
import asyncio
import signal
import os
from asyncio import StreamReader, StreamWriter
from aiohttp import web
from logging import config # noqa F401
from proxy import Proxy
from inverter_ifc import InverterIfc
from gen3.inverter_g3 import InverterG3
from gen3plus.inverter_g3p import InverterG3P
from scheduler import Schedule
from config import Config
from modbus_tcp import ModbusTcp
routes = web.RouteTableDef()
proxy_is_up = False
@routes.get('/')
async def hello(request):
return web.Response(text="Hello, world")
@routes.get('/-/ready')
async def ready(request):
if proxy_is_up:
status = 200
text = 'Is ready'
else:
status = 503
text = 'Not ready'
return web.Response(status=status, text=text)
@routes.get('/-/healthy')
async def healthy(request):
if proxy_is_up:
# logging.info('web reqeust healthy()')
for inverter in InverterIfc:
try:
res = inverter.healthy()
if not res:
return web.Response(status=503, text="I have a problem")
except Exception as err:
logging.info(f'Exception:{err}')
return web.Response(status=200, text="I'm fine")
async def webserver(addr, port):
'''coro running our webserver'''
app = web.Application()
app.add_routes(routes)
runner = web.AppRunner(app)
await runner.setup()
site = web.TCPSite(runner, addr, port)
await site.start()
logging.info(f'HTTP server listen on port: {port}')
try:
# Normal interaction with aiohttp
while True:
await asyncio.sleep(3600) # sleep forever
except asyncio.CancelledError:
logging.info('HTTP server cancelled')
await runner.cleanup()
logging.debug('HTTP cleanup done')
async def handle_client(reader: StreamReader, writer: StreamWriter, inv_class):
'''Handles a new incoming connection and starts an async loop'''
with inv_class(reader, writer) as inv:
await inv.local.ifc.server_loop()
async def handle_shutdown(web_task):
'''Close all TCP connections and stop the event loop'''
logging.info('Shutdown due to SIGTERM')
global proxy_is_up
proxy_is_up = False
#
# first, disc all open TCP connections gracefully
#
for inverter in InverterIfc:
await inverter.disc(True)
logging.info('Proxy disconnecting done')
#
# second, cancel the web server
#
web_task.cancel()
await web_task
#
# now cancel all remaining (pending) tasks
#
pending = asyncio.all_tasks()
for task in pending:
task.cancel()
#
# at last, start a coro for stopping the loop
#
logging.debug("Stop event loop")
loop.stop()
def get_log_level() -> int:
'''checks if LOG_LVL is set in the environment and returns the
corresponding logging.LOG_LEVEL'''
log_level = os.getenv('LOG_LVL', 'INFO')
if log_level == 'DEBUG':
log_level = logging.DEBUG
elif log_level == 'WARN':
log_level = logging.WARNING
else:
log_level = logging.INFO
return log_level
if __name__ == "__main__":
#
# Setup our daily, rotating logger
#
serv_name = os.getenv('SERVICE_NAME', 'proxy')
version = os.getenv('VERSION', 'unknown')
logging.config.fileConfig('logging.ini')
logging.info(f'Server "{serv_name} - {version}" will be started')
# set lowest-severity for 'root', 'msg', 'conn' and 'data' logger
log_level = get_log_level()
logging.getLogger().setLevel(log_level)
logging.getLogger('msg').setLevel(log_level)
logging.getLogger('conn').setLevel(log_level)
logging.getLogger('data').setLevel(log_level)
logging.getLogger('tracer').setLevel(log_level)
logging.getLogger('asyncio').setLevel(log_level)
# logging.getLogger('mqtt').setLevel(log_level)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
# read config file
ConfigErr = Config.class_init()
if ConfigErr is not None:
logging.info(f'ConfigErr: {ConfigErr}')
Proxy.class_init()
Schedule.start()
ModbusTcp(loop)
#
# Create tasks for our listening servers. These must be tasks! If we call
# start_server directly out of our main task, the eventloop will be blocked
# and we can't receive and handle the UNIX signals!
#
for inv_class, port in [(InverterG3, 5005), (InverterG3P, 10000)]:
loop.create_task(asyncio.start_server(lambda r, w, i=inv_class:
handle_client(r, w, i),
'0.0.0.0', port))
web_task = loop.create_task(webserver('0.0.0.0', 8127))
#
# Register some UNIX Signal handler for a gracefully server shutdown
# on Docker restart and stop
#
for signame in ('SIGINT', 'SIGTERM'):
loop.add_signal_handler(getattr(signal, signame),
lambda loop=loop: asyncio.create_task(
handle_shutdown(web_task)))
loop.set_debug(log_level == logging.DEBUG)
try:
if ConfigErr is None:
proxy_is_up = True
loop.run_forever()
except KeyboardInterrupt:
pass
finally:
logging.info("Event loop is stopped")
Proxy.class_close(loop)
logging.debug('Close event loop')
loop.close()
logging.info(f'Finally, exit Server "{serv_name}"')

View File

@@ -1,14 +0,0 @@
from weakref import WeakValueDictionary
class Singleton(type):
_instances = WeakValueDictionary()
def __call__(cls, *args, **kwargs):
# logger_mqtt.debug('singleton: __call__')
if cls not in cls._instances:
instance = super(Singleton,
cls).__call__(*args, **kwargs)
cls._instances[cls] = instance
return cls._instances[cls]

View File

@@ -1,4 +0,0 @@
aiomqtt==2.3.0
schema==0.7.7
aiocron==1.8
aiohttp==3.10.11

View File

@@ -23,12 +23,16 @@ fi
cd /home || exit
# Erstelle Ordner für log und config
mkdir -p proxy/log
mkdir -p proxy/config
echo "Erstelle config.toml"
echo "Create config.toml..."
python3 create_config_toml.py
cd /home/proxy || exit
echo "Starte Webserver"
export VERSION=$(cat /proxy-version.txt)
echo "Start Proxyserver..."
python3 server.py

View File

@@ -0,0 +1,190 @@
# test_with_pytest.py
import pytest
import tomllib
from mock import patch
from cnf.config import Config
from home.create_config_toml import create_config
from test_config import ConfigComplete, ConfigMinimum
class FakeBuffer:
rd = bytearray()
wr = str()
test_buffer = FakeBuffer
class FakeFile():
def __init__(self):
self.buf = test_buffer
def __enter__(self):
return self
def __exit__(self, exc_type, exc, tb):
pass
class FakeOptionsFile(FakeFile):
def read(self):
return self.buf.rd
class FakeConfigFile(FakeFile):
def write(self, data: str):
self.buf.wr += data
@pytest.fixture
def patch_open():
def new_open(file: str, OpenTextMode="r"):
if file == '/data/options.json':
return FakeOptionsFile()
elif file == '/home/proxy/config/config.toml':
# write_buffer += 'bla1'.encode('utf-8')
return FakeConfigFile()
raise TimeoutError
with patch('builtins.open', new_open) as conn:
yield conn
@pytest.fixture
def ConfigTomlEmpty():
return {
'gen3plus': {'at_acl': {'mqtt': {'allow': [], 'block': []},
'tsun': {'allow': [], 'block': []}}},
'ha': {'auto_conf_prefix': 'homeassistant',
'discovery_prefix': 'homeassistant',
'entity_prefix': 'tsun',
'proxy_node_id': 'proxy',
'proxy_unique_id': 'P170000000000001'},
'inverters': {
'allow_all': False
},
'mqtt': {'host': 'mqtt', 'passwd': '', 'port': 1883, 'user': ''},
'solarman': {
'enabled': True,
'host': 'iot.talent-monitoring.com',
'port': 10000,
},
'tsun': {
'enabled': True,
'host': 'logger.talent-monitoring.com',
'port': 5005,
},
}
def test_no_config(patch_open, ConfigTomlEmpty):
_ = patch_open
test_buffer.wr = ""
test_buffer.rd = "" # empty buffer, no json
create_config()
cnf = tomllib.loads(test_buffer.wr)
assert cnf == ConfigTomlEmpty
def test_empty_config(patch_open, ConfigTomlEmpty):
_ = patch_open
test_buffer.wr = ""
test_buffer.rd = "{}" # empty json
create_config()
cnf = tomllib.loads(test_buffer.wr)
assert cnf == ConfigTomlEmpty
def test_full_config(patch_open, ConfigComplete):
_ = patch_open
test_buffer.wr = ""
test_buffer.rd = """
{
"inverters": [
{
"serial": "R170000000000001",
"node_id": "PV-Garage",
"suggested_area": "Garage",
"modbus_polling": false,
"pv1_manufacturer": "man1",
"pv1_type": "type1",
"pv2_manufacturer": "man2",
"pv2_type": "type2"
},
{
"serial": "Y170000000000001",
"monitor_sn": 2000000000,
"node_id": "PV-Garage2",
"suggested_area": "Garage2",
"modbus_polling": true,
"client_mode_host": "InverterIP",
"client_mode_port": 1234,
"pv1_manufacturer": "man1",
"pv1_type": "type1",
"pv2_manufacturer": "man2",
"pv2_type": "type2",
"pv3_manufacturer": "man3",
"pv3_type": "type3",
"pv4_manufacturer": "man4",
"pv4_type": "type4"
}
],
"tsun.enabled": true,
"solarman.enabled": true,
"inverters.allow_all": false,
"gen3plus.at_acl.tsun.allow": [
"AT+Z",
"AT+UPURL",
"AT+SUPDATE"
],
"gen3plus.at_acl.tsun.block": [
"AT+SUPDATE"
],
"gen3plus.at_acl.mqtt.allow": [
"AT+"
],
"gen3plus.at_acl.mqtt.block": [
"AT+SUPDATE"
]
}
"""
create_config()
cnf = tomllib.loads(test_buffer.wr)
validated = Config.conf_schema.validate(cnf)
assert validated == ConfigComplete
def test_minimum_config(patch_open, ConfigMinimum):
_ = patch_open
test_buffer.wr = ""
test_buffer.rd = """
{
"inverters": [
{
"serial": "R170000000000001",
"monitor_sn": 0,
"node_id": "",
"suggested_area": "",
"modbus_polling": true,
"client_mode_host": "InverterIP",
"client_mode_port": 1234
}
],
"tsun.enabled": true,
"solarman.enabled": true,
"inverters.allow_all": true,
"gen3plus.at_acl.tsun.allow": [
"AT+Z",
"AT+UPURL",
"AT+SUPDATE"
],
"gen3plus.at_acl.mqtt.allow": [
"AT+"
]
}
"""
create_config()
cnf = tomllib.loads(test_buffer.wr)
validated = Config.conf_schema.validate(cnf)
assert validated == ConfigMinimum

View File

@@ -2,7 +2,7 @@
configuration:
inverters:
name: Inverters
description: >-
description: >+
For each GEN3 inverter, the serial number of the inverter must be mapped to an MQTT
definition. To do this, the corresponding configuration block is started with
<16-digit serial number> so that all subsequent parameters are assigned
@@ -11,6 +11,7 @@ configuration:
The serial numbers of all GEN3 inverters start with `R17`!
monitor_sn # The GEN3PLUS "Monitoring SN:"
node_id # MQTT replacement for inverters serial number
suggested_area # suggested installation area for home-assistant
modbus_polling # Disable optional MODBUS polling
@@ -18,21 +19,28 @@ configuration:
pv2 # Optional, PV module descr
tsun.enabled:
name: Connection to TSUN Cloud
name: Connection to TSUN Cloud - for GEN3 inverter only
description: >-
disable connecting to the tsun cloud avoids updates.
The Inverter become isolated from Internet if switched on.
switch on/off connection to the TSUN cloud
This connection is only required if you want send data to the TSUN cloud
eg. to use the TSUN APPs or receive firmware updates.
on - normal proxy operation
off - The Inverter become isolated from Internet
solarman.enabled:
name: Connection to Solarman Cloud
name: Connection to Solarman Cloud - for GEN3PLUS inverter only
description: >-
disables connecting to the Solarman cloud avoids updates.
The Inverter become isolated from Internet if switched on.
switch on/off connection to the Solarman cloud
This connection is only required if you want send data to the Solarman cloud
eg. to use the Solarman APPs or receive firmware updates.
on - normal proxy operation
off - The Inverter become isolated from Internet
inverters.allow_all:
name: Allow all connections from all inverters
description: >-
The proxy only usually accepts connections from known inverters.
This can be switched off for test purposes and unknown serial
numbers are also accepted.
Switch on for test purposes and unknown serial numbers.
mqtt.host:
name: MQTT Broker Host
description: >-
@@ -59,6 +67,17 @@ configuration:
name: MQTT node id, for the proxy_node_id
ha.proxy_unique_id:
name: MQTT unique id, to identify a proxy instance
tsun.host:
name: TSUN Cloud Host
description: >-
Hostname or IP address of the TSUN cloud. if not set, the addon will try to connect to the cloud default
on logger.talent-monitoring.com
solarman.host:
name: Solarman Cloud Host
description: >-
Hostname or IP address of the Solarman cloud. if not set, the addon will try to connect to the cloud default
on iot.talent-monitoring.com
network:
8127/tcp: x...

View File

@@ -0,0 +1,3 @@
name: TSUN-Proxy
url: https://github.com/s-allius/tsun-gen3-proxy/ha_addons
maintainer: Stefan Allius

View File

@@ -2,7 +2,7 @@
[pytest]
minversion = 8.0
addopts = -ra -q --durations=5
pythonpath = app/src
testpaths = app/tests
pythonpath = app/src app/tests ha_addons/ha_addon/rootfs
testpaths = app/tests ha_addons/ha_addon/tests
asyncio_default_fixture_loop_scope = function
asyncio_mode = strict

View File

@@ -7,13 +7,13 @@ sonar.projectName=tsun-gen3-proxy
# Path is relative to the sonar-project.properties file. Replace "\" by "/" on Windows.
sonar.sources=app/src/
sonar.sources=app/src/,ha_addons/ha_addon/rootfs/home/
# Encoding of the source code. Default is default system encoding
#sonar.sourceEncoding=UTF-8
sonar.python.version=3.12
sonar.tests=system_tests/,app/tests/
sonar.tests=system_tests/,app/tests/,ha_addons/ha_addon/tests/
sonar.exclusions=**/.vscode/**/*
# Name your criteria
sonar.issue.ignore.multicriteria=e1,e2