lint code with flake8

This commit is contained in:
Stefan Allius
2023-11-02 22:31:30 +01:00
parent c772eeeb28
commit 2c4af0b7d8
7 changed files with 585 additions and 490 deletions

View File

@@ -1,13 +1,16 @@
import logging, traceback import logging
from config import Config import traceback
#import gc # from config import Config
# import gc
from messages import Message, hex_dump_memory from messages import Message, hex_dump_memory
logger = logging.getLogger('conn') logger = logging.getLogger('conn')
class AsyncStream(Message): class AsyncStream(Message):
def __init__(self, reader, writer, addr, remote_stream, server_side: bool) -> None: def __init__(self, reader, writer, addr, remote_stream, server_side: bool
) -> None:
super().__init__() super().__init__()
self.reader = reader self.reader = reader
self.writer = writer self.writer = writer
@@ -29,7 +32,6 @@ class AsyncStream(Message):
await self.__async_forward() await self.__async_forward()
await self.async_publ_mqtt() await self.async_publ_mqtt()
except (ConnectionResetError, except (ConnectionResetError,
ConnectionAbortedError, ConnectionAbortedError,
BrokenPipeError, BrokenPipeError,
@@ -48,7 +50,6 @@ class AsyncStream(Message):
logger.debug(f'in AsyncStream.disc() {self.addr}') logger.debug(f'in AsyncStream.disc() {self.addr}')
self.writer.close() self.writer.close()
def close(self): def close(self):
logger.debug(f'in AsyncStream.close() {self.addr}') logger.debug(f'in AsyncStream.close() {self.addr}')
self.writer.close() self.writer.close()
@@ -56,7 +57,6 @@ class AsyncStream(Message):
# logger.info (f'AsyncStream refs: {gc.get_referrers(self)}') # logger.info (f'AsyncStream refs: {gc.get_referrers(self)}')
''' '''
Our private methods Our private methods
''' '''
@@ -70,19 +70,23 @@ class AsyncStream(Message):
async def __async_write(self) -> None: async def __async_write(self) -> None:
if self._send_buffer: if self._send_buffer:
hex_dump_memory(logging.INFO, f'Transmit to {self.addr}:', self._send_buffer, len(self._send_buffer)) hex_dump_memory(logging.INFO, f'Transmit to {self.addr}:',
self._send_buffer, len(self._send_buffer))
self.writer.write(self._send_buffer) self.writer.write(self._send_buffer)
await self.writer.drain() await self.writer.drain()
self._send_buffer = bytearray(0) #self._send_buffer[sent:] self._send_buffer = bytearray(0) # self._send_buffer[sent:]
async def __async_forward(self) -> None: async def __async_forward(self) -> None:
if self._forward_buffer: if self._forward_buffer:
if not self.remoteStream: if not self.remoteStream:
await self.async_create_remote() # only implmeneted for server side => syncServerStream await self.async_create_remote()
if self.remoteStream: if self.remoteStream:
hex_dump_memory(logging.INFO, f'Forward to {self.remoteStream.addr}:', self._forward_buffer, len(self._forward_buffer)) hex_dump_memory(logging.INFO,
self.remoteStream.writer.write (self._forward_buffer) f'Forward to {self.remoteStream.addr}:',
self._forward_buffer,
len(self._forward_buffer))
self.remoteStream.writer.write(self._forward_buffer)
await self.remoteStream.writer.drain() await self.remoteStream.writer.drain()
self._forward_buffer = bytearray(0) self._forward_buffer = bytearray(0)
@@ -92,8 +96,5 @@ class AsyncStream(Message):
async def async_publ_mqtt(self) -> None: async def async_publ_mqtt(self) -> None:
pass pass
def __del__(self):
def __del__ (self): logging.debug(f"AsyncStream.__del__ {self.addr}")
logging.debug (f"AsyncStream.__del__ {self.addr}")

View File

@@ -1,8 +1,11 @@
'''Config module handles the proxy configuration in the config.toml file''' '''Config module handles the proxy configuration in the config.toml file'''
import shutil, tomllib, logging import shutil
import tomllib
import logging
from schema import Schema, And, Use, Optional from schema import Schema, And, Use, Optional
class Config(): class Config():
'''Static class Config is reads and sanitize the config. '''Static class Config is reads and sanitize the config.
@@ -10,67 +13,76 @@ class Config():
Get named parts of the config with get()''' Get named parts of the config with get()'''
config = {} config = {}
conf_schema = Schema({ 'tsun': { conf_schema = Schema({
'enabled': Use(bool), 'tsun': {
'host': Use(str), 'enabled': Use(bool),
'port': And(Use(int), lambda n: 1024 <= n <= 65535)}, 'host': Use(str),
'port': And(Use(int), lambda n: 1024 <= n <= 65535)
},
'mqtt': {
'host': Use(str),
'port': And(Use(int), lambda n: 1024 <= n <= 65535),
'user': And(Use(str), Use(lambda s: s if len(s) > 0 else None)),
'passwd': And(Use(str), Use(lambda s: s if len(s) > 0 else None))
},
'ha': {
'auto_conf_prefix': Use(str),
'discovery_prefix': Use(str),
'entity_prefix': Use(str),
'proxy_node_id': Use(str),
'proxy_unique_id': Use(str)
},
'inverters': {
'allow_all': Use(bool), And(Use(str), lambda s: len(s) == 16): {
Optional('node_id', default=""): And(Use(str),
Use(lambda s: s + '/'
if len(s) > 0 and
s[-1] != '/' else s)),
'mqtt': { Optional('suggested_area', default=""): Use(str)
'host': Use(str), }}
'port': And(Use(int), lambda n: 1024 <= n <= 65535), }, ignore_extra_keys=True
'user': And(Use(str), Use(lambda s: s if len(s) >0 else None)), )
'passwd': And(Use(str), Use(lambda s: s if len(s) >0 else None))},
'ha': {
'auto_conf_prefix': Use(str),
'discovery_prefix': Use(str),
'entity_prefix': Use(str),
'proxy_node_id': Use(str),
'proxy_unique_id': Use(str)},
'inverters': {
'allow_all' : Use(bool),
And(Use(str), lambda s: len(s) == 16 ): {
Optional('node_id', default=""): And(Use(str),Use(lambda s: s +'/' if len(s)> 0 and s[-1] != '/' else s)),
Optional('suggested_area', default=""): Use(str)
}}
}, ignore_extra_keys=True)
@classmethod @classmethod
def read(cls) -> None: def read(cls) -> None:
'''Read config file, merge it with the default config and sanitize the result''' '''Read config file, merge it with the default config
and sanitize the result'''
config = {} config = {}
logger = logging.getLogger('data') logger = logging.getLogger('data')
try: try:
# make the default config transparaent by copying it in the config.example file # make the default config transparaent by copying it
# in the config.example file
shutil.copy2("default_config.toml", "config/config.example.toml") shutil.copy2("default_config.toml", "config/config.example.toml")
# read example config file as default configuration # read example config file as default configuration
with open("default_config.toml", "rb") as f: with open("default_config.toml", "rb") as f:
def_config = tomllib.load(f) def_config = tomllib.load(f)
# overwrite the default values, with values from the config.toml file # overwrite the default values, with values from
# the config.toml file
with open("config/config.toml", "rb") as f: with open("config/config.toml", "rb") as f:
usr_config = tomllib.load(f) usr_config = tomllib.load(f)
config['tsun'] = def_config['tsun'] | usr_config['tsun'] config['tsun'] = def_config['tsun'] | usr_config['tsun']
config['mqtt'] = def_config['mqtt'] | usr_config['mqtt'] config['mqtt'] = def_config['mqtt'] | usr_config['mqtt']
config['ha'] = def_config['ha'] | usr_config['ha'] config['ha'] = def_config['ha'] | usr_config['ha']
config['inverters'] = def_config['inverters'] | usr_config['inverters'] config['inverters'] = def_config['inverters'] | \
usr_config['inverters']
cls.config = cls.conf_schema.validate(config) cls.config = cls.conf_schema.validate(config)
#logging.debug(f'Readed config: "{cls.config}" ') # logging.debug(f'Readed config: "{cls.config}" ')
except Exception as error: except Exception as error:
logger.error(f'Config.read: {error}') logger.error(f'Config.read: {error}')
cls.config = {} cls.config = {}
@classmethod @classmethod
def get(cls, member:str = None): def get(cls, member: str = None):
'''Get a named attribute from the proxy config. If member == None it returns the complete config dict''' '''Get a named attribute from the proxy config. If member ==
None it returns the complete config dict'''
if member: if member:
return cls.config.get(member, {}) return cls.config.get(member, {})

View File

@@ -1,11 +1,13 @@
import struct, json, logging, os import struct
import json
import logging
import os
class Infos: class Infos:
stat = {} stat = {}
app_name = os.getenv('SERVICE_NAME', 'proxy') app_name = os.getenv('SERVICE_NAME', 'proxy')
version = os.getenv('VERSION', 'unknown') version = os.getenv('VERSION', 'unknown')
@classmethod @classmethod
def static_init(cls): def static_init(cls):
@@ -14,119 +16,120 @@ class Infos:
cls.stat['proxy'] = {} cls.stat['proxy'] = {}
for key in cls.__info_defs: for key in cls.__info_defs:
name = cls.__info_defs[key]['name'] name = cls.__info_defs[key]['name']
if name[0]=='proxy': if name[0] == 'proxy':
cls.stat['proxy'][name[1]] = 0 cls.stat['proxy'][name[1]] = 0
# add values from the environment to the device definition table # add values from the environment to the device definition table
prxy = cls.__info_devs['proxy'] prxy = cls.__info_devs['proxy']
prxy['sw'] = cls.version prxy['sw'] = cls.version
prxy['mdl'] = cls.app_name prxy['mdl'] = cls.app_name
def __init__(self): def __init__(self):
self.db = {} self.db = {}
self.tracer = logging.getLogger('data') self.tracer = logging.getLogger('data')
__info_devs={ __info_devs = {
'proxy': {'singleton': True, 'name':'Proxy', 'mf':'Stefan Allius'}, 'proxy': {'singleton': True, 'name': 'Proxy', 'mf': 'Stefan Allius'}, # noqa: E501
'controller':{'via':'proxy', 'name':'Controller', 'mdl':0x00092f90, 'mf':0x000927c0, 'sw':0x00092ba8}, 'controller': {'via': 'proxy', 'name': 'Controller', 'mdl': 0x00092f90, 'mf': 0x000927c0, 'sw': 0x00092ba8}, # noqa: E501
'inverter': {'via':'controller', 'name':'Micro Inverter', 'mdl':0x00000032, 'mf':0x00000014, 'sw':0x0000001e}, 'inverter': {'via': 'controller', 'name': 'Micro Inverter', 'mdl': 0x00000032, 'mf': 0x00000014, 'sw': 0x0000001e}, # noqa: E501
'input_pv1': {'via':'inverter', 'name':'Module PV1'}, 'input_pv1': {'via': 'inverter', 'name': 'Module PV1'},
'input_pv2': {'via':'inverter', 'name':'Module PV2', 'dep':{'reg':0x00095b50, 'gte': 2}}, 'input_pv2': {'via': 'inverter', 'name': 'Module PV2', 'dep': {'reg': 0x00095b50, 'gte': 2}}, # noqa: E501
'input_pv3': {'via':'inverter', 'name':'Module PV3', 'dep':{'reg':0x00095b50, 'gte': 3}}, 'input_pv3': {'via': 'inverter', 'name': 'Module PV3', 'dep': {'reg': 0x00095b50, 'gte': 3}}, # noqa: E501
'input_pv4': {'via':'inverter', 'name':'Module PV4', 'dep':{'reg':0x00095b50, 'gte': 4}}, 'input_pv4': {'via': 'inverter', 'name': 'Module PV4', 'dep': {'reg': 0x00095b50, 'gte': 4}}, # noqa: E501
} }
__info_defs={ __info_defs = {
# collector values used for device registration: # collector values used for device registration:
0x00092ba8: {'name':['collector', 'Collector_Fw_Version'], 'level': logging.INFO, 'unit': ''}, 0x00092ba8: {'name': ['collector', 'Collector_Fw_Version'], 'level': logging.INFO, 'unit': ''}, # noqa: E501
0x000927c0: {'name':['collector', 'Chip_Type'], 'level': logging.DEBUG, 'unit': ''}, 0x000927c0: {'name': ['collector', 'Chip_Type'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00092f90: {'name':['collector', 'Chip_Model'], 'level': logging.DEBUG, 'unit': ''}, 0x00092f90: {'name': ['collector', 'Chip_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00095a88: {'name':['collector', 'Trace_URL'], 'level': logging.DEBUG, 'unit': ''}, 0x00095a88: {'name': ['collector', 'Trace_URL'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00095aec: {'name':['collector', 'Logger_URL'], 'level': logging.DEBUG, 'unit': ''}, 0x00095aec: {'name': ['collector', 'Logger_URL'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00095b50: {'name':['collector', 'No_Inputs'], 'level': logging.DEBUG, 'unit': ''}, 0x00095b50: {'name': ['collector', 'No_Inputs'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
# inverter values used for device registration: # inverter values used for device registration:
0x0000000a: {'name':['inverter', 'Product_Name'], 'level': logging.DEBUG, 'unit': ''}, 0x0000000a: {'name': ['inverter', 'Product_Name'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00000014: {'name':['inverter', 'Manufacturer'], 'level': logging.DEBUG, 'unit': ''}, 0x00000014: {'name': ['inverter', 'Manufacturer'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x0000001e: {'name':['inverter', 'Version'], 'level': logging.INFO, 'unit': ''}, 0x0000001e: {'name': ['inverter', 'Version'], 'level': logging.INFO, 'unit': ''}, # noqa: E501
0x00000028: {'name':['inverter', 'Serial_Number'], 'level': logging.DEBUG, 'unit': ''}, 0x00000028: {'name': ['inverter', 'Serial_Number'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00000032: {'name':['inverter', 'Equipment_Model'], 'level': logging.DEBUG, 'unit': ''}, 0x00000032: {'name': ['inverter', 'Equipment_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
# proxy: # proxy:
0xffffff00: {'name':['proxy', 'Inverter_Cnt'], 'singleton': True, 'ha':{'dev':'proxy', 'comp' : 'sensor', 'dev_cla': None, 'stat_cla': None, 'id':'inv_count_', 'fmt':'| int', 'name': 'Active Inverter Connections', 'icon':'mdi:counter'}}, 0xffffff00: {'name': ['proxy', 'Inverter_Cnt'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_count_', 'fmt': '| int', 'name': 'Active Inverter Connections', 'icon': 'mdi:counter'}}, # noqa: E501
0xffffff01: {'name':['proxy', 'Unknown_SNR'], 'singleton': True, 'ha':{'dev':'proxy', 'comp' : 'sensor', 'dev_cla': None, 'stat_cla': None, 'id':'unknown_snr_', 'fmt':'| int', 'name': 'Unknown Serial No', 'icon':'mdi:counter', 'ent_cat':'diagnostic'}}, 0xffffff01: {'name': ['proxy', 'Unknown_SNR'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'unknown_snr_', 'fmt': '| int', 'name': 'Unknown Serial No', 'icon': 'mdi:counter', 'ent_cat': 'diagnostic'}}, # noqa: E501
0xffffff02: {'name':['proxy', 'Unknown_Msg'], 'singleton': True, 'ha':{'dev':'proxy', 'comp' : 'sensor', 'dev_cla': None, 'stat_cla': None, 'id':'unknown_msg_', 'fmt':'| int', 'name': 'Unknown Msg Type', 'icon':'mdi:counter', 'ent_cat':'diagnostic'}}, 0xffffff02: {'name': ['proxy', 'Unknown_Msg'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'unknown_msg_', 'fmt': '| int', 'name': 'Unknown Msg Type', 'icon': 'mdi:counter', 'ent_cat': 'diagnostic'}}, # noqa: E501
0xffffff03: {'name':['proxy', 'Invalid_Data_Type'], 'singleton': True, 'ha':{'dev':'proxy', 'comp' : 'sensor', 'dev_cla': None, 'stat_cla': None, 'id':'inv_data_type_', 'fmt':'| int', 'name': 'Invalid Data Type', 'icon':'mdi:counter', 'ent_cat':'diagnostic'}}, 0xffffff03: {'name': ['proxy', 'Invalid_Data_Type'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_data_type_', 'fmt': '| int', 'name': 'Invalid Data Type', 'icon': 'mdi:counter', 'ent_cat': 'diagnostic'}}, # noqa: E501
0xffffff04: {'name':['proxy', 'Internal_Error'], 'singleton': True, 'ha':{'dev':'proxy', 'comp' : 'sensor', 'dev_cla': None, 'stat_cla': None, 'id':'intern_err_', 'fmt':'| int', 'name': 'Internal Error', 'icon':'mdi:counter', 'ent_cat':'diagnostic', 'en':False}}, 0xffffff04: {'name': ['proxy', 'Internal_Error'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'intern_err_', 'fmt': '| int', 'name': 'Internal Error', 'icon': 'mdi:counter', 'ent_cat': 'diagnostic', 'en': False}}, # noqa: E501
0xffffff05: {'name':['proxy', 'Unknown_Ctrl'], 'singleton': True, 'ha':{'dev':'proxy', 'comp' : 'sensor', 'dev_cla': None, 'stat_cla': None, 'id':'unknown_ctrl_', 'fmt':'| int', 'name': 'Unknown Control Type', 'icon':'mdi:counter', 'ent_cat':'diagnostic'}}, 0xffffff05: {'name': ['proxy', 'Unknown_Ctrl'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'unknown_ctrl_', 'fmt': '| int', 'name': 'Unknown Control Type', 'icon': 'mdi:counter', 'ent_cat': 'diagnostic'}}, # noqa: E501
# 0xffffff03: {'name':['proxy', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha':{'dev':'proxy', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id':'proxy_volt_', 'fmt':'| float','name': 'Grid Voltage'}}, # 0xffffff03: {'name':['proxy', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha':{'dev':'proxy', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id':'proxy_volt_', 'fmt':'| float','name': 'Grid Voltage'}}, # noqa: E501
# events # events
0x00000191: {'name':['events', '401_'], 'level': logging.DEBUG, 'unit': ''}, 0x00000191: {'name': ['events', '401_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00000192: {'name':['events', '402_'], 'level': logging.DEBUG, 'unit': ''}, 0x00000192: {'name': ['events', '402_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00000193: {'name':['events', '403_'], 'level': logging.DEBUG, 'unit': ''}, 0x00000193: {'name': ['events', '403_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00000194: {'name':['events', '404_'], 'level': logging.DEBUG, 'unit': ''}, 0x00000194: {'name': ['events', '404_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00000195: {'name':['events', '405_'], 'level': logging.DEBUG, 'unit': ''}, 0x00000195: {'name': ['events', '405_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00000196: {'name':['events', '406_'], 'level': logging.DEBUG, 'unit': ''}, 0x00000196: {'name': ['events', '406_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00000197: {'name':['events', '407_'], 'level': logging.DEBUG, 'unit': ''}, 0x00000197: {'name': ['events', '407_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00000198: {'name':['events', '408_'], 'level': logging.DEBUG, 'unit': ''}, 0x00000198: {'name': ['events', '408_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00000199: {'name':['events', '409_'], 'level': logging.DEBUG, 'unit': ''}, 0x00000199: {'name': ['events', '409_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x0000019a: {'name':['events', '410_'], 'level': logging.DEBUG, 'unit': ''}, 0x0000019a: {'name': ['events', '410_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x0000019b: {'name':['events', '411_'], 'level': logging.DEBUG, 'unit': ''}, 0x0000019b: {'name': ['events', '411_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x0000019c: {'name':['events', '412_'], 'level': logging.DEBUG, 'unit': ''}, 0x0000019c: {'name': ['events', '412_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x0000019d: {'name':['events', '413_'], 'level': logging.DEBUG, 'unit': ''}, 0x0000019d: {'name': ['events', '413_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x0000019e: {'name':['events', '414_'], 'level': logging.DEBUG, 'unit': ''}, 0x0000019e: {'name': ['events', '414_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x0000019f: {'name':['events', '415_GridFreqOverRating'], 'level': logging.DEBUG, 'unit': ''}, 0x0000019f: {'name': ['events', '415_GridFreqOverRating'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x000001a0: {'name':['events', '416_'], 'level': logging.DEBUG, 'unit': ''}, 0x000001a0: {'name': ['events', '416_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
# grid measures: # grid measures:
0x000003e8: {'name':['grid', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha':{'dev':'inverter', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id':'out_volt_', 'fmt':'| float','name': 'Grid Voltage','ent_cat':'diagnostic'}}, 0x000003e8: {'name': ['grid', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'inverter', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'out_volt_', 'fmt': '| float', 'name': 'Grid Voltage', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x0000044c: {'name':['grid', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha':{'dev':'inverter', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id':'out_cur_', 'fmt':'| float','name': 'Grid Current','ent_cat':'diagnostic'}}, 0x0000044c: {'name': ['grid', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'inverter', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'out_cur_', 'fmt': '| float', 'name': 'Grid Current', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x000004b0: {'name':['grid', 'Frequency'], 'level': logging.DEBUG, 'unit': 'Hz', 'ha':{'dev':'inverter', 'dev_cla': 'frequency', 'stat_cla': 'measurement', 'id':'out_freq_', 'fmt':'| float','name': 'Grid Frequency','ent_cat':'diagnostic'}}, 0x000004b0: {'name': ['grid', 'Frequency'], 'level': logging.DEBUG, 'unit': 'Hz', 'ha': {'dev': 'inverter', 'dev_cla': 'frequency', 'stat_cla': 'measurement', 'id': 'out_freq_', 'fmt': '| float', 'name': 'Grid Frequency', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x00000640: {'name':['grid', 'Output_Power'], 'level': logging.INFO, 'unit': 'W', 'ha':{'dev':'inverter', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id':'out_power_', 'fmt':'| float','name': 'Power'}}, 0x00000640: {'name': ['grid', 'Output_Power'], 'level': logging.INFO, 'unit': 'W', 'ha': {'dev': 'inverter', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'out_power_', 'fmt': '| float', 'name': 'Power'}}, # noqa: E501
0x000005dc: {'name':['env', 'Rated_Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha':{'dev':'inverter', 'dev_cla': None, 'stat_cla': None, 'id':'rated_power_', 'fmt':'| int', 'name': 'Rated Power','ent_cat':'diagnostic'}}, 0x000005dc: {'name': ['env', 'Rated_Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'inverter', 'dev_cla': None, 'stat_cla': None, 'id': 'rated_power_', 'fmt': '| int', 'name': 'Rated Power', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x00000514: {'name':['env', 'Inverter_Temp'], 'level': logging.DEBUG, 'unit': '°C', 'ha':{'dev':'inverter', 'dev_cla': 'temperature', 'stat_cla': 'measurement', 'id':'temp_', 'fmt':'| int','name': 'Temperature'}}, 0x00000514: {'name': ['env', 'Inverter_Temp'], 'level': logging.DEBUG, 'unit': '°C', 'ha': {'dev': 'inverter', 'dev_cla': 'temperature', 'stat_cla': 'measurement', 'id': 'temp_', 'fmt': '| int', 'name': 'Temperature'}}, # noqa: E501
# input measures: # input measures:
0x000006a4: {'name':['input', 'pv1', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha':{'dev':'input_pv1', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id':'volt_pv1_', 'val_tpl' :"{{ (value_json['pv1']['Voltage'] | float)}}", 'icon':'mdi:gauge','ent_cat':'diagnostic'}}, 0x000006a4: {'name': ['input', 'pv1', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'input_pv1', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'volt_pv1_', 'val_tpl': "{{ (value_json['pv1']['Voltage'] | float)}}", 'icon': 'mdi:gauge', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x00000708: {'name':['input', 'pv1', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha':{'dev':'input_pv1', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id':'cur_pv1_', 'val_tpl' :"{{ (value_json['pv1']['Current'] | float)}}", 'icon':'mdi:gauge','ent_cat':'diagnostic'}}, 0x00000708: {'name': ['input', 'pv1', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'input_pv1', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'cur_pv1_', 'val_tpl': "{{ (value_json['pv1']['Current'] | float)}}", 'icon': 'mdi:gauge', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x0000076c: {'name':['input', 'pv1', 'Power'], 'level': logging.INFO, 'unit': 'W', 'ha':{'dev':'input_pv1', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id':'power_pv1_','val_tpl' :"{{ (value_json['pv1']['Power'] | float)}}"}}, 0x0000076c: {'name': ['input', 'pv1', 'Power'], 'level': logging.INFO, 'unit': 'W', 'ha': {'dev': 'input_pv1', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'power_pv1_', 'val_tpl': "{{ (value_json['pv1']['Power'] | float)}}"}}, # noqa: E501
0x000007d0: {'name':['input', 'pv2', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha':{'dev':'input_pv2', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id':'volt_pv2_', 'val_tpl' :"{{ (value_json['pv2']['Voltage'] | float)}}", 'icon':'mdi:gauge','ent_cat':'diagnostic'}}, 0x000007d0: {'name': ['input', 'pv2', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'input_pv2', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'volt_pv2_', 'val_tpl': "{{ (value_json['pv2']['Voltage'] | float)}}", 'icon': 'mdi:gauge', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x00000834: {'name':['input', 'pv2', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha':{'dev':'input_pv2', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id':'cur_pv2_', 'val_tpl' :"{{ (value_json['pv2']['Current'] | float)}}", 'icon':'mdi:gauge','ent_cat':'diagnostic'}}, 0x00000834: {'name': ['input', 'pv2', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'input_pv2', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'cur_pv2_', 'val_tpl': "{{ (value_json['pv2']['Current'] | float)}}", 'icon': 'mdi:gauge', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x00000898: {'name':['input', 'pv2', 'Power'], 'level': logging.INFO, 'unit': 'W', 'ha':{'dev':'input_pv2', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id':'power_pv2_','val_tpl' :"{{ (value_json['pv2']['Power'] | float)}}"}}, 0x00000898: {'name': ['input', 'pv2', 'Power'], 'level': logging.INFO, 'unit': 'W', 'ha': {'dev': 'input_pv2', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'power_pv2_', 'val_tpl': "{{ (value_json['pv2']['Power'] | float)}}"}}, # noqa: E501
0x000008fc: {'name':['input', 'pv3', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha':{'dev':'input_pv3', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id':'volt_pv3_', 'val_tpl' :"{{ (value_json['pv3']['Voltage'] | float)}}", 'icon':'mdi:gauge','ent_cat':'diagnostic'}}, 0x000008fc: {'name': ['input', 'pv3', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'input_pv3', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'volt_pv3_', 'val_tpl': "{{ (value_json['pv3']['Voltage'] | float)}}", 'icon': 'mdi:gauge', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x00000960: {'name':['input', 'pv3', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha':{'dev':'input_pv3', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id':'cur_pv3_', 'val_tpl' :"{{ (value_json['pv3']['Current'] | float)}}", 'icon':'mdi:gauge','ent_cat':'diagnostic'}}, 0x00000960: {'name': ['input', 'pv3', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'input_pv3', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'cur_pv3_', 'val_tpl': "{{ (value_json['pv3']['Current'] | float)}}", 'icon': 'mdi:gauge', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x000009c4: {'name':['input', 'pv3', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha':{'dev':'input_pv3', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id':'power_pv3_','val_tpl' :"{{ (value_json['pv3']['Power'] | float)}}"}}, 0x000009c4: {'name': ['input', 'pv3', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'input_pv3', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'power_pv3_', 'val_tpl': "{{ (value_json['pv3']['Power'] | float)}}"}}, # noqa: E501
0x00000a28: {'name':['input', 'pv4', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha':{'dev':'input_pv4', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id':'volt_pv4_', 'val_tpl' :"{{ (value_json['pv4']['Voltage'] | float)}}", 'icon':'mdi:gauge','ent_cat':'diagnostic'}}, 0x00000a28: {'name': ['input', 'pv4', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'input_pv4', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'volt_pv4_', 'val_tpl': "{{ (value_json['pv4']['Voltage'] | float)}}", 'icon': 'mdi:gauge', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x00000a8c: {'name':['input', 'pv4', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha':{'dev':'input_pv4', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id':'cur_pv4_', 'val_tpl' :"{{ (value_json['pv4']['Current'] | float)}}", 'icon':'mdi:gauge','ent_cat':'diagnostic'}}, 0x00000a8c: {'name': ['input', 'pv4', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'input_pv4', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'cur_pv4_', 'val_tpl': "{{ (value_json['pv4']['Current'] | float)}}", 'icon': 'mdi:gauge', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x00000af0: {'name':['input', 'pv4', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha':{'dev':'input_pv4', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id':'power_pv4_','val_tpl' :"{{ (value_json['pv4']['Power'] | float)}}"}}, 0x00000af0: {'name': ['input', 'pv4', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'input_pv4', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'power_pv4_', 'val_tpl': "{{ (value_json['pv4']['Power'] | float)}}"}}, # noqa: E501
0x00000c1c: {'name':['input', 'pv1', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev':'input_pv1', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id':'daily_gen_pv1_','name': 'Daily Generation', 'val_tpl' :"{{ (value_json['pv1']['Daily_Generation'] | float)}}", 'icon':'mdi:solar-power-variant', 'must_incr':True}}, 0x00000c1c: {'name': ['input', 'pv1', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv1', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_pv1_', 'name': 'Daily Generation', 'val_tpl': "{{ (value_json['pv1']['Daily_Generation'] | float)}}", 'icon': 'mdi:solar-power-variant', 'must_incr': True}}, # noqa: E501
0x00000c80: {'name':['input', 'pv1', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev':'input_pv1', 'dev_cla': 'energy', 'stat_cla': 'total', 'id':'total_gen_pv1_','name': 'Total Generation', 'val_tpl' :"{{ (value_json['pv1']['Total_Generation'] | float)}}", 'icon':'mdi:solar-power', 'must_incr':True}}, 0x00000c80: {'name': ['input', 'pv1', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv1', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_pv1_', 'name': 'Total Generation', 'val_tpl': "{{ (value_json['pv1']['Total_Generation'] | float)}}", 'icon': 'mdi:solar-power', 'must_incr': True}}, # noqa: E501
0x00000ce4: {'name':['input', 'pv2', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev':'input_pv2', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id':'daily_gen_pv2_','name': 'Daily Generation', 'val_tpl' :"{{ (value_json['pv2']['Daily_Generation'] | float)}}", 'icon':'mdi:solar-power-variant', 'must_incr':True}}, 0x00000ce4: {'name': ['input', 'pv2', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv2', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_pv2_', 'name': 'Daily Generation', 'val_tpl': "{{ (value_json['pv2']['Daily_Generation'] | float)}}", 'icon': 'mdi:solar-power-variant', 'must_incr': True}}, # noqa: E501
0x00000d48: {'name':['input', 'pv2', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev':'input_pv2', 'dev_cla': 'energy', 'stat_cla': 'total', 'id':'total_gen_pv2_','name': 'Total Generation', 'val_tpl' :"{{ (value_json['pv2']['Total_Generation'] | float)}}", 'icon':'mdi:solar-power', 'must_incr':True}}, 0x00000d48: {'name': ['input', 'pv2', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv2', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_pv2_', 'name': 'Total Generation', 'val_tpl': "{{ (value_json['pv2']['Total_Generation'] | float)}}", 'icon': 'mdi:solar-power', 'must_incr': True}}, # noqa: E501
0x00000dac: {'name':['input', 'pv3', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev':'input_pv3', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id':'daily_gen_pv3_','name': 'Daily Generation', 'val_tpl' :"{{ (value_json['pv3']['Daily_Generation'] | float)}}", 'icon':'mdi:solar-power-variant', 'must_incr':True}}, 0x00000dac: {'name': ['input', 'pv3', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv3', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_pv3_', 'name': 'Daily Generation', 'val_tpl': "{{ (value_json['pv3']['Daily_Generation'] | float)}}", 'icon': 'mdi:solar-power-variant', 'must_incr': True}}, # noqa: E501
0x00000e10: {'name':['input', 'pv3', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev':'input_pv3', 'dev_cla': 'energy', 'stat_cla': 'total', 'id':'total_gen_pv3_','name': 'Total Generation', 'val_tpl' :"{{ (value_json['pv3']['Total_Generation'] | float)}}", 'icon':'mdi:solar-power', 'must_incr':True}}, 0x00000e10: {'name': ['input', 'pv3', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv3', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_pv3_', 'name': 'Total Generation', 'val_tpl': "{{ (value_json['pv3']['Total_Generation'] | float)}}", 'icon': 'mdi:solar-power', 'must_incr': True}}, # noqa: E501
0x00000e74: {'name':['input', 'pv4', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev':'input_pv4', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id':'daily_gen_pv4_','name': 'Daily Generation', 'val_tpl' :"{{ (value_json['pv4']['Daily_Generation'] | float)}}", 'icon':'mdi:solar-power-variant', 'must_incr':True}}, 0x00000e74: {'name': ['input', 'pv4', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv4', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_pv4_', 'name': 'Daily Generation', 'val_tpl': "{{ (value_json['pv4']['Daily_Generation'] | float)}}", 'icon': 'mdi:solar-power-variant', 'must_incr': True}}, # noqa: E501
0x00000ed8: {'name':['input', 'pv4', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev':'input_pv4', 'dev_cla': 'energy', 'stat_cla': 'total', 'id':'total_gen_pv4_','name': 'Total Generation', 'val_tpl' :"{{ (value_json['pv4']['Total_Generation'] | float)}}", 'icon':'mdi:solar-power', 'must_incr':True}}, 0x00000ed8: {'name': ['input', 'pv4', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv4', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_pv4_', 'name': 'Total Generation', 'val_tpl': "{{ (value_json['pv4']['Total_Generation'] | float)}}", 'icon': 'mdi:solar-power', 'must_incr': True}}, # noqa: E501
# total: # total:
0x00000b54: {'name':['total', 'Daily_Generation'], 'level': logging.INFO, 'unit': 'kWh', 'ha':{'dev':'inverter', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id':'daily_gen_', 'fmt':'| float','name': 'Daily Generation', 'icon':'mdi:solar-power-variant', 'must_incr':True}}, 0x00000b54: {'name': ['total', 'Daily_Generation'], 'level': logging.INFO, 'unit': 'kWh', 'ha': {'dev': 'inverter', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_', 'fmt': '| float', 'name': 'Daily Generation', 'icon': 'mdi:solar-power-variant', 'must_incr': True}}, # noqa: E501
0x00000bb8: {'name':['total', 'Total_Generation'], 'level': logging.INFO, 'unit': 'kWh', 'ha':{'dev':'inverter', 'dev_cla': 'energy', 'stat_cla': 'total', 'id':'total_gen_', 'fmt':'| float','name': 'Total Generation', 'icon':'mdi:solar-power', 'must_incr':True}}, 0x00000bb8: {'name': ['total', 'Total_Generation'], 'level': logging.INFO, 'unit': 'kWh', 'ha': {'dev': 'inverter', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_', 'fmt': '| float', 'name': 'Total Generation', 'icon': 'mdi:solar-power', 'must_incr': True}}, # noqa: E501
# controller: # controller:
0x000c3500: {'name':['controller', 'Signal_Strength'], 'level': logging.DEBUG, 'unit': '%' , 'ha':{'dev':'controller', 'dev_cla': None, 'stat_cla': 'measurement', 'id':'signal_', 'fmt':'| int', 'name': 'Signal Strength', 'icon':'mdi:wifi'}}, 0x000c3500: {'name': ['controller', 'Signal_Strength'], 'level': logging.DEBUG, 'unit': '%', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': 'measurement', 'id': 'signal_', 'fmt': '| int', 'name': 'Signal Strength', 'icon': 'mdi:wifi'}}, # noqa: E501
0x000c96a8: {'name':['controller', 'Power_On_Time'], 'level': logging.DEBUG, 'unit': 's', 'ha':{'dev':'controller', 'dev_cla': 'duration', 'stat_cla': 'measurement', 'id':'power_on_time_', 'name': 'Power on Time', 'val_tpl':"{{ (value_json['Power_On_Time'] | float)}}", 'nat_prc':'3','ent_cat':'diagnostic'}}, 0x000c96a8: {'name': ['controller', 'Power_On_Time'], 'level': logging.DEBUG, 'unit': 's', 'ha': {'dev': 'controller', 'dev_cla': 'duration', 'stat_cla': 'measurement', 'id': 'power_on_time_', 'name': 'Power on Time', 'val_tpl': "{{ (value_json['Power_On_Time'] | float)}}", 'nat_prc': '3', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x000cf850: {'name':['controller', 'Data_Up_Interval'], 'level': logging.DEBUG, 'unit': 's', 'ha':{'dev':'controller', 'dev_cla': None, 'stat_cla': 'measurement', 'id':'data_up_intval_', 'fmt':'| int', 'name': 'Data Up Interval', 'icon':'mdi:update','ent_cat':'diagnostic'}}, 0x000cf850: {'name': ['controller', 'Data_Up_Interval'], 'level': logging.DEBUG, 'unit': 's', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': 'measurement', 'id': 'data_up_intval_', 'fmt': '| int', 'name': 'Data Up Interval', 'icon': 'mdi:update', 'ent_cat': 'diagnostic'}}, # noqa: E501
} }
def dev_value(self, idx:str|int) -> str|int|float|None: def dev_value(self, idx: str | int) -> str | int | float | None:
'''returns the stored device value from our database '''returns the stored device value from our database
idx:int ==> lookup the value in the database and return it as str, int or flout. If the value is not available return 'None' idx:int ==> lookup the value in the database and return it as str,
idx:str ==> returns the string as a fixed value without a database loopup int or flout. If the value is not available return 'None'
idx:str ==> returns the string as a fixed value without a
database loopup
''' '''
if type (idx) is str: if type(idx) is str:
return idx # return idx as a fixed value return idx # return idx as a fixed value
elif idx in self.__info_defs: elif idx in self.__info_defs:
row = self.__info_defs[idx] row = self.__info_defs[idx]
@@ -145,13 +148,15 @@ class Infos:
return None # unknwon idx, not in __info_defs return None # unknwon idx, not in __info_defs
def ignore_this_device(self, dep:dict) -> bool: def ignore_this_device(self, dep: dict) -> bool:
'''Checks the equation in the dep dict '''Checks the equation in the dep dict
returns 'False' only if the equation is valid; 'True' in any other case''' returns 'False' only if the equation is valid;
'True' in any other case'''
if 'reg' in dep: if 'reg' in dep:
value = self.dev_value(dep['reg']) value = self.dev_value(dep['reg'])
if not value: return True if not value:
return True
if 'gte' in dep: if 'gte' in dep:
return not value >= dep['gte'] return not value >= dep['gte']
@@ -159,12 +164,14 @@ class Infos:
return not value <= dep['less_eq'] return not value <= dep['less_eq']
return True return True
def ha_confs(self, ha_prfx, node_id, snr, singleton:bool, sug_area =''): def ha_confs(self, ha_prfx, node_id, snr, singleton: bool, sug_area=''):
'''Generator function yields a json register struct for home-assistant auto configuration and a unique entity string '''Generator function yields a json register struct for home-assistant
auto configuration and a unique entity string
arguments: arguments:
prfx:str ==> MQTT prefix for the home assistant 'stat_t string prfx:str ==> MQTT prefix for the home assistant 'stat_t string
snr:str ==> serial number of the inverter, used to build unique entity strings snr:str ==> serial number of the inverter, used to build unique
entity strings
sug_area:str ==> suggested area string from the config file''' sug_area:str ==> suggested area string from the config file'''
tab = self.__info_defs tab = self.__info_defs
for key in tab: for key in tab:
@@ -176,66 +183,72 @@ class Infos:
continue continue
prfx = ha_prfx + node_id prfx = ha_prfx + node_id
#check if we have details for home assistant # check if we have details for home assistant
if 'ha' in row: if 'ha' in row:
ha = row['ha'] ha = row['ha']
if 'comp' in ha: if 'comp' in ha:
component = ha['comp'] component = ha['comp']
else: else:
component = 'sensor' component = 'sensor'
attr = {} # dict to collect all the sensor entity details attr = {}
if 'name' in ha: if 'name' in ha:
attr['name'] = ha['name'] # take the entity name from the ha dict attr['name'] = ha['name']
else: else:
attr['name'] = row['name'][-1] # otherwise take a name from the name array attr['name'] = row['name'][-1]
attr['stat_t'] = prfx +row['name'][0] # eg. 'stat_t': "tsun/garagendach/grid" attr['stat_t'] = prfx + row['name'][0]
attr['dev_cla'] = ha['dev_cla'] # eg. 'dev_cla': 'power' attr['dev_cla'] = ha['dev_cla']
attr['stat_cla'] = ha['stat_cla'] # eg. 'stat_cla': "measurement" attr['stat_cla'] = ha['stat_cla']
attr['uniq_id'] = ha['id']+snr # build the 'uniq_id' from the id str + the serial no of the inverter attr['uniq_id'] = ha['id']+snr
if 'val_tpl' in ha: if 'val_tpl' in ha:
attr['val_tpl'] = ha['val_tpl'] # get value template for complexe data structures attr['val_tpl'] = ha['val_tpl']
elif 'fmt' in ha: elif 'fmt' in ha:
attr['val_tpl'] = '{{value_json' + f"['{row['name'][-1]}'] {ha['fmt']}" + '}}' # eg. 'val_tpl': "{{ value_json['Output_Power']|float }}" attr['val_tpl'] = '{{value_json' + f"['{row['name'][-1]}']"
" {ha['fmt']}" + '}}' # eg. 'val_tpl': "{{ value_json['Output_Power']|float }} # noqa: E501
else: else:
self.inc_counter('Internal_Error') self.inc_counter('Internal_Error')
logging.error(f"Infos.__info_defs: the row for {key} do not have a 'val_tpl' nor a 'fmt' value") logging.error(f"Infos.__info_defs: the row for {key} do"
" not have a 'val_tpl' nor a 'fmt' value")
# add unit_of_meas only, if status_class isn't none. If status_cla is None we want a number format and not line graph in home assistant. # add unit_of_meas only, if status_class isn't none. If
# A unit will change the number format to a line graph # status_cla is None we want a number format and not line
if 'unit' in row and attr['stat_cla'] != None: # graph in home assistant. A unit will change the number
attr['unit_of_meas'] = row['unit'] # optional add a 'unit_of_meas' e.g. 'W' # format to a line graph
if 'unit' in row and attr['stat_cla'] is not None:
attr['unit_of_meas'] = row['unit'] # 'unit_of_meas'
if 'icon' in ha: if 'icon' in ha:
attr['ic'] = ha['icon'] # optional add an icon for the entity attr['ic'] = ha['icon'] # icon for the entity
if 'nat_prc' in ha: if 'nat_prc' in ha:
attr['sug_dsp_prc'] = ha['nat_prc'] # optional add the precison of floats attr['sug_dsp_prc'] = ha['nat_prc'] # precison of floats
if 'ent_cat' in ha: if 'ent_cat' in ha:
attr['ent_cat'] = ha['ent_cat'] # diagnostic, config attr['ent_cat'] = ha['ent_cat'] # diagnostic, config
# enabled_by_default is deactivated, since it avoid the via setup of the devices # enabled_by_default is deactivated, since it avoid the via
# it seems, that there is a bug in home assistant. tested with 'Home Assistant 2023.10.4' # setup of the devices. It seems, that there is a bug in home
#if 'en' in ha: # enabled_by_default # assistant. tested with 'Home Assistant 2023.10.4'
# if 'en' in ha: # enabled_by_default
# attr['en'] = ha['en'] # attr['en'] = ha['en']
# eg. 'dev':{'name':'Microinverter','mdl':'MS-600','ids':["inverter_123"],'mf':'TSUN','sa': 'auf Garagendach'}
# attr['dev'] = {'name':'Microinverter','mdl':'MS-600','ids':[f'inverter_{snr}'],'mf':'TSUN','sa': 'auf Garagendach'}
if 'dev' in ha: if 'dev' in ha:
device = self.__info_devs[ha['dev']] device = self.__info_devs[ha['dev']]
if 'dep' in device and self.ignore_this_device(device['dep']): if 'dep' in device and self.ignore_this_device(device['dep']): # noqa: E501
continue continue
dev = {} dev = {}
# the same name for 'name' and 'suggested area', so we get dedicated devices in home assistant with short value name and headline # the same name for 'name' and 'suggested area', so we get
if sug_area == '' or ('singleton' in device and device['singleton']): # dedicated devices in home assistant with short value
# name and headline
if (sug_area == '' or
('singleton' in device and device['singleton'])):
dev['name'] = device['name'] dev['name'] = device['name']
dev['sa'] = device['name'] dev['sa'] = device['name']
else: else:
dev['name'] = device['name']+' - '+sug_area dev['name'] = device['name']+' - '+sug_area
dev['sa'] = device['name']+' - '+sug_area dev['sa'] = device['name']+' - '+sug_area
if 'via' in device: # add the link to the parent device if 'via' in device: # add the link to the parent device
via = device['via'] via = device['via']
if via in self.__info_devs: if via in self.__info_devs:
via_dev = self.__info_devs[via] via_dev = self.__info_devs[via]
@@ -245,19 +258,22 @@ class Infos:
dev['via_device'] = f"{via}_{snr}" dev['via_device'] = f"{via}_{snr}"
else: else:
self.inc_counter('Internal_Error') self.inc_counter('Internal_Error')
logging.error(f"Infos.__info_defs: the row for {key} has an invalid via value: {via}") logging.error(f"Infos.__info_defs: the row for "
f"{key} has an invalid via value: "
f"{via}")
for key in ('mdl', 'mf', 'sw', 'hw'): # add optional
# values fpr 'modell', 'manufaturer', 'sw version' and
for key in ('mdl','mf', 'sw', 'hw'): # add optional values fpr 'modell', 'manufaturer', 'sw version' and 'hw version' # 'hw version'
if key in device: if key in device:
data = self.dev_value(device[key]) data = self.dev_value(device[key])
if data is not None: dev[key] = data if data is not None:
dev[key] = data
if 'singleton' in device and device['singleton']: if 'singleton' in device and device['singleton']:
dev['ids'] = [f"{ha['dev']}"] dev['ids'] = [f"{ha['dev']}"]
else: else:
dev['ids'] = [f"{ha['dev']}_{snr}"] dev['ids'] = [f"{ha['dev']}_{snr}"]
attr['dev'] = dev attr['dev'] = dev
@@ -267,24 +283,24 @@ class Infos:
attr['o'] = origin attr['o'] = origin
else: else:
self.inc_counter('Internal_Error') self.inc_counter('Internal_Error')
logging.error(f"Infos.__info_defs: the row for {key} missing 'dev' value for ha register") logging.error(f"Infos.__info_defs: the row for {key} "
"missing 'dev' value for ha register")
yield json.dumps(attr), component, node_id, attr['uniq_id']
yield json.dumps (attr), component, node_id, attr['uniq_id'] def inc_counter(self, counter: str) -> None:
def inc_counter (self, counter:str) -> None:
'''inc proxy statistic counter''' '''inc proxy statistic counter'''
dict = self.stat['proxy'] dict = self.stat['proxy']
dict[counter] += 1 dict[counter] += 1
def dec_counter (self, counter:str) -> None: def dec_counter(self, counter: str) -> None:
'''dec proxy statistic counter''' '''dec proxy statistic counter'''
dict = self.stat['proxy'] dict = self.stat['proxy']
dict[counter] -= 1 dict[counter] -= 1
def __key_obj(self, id) -> list: def __key_obj(self, id) -> list:
d = self.__info_defs.get(id, {'name': None, 'level': logging.DEBUG, 'unit': ''}) d = self.__info_defs.get(id, {'name': None, 'level': logging.DEBUG,
'unit': ''})
if 'ha' in d and 'must_incr' in d['ha']: if 'ha' in d and 'must_incr' in d['ha']:
must_incr = d['ha']['must_incr'] must_incr = d['ha']['must_incr']
else: else:
@@ -292,9 +308,9 @@ class Infos:
return d['name'], d['level'], d['unit'], must_incr return d['name'], d['level'], d['unit'], must_incr
def parse(self, buf) -> None: def parse(self, buf) -> None:
'''parse a data sequence received from the inverter and stores the values in Infos.db '''parse a data sequence received from the inverter and
stores the values in Infos.db
buf: buffer of the sequence to parse''' buf: buffer of the sequence to parse'''
result = struct.unpack_from('!l', buf, 0) result = struct.unpack_from('!l', buf, 0)
@@ -303,34 +319,35 @@ class Infos:
ind = 4 ind = 4
while i < elms: while i < elms:
result = struct.unpack_from('!lB', buf, ind) result = struct.unpack_from('!lB', buf, ind)
info_id = result[0] info_id = result[0]
data_type = result[1] data_type = result[1]
ind += 5 ind += 5
keys, level, unit, must_incr = self.__key_obj(info_id) keys, level, unit, must_incr = self.__key_obj(info_id)
if data_type==0x54: # 'T' -> Pascal-String if data_type == 0x54: # 'T' -> Pascal-String
str_len = buf[ind] str_len = buf[ind]
result = struct.unpack_from(f'!{str_len+1}p', buf, ind)[0].decode(encoding='ascii', errors='replace') result = struct.unpack_from(f'!{str_len+1}p', buf,
ind)[0].decode(encoding='ascii',
errors='replace')
ind += str_len+1 ind += str_len+1
elif data_type==0x49: # 'I' -> int32 elif data_type == 0x49: # 'I' -> int32
result = struct.unpack_from(f'!l', buf, ind)[0] result = struct.unpack_from('!l', buf, ind)[0]
ind += 4 ind += 4
elif data_type==0x53: # 'S' -> short elif data_type == 0x53: # 'S' -> short
result = struct.unpack_from(f'!h', buf, ind)[0] result = struct.unpack_from('!h', buf, ind)[0]
ind += 2 ind += 2
elif data_type==0x46: # 'F' -> float32 elif data_type == 0x46: # 'F' -> float32
result = round(struct.unpack_from(f'!f', buf, ind)[0],2) result = round(struct.unpack_from('!f', buf, ind)[0], 2)
ind += 4 ind += 4
else: else:
self.inc_counter('Invalid_Data_Type') self.inc_counter('Invalid_Data_Type')
logging.error(f"Infos.parse: data_type: {data_type} not supported") logging.error(f"Infos.parse: data_type: {data_type}"
" not supported")
return return
if keys: if keys:
dict = self.db dict = self.db
name = '' name = ''
@@ -342,22 +359,22 @@ class Infos:
name += key + '.' name += key + '.'
if keys[-1] not in dict: if keys[-1] not in dict:
update = (not must_incr or result>0) update = (not must_incr or result > 0)
else: else:
if must_incr: if must_incr:
update = dict[keys[-1]] < result update = dict[keys[-1]] < result
else: else:
update = dict[keys[-1]] != result update = dict[keys[-1]] != result
if update: dict[keys[-1]] = result if update:
dict[keys[-1]] = result
name += keys[-1] name += keys[-1]
yield keys[0], update yield keys[0], update
else: else:
update = False update = False
name = str(f'info-id.0x{info_id:x}') name = str(f'info-id.0x{info_id:x}')
self.tracer.log(level, f'{name} : {result}{unit} update: {update}') self.tracer.log(level, f'{name} : {result}{unit}'
' update: {update}')
i +=1
i += 1

View File

@@ -1,20 +1,50 @@
import asyncio, logging, traceback, json import asyncio
import logging
import traceback
import json
from config import Config from config import Config
from async_stream import AsyncStream from async_stream import AsyncStream
from mqtt import Mqtt from mqtt import Mqtt
from aiomqtt import MqttCodeError from aiomqtt import MqttCodeError
from infos import Infos from infos import Infos
#import gc # import gc
#logger = logging.getLogger('conn') # logger = logging.getLogger('conn')
logger_mqtt = logging.getLogger('mqtt') logger_mqtt = logging.getLogger('mqtt')
class Inverter(AsyncStream): class Inverter(AsyncStream):
'''class Inverter is a derivation of an Async_Stream
The class has some class method for managing common resources like a
connection to the MQTT broker or proxy error counter which are common
for all inverter connection
Instances of the class are connections to an inverter and can have an
optional link to an remote connection to the TSUN cloud. A remote
connection dies with the inverter connection.
class methods:
class_init(): initialize the common resources of the proxy (MQTT
broker, Proxy DB, etc). Must be called before the
first Ib´verter instance can be created
class_close(): release the common resources of the proxy. Should not
be called before any instances of the class are
destroyed
methods:
server_loop(addr): Async loop method for receiving messages from the
inverter (server-side)
client_loop(addr): Async loop method for receiving messages from the
TSUN cloud (client-side)
async_create_remote(): Establish a client connection to the TSUN cloud
async_publ_mqtt(): Publish data to MQTT broker
close(): Release method which must be called before a instance can be
destroyed
'''
@classmethod @classmethod
def class_init(cls): def class_init(cls) -> None:
logging.debug('Inverter.class_init') logging.debug('Inverter.class_init')
# initialize the proxy statistics # initialize the proxy statistics
Infos.static_init() Infos.static_init()
@@ -23,78 +53,84 @@ class Inverter(AsyncStream):
ha = Config.get('ha') ha = Config.get('ha')
cls.entity_prfx = ha['entity_prefix'] + '/' cls.entity_prfx = ha['entity_prefix'] + '/'
cls.discovery_prfx = ha['discovery_prefix'] + '/' cls.discovery_prfx = ha['discovery_prefix'] + '/'
cls.proxy_node_id = ha['proxy_node_id'] + '/' cls.proxy_node_id = ha['proxy_node_id'] + '/'
cls.proxy_unique_id = ha['proxy_unique_id'] cls.proxy_unique_id = ha['proxy_unique_id']
# call Mqtt singleton to establisch the connection to the mqtt broker # call Mqtt singleton to establisch the connection to the mqtt broker
cls.mqtt = Mqtt(cls.__cb_MqttIsUp) cls.mqtt = Mqtt(cls.__cb_mqtt_is_up)
@classmethod @classmethod
async def __cb_MqttIsUp(cls): async def __cb_mqtt_is_up(cls) -> None:
logging.info('Initialize proxy device on home assistant') logging.info('Initialize proxy device on home assistant')
# register proxy status counters at home assistant # register proxy status counters at home assistant
await cls.__register_proxy_stat_home_assistant() await cls.__register_proxy_stat_home_assistant()
# send values of the proxy status counters # send values of the proxy status counters
await asyncio.sleep(0.5) # wait a bit, before sending first data await asyncio.sleep(0.5) # wait a bit, before sending data
cls.new_stat_data['proxy'] = True # force sending data to sync home assistant cls.new_stat_data['proxy'] = True # force sending data to sync ha
await cls.__async_publ_mqtt_proxy_stat('proxy') await cls.__async_publ_mqtt_proxy_stat('proxy')
@classmethod @classmethod
async def __register_proxy_stat_home_assistant(cls) -> None: async def __register_proxy_stat_home_assistant(cls) -> None:
'''register all our topics at home assistant''' '''register all our topics at home assistant'''
for data_json, component, node_id, id in cls.db_stat.ha_confs(cls.entity_prfx, cls.proxy_node_id, cls.proxy_unique_id, True): for data_json, component, node_id, id in cls.db_stat.ha_confs(
logger_mqtt.debug(f"MQTT Register: cmp:'{component}' node_id:'{node_id}' {data_json}") cls.entity_prfx, cls.proxy_node_id,
await cls.mqtt.publish(f"{cls.discovery_prfx}{component}/{node_id}{id}/config", data_json) cls.proxy_unique_id, True):
logger_mqtt.debug(f'''MQTT Register: cmp:'{component}' node_id:'
{node_id}' {data_json}''')
await cls.mqtt.publish(f'''{cls.discovery_prfx}{component}
/{node_id}{id}/config''', data_json)
@classmethod @classmethod
async def __async_publ_mqtt_proxy_stat(cls, key): async def __async_publ_mqtt_proxy_stat(cls, key) -> None:
stat = Infos.stat stat = Infos.stat
if key in stat and cls.new_stat_data[key]: if key in stat and cls.new_stat_data[key]:
data_json = json.dumps(stat[key]) data_json = json.dumps(stat[key])
node_id = cls.proxy_node_id node_id = cls.proxy_node_id
logger_mqtt.debug(f'{key}: {data_json}') logger_mqtt.debug(f'{key}: {data_json}')
await cls.mqtt.publish(f"{cls.entity_prfx}{node_id}{key}", data_json) await cls.mqtt.publish(f"{cls.entity_prfx}{node_id}{key}",
data_json)
cls.new_stat_data[key] = False cls.new_stat_data[key] = False
@classmethod @classmethod
def class_close(cls, loop): def class_close(cls, loop) -> None:
logging.debug('Inverter.class_close') logging.debug('Inverter.class_close')
logging.info ('Close MQTT Task') logging.info('Close MQTT Task')
loop.run_until_complete(cls.mqtt.close()) loop.run_until_complete(cls.mqtt.close())
cls.mqtt = None cls.mqtt = None
def __init__ (self, reader, writer, addr): def __init__(self, reader, writer, addr):
super().__init__(reader, writer, addr, None, True) super().__init__(reader, writer, addr, None, True)
self.ha_restarts = -1 self.ha_restarts = -1
async def server_loop(self, addr): async def server_loop(self, addr):
'''Loop for receiving messages from the inverter (server-side)''' '''Loop for receiving messages from the inverter (server-side)'''
logging.info(f'Accept connection from {addr}') logging.info(f'Accept connection from {addr}')
self.inc_counter ('Inverter_Cnt') self.inc_counter('Inverter_Cnt')
await self.loop() await self.loop()
self.dec_counter ('Inverter_Cnt') self.dec_counter('Inverter_Cnt')
logging.info(f'Server loop stopped for {addr}') logging.info(f'Server loop stopped for {addr}')
# if the server connection closes, we also have to disconnect the connection to te TSUN cloud # if the server connection closes, we also have to disconnect
# the connection to te TSUN cloud
if self.remoteStream: if self.remoteStream:
logging.debug ("disconnect client connection") logging.debug("disconnect client connection")
self.remoteStream.disc() self.remoteStream.disc()
try: try:
await self.__async_publ_mqtt_proxy_stat('proxy') await self.__async_publ_mqtt_proxy_stat('proxy')
except: pass except Exception:
pass
async def client_loop(self, addr): async def client_loop(self, addr):
'''Loop for receiving messages from the TSUN cloud (client-side)''' '''Loop for receiving messages from the TSUN cloud (client-side)'''
await self.remoteStream.loop() await self.remoteStream.loop()
logging.info(f'Client loop stopped for {addr}') logging.info(f'Client loop stopped for {addr}')
# if the client connection closes, we don't touch the server connection. Instead we erase the client # if the client connection closes, we don't touch the server
# connection stream, thus on the next received packet from the inverter, we can establish a new connection # connection. Instead we erase the client connection stream,
# to the TSUN cloud # thus on the next received packet from the inverter, we can
self.remoteStream.remoteStream = None # erase backlink to inverter instance # establish a new connection to the TSUN cloud
self.remoteStream.remoteStream = None # erase backlink to inverter
self.remoteStream = None # than erase client connection self.remoteStream = None # than erase client connection
async def async_create_remote(self) -> None: async def async_create_remote(self) -> None:
@@ -118,15 +154,15 @@ class Inverter(AsyncStream):
f"Inverter: Exception for {addr}:\n" f"Inverter: Exception for {addr}:\n"
f"{traceback.format_exc()}") f"{traceback.format_exc()}")
async def async_publ_mqtt(self) -> None: async def async_publ_mqtt(self) -> None:
'''puplish data to MQTT broker''' '''publish data to MQTT broker'''
# check if new inverter or collector infos are available or when the home assistant has changed the status back to online # check if new inverter or collector infos are available or when the
# home assistant has changed the status back to online
try: try:
if (('inverter' in self.new_data and self.new_data['inverter']) or if (('inverter' in self.new_data and self.new_data['inverter'])
('collector' in self.new_data and self.new_data['collector']) or or ('collector' in self.new_data and
self.mqtt.ha_restarts != self.ha_restarts): self.new_data['collector'])
or self.mqtt.ha_restarts != self.ha_restarts):
await self.__register_proxy_stat_home_assistant() await self.__register_proxy_stat_home_assistant()
await self.__register_home_assistant() await self.__register_home_assistant()
self.ha_restarts = self.mqtt.ha_restarts self.ha_restarts = self.mqtt.ha_restarts
@@ -143,28 +179,31 @@ class Inverter(AsyncStream):
f"Inverter: Exception:\n" f"Inverter: Exception:\n"
f"{traceback.format_exc()}") f"{traceback.format_exc()}")
async def __async_publ_mqtt_packet(self, key): async def __async_publ_mqtt_packet(self, key):
db = self.db.db db = self.db.db
if key in db and self.new_data[key]: if key in db and self.new_data[key]:
data_json = json.dumps(db[key]) data_json = json.dumps(db[key])
node_id = self.node_id node_id = self.node_id
logger_mqtt.debug(f'{key}: {data_json}') logger_mqtt.debug(f'{key}: {data_json}')
await self.mqtt.publish(f"{self.entity_prfx}{node_id}{key}", data_json) await self.mqtt.publish(f'''{self.entity_prfx}{node_id}
{key}''', data_json)
self.new_data[key] = False self.new_data[key] = False
async def __register_home_assistant(self) -> None: async def __register_home_assistant(self) -> None:
'''register all our topics at home assistant''' '''register all our topics at home assistant'''
for data_json, component, node_id, id in self.db.ha_confs(self.entity_prfx, self.node_id, self.unique_id, False, self.sug_area): for data_json, component, node_id, id in self.db.ha_confs(
logger_mqtt.debug(f"MQTT Register: cmp:'{component}' node_id:'{node_id}' {data_json}") self.entity_prfx, self.node_id, self.unique_id,
await self.mqtt.publish(f"{self.discovery_prfx}{component}/{node_id}{id}/config", data_json) False, self.sug_area):
logger_mqtt.debug(f"MQTT Register: cmp:'{component}'"
" node_id:'{node_id}' {data_json}")
await self.mqtt.publish(f"{self.discovery_prfx}{component}"
"/{node_id}{id}/config", data_json)
def close(self) -> None: def close(self) -> None:
logging.debug(f'Inverter.close() {self.addr}') logging.debug(f'Inverter.close() {self.addr}')
super().close() # call close handler in the parent class super().close() # call close handler in the parent class
# logger.debug (f'Inverter refs: {gc.get_referrers(self)}') # logger.debug (f'Inverter refs: {gc.get_referrers(self)}')
def __del__(self):
def __del__ (self): logging.debug("Inverter.__del__")
logging.debug ("Inverter.__del__")
super().__del__() super().__del__()

View File

@@ -1,6 +1,8 @@
import struct, logging, time, datetime import struct
import logging
import time
import datetime
import weakref import weakref
from datetime import datetime
if __name__ == "app.src.messages": if __name__ == "app.src.messages":
from app.src.infos import Infos from app.src.infos import Infos
@@ -17,7 +19,8 @@ def hex_dump_memory(level, info, data, num):
lines = [] lines = []
lines.append(info) lines.append(info)
tracer = logging.getLogger('tracer') tracer = logging.getLogger('tracer')
if not tracer.isEnabledFor(level): return if not tracer.isEnabledFor(level):
return
for i in range(0, num, 16): for i in range(0, num, 16):
line = ' ' line = ' '
@@ -25,13 +28,15 @@ def hex_dump_memory(level, info, data, num):
n += 16 n += 16
for j in range(n-16, n): for j in range(n-16, n):
if j >= len(data): break if j >= len(data):
break
line += '%02x ' % abs(data[j]) line += '%02x ' % abs(data[j])
line += ' ' * (3 * 16 + 9 - len(line)) + ' | ' line += ' ' * (3 * 16 + 9 - len(line)) + ' | '
for j in range(n-16, n): for j in range(n-16, n):
if j >= len(data): break if j >= len(data):
break
c = data[j] if not (data[j] < 0x20 or data[j] > 0x7e) else '.' c = data[j] if not (data[j] < 0x20 or data[j] > 0x7e) else '.'
line += '%c' % c line += '%c' % c
@@ -41,7 +46,7 @@ def hex_dump_memory(level, info, data, num):
class Control: class Control:
def __init__(self, ctrl:int): def __init__(self, ctrl: int):
self.ctrl = ctrl self.ctrl = ctrl
def __int__(self) -> int: def __int__(self) -> int:
@@ -50,17 +55,19 @@ class Control:
def is_ind(self) -> bool: def is_ind(self) -> bool:
return (self.ctrl == 0x91) return (self.ctrl == 0x91)
#def is_req(self) -> bool: # def is_req(self) -> bool:
# return not (self.ctrl & 0x08) # return not (self.ctrl & 0x08)
def is_resp(self) -> bool: def is_resp(self) -> bool:
return (self.ctrl == 0x99) return (self.ctrl == 0x99)
class IterRegistry(type): class IterRegistry(type):
def __iter__(cls): def __iter__(cls):
for ref in cls._registry: for ref in cls._registry:
obj = ref() obj = ref()
if obj is not None: yield obj if obj is not None:
yield obj
class Message(metaclass=IterRegistry): class Message(metaclass=IterRegistry):
@@ -80,7 +87,7 @@ class Message(metaclass=IterRegistry):
self._forward_buffer = bytearray(0) self._forward_buffer = bytearray(0)
self.db = Infos() self.db = Infos()
self.new_data = {} self.new_data = {}
self.switch={ self.switch = {
0x00: self.msg_contact_info, 0x00: self.msg_contact_info,
0x22: self.msg_get_time, 0x22: self.msg_get_time,
0x71: self.msg_collector_data, 0x71: self.msg_collector_data,
@@ -88,9 +95,11 @@ class Message(metaclass=IterRegistry):
} }
''' '''
Empty methods, that have to be implemented in any child class which don't use asyncio Empty methods, that have to be implemented in any child class which
don't use asyncio
''' '''
def _read(self) -> None: # read data bytes from socket and copy them to our _recv_buffer def _read(self) -> None: # read data bytes from socket and copy them
# to our _recv_buffer
return # pragma: no cover return # pragma: no cover
''' '''
@@ -102,48 +111,51 @@ class Message(metaclass=IterRegistry):
# deallocated by the garbage collector ==> we get a memory leak # deallocated by the garbage collector ==> we get a memory leak
del self.switch del self.switch
def inc_counter(self, counter:str) -> None: def inc_counter(self, counter: str) -> None:
self.db.inc_counter(counter) self.db.inc_counter(counter)
self.new_stat_data['proxy'] = True self.new_stat_data['proxy'] = True
def dec_counter(self, counter:str) -> None: def dec_counter(self, counter: str) -> None:
self.db.dec_counter(counter) self.db.dec_counter(counter)
self.new_stat_data['proxy'] = True self.new_stat_data['proxy'] = True
def set_serial_no(self, serial_no : str): def set_serial_no(self, serial_no: str):
if self.unique_id == serial_no: if self.unique_id == serial_no:
logger.debug(f'SerialNo: {serial_no}') logger.debug(f'SerialNo: {serial_no}')
else: else:
inverters = Config.get('inverters') inverters = Config.get('inverters')
#logger.debug(f'Inverters: {inverters}') # logger.debug(f'Inverters: {inverters}')
if serial_no in inverters: if serial_no in inverters:
inv = inverters[serial_no] inv = inverters[serial_no]
self.node_id = inv['node_id'] self.node_id = inv['node_id']
self.sug_area = inv['suggested_area'] self.sug_area = inv['suggested_area']
logger.debug(f'SerialNo {serial_no} allowed! area:{self.sug_area}') logger.debug(f'''SerialNo {serial_no} allowed!
area:{self.sug_area}''')
else: else:
self.node_id = '' self.node_id = ''
self.sug_area = '' self.sug_area = ''
if 'allow_all' not in inverters or not inverters['allow_all']: if 'allow_all' not in inverters or not inverters['allow_all']:
self.inc_counter('Unknown_SNR') self.inc_counter('Unknown_SNR')
self.unique_id = None self.unique_id = None
logger.warning(f'ignore message from unknow inverter! (SerialNo: {serial_no})') logger.warning(f'''ignore message from unknow inverter!
(SerialNo: {serial_no})''')
return return
logger.debug(f'SerialNo {serial_no} not known but accepted!') logger.debug(f'SerialNo {serial_no} not known but accepted!')
self.unique_id = serial_no self.unique_id = serial_no
def read(self) -> None: def read(self) -> None:
self._read() self._read()
if not self.header_valid: if not self.header_valid:
self.__parse_header(self._recv_buffer, len(self._recv_buffer)) self.__parse_header(self._recv_buffer, len(self._recv_buffer))
if self.header_valid and len(self._recv_buffer) >= (self.header_len+self.data_len): if self.header_valid and len(self._recv_buffer) >= (self.header_len +
hex_dump_memory(logging.INFO, f'Received from {self.addr}:', self._recv_buffer, self.header_len+self.data_len) self.data_len):
hex_dump_memory(logging.INFO, f'Received from {self.addr}:',
self._recv_buffer, self.header_len+self.data_len)
self.set_serial_no(self.id_str.decode("utf-8")) self.set_serial_no(self.id_str.decode("utf-8"))
self.__dispatch_msg() self.__dispatch_msg()
@@ -154,28 +166,33 @@ class Message(metaclass=IterRegistry):
tsun = Config.get('tsun') tsun = Config.get('tsun')
if tsun['enabled']: if tsun['enabled']:
self._forward_buffer = buffer[:buflen] self._forward_buffer = buffer[:buflen]
hex_dump_memory(logging.DEBUG, 'Store for forwarding:', buffer, buflen) hex_dump_memory(logging.DEBUG, 'Store for forwarding:',
buffer, buflen)
self.__parse_header(self._forward_buffer, len(self._forward_buffer)) self.__parse_header(self._forward_buffer,
len(self._forward_buffer))
fnc = self.switch.get(self.msg_id, self.msg_unknown) fnc = self.switch.get(self.msg_id, self.msg_unknown)
logger.info(self.__flow_str(self.server_side, 'forwrd') + f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}' ) logger.info(self.__flow_str(self.server_side, 'forwrd') +
f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}')
return return
''' '''
Our private methods Our private methods
''' '''
def __flow_str(self, server_side:bool, type:('rx','tx','forwrd', 'drop')): def __flow_str(self, server_side: bool, type:
switch={ ('rx', 'tx', 'forwrd', 'drop')): # noqa: F821
'rx': ' <', switch = {
'tx': ' >', 'rx': ' <',
'forwrd': '<< ', 'tx': ' >',
'drop': ' xx', 'forwrd': '<< ',
'rxS': '> ', 'drop': ' xx',
'txS': '< ', 'rxS': '> ',
'forwrdS':' >>', 'txS': '< ',
'dropS': 'xx ', 'forwrdS': ' >>',
'dropS': 'xx ',
} }
if server_side: type +='S' if server_side:
type += 'S'
return switch.get(type, '???') return switch.get(type, '???')
def __timestamp(self): def __timestamp(self):
@@ -184,7 +201,7 @@ class Message(metaclass=IterRegistry):
ts = time.time() ts = time.time()
else: else:
# convert localtime in epoche # convert localtime in epoche
ts = (datetime.now() - datetime(1970,1,1)).total_seconds() ts = (datetime.now() - datetime(1970, 1, 1)).total_seconds()
return round(ts*1000) return round(ts*1000)
# check if there is a complete header in the buffer, parse it # check if there is a complete header in the buffer, parse it
@@ -197,12 +214,12 @@ class Message(metaclass=IterRegistry):
# #
# if the header is incomplete, than self.header_len is still 0 # if the header is incomplete, than self.header_len is still 0
# #
def __parse_header(self, buf:bytes, buf_len:int) -> None: def __parse_header(self, buf: bytes, buf_len: int) -> None:
if (buf_len <5): # enough bytes to read len and id_len? if (buf_len < 5): # enough bytes to read len and id_len?
return return
result = struct.unpack_from('!lB', buf, 0) result = struct.unpack_from('!lB', buf, 0)
len = result[0] # len of complete message len = result[0] # len of complete message
id_len = result[1] # len of variable id string id_len = result[1] # len of variable id string
hdr_len = 5+id_len+2 hdr_len = 5+id_len+2
@@ -214,7 +231,7 @@ class Message(metaclass=IterRegistry):
# store parsed header values in the class # store parsed header values in the class
self.id_str = result[0] self.id_str = result[0]
self.ctrl = Control(result[1]) self.ctrl = Control(result[1])
self.msg_id = result[2] self.msg_id = result[2]
self.data_len = len-id_len-3 self.data_len = len-id_len-3
self.header_len = hdr_len self.header_len = hdr_len
@@ -222,25 +239,26 @@ class Message(metaclass=IterRegistry):
return return
def __build_header(self, ctrl) -> None: def __build_header(self, ctrl) -> None:
self.send_msg_ofs = len (self._send_buffer) self.send_msg_ofs = len(self._send_buffer)
self._send_buffer += struct.pack(f'!l{len(self.id_str)+1}pBB', 0, self.id_str, ctrl, self.msg_id) self._send_buffer += struct.pack(f'!l{len(self.id_str)+1}pBB',
0, self.id_str, ctrl, self.msg_id)
fnc = self.switch.get(self.msg_id, self.msg_unknown) fnc = self.switch.get(self.msg_id, self.msg_unknown)
logger.info(self.__flow_str(self.server_side, 'tx') + f' Ctl: {int(ctrl):#02x} Msg: {fnc.__name__!r}' ) logger.info(self.__flow_str(self.server_side, 'tx') +
f' Ctl: {int(ctrl):#02x} Msg: {fnc.__name__!r}')
def __finish_send_msg(self) -> None: def __finish_send_msg(self) -> None:
_len = len(self._send_buffer) - self.send_msg_ofs _len = len(self._send_buffer) - self.send_msg_ofs
struct.pack_into('!l',self._send_buffer, self.send_msg_ofs, _len-4) struct.pack_into('!l', self._send_buffer, self.send_msg_ofs, _len-4)
def __dispatch_msg(self) -> None: def __dispatch_msg(self) -> None:
fnc = self.switch.get(self.msg_id, self.msg_unknown) fnc = self.switch.get(self.msg_id, self.msg_unknown)
if self.unique_id: if self.unique_id:
logger.info(self.__flow_str(self.server_side, 'rx') + f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}' ) logger.info(self.__flow_str(self.server_side, 'rx') +
f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}')
fnc() fnc()
else: else:
logger.info(self.__flow_str(self.server_side, 'drop') + f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}' ) logger.info(self.__flow_str(self.server_side, 'drop') +
f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}')
def __flush_recv_msg(self) -> None: def __flush_recv_msg(self) -> None:
self._recv_buffer = self._recv_buffer[(self.header_len+self.data_len):] self._recv_buffer = self._recv_buffer[(self.header_len+self.data_len):]
@@ -255,7 +273,7 @@ class Message(metaclass=IterRegistry):
self._send_buffer += b'\x01' self._send_buffer += b'\x01'
self.__finish_send_msg() self.__finish_send_msg()
elif self.ctrl.is_resp(): elif self.ctrl.is_resp():
return # ignore received response from tsun return # ignore received response from tsun
else: else:
self.inc_counter('Unknown_Ctrl') self.inc_counter('Unknown_Ctrl')
@@ -271,33 +289,32 @@ class Message(metaclass=IterRegistry):
self.__finish_send_msg() self.__finish_send_msg()
elif self.ctrl.is_resp(): elif self.ctrl.is_resp():
result = struct.unpack_from(f'!q', self._recv_buffer, self.header_len) result = struct.unpack_from('!q', self._recv_buffer,
self.header_len)
logger.debug(f'tsun-time: {result[0]:08x}') logger.debug(f'tsun-time: {result[0]:08x}')
return # ignore received response from tsun return # ignore received response from tsun
else: else:
self.inc_counter('Unknown_Ctrl') self.inc_counter('Unknown_Ctrl')
self.forward(self._recv_buffer, self.header_len+self.data_len) self.forward(self._recv_buffer, self.header_len+self.data_len)
def parse_msg_header(self): def parse_msg_header(self):
result = struct.unpack_from('!lB', self._recv_buffer, self.header_len) result = struct.unpack_from('!lB', self._recv_buffer, self.header_len)
data_id = result[0] # len of complete message data_id = result[0] # len of complete message
id_len = result[1] # len of variable id string id_len = result[1] # len of variable id string
logger.debug(f'Data_ID: {data_id} id_len: {id_len}') logger.debug(f'Data_ID: {data_id} id_len: {id_len}')
msg_hdr_len= 5+id_len+9 msg_hdr_len = 5+id_len+9
result = struct.unpack_from(f'!{id_len+1}pBq', self._recv_buffer, self.header_len+4) result = struct.unpack_from(f'!{id_len+1}pBq', self._recv_buffer,
self.header_len + 4)
logger.debug(f'ID: {result[0]} B: {result[1]}') logger.debug(f'ID: {result[0]} B: {result[1]}')
logger.debug(f'time: {result[2]:08x}') logger.debug(f'time: {result[2]:08x}')
#logger.info(f'time: {datetime.utcfromtimestamp(result[2]).strftime("%Y-%m-%d %H:%M:%S")}') # logger.info(f'time: {datetime.utcfromtimestamp(result[2]).strftime(
# "%Y-%m-%d %H:%M:%S")}')
return msg_hdr_len return msg_hdr_len
def msg_collector_data(self): def msg_collector_data(self):
if self.ctrl.is_ind(): if self.ctrl.is_ind():
self.__build_header(0x99) self.__build_header(0x99)
@@ -306,13 +323,12 @@ class Message(metaclass=IterRegistry):
self.__process_data() self.__process_data()
elif self.ctrl.is_resp(): elif self.ctrl.is_resp():
return # ignore received response return # ignore received response
else: else:
self.inc_counter('Unknown_Ctrl') self.inc_counter('Unknown_Ctrl')
self.forward(self._recv_buffer, self.header_len+self.data_len) self.forward(self._recv_buffer, self.header_len+self.data_len)
def msg_inverter_data(self): def msg_inverter_data(self):
if self.ctrl.is_ind(): if self.ctrl.is_ind():
self.__build_header(0x99) self.__build_header(0x99)
@@ -321,7 +337,7 @@ class Message(metaclass=IterRegistry):
self.__process_data() self.__process_data()
elif self.ctrl.is_resp(): elif self.ctrl.is_resp():
return # ignore received response return # ignore received response
else: else:
self.inc_counter('Unknown_Ctrl') self.inc_counter('Unknown_Ctrl')
@@ -330,16 +346,12 @@ class Message(metaclass=IterRegistry):
def __process_data(self): def __process_data(self):
msg_hdr_len = self.parse_msg_header() msg_hdr_len = self.parse_msg_header()
for key, update in self.db.parse(self._recv_buffer[self.header_len + msg_hdr_len:]): for key, update in self.db.parse(self._recv_buffer[self.header_len
if update: self.new_data[key] = True + msg_hdr_len:]):
if update:
self.new_data[key] = True
def msg_unknown(self): def msg_unknown(self):
logger.warning (f"Unknow Msg: ID:{self.msg_id}") logger.warning(f"Unknow Msg: ID:{self.msg_id}")
self.inc_counter('Unknown_Msg') self.inc_counter('Unknown_Msg')
self.forward(self._recv_buffer, self.header_len+self.data_len) self.forward(self._recv_buffer, self.header_len+self.data_len)

View File

@@ -1,4 +1,5 @@
import asyncio, logging import asyncio
import logging
import aiomqtt import aiomqtt
from config import Config from config import Config
@@ -7,10 +8,12 @@ logger_mqtt = logging.getLogger('mqtt')
class Singleton(type): class Singleton(type):
_instances = {} _instances = {}
def __call__(cls, *args, **kwargs): def __call__(cls, *args, **kwargs):
logger_mqtt.debug(f'singleton: __call__') logger_mqtt.debug('singleton: __call__')
if cls not in cls._instances: if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs) cls._instances[cls] = super(Singleton,
cls).__call__(*args, **kwargs)
return cls._instances[cls] return cls._instances[cls]
@@ -19,13 +22,13 @@ class Mqtt(metaclass=Singleton):
cb_MqttIsUp = None cb_MqttIsUp = None
def __init__(self, cb_MqttIsUp): def __init__(self, cb_MqttIsUp):
logger_mqtt.debug(f'MQTT: __init__') logger_mqtt.debug('MQTT: __init__')
if cb_MqttIsUp: self.cb_MqttIsUp = cb_MqttIsUp if cb_MqttIsUp:
self.cb_MqttIsUp = cb_MqttIsUp
loop = asyncio.get_event_loop() loop = asyncio.get_event_loop()
self.task = loop.create_task(self.__loop()) self.task = loop.create_task(self.__loop())
self.ha_restarts = 0 self.ha_restarts = 0
@property @property
def ha_restarts(self): def ha_restarts(self):
return self._ha_restarts return self._ha_restarts
@@ -35,29 +38,30 @@ class Mqtt(metaclass=Singleton):
self._ha_restarts = value self._ha_restarts = value
def __del__(self): def __del__(self):
logger_mqtt.debug(f'MQTT: __del__') logger_mqtt.debug('MQTT: __del__')
async def close(self) -> None: async def close(self) -> None:
logger_mqtt.debug(f'MQTT: close') logger_mqtt.debug('MQTT: close')
self.task.cancel() self.task.cancel()
try: try:
await self.task await self.task
except Exception as e: except Exception as e:
logging.debug(f"Mqtt.close: exception: {e} ...") logging.debug(f"Mqtt.close: exception: {e} ...")
async def publish(self, topic: str, payload: str | bytes | bytearray
| int | float | None = None) -> None:
async def publish(self, topic: str, payload: str | bytes | bytearray | int | float | None = None) -> None:
if self.client: if self.client:
await self.client.publish(topic, payload) await self.client.publish(topic, payload)
async def __loop(self) -> None: async def __loop(self) -> None:
mqtt = Config.get('mqtt') mqtt = Config.get('mqtt')
ha = Config.get('ha') ha = Config.get('ha')
logger_mqtt.info(f'start MQTT: host:{mqtt["host"]} port:{mqtt["port"]} user:{mqtt["user"]}') logger_mqtt.info(f'start MQTT: host:{mqtt["host"]} port:'
self.client = aiomqtt.Client(hostname=mqtt['host'], port=mqtt['port'], username=mqtt['user'], password=mqtt['passwd']) '{mqtt["port"]} '
'user:{mqtt["user"]}')
self.client = aiomqtt.Client(hostname=mqtt['host'], port=mqtt['port'],
username=mqtt['user'],
password=mqtt['passwd'])
interval = 5 # Seconds interval = 5 # Seconds
while True: while True:
@@ -69,18 +73,21 @@ class Mqtt(metaclass=Singleton):
await self.cb_MqttIsUp() await self.cb_MqttIsUp()
async with self.client.messages() as messages: async with self.client.messages() as messages:
await self.client.subscribe(f"{ha['auto_conf_prefix']}/status") await self.client.subscribe(f"{ha['auto_conf_prefix']}"
"/status")
async for message in messages: async for message in messages:
status = message.payload.decode("UTF-8") status = message.payload.decode("UTF-8")
logger_mqtt.info(f'Home-Assistant Status: {status}') logger_mqtt.info('Home-Assistant Status:'
f' {status}')
if status == 'online': if status == 'online':
self.ha_restarts += 1 self.ha_restarts += 1
await self.cb_MqttIsUp() await self.cb_MqttIsUp()
except aiomqtt.MqttError: except aiomqtt.MqttError:
logger_mqtt.info(f"Connection lost; Reconnecting in {interval} seconds ...") logger_mqtt.info(f"Connection lost; Reconnecting in {interval}"
" seconds ...")
await asyncio.sleep(interval) await asyncio.sleep(interval)
except asyncio.CancelledError: except asyncio.CancelledError:
logger_mqtt.debug(f"MQTT task cancelled") logger_mqtt.debug("MQTT task cancelled")
self.client = None self.client = None
return return

View File

@@ -1,5 +1,9 @@
import logging, asyncio, signal, functools, os import logging
from logging import config import asyncio
import signal
import functools
import os
# from logging import config
from async_stream import AsyncStream from async_stream import AsyncStream
from inverter import Inverter from inverter import Inverter
from config import Config from config import Config
@@ -32,11 +36,12 @@ def handle_SIGTERM(loop):
def get_log_level() -> int: def get_log_level() -> int:
'''checks if LOG_LVL is set in the environment and returns the corresponding logging.LOG_LEVEL''' '''checks if LOG_LVL is set in the environment and returns the
corresponding logging.LOG_LEVEL'''
log_level = os.getenv('LOG_LVL', 'INFO') log_level = os.getenv('LOG_LVL', 'INFO')
if log_level== 'DEBUG': if log_level == 'DEBUG':
log_level = logging.DEBUG log_level = logging.DEBUG
elif log_level== 'WARN': elif log_level == 'WARN':
log_level = logging.WARNING log_level = logging.WARNING
else: else:
log_level = logging.INFO log_level = logging.INFO
@@ -48,7 +53,7 @@ if __name__ == "__main__":
# Setup our daily, rotating logger # Setup our daily, rotating logger
# #
serv_name = os.getenv('SERVICE_NAME', 'proxy') serv_name = os.getenv('SERVICE_NAME', 'proxy')
version = os.getenv('VERSION', 'unknown') version = os.getenv('VERSION', 'unknown')
logging.config.fileConfig('logging.ini') logging.config.fileConfig('logging.ini')
logging.info(f'Server "{serv_name} - {version}" will be started') logging.info(f'Server "{serv_name} - {version}" will be started')
@@ -68,14 +73,17 @@ if __name__ == "__main__":
Inverter.class_init() Inverter.class_init()
# #
# Register some UNIX Signal handler for a gracefully server shutdown on Docker restart and stop # Register some UNIX Signal handler for a gracefully server shutdown
# on Docker restart and stop
# #
for signame in ('SIGINT','SIGTERM'): for signame in ('SIGINT', 'SIGTERM'):
loop.add_signal_handler(getattr(signal, signame), functools.partial(handle_SIGTERM, loop)) loop.add_signal_handler(getattr(signal, signame),
functools.partial(handle_SIGTERM, loop))
# #
# Create a task for our listening server. This must be a task! If we call start_server directly out # Create a task for our listening server. This must be a task! If we call
# of our main task, the eventloop will be blocked and we can't receive and handle the UNIX signals! # start_server directly out of our main task, the eventloop will be blocked
# and we can't receive and handle the UNIX signals!
# #
loop.create_task(asyncio.start_server(handle_client, '0.0.0.0', 5005)) loop.create_task(asyncio.start_server(handle_client, '0.0.0.0', 5005))
@@ -85,7 +93,6 @@ if __name__ == "__main__":
pass pass
finally: finally:
Inverter.class_close(loop) Inverter.class_close(loop)
logging.info ('Close event loop') logging.info('Close event loop')
loop.close() loop.close()
logging.info (f'Finally, exit Server "{serv_name}"') logging.info(f'Finally, exit Server "{serv_name}"')