Merge branch 'main' of https://github.com/s-allius/tsun-gen3-proxy into s-allius/issue397

This commit is contained in:
Stefan Allius
2025-05-20 23:29:47 +02:00
30 changed files with 1156 additions and 397 deletions

39
app/src/gen3plus/solarman_v5.py Normal file → Executable file
View File

@@ -247,6 +247,7 @@ class SolarmanBase(Message):
class SolarmanV5(SolarmanBase):
AT_CMD = 1
MB_RTU_CMD = 2
DCU_CMD = 5
AT_CMD_RSP = 8
MB_CLIENT_DATA_UP = 30
'''Data up time in client mode'''
@@ -532,6 +533,26 @@ class SolarmanV5(SolarmanBase):
except Exception:
self.ifc.tx_clear()
def send_dcu_cmd(self, pdu: bytearray):
if self.sensor_list != 0x3026:
logger.debug(f'[{self.node_id}] DCU CMD not allowed,'
f' for sensor: {self.sensor_list:#04x}')
return
if self.state != State.up:
logger.warning(f'[{self.node_id}] ignore DCU CMD,'
' cause the state is not UP anymore')
return
self.inverter.forward_dcu_cmd_resp = False
self._build_header(0x4510)
self.ifc.tx_add(struct.pack('<BHLLL', self.DCU_CMD,
self.sensor_list, 0, 0, 0))
self.ifc.tx_add(pdu)
self._finish_send_msg()
self.ifc.tx_log(logging.INFO, f'Send DCU CMD :{self.addr}:')
self.ifc.tx_flush()
def __forward_msg(self):
self.forward(self.ifc.rx_peek(), self.header_len+self.data_len+2)
@@ -647,6 +668,10 @@ class SolarmanV5(SolarmanBase):
self.inc_counter('AT_Command')
self.inverter.forward_at_cmd_resp = True
if ftype == self.DCU_CMD:
self.inc_counter('DCU_Command')
self.inverter.forward_dcu_cmd_resp = True
elif ftype == self.MB_RTU_CMD:
rstream = self.ifc.remote.stream
if rstream.mb.recv_req(data[15:],
@@ -670,6 +695,10 @@ class SolarmanV5(SolarmanBase):
if self.inverter.forward_at_cmd_resp:
return logging.INFO
return logging.DEBUG
elif ftype == self.DCU_CMD:
if self.inverter.forward_dcu_cmd_resp:
return logging.INFO
return logging.DEBUG
elif ftype == self.MB_RTU_CMD \
and self.server_side:
return self.mb.last_log_lvl
@@ -689,6 +718,16 @@ class SolarmanV5(SolarmanBase):
logger.info(f'{key}: {data_json}')
self.publish_mqtt(f'{Proxy.entity_prfx}{node_id}{key}', data_json) # noqa: E501
return
elif ftype == self.DCU_CMD:
if not self.inverter.forward_dcu_cmd_resp:
data_json = '+ok'
node_id = self.node_id
key = 'dcu_resp'
logger.info(f'{key}: {data_json}')
self.publish_mqtt(f'{Proxy.entity_prfx}{node_id}{key}', data_json) # noqa: E501
return
elif ftype == self.MB_RTU_CMD:
self.__modbus_command_rsp(data)
return

View File

@@ -44,6 +44,7 @@ class Register(Enum):
MODBUS_COMMAND = 60
AT_COMMAND_BLOCKED = 61
CLOUD_CONN_CNT = 62
DCU_COMMAND = 63
OUTPUT_POWER = 83
RATED_POWER = 84
INVERTER_TEMP = 85
@@ -625,6 +626,7 @@ class Infos:
Register.INVALID_MSG_FMT: {'name': ['proxy', 'Invalid_Msg_Format'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_msg_fmt_', 'fmt': FMT_INT, 'name': 'Invalid Message Format', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.AT_COMMAND: {'name': ['proxy', 'AT_Command'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'at_cmd_', 'fmt': FMT_INT, 'name': 'AT Command', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.AT_COMMAND_BLOCKED: {'name': ['proxy', 'AT_Command_Blocked'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'at_cmd_blocked_', 'fmt': FMT_INT, 'name': 'AT Command Blocked', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.DCU_COMMAND: {'name': ['proxy', 'DCU_Command'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'dcu_cmd_', 'fmt': FMT_INT, 'name': 'DCU Command', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.MODBUS_COMMAND: {'name': ['proxy', 'Modbus_Command'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'modbus_cmd_', 'fmt': FMT_INT, 'name': 'Modbus Command', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501
# 0xffffff03: {'name':['proxy', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha':{'dev':'proxy', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id':'proxy_volt_', 'fmt':FMT_FLOAT,'name': 'Grid Voltage'}}, # noqa: E501

View File

@@ -1,16 +1,15 @@
[loggers]
keys=root,tracer,mesg,conn,data,mqtt,asyncio
keys=root,tracer,mesg,conn,data,mqtt,asyncio,hypercorn_access,hypercorn_error
[handlers]
keys=console_handler,file_handler_name1,file_handler_name2
keys=console_handler,file_handler_name1,file_handler_name2,file_handler_name3,dashboard
[formatters]
keys=console_formatter,file_formatter
[logger_root]
level=DEBUG
handlers=console_handler,file_handler_name1
handlers=console_handler,file_handler_name1,dashboard
[logger_conn]
level=DEBUG
@@ -20,13 +19,13 @@ qualname=conn
[logger_mqtt]
level=INFO
handlers=console_handler,file_handler_name1
handlers=console_handler,file_handler_name1,dashboard
propagate=0
qualname=mqtt
[logger_asyncio]
level=INFO
handlers=console_handler,file_handler_name1
handlers=console_handler,file_handler_name1,dashboard
propagate=0
qualname=asyncio
@@ -49,6 +48,18 @@ handlers=file_handler_name2
propagate=0
qualname=tracer
[logger_hypercorn_access]
level=INFO
handlers=file_handler_name3
propagate=0
qualname=hypercorn.access
[logger_hypercorn_error]
level=INFO
handlers=file_handler_name1,dashboard
propagate=0
qualname=hypercorn.error
[handler_console_handler]
class=StreamHandler
level=DEBUG
@@ -66,6 +77,16 @@ level=NOTSET
formatter=file_formatter
args=(handlers.log_path + 'trace.log', when:='midnight', backupCount:=handlers.log_backups)
[handler_file_handler_name3]
class=handlers.TimedRotatingFileHandler
level=NOTSET
formatter=file_formatter
args=(handlers.log_path + 'access.log', when:='midnight', backupCount:=handlers.log_backups)
[handler_dashboard]
level=WARNING
class=web.log_handler.LogHandler
[formatter_console_formatter]
format=%(asctime)s %(levelname)5s | %(name)4s | %(message)s
datefmt=%Y-%m-%d %H:%M:%S

136
app/src/mqtt.py Normal file → Executable file
View File

@@ -2,6 +2,8 @@ import asyncio
import logging
import aiomqtt
import traceback
import struct
import inspect
from modbus import Modbus
from messages import Message
@@ -27,14 +29,27 @@ class Mqtt(metaclass=Singleton):
loop = asyncio.get_event_loop()
self.task = loop.create_task(self.__loop())
self.ha_restarts = 0
self.topic_defs = [
{'prefix': 'auto_conf_prefix', 'topic': '/status',
'fnc': self._ha_status, 'args': []},
{'prefix': 'entity_prefix', 'topic': '/+/rated_load',
'fnc': self._modbus_cmd,
'args': [Modbus.WRITE_SINGLE_REG, 1, 0x2008]},
{'prefix': 'entity_prefix', 'topic': '/+/out_coeff',
'fnc': self._out_coeff, 'args': []},
{'prefix': 'entity_prefix', 'topic': '/+/dcu_power',
'fnc': self._dcu_cmd, 'args': []},
{'prefix': 'entity_prefix', 'topic': '/+/modbus_read_regs',
'fnc': self._modbus_cmd, 'args': [Modbus.READ_REGS, 2]},
{'prefix': 'entity_prefix', 'topic': '/+/modbus_read_inputs',
'fnc': self._modbus_cmd, 'args': [Modbus.READ_INPUTS, 2]},
{'prefix': 'entity_prefix', 'topic': '/+/at_cmd',
'fnc': self._at_cmd, 'args': []},
]
ha = Config.get('ha')
self.ha_status_topic = f"{ha['auto_conf_prefix']}/status"
self.mb_rated_topic = f"{ha['entity_prefix']}/+/rated_load"
self.mb_out_coeff_topic = f"{ha['entity_prefix']}/+/out_coeff"
self.mb_reads_topic = f"{ha['entity_prefix']}/+/modbus_read_regs"
self.mb_inputs_topic = f"{ha['entity_prefix']}/+/modbus_read_inputs"
self.mb_at_cmd_topic = f"{ha['entity_prefix']}/+/at_cmd"
for entry in self.topic_defs:
entry['full_topic'] = f"{ha[entry['prefix']]}{entry['topic']}"
@property
def ha_restarts(self):
@@ -75,19 +90,7 @@ class Mqtt(metaclass=Singleton):
try:
async with self.__client:
logger_mqtt.info('MQTT broker connection established')
self.ctime = datetime.now()
self.published = 0
self.received = 0
if self.__cb_mqtt_is_up:
await self.__cb_mqtt_is_up()
await self.__client.subscribe(self.ha_status_topic)
await self.__client.subscribe(self.mb_rated_topic)
await self.__client.subscribe(self.mb_out_coeff_topic)
await self.__client.subscribe(self.mb_reads_topic)
await self.__client.subscribe(self.mb_inputs_topic)
await self.__client.subscribe(self.mb_at_cmd_topic)
await self._init_new_conn()
async for message in self.__client.messages:
await self.dispatch_msg(message)
@@ -117,47 +120,51 @@ class Mqtt(metaclass=Singleton):
f"Exception:\n"
f"{traceback.format_exc()}")
async def _init_new_conn(self):
self.ctime = datetime.now()
self.published = 0
self.received = 0
if self.__cb_mqtt_is_up:
await self.__cb_mqtt_is_up()
for entry in self.topic_defs:
await self.__client.subscribe(entry['full_topic'])
async def dispatch_msg(self, message):
self.received += 1
if message.topic.matches(self.ha_status_topic):
status = message.payload.decode("UTF-8")
logger_mqtt.info('Home-Assistant Status:'
f' {status}')
if status == 'online':
self.ha_restarts += 1
for entry in self.topic_defs:
if message.topic.matches(entry['full_topic']) \
and 'fnc' in entry:
fnc = entry['fnc']
if inspect.iscoroutinefunction(fnc):
await entry['fnc'](message, *entry['args'])
elif callable(fnc):
entry['fnc'](message, *entry['args'])
async def _ha_status(self, message):
status = message.payload.decode("UTF-8")
logger_mqtt.info('Home-Assistant Status:'
f' {status}')
if status == 'online':
self.ha_restarts += 1
if self.__cb_mqtt_is_up:
await self.__cb_mqtt_is_up()
if message.topic.matches(self.mb_rated_topic):
await self.modbus_cmd(message,
Modbus.WRITE_SINGLE_REG,
1, 0x2008)
if message.topic.matches(self.mb_out_coeff_topic):
payload = message.payload.decode("UTF-8")
try:
val = round(float(payload) * 1024/100)
if val < 0 or val > 1024:
logger_mqtt.error('out_coeff: value must be in'
'the range 0..100,'
f' got: {payload}')
else:
await self.modbus_cmd(message,
Modbus.WRITE_SINGLE_REG,
0, 0x202c, val)
except Exception:
pass
if message.topic.matches(self.mb_reads_topic):
await self.modbus_cmd(message,
Modbus.READ_REGS, 2)
if message.topic.matches(self.mb_inputs_topic):
await self.modbus_cmd(message,
Modbus.READ_INPUTS, 2)
if message.topic.matches(self.mb_at_cmd_topic):
await self.at_cmd(message)
async def _out_coeff(self, message):
payload = message.payload.decode("UTF-8")
try:
val = round(float(payload) * 1024/100)
if val < 0 or val > 1024:
logger_mqtt.error('out_coeff: value must be in'
'the range 0..100,'
f' got: {payload}')
else:
await self._modbus_cmd(message,
Modbus.WRITE_SINGLE_REG,
0, 0x202c, val)
except Exception:
pass
def each_inverter(self, message, func_name: str):
topic = str(message.topic)
@@ -175,7 +182,7 @@ class Mqtt(metaclass=Singleton):
else:
logger_mqtt.warning(f'Node_id: {node_id} not found')
async def modbus_cmd(self, message, func, params=0, addr=0, val=0):
async def _modbus_cmd(self, message, func, params=0, addr=0, val=0):
payload = message.payload.decode("UTF-8")
for fnc in self.each_inverter(message, "send_modbus_cmd"):
res = payload.split(',')
@@ -190,7 +197,22 @@ class Mqtt(metaclass=Singleton):
val = int(res[1]) # lenght
await fnc(func, addr, val, logging.INFO)
async def at_cmd(self, message):
async def _at_cmd(self, message):
payload = message.payload.decode("UTF-8")
for fnc in self.each_inverter(message, "send_at_cmd"):
await fnc(payload)
def _dcu_cmd(self, message):
payload = message.payload.decode("UTF-8")
try:
val = round(float(payload) * 10)
if val < 1000 or val > 8000:
logger_mqtt.error('dcu_power: value must be in'
'the range 100..800,'
f' got: {payload}')
else:
pdu = struct.pack('>BBBBBBH', 1, 1, 6, 1, 0, 1, val)
for fnc in self.each_inverter(message, "send_dcu_cmd"):
fnc(pdu)
except Exception:
pass

View File

@@ -1,26 +1,161 @@
import logging
import asyncio
import logging.handlers
from logging import config # noqa F401
import asyncio
from asyncio import StreamReader, StreamWriter
import os
import argparse
from asyncio import StreamReader, StreamWriter
from quart import Quart, Response
from logging import config # noqa F401
from proxy import Proxy
from inverter_ifc import InverterIfc
from gen3.inverter_g3 import InverterG3
from gen3plus.inverter_g3p import InverterG3P
from scheduler import Schedule
from cnf.config import Config
from cnf.config_read_env import ConfigReadEnv
from cnf.config_read_toml import ConfigReadToml
from cnf.config_read_json import ConfigReadJson
from web import Web
from web.wrapper import url_for
from proxy import Proxy
from inverter_ifc import InverterIfc
from gen3.inverter_g3 import InverterG3
from gen3plus.inverter_g3p import InverterG3P
from scheduler import Schedule
from modbus_tcp import ModbusTcp
class Server():
serv_name = ''
version = ''
src_dir = ''
####
# The following default values are used for the unit tests only, since
# `Server.parse_args()' will not be called during test setup.
# Ofcorse, we can call `Server.parse_args()' in a test case explicitly
# to overwrite this values
config_path = './config/'
json_config = ''
toml_config = ''
trans_path = '../translations/'
rel_urls = False
log_path = './log/'
log_backups = 0
log_level = None
def __init__(self, app, parse_args: bool):
''' Applikation Setup
1. Read cli arguments
2. Init the logging system by the ini file
3. Log the config parms
4. Set the log-levels
5. Read the build the config for the app
'''
self.serv_name = os.getenv('SERVICE_NAME', 'proxy')
self.version = os.getenv('VERSION', 'unknown')
self.src_dir = os.path.dirname(__file__) + '/'
if parse_args: # pragma: no cover
self.parse_args(None)
self.init_logging_system()
self.build_config()
@app.context_processor
def utility_processor():
return dict(version=self.version)
def parse_args(self, arg_list: list[str] | None):
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--config_path', type=str,
default='./config/',
help='set path for the configuration files')
parser.add_argument('-j', '--json_config', type=str,
help='read user config from json-file')
parser.add_argument('-t', '--toml_config', type=str,
help='read user config from toml-file')
parser.add_argument('-l', '--log_path', type=str,
default='./log/',
help='set path for the logging files')
parser.add_argument('-b', '--log_backups', type=int,
default=0,
help='set max number of daily log-files')
parser.add_argument('-tr', '--trans_path', type=str,
default='../translations/',
help='set path for the translations files')
parser.add_argument('-r', '--rel_urls', action="store_true",
help='use relative dashboard urls')
args = parser.parse_args(arg_list)
self.config_path = args.config_path
self.json_config = args.json_config
self.toml_config = args.toml_config
self.trans_path = args.trans_path
self.rel_urls = args.rel_urls
self.log_path = args.log_path
self.log_backups = args.log_backups
def init_logging_system(self):
setattr(logging.handlers, "log_path", self.log_path)
setattr(logging.handlers, "log_backups", self.log_backups)
os.makedirs(self.log_path, exist_ok=True)
logging.config.fileConfig(self.src_dir + 'logging.ini')
logging.info(
f'Server "{self.serv_name} - {self.version}" will be started')
logging.info(f'current dir: {os.getcwd()}')
logging.info(f"config_path: {self.config_path}")
logging.info(f"json_config: {self.json_config}")
logging.info(f"toml_config: {self.toml_config}")
logging.info(f"trans_path: {self.trans_path}")
logging.info(f"rel_urls: {self.rel_urls}")
logging.info(f"log_path: {self.log_path}")
if self.log_backups == 0:
logging.info("log_backups: unlimited")
else:
logging.info(f"log_backups: {self.log_backups} days")
self.log_level = self.get_log_level()
logging.info('******')
if self.log_level:
# set lowest-severity for 'root', 'msg', 'conn' and 'data' logger
logging.getLogger().setLevel(self.log_level)
logging.getLogger('msg').setLevel(self.log_level)
logging.getLogger('conn').setLevel(self.log_level)
logging.getLogger('data').setLevel(self.log_level)
logging.getLogger('tracer').setLevel(self.log_level)
logging.getLogger('asyncio').setLevel(self.log_level)
# logging.getLogger('mqtt').setLevel(self.log_level)
def build_config(self):
# read config file
Config.init(ConfigReadToml(self.src_dir + "cnf/default_config.toml"),
log_path=self.log_path)
ConfigReadEnv()
ConfigReadJson(self.config_path + "config.json")
ConfigReadToml(self.config_path + "config.toml")
ConfigReadJson(self.json_config)
ConfigReadToml(self.toml_config)
config_err = Config.get_error()
if config_err is not None:
logging.info(f'config_err: {config_err}')
return
logging.info('******')
def get_log_level(self) -> int | None:
'''checks if LOG_LVL is set in the environment and returns the
corresponding logging.LOG_LEVEL'''
switch = {
'DEBUG': logging.DEBUG,
'WARN': logging.WARNING,
'INFO': logging.INFO,
'ERROR': logging.ERROR,
}
log_lvl = os.getenv('LOG_LVL', None)
logging.info(f"LOG_LVL : {log_lvl}")
return switch.get(log_lvl, None)
class ProxyState:
_is_up = False
@@ -33,11 +168,48 @@ class ProxyState:
ProxyState._is_up = value
class HypercornLogHndl:
access_hndl = []
error_hndl = []
must_fix = False
HYPERC_ERR = 'hypercorn.error'
HYPERC_ACC = 'hypercorn.access'
@classmethod
def save(cls):
cls.access_hndl = logging.getLogger(
cls.HYPERC_ACC).handlers
cls.error_hndl = logging.getLogger(
cls.HYPERC_ERR).handlers
cls.must_fix = True
@classmethod
def restore(cls):
if not cls.must_fix:
return
cls.must_fix = False
access_hndl = logging.getLogger(
cls.HYPERC_ACC).handlers
if access_hndl != cls.access_hndl:
print(' * Fix hypercorn.access setting')
logging.getLogger(
cls.HYPERC_ACC).handlers = cls.access_hndl
error_hndl = logging.getLogger(
cls.HYPERC_ERR).handlers
if error_hndl != cls.error_hndl:
print(' * Fix hypercorn.error setting')
logging.getLogger(
cls.HYPERC_ERR).handlers = cls.error_hndl
app = Quart(__name__,
template_folder='web/templates',
static_folder='web/static')
app.secret_key = 'JKLdks.dajlKKKdladkflKwolafallsdfl'
app.jinja_env.globals.update(url_for=url_for)
server = Server(app, __name__ == "__main__")
Web(app, server.trans_path, server.rel_urls)
@app.route('/-/ready')
@@ -67,13 +239,36 @@ async def healthy():
return Response(status=200, response="I'm fine")
async def handle_client(reader: StreamReader, writer: StreamWriter, inv_class):
async def handle_client(reader: StreamReader,
writer: StreamWriter,
inv_class): # pragma: no cover
'''Handles a new incoming connection and starts an async loop'''
with inv_class(reader, writer) as inv:
await inv.local.ifc.server_loop()
@app.before_serving
async def startup_app(): # pragma: no cover
HypercornLogHndl.save()
loop = asyncio.get_event_loop()
Proxy.class_init()
Schedule.start()
ModbusTcp(loop)
for inv_class, port in [(InverterG3, 5005), (InverterG3P, 10000)]:
logging.info(f'listen on port: {port} for inverters')
loop.create_task(asyncio.start_server(lambda r, w, i=inv_class:
handle_client(r, w, i),
'0.0.0.0', port))
ProxyState.set_up(True)
@app.before_request
async def startup_request():
HypercornLogHndl.restore()
@app.after_serving
async def handle_shutdown(): # pragma: no cover
'''Close all TCP connections and stop the event loop'''
@@ -90,136 +285,15 @@ async def handle_shutdown(): # pragma: no cover
logging.info('Proxy disconnecting done')
#
# now cancel all remaining (pending) tasks
#
for task in asyncio.all_tasks():
if task == asyncio.current_task():
continue
task.cancel()
logging.info('Proxy cancelling done')
await Proxy.class_close(loop)
def get_log_level() -> int | None:
'''checks if LOG_LVL is set in the environment and returns the
corresponding logging.LOG_LEVEL'''
switch = {
'DEBUG': logging.DEBUG,
'WARN': logging.WARNING,
'INFO': logging.INFO,
'ERROR': logging.ERROR,
}
log_level = os.getenv('LOG_LVL', None)
logging.info(f"LOG_LVL : {log_level}")
if __name__ == "__main__": # pragma: no cover
return switch.get(log_level, None)
def main(): # pragma: no cover
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--config_path', type=str,
default='./config/',
help='set path for the configuration files')
parser.add_argument('-j', '--json_config', type=str,
help='read user config from json-file')
parser.add_argument('-t', '--toml_config', type=str,
help='read user config from toml-file')
parser.add_argument('-l', '--log_path', type=str,
default='./log/',
help='set path for the logging files')
parser.add_argument('-b', '--log_backups', type=int,
default=0,
help='set max number of daily log-files')
parser.add_argument('-tr', '--trans_path', type=str,
default='../translations/',
help='set path for the translations files')
parser.add_argument('-r', '--rel_urls', type=bool,
default=False,
help='use relative dashboard urls')
args = parser.parse_args()
#
# Setup our daily, rotating logger
#
serv_name = os.getenv('SERVICE_NAME', 'proxy')
version = os.getenv('VERSION', 'unknown')
@app.context_processor
def utility_processor():
return dict(version=version)
setattr(logging.handlers, "log_path", args.log_path)
setattr(logging.handlers, "log_backups", args.log_backups)
os.makedirs(args.log_path, exist_ok=True)
src_dir = os.path.dirname(__file__) + '/'
logging.config.fileConfig(src_dir + 'logging.ini')
logging.info(f'Server "{serv_name} - {version}" will be started')
logging.info(f'current dir: {os.getcwd()}')
logging.info(f"config_path: {args.config_path}")
logging.info(f"json_config: {args.json_config}")
logging.info(f"toml_config: {args.toml_config}")
logging.info(f"trans_path: {args.trans_path}")
logging.info(f"rel_urls: {args.rel_urls}")
logging.info(f"log_path: {args.log_path}")
if args.log_backups == 0:
logging.info("log_backups: unlimited")
else:
logging.info(f"log_backups: {args.log_backups} days")
log_level = get_log_level()
logging.info('******')
if log_level:
# set lowest-severity for 'root', 'msg', 'conn' and 'data' logger
logging.getLogger().setLevel(log_level)
logging.getLogger('msg').setLevel(log_level)
logging.getLogger('conn').setLevel(log_level)
logging.getLogger('data').setLevel(log_level)
logging.getLogger('tracer').setLevel(log_level)
logging.getLogger('asyncio').setLevel(log_level)
# logging.getLogger('mqtt').setLevel(log_level)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
# read config file
Config.init(ConfigReadToml(src_dir + "cnf/default_config.toml"),
log_path=args.log_path)
ConfigReadEnv()
ConfigReadJson(args.config_path + "config.json")
ConfigReadToml(args.config_path + "config.toml")
ConfigReadJson(args.json_config)
ConfigReadToml(args.toml_config)
config_err = Config.get_error()
if config_err is not None:
logging.info(f'config_err: {config_err}')
return
logging.info('******')
Proxy.class_init()
Schedule.start()
ModbusTcp(loop)
Web(app, args.trans_path, args.rel_urls)
#
# Create tasks for our listening servers. These must be tasks! If we call
# start_server directly out of our main task, the eventloop will be blocked
# and we can't receive and handle the UNIX signals!
#
for inv_class, port in [(InverterG3, 5005), (InverterG3P, 10000)]:
logging.info(f'listen on port: {port} for inverters')
loop.create_task(asyncio.start_server(lambda r, w, i=inv_class:
handle_client(r, w, i),
'0.0.0.0', port))
loop.set_debug(log_level == logging.DEBUG)
try:
ProxyState.set_up(True)
logging.info("Start Quart")
app.run(host='0.0.0.0', port=8127, use_reloader=False, loop=loop,
debug=log_level == logging.DEBUG)
app.run(host='0.0.0.0', port=8127, use_reloader=False,
debug=server.log_level == logging.DEBUG)
logging.info("Quart stopped")
except KeyboardInterrupt:
@@ -228,10 +302,4 @@ def main(): # pragma: no cover
logging.info("Quart cancelled")
finally:
logging.debug('Close event loop')
loop.close()
logging.info(f'Finally, exit Server "{serv_name}"')
if __name__ == "__main__": # pragma: no cover
main()
logging.info(f'Finally, exit Server "{server.serv_name}"')

View File

@@ -10,39 +10,40 @@ from .log_handler import LogHandler
def _get_device_icon(client_mode: bool):
'''returns the icon for the device conntection'''
if client_mode:
return 'fa-download fa-rotate-180'
return 'fa-download fa-rotate-180', 'Server Mode'
return 'fa-upload fa-rotate-180'
return 'fa-upload fa-rotate-180', 'Client Mode'
def _get_cloud_icon(emu_mode: bool):
'''returns the icon for the cloud conntection'''
if emu_mode:
return 'fa-cloud-arrow-up-alt'
return 'fa-cloud-arrow-up-alt', 'Emu Mode'
return 'fa-cloud'
return 'fa-cloud', 'Proxy Mode'
def _get_row(inv: InverterBase):
'''build one row for the connection table'''
client_mode = inv.client_mode
inv_serial = inv.local.stream.inv_serial
icon1 = _get_device_icon(client_mode)
icon1, descr1 = _get_device_icon(client_mode)
ip1, port1 = inv.addr
icon2 = ''
descr2 = ''
ip2 = '--'
port2 = '--'
if inv.remote.ifc:
ip2, port2 = inv.remote.ifc.r_addr
icon2 = _get_cloud_icon(client_mode)
icon2, descr2 = _get_cloud_icon(client_mode)
row = []
row.append(f'<i class="fa {icon1}"></i> {ip1}:{port1}')
row.append(f'<i class="fa {icon1}"></i> {ip1}')
row.append(f'<i class="fa {icon1}" title="{_(descr1)}"></i> {ip1}:{port1}')
row.append(f'<i class="fa {icon1}" title="{_(descr1)}"></i> {ip1}')
row.append(inv_serial)
row.append(f'<i class="fa {icon2}"></i> {ip2}:{port2}')
row.append(f'<i class="fa {icon2}"></i> {ip2}')
row.append(f'<i class="fa {icon2}" title="{_(descr2)}"></i> {ip2}:{port2}')
row.append(f'<i class="fa {icon2}" title="{_(descr2)}"></i> {ip2}')
return row

View File

@@ -1,26 +1,58 @@
from quart import render_template
from quart_babel import format_datetime, format_decimal
from quart_babel import format_datetime, format_decimal, _
from quart.helpers import send_from_directory
from werkzeug.utils import secure_filename
from cnf.config import Config
from datetime import datetime
from os import DirEntry
import os
from dateutil import tz
from . import web
def _get_file(file):
def _get_birth_from_log(path: str) -> None | datetime:
'''read timestamp from the first line of a log file'''
dt = None
try:
with open(path) as f:
first_line = f.readline()
first_line = first_line.lstrip("'")
fmt = "%Y-%m-%d %H:%M:%S" if first_line[4] == '-' \
else "%d-%m-%Y %H:%M:%S"
dt = datetime.strptime(first_line[0:19], fmt). \
replace(tzinfo=tz.tzlocal())
except Exception:
pass
return dt
def _get_file(file: DirEntry) -> dict:
'''build one row for the connection table'''
entry = {}
entry['name'] = file.name
stat = file.stat()
entry['size'] = format_decimal(stat.st_size)
entry['date'] = stat.st_mtime
entry['created'] = format_datetime(stat.st_ctime, format="short")
try:
dt = stat.st_birthtime
except Exception:
dt = _get_birth_from_log(file.path)
if dt:
entry['created'] = format_datetime(dt, format="short")
# sort by creating date, if available
entry['date'] = dt if isinstance(dt, float) else dt.timestamp()
else:
entry['created'] = _('n/a')
entry['date'] = stat.st_mtime
entry['modified'] = format_datetime(stat.st_mtime, format="short")
return entry
def get_list_data():
def get_list_data() -> list:
'''build the connection table'''
file_list = []
with os.scandir(Config.get_log_path()) as it:

View File

@@ -46,10 +46,13 @@ def get_table_data():
@web.route('/mqtt-fetch')
async def mqtt_fetch():
mqtt = Mqtt(None)
ctime = format_datetime(dt=mqtt.ctime, format='short')
cdatetime = format_datetime(dt=mqtt.ctime, format='d.MM. HH:mm')
data = {
"update-time": format_datetime(format="medium"),
"mqtt-ctime": f"<h3>{ctime}</h3>",
"mqtt-ctime": f"""
<h3 class="w3-hide-small w3-hide-medium">{cdatetime}</h3>
<h4 class="w3-hide-large">{cdatetime}</h4>
""",
"mqtt-tx": f"<h3>{mqtt.published}</h3>",
"mqtt-rx": f"<h3>{mqtt.received}</h3>",
}

View File

@@ -7,9 +7,9 @@
<div id="id01" class="w3-modal">
<div class="w3-modal-content" style="width:600px">
<div class="w3-container w3-padding-24">
<h2>{{_("Do you really want to delete the log file")}}:<br><b><span id="id03"></span></b> ?</h2>
<h2>{{_('Do you really want to delete the log file: <br>%(file)s ?', file='<b><span id="id03"></span></b>')}}</h2>
<div class="w3-bar">
<button id="id02" class="w3-button w3-red" onclick="deleteFile(); document.getElementById('id01').style.display='none'">{{_('Delete File</button')}}>
<button id="id02" class="w3-button w3-red" onclick="deleteFile(); document.getElementById('id01').style.display='none'">{{_('Delete File')}}</button>
<button class="w3-button w3-grey w3-right" onclick="document.getElementById('id01').style.display='none'">{{_('Abort')}}</button>
</div>
</div>

View File

@@ -8,7 +8,7 @@
<div class="w3-third">
<div class="w3-card-4">
<div class="w3-container w3-indigo w3-padding-16">
<div class="w3-left"><i class="fa fa-link w3-xxxlarge"></i></div>
<div class="w3-left"><i class="fa fa-business-time w3-xxxlarge"></i></div>
<div id = "mqtt-ctime" class="w3-right">
<h3>-</h3>
</div>
@@ -21,7 +21,7 @@
<div class="w3-third">
<div class="w3-card-4">
<div class="w3-container w3-purple w3-padding-16">
<div class="w3-left"><i class="fa fa-server w3-xxxlarge"></i></div>
<div class="w3-left"><i class="fa fa-angle-double-right w3-xxxlarge"></i></div>
<div id = "mqtt-tx" class="w3-right">
<h3>-</h3>
</div>
@@ -34,7 +34,7 @@
<div class="w3-third">
<div class="w3-card-4">
<div class="w3-container w3-orange w3-text-white w3-padding-16">
<div class="w3-left"><i class="fa fa-user w3-xxxlarge"></i></div>
<div class="w3-left"><i class="fa fa-angle-double-left w3-xxxlarge"></i></div>
<div id = "mqtt-rx" class="w3-right">
<h3>-</h3>
</div>