* Code Cleanup (#158) * print coverage report * create sonar-project property file * install all py dependencies in one step * code cleanup * reduce cognitive complexity * do not build on *.yml changes * optimise versionstring handling (#159) - Reading the version string from the image updates it even if the image is re-pulled without re-deployment * fix linter warning * exclude *.pyi filese * ignore some rules for tests * cleanup (#160) * Sonar qube 3 (#163) fix SonarQube warnings in modbus.py * Sonar qube 3 (#164) * fix SonarQube warnings * Sonar qube 3 (#165) * cleanup * Add support for TSUN Titan inverter Fixes #161 * fix SonarQube warnings * fix error * rename field "config" * SonarQube reads flake8 output * don't stop on flake8 errors * flake8 scan only app/src for SonarQube * update flake8 run * ignore flake8 C901 * cleanup * fix linter warnings * ignore changed *.yml files * read sensor list solarman data packets * catch 'No route to' error and log only in debug mode * fix unit tests * add sensor_list configuration * adapt unit tests * fix SonarQube warnings * Sonar qube 3 (#166) * add unittests for mqtt.py * add mock * move test requirements into a file * fix unit tests * fix formating * initial version * fix SonarQube warning * Sonar qube 4 (#169) * add unit test for inverter.py * fix SonarQube warning * Sonar qube 5 (#170) * fix SonarLints warnings * use random IP adresses for unit tests * Docker: The description ist missing (#171) Fixes #167 * S allius/issue167 (#172) * cleanup * Sonar qube 6 (#174) * test class ModbusConn * Sonar qube 3 (#178) * add more unit tests * GEN3: don't crash on overwritten msg in the receive buffer * improve test coverage und reduce test delays * reduce cognitive complexity * fix merge * fix merge conflikt * fix merge conflict * S allius/issue182 (#183) * GEN3: After inverter firmware update the 'Unknown Msg Type' increases continuously Fixes #182 * add support for Controller serial no and MAC * test hardening * GEN3: add support for new messages of version 3 firmwares * bump libraries to latest versions - bump aiomqtt to version 2.3.0 - bump aiohttp to version 3.10.5 * improve test coverage * reduce cognective complexity * fix target preview * remove dubbled fixtures * increase test coverage * Update README.md (#185) update badges * S allius/issue186 (#187) * Parse more values in Server Mode Fixes #186 * read OUTPUT_COEFFICIENT and MAC_ADDR in SrvMode * fix unit test * increase test coverage * S allius/issue186 (#188) * increase test coverage * update changelog * add dokumentation * change default config * Update README.md (#189) Config file is now foldable * GEN3: Invalid Contact Info Msg (#192) Fixes #191 * Refactoring async stream (#194) * GEN3: Invalid Contact Info Msg Fixes #191 * introduce ifc with FIFOs * add object factory * use AsyncIfc class with FIFO * declare more methods as classmethods * - refactoring - remove _forward_buffer - make async_write private * remove _forward_buffer * refactoring * avoid mqtt handling for invalid serial numbers * add two more callbacks * FIX update_header_cb handling * split AsyncStream in two classes * split ConnectionG3(P) in server and client class * update class diagramm * refactor server creation * remove duplicated imports * reduce code duplication * move StremPtr instances into Inverter class * resolution of connection classes - remove ConnectionG3Client - remove ConnectionG3Server - remove ConnectionG3PClient - remove ConnectionG3PServer * fix server connections * fix client loop closing * don't overwrite self.remote in constructor * update class diagramm * fixes - fixes null pointer accesses - initalize AsyncStreamClient with proper StreamPtr instance * add close callback * refactor close handling * remove connection classes * move more code into InverterBase class * remove test_inverter_base.py * add abstract inverter interface class * initial commit * fix sonar qube warnings * rename class Inverter into Proxy * fix typo * move class InverterIfc into a separate file * add more testcases * use ProtocolIfc class * add unit tests for AsyncStream class * icrease test coverage * reduce cognitive complexity * increase test coverage * increase tes coverage * simplify heartbeat handler * remove obsolete tx_get method * add more unittests * update changelog * remove __del__ method for proper gc runs * check releasing of ModbusConn instances * call garbage collector to release unreachable objs * decrease ref counter after the with block * S allius/issue196 (#198) * fix healthcheck - on infrastructure with IPv6 support localhost might be resolved to an IPv6 adress. Since the proxy only support IPv4 for now, we replace localhost by 127.0.0.1, to fix this * merge from main
171 lines
4.5 KiB
Python
171 lines
4.5 KiB
Python
import logging
|
|
import weakref
|
|
from typing import Callable
|
|
from enum import Enum
|
|
|
|
|
|
if __name__ == "app.src.messages":
|
|
from app.src.protocol_ifc import ProtocolIfc
|
|
from app.src.infos import Infos, Register
|
|
from app.src.modbus import Modbus
|
|
else: # pragma: no cover
|
|
from protocol_ifc import ProtocolIfc
|
|
from infos import Infos, Register
|
|
from modbus import Modbus
|
|
|
|
logger = logging.getLogger('msg')
|
|
|
|
|
|
def __hex_val(n, data, data_len):
|
|
line = ''
|
|
for j in range(n-16, n):
|
|
if j >= data_len:
|
|
break
|
|
line += '%02x ' % abs(data[j])
|
|
return line
|
|
|
|
|
|
def __asc_val(n, data, data_len):
|
|
line = ''
|
|
for j in range(n-16, n):
|
|
if j >= data_len:
|
|
break
|
|
c = data[j] if not (data[j] < 0x20 or data[j] > 0x7e) else '.'
|
|
line += '%c' % c
|
|
return line
|
|
|
|
|
|
def hex_dump(data, data_len) -> list:
|
|
n = 0
|
|
lines = []
|
|
|
|
for i in range(0, data_len, 16):
|
|
line = ' '
|
|
line += '%04x | ' % (i)
|
|
n += 16
|
|
line += __hex_val(n, data, data_len)
|
|
line += ' ' * (3 * 16 + 9 - len(line)) + ' | '
|
|
line += __asc_val(n, data, data_len)
|
|
lines.append(line)
|
|
|
|
return lines
|
|
|
|
|
|
def hex_dump_str(data, data_len):
|
|
lines = hex_dump(data, data_len)
|
|
return '\n'.join(lines)
|
|
|
|
|
|
def hex_dump_memory(level, info, data, data_len):
|
|
lines = []
|
|
lines.append(info)
|
|
tracer = logging.getLogger('tracer')
|
|
if not tracer.isEnabledFor(level):
|
|
return
|
|
|
|
lines += hex_dump(data, data_len)
|
|
|
|
tracer.log(level, '\n'.join(lines))
|
|
|
|
|
|
class State(Enum):
|
|
'''state of the logical connection'''
|
|
init = 0
|
|
'''just created'''
|
|
received = 1
|
|
'''at least one packet received'''
|
|
up = 2
|
|
'''at least one cmd-rsp transaction'''
|
|
pend = 3
|
|
'''inverter transaction pending, don't send MODBUS cmds'''
|
|
closed = 4
|
|
'''connection closed'''
|
|
|
|
|
|
class Message(ProtocolIfc):
|
|
MAX_START_TIME = 400
|
|
'''maximum time without a received msg in sec'''
|
|
MAX_INV_IDLE_TIME = 120
|
|
'''maximum time without a received msg from the inverter in sec'''
|
|
MAX_DEF_IDLE_TIME = 360
|
|
'''maximum default time without a received msg in sec'''
|
|
|
|
def __init__(self, server_side: bool, send_modbus_cb:
|
|
Callable[[bytes, int, str], None], mb_timeout: int):
|
|
self._registry.append(weakref.ref(self))
|
|
|
|
self.server_side = server_side
|
|
if server_side:
|
|
self.mb = Modbus(send_modbus_cb, mb_timeout)
|
|
else:
|
|
self.mb = None
|
|
|
|
self.header_valid = False
|
|
self.header_len = 0
|
|
self.data_len = 0
|
|
self.unique_id = 0
|
|
self._node_id = ''
|
|
self.sug_area = ''
|
|
self.new_data = {}
|
|
self.state = State.init
|
|
self.shutdown_started = False
|
|
|
|
@property
|
|
def node_id(self):
|
|
return self._node_id
|
|
|
|
@node_id.setter
|
|
def node_id(self, value):
|
|
self._node_id = value
|
|
self.ifc.set_node_id(value)
|
|
|
|
'''
|
|
Empty methods, that have to be implemented in any child class which
|
|
don't use asyncio
|
|
'''
|
|
def _read(self) -> None: # read data bytes from socket and copy them
|
|
# to our _recv_buffer
|
|
return # pragma: no cover
|
|
|
|
def _set_mqtt_timestamp(self, key, ts: float | None):
|
|
if key not in self.new_data or \
|
|
not self.new_data[key]:
|
|
if key == 'grid':
|
|
info_id = Register.TS_GRID
|
|
elif key == 'input':
|
|
info_id = Register.TS_INPUT
|
|
elif key == 'total':
|
|
info_id = Register.TS_TOTAL
|
|
else:
|
|
return
|
|
# tstr = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(ts))
|
|
# logger.info(f'update: key: {key} ts:{tstr}'
|
|
self.db.set_db_def_value(info_id, round(ts))
|
|
|
|
def _timeout(self) -> int:
|
|
if self.state == State.init or self.state == State.received:
|
|
to = self.MAX_START_TIME
|
|
elif self.state == State.up and \
|
|
self.server_side and self.modbus_polling:
|
|
to = self.MAX_INV_IDLE_TIME
|
|
else:
|
|
to = self.MAX_DEF_IDLE_TIME
|
|
return to
|
|
|
|
'''
|
|
Our puplic methods
|
|
'''
|
|
def close(self) -> None:
|
|
if self.mb:
|
|
self.mb.close()
|
|
self.mb = None
|
|
# pragma: no cover
|
|
|
|
def inc_counter(self, counter: str) -> None:
|
|
self.db.inc_counter(counter)
|
|
Infos.new_stat_data['proxy'] = True
|
|
|
|
def dec_counter(self, counter: str) -> None:
|
|
self.db.dec_counter(counter)
|
|
Infos.new_stat_data['proxy'] = True
|