move StremPtr instances into Inverter class

This commit is contained in:
Stefan Allius
2024-09-29 15:31:14 +02:00
parent 0c824b4a2a
commit 5a0ef30ceb
13 changed files with 548 additions and 550 deletions

View File

@@ -17,6 +17,28 @@ logger_mqtt = logging.getLogger('mqtt')
class Inverter():
'''class Inverter is a baseclass
The class has some class method for managing common resources like a
connection to the MQTT broker or proxy error counter which are common
for all inverter connection
Instances of the class are connections to an inverter and can have an
optional link to an remote connection to the TSUN cloud. A remote
connection dies with the inverter connection.
class methods:
class_init(): initialize the common resources of the proxy (MQTT
broker, Proxy DB, etc). Must be called before the
first inverter instance can be created
class_close(): release the common resources of the proxy. Should not
be called before any instances of the class are
destroyed
methods:
async_create_remote(): Establish a client connection to the TSUN cloud
async_publ_mqtt(): Publish data to MQTT broker
'''
@classmethod
def class_init(cls) -> None:
logging.debug('Inverter.class_init')
@@ -94,17 +116,18 @@ class Inverter():
host = tsun['host']
port = tsun['port']
addr = (host, port)
stream = self.local.stream
try:
logging.info(f'[{self.node_id}] Connect to {addr}')
logging.info(f'[{stream.node_id}] Connect to {addr}')
connect = asyncio.open_connection(host, port)
reader, writer = await connect
if hasattr(self, 'id_str'):
if hasattr(stream, 'id_str'):
self.remote.stream = conn_class(
reader, writer, addr, self, self.id_str)
self, reader, writer, addr, stream.id_str)
else:
self.remote.stream = conn_class(
reader, writer, addr, self)
self, reader, writer, addr)
logging.info(f'[{self.remote.stream.node_id}:'
f'{self.remote.stream.conn_no}] '
@@ -114,56 +137,57 @@ class Inverter():
except (ConnectionRefusedError, TimeoutError) as error:
logging.info(f'{error}')
except Exception:
self.inc_counter('SW_Exception')
Infos.inc_counter('SW_Exception')
logging.error(
f"Inverter: Exception for {addr}:\n"
f"{traceback.format_exc()}")
async def async_publ_mqtt(self) -> None:
'''publish data to MQTT broker'''
if not self.unique_id:
stream = self.local.stream
if not stream.unique_id:
return
# check if new inverter or collector infos are available or when the
# home assistant has changed the status back to online
try:
if (('inverter' in self.new_data and self.new_data['inverter'])
or ('collector' in self.new_data and
self.new_data['collector'])
if (('inverter' in stream.new_data and stream.new_data['inverter'])
or ('collector' in stream.new_data and
stream.new_data['collector'])
or self.mqtt.ha_restarts != self.__ha_restarts):
await self._register_proxy_stat_home_assistant()
await self.__register_home_assistant()
await self.__register_home_assistant(stream)
self.__ha_restarts = self.mqtt.ha_restarts
for key in self.new_data:
await self.__async_publ_mqtt_packet(key)
for key in stream.new_data:
await self.__async_publ_mqtt_packet(stream, key)
for key in Infos.new_stat_data:
await Inverter._async_publ_mqtt_proxy_stat(key)
except MqttCodeError as error:
logging.error(f'Mqtt except: {error}')
except Exception:
self.inc_counter('SW_Exception')
Infos.inc_counter('SW_Exception')
logging.error(
f"Inverter: Exception:\n"
f"{traceback.format_exc()}")
async def __async_publ_mqtt_packet(self, key):
db = self.db.db
if key in db and self.new_data[key]:
async def __async_publ_mqtt_packet(self, stream, key):
db = stream.db.db
if key in db and stream.new_data[key]:
data_json = json.dumps(db[key])
node_id = self.node_id
node_id = stream.node_id
logger_mqtt.debug(f'{key}: {data_json}')
await self.mqtt.publish(f'{self.entity_prfx}{node_id}{key}', data_json) # noqa: E501
self.new_data[key] = False
stream.new_data[key] = False
async def __register_home_assistant(self) -> None:
async def __register_home_assistant(self, stream) -> None:
'''register all our topics at home assistant'''
for data_json, component, node_id, id in self.db.ha_confs(
self.entity_prfx, self.node_id, self.unique_id,
self.sug_area):
for data_json, component, node_id, id in stream.db.ha_confs(
self.entity_prfx, stream.node_id, stream.unique_id,
stream.sug_area):
logger_mqtt.debug(f"MQTT Register: cmp:'{component}'"
f" node_id:'{node_id}' {data_json}")
await self.mqtt.publish(f"{self.discovery_prfx}{component}"
f"/{node_id}{id}/config", data_json)
self.db.reg_clr_at_midnight(f'{self.entity_prfx}{self.node_id}')
stream.db.reg_clr_at_midnight(f'{self.entity_prfx}{stream.node_id}')