Compare commits
15 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d5561d393a | ||
|
|
a8f1a838c1 | ||
|
|
b530353e54 | ||
|
|
271b4f876e | ||
|
|
6816a3e027 | ||
|
|
bee25a5f13 | ||
|
|
3db643cb87 | ||
|
|
c791395e0e | ||
|
|
0043e4c147 | ||
|
|
f38047c931 | ||
|
|
19cbd5a041 | ||
|
|
a48394d057 | ||
|
|
1871f6c8d2 | ||
|
|
066459f14e | ||
|
|
3f14f5cb9e |
@@ -7,6 +7,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||||||
|
|
||||||
## [Unreleased]
|
## [Unreleased]
|
||||||
|
|
||||||
|
## [0.1.0] - 2023-10-06
|
||||||
|
|
||||||
|
- refactoring of the connection classes
|
||||||
|
- change user id on startup
|
||||||
|
- register MQTT topics to home assistant, even if we have multiple inverters
|
||||||
|
|
||||||
## [0.0.6] - 2023-10-03
|
## [0.0.6] - 2023-10-03
|
||||||
|
|
||||||
- Bump aiomqtt to version 1.2.1
|
- Bump aiomqtt to version 1.2.1
|
||||||
|
|||||||
@@ -37,12 +37,6 @@ ENV SERVICE_NAME=$SERVICE_NAME
|
|||||||
ENV UID=$UID
|
ENV UID=$UID
|
||||||
ENV GID=$GID
|
ENV GID=$GID
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
RUN addgroup --gid $GID $SERVICE_NAME && \
|
|
||||||
adduser --ingroup $SERVICE_NAME --shell /bin/false --disabled-password --uid $UID $SERVICE_NAME && \
|
|
||||||
mkdir -p /home/$SERVICE_NAME/log /home/$SERVICE_NAME/config && \
|
|
||||||
chown -R $SERVICE_NAME:$SERVICE_NAME /home/$SERVICE_NAME
|
|
||||||
|
|
||||||
# set the working directory in the container
|
# set the working directory in the container
|
||||||
WORKDIR /home/$SERVICE_NAME
|
WORKDIR /home/$SERVICE_NAME
|
||||||
|
|||||||
@@ -3,12 +3,23 @@ set -e
|
|||||||
|
|
||||||
user="$(id -u)"
|
user="$(id -u)"
|
||||||
echo "######################################################"
|
echo "######################################################"
|
||||||
echo "# start: '$SERVICE_NAME' Version:$VERSION"
|
echo "# prepare: '$SERVICE_NAME' Version:$VERSION"
|
||||||
echo "# with UserID:$UID, GroupID:$GID"
|
echo "# for running with UserID:$UID, GroupID:$GID"
|
||||||
echo "######################################################"
|
echo "#"
|
||||||
|
|
||||||
if [ "$user" = '0' ]; then
|
if [ "$user" = '0' ]; then
|
||||||
[ -d "/home/$SERVICE_NAME" ] && chown -R $SERVICE_NAME:$SERVICE_NAME /home/$SERVICE_NAME || true
|
mkdir -p /home/$SERVICE_NAME/log /home/$SERVICE_NAME/config
|
||||||
|
|
||||||
|
if id $SERVICE_NAME ; then
|
||||||
|
echo "user still exists"
|
||||||
|
else
|
||||||
|
addgroup --gid $GID $SERVICE_NAME 2> /dev/null
|
||||||
|
adduser --ingroup $SERVICE_NAME --shell /bin/false --disabled-password --no-create-home --comment "" --uid $UID $SERVICE_NAME
|
||||||
|
fi
|
||||||
|
chown -R $SERVICE_NAME:$SERVICE_NAME /home/$SERVICE_NAME || true
|
||||||
|
echo "######################################################"
|
||||||
|
echo "#"
|
||||||
|
|
||||||
exec gosu $SERVICE_NAME "$@"
|
exec gosu $SERVICE_NAME "$@"
|
||||||
else
|
else
|
||||||
exec "$@"
|
exec "$@"
|
||||||
|
|||||||
@@ -1,22 +1,19 @@
|
|||||||
import logging, traceback, aiomqtt, json
|
import logging, traceback
|
||||||
from config import Config
|
from config import Config
|
||||||
|
#import gc
|
||||||
from messages import Message, hex_dump_memory
|
from messages import Message, hex_dump_memory
|
||||||
from mqtt import Mqtt
|
|
||||||
|
|
||||||
logger = logging.getLogger('conn')
|
logger = logging.getLogger('conn')
|
||||||
logger_mqtt = logging.getLogger('mqtt')
|
|
||||||
|
|
||||||
class AsyncStream(Message):
|
class AsyncStream(Message):
|
||||||
|
|
||||||
def __init__(self, proxy, reader, writer, addr, stream=None, server_side=True):
|
def __init__(self, reader, writer, addr, remote_stream, server_side: bool) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.proxy = proxy
|
|
||||||
self.reader = reader
|
self.reader = reader
|
||||||
self.writer = writer
|
self.writer = writer
|
||||||
self.remoteStream = stream
|
self.remoteStream = remote_stream
|
||||||
self.addr = addr
|
|
||||||
self.server_side = server_side
|
self.server_side = server_side
|
||||||
self.mqtt = Mqtt()
|
self.addr = addr
|
||||||
self.unique_id = 0
|
self.unique_id = 0
|
||||||
self.node_id = ''
|
self.node_id = ''
|
||||||
|
|
||||||
@@ -24,47 +21,32 @@ class AsyncStream(Message):
|
|||||||
Our puplic methods
|
Our puplic methods
|
||||||
'''
|
'''
|
||||||
def set_serial_no(self, serial_no : str):
|
def set_serial_no(self, serial_no : str):
|
||||||
logger_mqtt.info(f'SerialNo: {serial_no}')
|
logger.info(f'SerialNo: {serial_no}')
|
||||||
|
|
||||||
if self.unique_id != serial_no:
|
if self.unique_id != serial_no:
|
||||||
|
|
||||||
inverters = Config.get('inverters')
|
inverters = Config.get('inverters')
|
||||||
#logger_mqtt.debug(f'Inverters: {inverters}')
|
#logger.debug(f'Inverters: {inverters}')
|
||||||
|
|
||||||
if serial_no in inverters:
|
if serial_no in inverters:
|
||||||
logger_mqtt.debug(f'SerialNo {serial_no} allowed!')
|
logger.debug(f'SerialNo {serial_no} allowed!')
|
||||||
inv = inverters[serial_no]
|
inv = inverters[serial_no]
|
||||||
self.node_id = inv['node_id']
|
self.node_id = inv['node_id']
|
||||||
self.sug_area = inv['suggested_area']
|
self.sug_area = inv['suggested_area']
|
||||||
else:
|
else:
|
||||||
logger_mqtt.debug(f'SerialNo {serial_no} not known!')
|
logger.debug(f'SerialNo {serial_no} not known!')
|
||||||
self.node_id = ''
|
self.node_id = ''
|
||||||
self.sug_area = ''
|
self.sug_area = ''
|
||||||
if not inverters['allow_all']:
|
if not inverters['allow_all']:
|
||||||
self.unique_id = None
|
self.unique_id = None
|
||||||
|
|
||||||
logger_mqtt.error('ignore message from unknow inverter!')
|
logger.error('ignore message from unknow inverter!')
|
||||||
return
|
return
|
||||||
|
|
||||||
self.unique_id = serial_no
|
self.unique_id = serial_no
|
||||||
|
|
||||||
ha = Config.get('ha')
|
|
||||||
self.entitiy_prfx = ha['entity_prefix'] + '/'
|
|
||||||
self.discovery_prfx = ha['discovery_prefix'] + '/'
|
|
||||||
|
|
||||||
|
|
||||||
async def register_home_assistant(self):
|
|
||||||
|
|
||||||
if self.server_side:
|
|
||||||
try:
|
|
||||||
for data_json, component, id in self.db.ha_confs(self.entitiy_prfx + self.node_id, self.unique_id, self.sug_area):
|
|
||||||
logger_mqtt.debug(f'Register: {data_json}')
|
|
||||||
await self.mqtt.publish(f"{self.discovery_prfx}{component}/{self.node_id}{id}/config", data_json)
|
|
||||||
|
|
||||||
except Exception:
|
|
||||||
logging.error(
|
|
||||||
f"Proxy: Exception:\n"
|
|
||||||
f"{traceback.format_exc()}")
|
|
||||||
|
|
||||||
|
|
||||||
async def loop(self) -> None:
|
async def loop(self) -> None:
|
||||||
@@ -79,7 +61,7 @@ class AsyncStream(Message):
|
|||||||
if self.unique_id:
|
if self.unique_id:
|
||||||
await self.__async_write()
|
await self.__async_write()
|
||||||
await self.__async_forward()
|
await self.__async_forward()
|
||||||
await self.__async_publ_mqtt()
|
await self.async_publ_mqtt()
|
||||||
|
|
||||||
|
|
||||||
except (ConnectionResetError,
|
except (ConnectionResetError,
|
||||||
@@ -104,11 +86,8 @@ class AsyncStream(Message):
|
|||||||
logger.debug(f'in AsyncStream.close() {self.addr}')
|
logger.debug(f'in AsyncStream.close() {self.addr}')
|
||||||
self.writer.close()
|
self.writer.close()
|
||||||
super().close() # call close handler in the parent class
|
super().close() # call close handler in the parent class
|
||||||
self.proxy = None # clear our refernce to the proxy, to avoid memory leaks
|
|
||||||
|
|
||||||
if self.remoteStream: # if we have knowledge about a remote stream, we del the references between the two streams
|
# logger.info (f'AsyncStream refs: {gc.get_referrers(self)}')
|
||||||
self.remoteStream.remoteStream = None
|
|
||||||
self.remoteStream = None
|
|
||||||
|
|
||||||
|
|
||||||
'''
|
'''
|
||||||
@@ -132,8 +111,7 @@ class AsyncStream(Message):
|
|||||||
async def __async_forward(self) -> None:
|
async def __async_forward(self) -> None:
|
||||||
if self._forward_buffer:
|
if self._forward_buffer:
|
||||||
if not self.remoteStream:
|
if not self.remoteStream:
|
||||||
tsun = Config.get('tsun')
|
await self.async_create_remote() # only implmeneted for server side => syncServerStream
|
||||||
self.remoteStream = await self.proxy.CreateClientStream (self, tsun['host'], tsun['port'])
|
|
||||||
|
|
||||||
if self.remoteStream:
|
if self.remoteStream:
|
||||||
hex_dump_memory(logging.DEBUG, f'Forward to {self.remoteStream.addr}:', self._forward_buffer, len(self._forward_buffer))
|
hex_dump_memory(logging.DEBUG, f'Forward to {self.remoteStream.addr}:', self._forward_buffer, len(self._forward_buffer))
|
||||||
@@ -141,24 +119,14 @@ class AsyncStream(Message):
|
|||||||
await self.remoteStream.writer.drain()
|
await self.remoteStream.writer.drain()
|
||||||
self._forward_buffer = bytearray(0)
|
self._forward_buffer = bytearray(0)
|
||||||
|
|
||||||
async def __async_publ_mqtt(self) -> None:
|
async def async_create_remote(self) -> None:
|
||||||
if self.server_side:
|
pass
|
||||||
db = self.db.db
|
|
||||||
|
|
||||||
# check if new inverter or collector infos are available or when the home assistant has changed the status back to online
|
async def async_publ_mqtt(self) -> None:
|
||||||
if (self.new_data.keys() & {'inverter', 'collector'}) or self.mqtt.home_assistant_restarted:
|
pass
|
||||||
await self.register_home_assistant()
|
|
||||||
self.mqtt.home_assistant_restarted = False # clear flag
|
|
||||||
|
|
||||||
for key in self.new_data:
|
|
||||||
if self.new_data[key] and key in db:
|
|
||||||
data_json = json.dumps(db[key])
|
|
||||||
logger_mqtt.info(f'{key}: {data_json}')
|
|
||||||
await self.mqtt.publish(f"{self.entitiy_prfx}{self.node_id}{key}", data_json)
|
|
||||||
self.new_data[key] = False
|
|
||||||
|
|
||||||
def __del__ (self):
|
def __del__ (self):
|
||||||
logger.debug ("AsyncStream __del__")
|
logging.debug (f"AsyncStream.__del__ {self.addr}")
|
||||||
super().__del__()
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -60,7 +60,7 @@ class Config():
|
|||||||
config['inverters'] = def_config['inverters'] | usr_config['inverters']
|
config['inverters'] = def_config['inverters'] | usr_config['inverters']
|
||||||
|
|
||||||
cls.config = cls.conf_schema.validate(config)
|
cls.config = cls.conf_schema.validate(config)
|
||||||
logging.debug(f'Readed config: "{cls.config}" ')
|
#logging.debug(f'Readed config: "{cls.config}" ')
|
||||||
|
|
||||||
except Exception as error:
|
except Exception as error:
|
||||||
logger.error(f'Config.read: {error}')
|
logger.error(f'Config.read: {error}')
|
||||||
|
|||||||
103
app/src/inverter.py
Normal file
103
app/src/inverter.py
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
import asyncio, logging, traceback, json
|
||||||
|
from config import Config
|
||||||
|
from async_stream import AsyncStream
|
||||||
|
from mqtt import Mqtt
|
||||||
|
#import gc
|
||||||
|
|
||||||
|
logger = logging.getLogger('conn')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class Inverter(AsyncStream):
|
||||||
|
|
||||||
|
def __init__ (self, reader, writer, addr):
|
||||||
|
super().__init__(reader, writer, addr, None, True)
|
||||||
|
self.mqtt = Mqtt()
|
||||||
|
self.ha_restarts = 0
|
||||||
|
ha = Config.get('ha')
|
||||||
|
self.entitiy_prfx = ha['entity_prefix'] + '/'
|
||||||
|
self.discovery_prfx = ha['discovery_prefix'] + '/'
|
||||||
|
|
||||||
|
|
||||||
|
async def server_loop(self, addr):
|
||||||
|
'''Loop for receiving messages from the inverter (server-side)'''
|
||||||
|
logger.info(f'Accept connection from {addr}')
|
||||||
|
await self.loop()
|
||||||
|
logging.info(f'Server loop stopped for {addr}')
|
||||||
|
|
||||||
|
# if the server connection closes, we also have to disconnect the connection to te TSUN cloud
|
||||||
|
if self.remoteStream:
|
||||||
|
logging.debug ("disconnect client connection")
|
||||||
|
self.remoteStream.disc()
|
||||||
|
|
||||||
|
async def client_loop(self, addr):
|
||||||
|
'''Loop for receiving messages from the TSUN cloud (client-side)'''
|
||||||
|
await self.remoteStream.loop()
|
||||||
|
logging.info(f'Client loop stopped for {addr}')
|
||||||
|
|
||||||
|
# if the client connection closes, we don't touch the server connection. Instead we erase the client
|
||||||
|
# connection stream, thus on the next received packet from the inverter, we can establish a new connection
|
||||||
|
# to the TSUN cloud
|
||||||
|
self.remoteStream.remoteStream = None # erase backlink to inverter instance
|
||||||
|
self.remoteStream = None # than erase client connection
|
||||||
|
|
||||||
|
async def async_create_remote(self) -> None:
|
||||||
|
'''Establish a client connection to the TSUN cloud'''
|
||||||
|
tsun = Config.get('tsun')
|
||||||
|
host = tsun['host']
|
||||||
|
port = tsun['port']
|
||||||
|
addr = (host, port)
|
||||||
|
|
||||||
|
try:
|
||||||
|
logging.info(f'Connected to {addr}')
|
||||||
|
connect = asyncio.open_connection(host, port)
|
||||||
|
reader, writer = await connect
|
||||||
|
self.remoteStream = AsyncStream(reader, writer, addr, self, False)
|
||||||
|
asyncio.create_task(self.client_loop(addr))
|
||||||
|
|
||||||
|
except ConnectionRefusedError as error:
|
||||||
|
logging.info(f'{error}')
|
||||||
|
except Exception:
|
||||||
|
logging.error(
|
||||||
|
f"Inverter: Exception for {addr}:\n"
|
||||||
|
f"{traceback.format_exc()}")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
async def async_publ_mqtt(self) -> None:
|
||||||
|
'''puplish data to MQTT broker'''
|
||||||
|
db = self.db.db
|
||||||
|
# check if new inverter or collector infos are available or when the home assistant has changed the status back to online
|
||||||
|
if (('inverter' in self.new_data and self.new_data['inverter']) or
|
||||||
|
('collector' in self.new_data and self.new_data['collector']) or
|
||||||
|
self.mqtt.ha_restarts != self.ha_restarts):
|
||||||
|
await self.__register_home_assistant()
|
||||||
|
self.ha_restarts = self.mqtt.ha_restarts
|
||||||
|
|
||||||
|
for key in self.new_data:
|
||||||
|
if self.new_data[key] and key in db:
|
||||||
|
data_json = json.dumps(db[key])
|
||||||
|
logger.info(f'{key}: {data_json}')
|
||||||
|
await self.mqtt.publish(f"{self.entitiy_prfx}{self.node_id}{key}", data_json)
|
||||||
|
self.new_data[key] = False
|
||||||
|
|
||||||
|
async def __register_home_assistant(self) -> None:
|
||||||
|
'''register all our topics at home assistant'''
|
||||||
|
try:
|
||||||
|
for data_json, component, id in self.db.ha_confs(self.entitiy_prfx + self.node_id, self.unique_id, self.sug_area):
|
||||||
|
#logger.debug(f'MQTT Register: {data_json}')
|
||||||
|
await self.mqtt.publish(f"{self.discovery_prfx}{component}/{self.node_id}{id}/config", data_json)
|
||||||
|
except Exception:
|
||||||
|
logging.error(
|
||||||
|
f"Inverter: Exception:\n"
|
||||||
|
f"{traceback.format_exc()}")
|
||||||
|
|
||||||
|
def close(self) -> None:
|
||||||
|
logging.debug(f'Inverter.close() {self.addr}')
|
||||||
|
super().close() # call close handler in the parent class
|
||||||
|
# logger.debug (f'Inverter refs: {gc.get_referrers(self)}')
|
||||||
|
|
||||||
|
|
||||||
|
def __del__ (self):
|
||||||
|
logging.debug ("Inverter.__del__")
|
||||||
|
super().__del__()
|
||||||
@@ -101,7 +101,6 @@ class Message(metaclass=IterRegistry):
|
|||||||
Our puplic methods
|
Our puplic methods
|
||||||
'''
|
'''
|
||||||
def close(self) -> None:
|
def close(self) -> None:
|
||||||
logger.debug(f'in Message.close()')
|
|
||||||
# we have refernces to methods of this class in self.switch
|
# we have refernces to methods of this class in self.switch
|
||||||
# so we have to erase self.switch, otherwise this instance can't be
|
# so we have to erase self.switch, otherwise this instance can't be
|
||||||
# deallocated by the garbage collector ==> we get a memory leak
|
# deallocated by the garbage collector ==> we get a memory leak
|
||||||
@@ -295,11 +294,9 @@ class Message(metaclass=IterRegistry):
|
|||||||
|
|
||||||
|
|
||||||
def msg_unknown(self):
|
def msg_unknown(self):
|
||||||
|
logger.error (f"Unknow Msg: ID:{self.msg_id}")
|
||||||
self.forward(self._recv_buffer, self.header_len+self.data_len)
|
self.forward(self._recv_buffer, self.header_len+self.data_len)
|
||||||
|
|
||||||
|
|
||||||
def __del__ (self):
|
|
||||||
logger.debug ("Messages __del__")
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -16,14 +16,22 @@ class Singleton(type):
|
|||||||
|
|
||||||
class Mqtt(metaclass=Singleton):
|
class Mqtt(metaclass=Singleton):
|
||||||
client = None
|
client = None
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
logger_mqtt.debug(f'MQTT: __init__')
|
logger_mqtt.debug(f'MQTT: __init__')
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
self.task = loop.create_task(self.__loop())
|
self.task = loop.create_task(self.__loop())
|
||||||
self.home_assistant_restarted = False
|
self.ha_restarts = 0
|
||||||
|
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ha_restarts(self):
|
||||||
|
return self._ha_restarts
|
||||||
|
|
||||||
|
@ha_restarts.setter
|
||||||
|
def ha_restarts(self, value):
|
||||||
|
self._ha_restarts = value
|
||||||
|
|
||||||
def __del__(self):
|
def __del__(self):
|
||||||
logger_mqtt.debug(f'MQTT: __del__')
|
logger_mqtt.debug(f'MQTT: __del__')
|
||||||
|
|
||||||
@@ -59,7 +67,7 @@ class Mqtt(metaclass=Singleton):
|
|||||||
status = message.payload.decode("UTF-8")
|
status = message.payload.decode("UTF-8")
|
||||||
logger_mqtt.info(f'Home-Assistant Status: {status}')
|
logger_mqtt.info(f'Home-Assistant Status: {status}')
|
||||||
if status == 'online':
|
if status == 'online':
|
||||||
self.home_assistant_restarted = True # set flag to force MQTT registering
|
self.ha_restarts += 1
|
||||||
|
|
||||||
except aiomqtt.MqttError:
|
except aiomqtt.MqttError:
|
||||||
logger_mqtt.info(f"Connection lost; Reconnecting in {interval} seconds ...")
|
logger_mqtt.info(f"Connection lost; Reconnecting in {interval} seconds ...")
|
||||||
|
|||||||
@@ -1,50 +0,0 @@
|
|||||||
import asyncio, logging, traceback
|
|
||||||
from async_stream import AsyncStream
|
|
||||||
|
|
||||||
class Proxy:
|
|
||||||
def __init__ (proxy, reader, writer, addr):
|
|
||||||
proxy.ServerStream = AsyncStream(proxy, reader, writer, addr)
|
|
||||||
proxy.ClientStream = None
|
|
||||||
|
|
||||||
async def server_loop(proxy, addr):
|
|
||||||
'''Loop for receiving messages from the inverter (server-side)'''
|
|
||||||
logging.info(f'Accept connection from {addr}')
|
|
||||||
await proxy.ServerStream.loop()
|
|
||||||
logging.info(f'Server loop stopped for {addr}')
|
|
||||||
|
|
||||||
# if the server connection closes, we also disconnect the connection to te TSUN cloud
|
|
||||||
if proxy.ClientStream:
|
|
||||||
logging.debug ("disconnect client connection")
|
|
||||||
proxy.ClientStream.disc()
|
|
||||||
|
|
||||||
async def client_loop(proxy, addr):
|
|
||||||
'''Loop for receiving messages from the TSUN cloud (client-side)'''
|
|
||||||
await proxy.ClientStream.loop()
|
|
||||||
logging.info(f'Client loop stopped for {addr}')
|
|
||||||
|
|
||||||
# if the client connection closes, we don't touch the server connection. Instead we erase the client
|
|
||||||
# connection stream, thus on the next received packet from the inverter, we can establish a new connection
|
|
||||||
# to the TSUN cloud
|
|
||||||
proxy.ClientStream = None
|
|
||||||
|
|
||||||
async def CreateClientStream (proxy, stream, host, port):
|
|
||||||
'''Establish a client connection to the TSUN cloud'''
|
|
||||||
addr = (host, port)
|
|
||||||
|
|
||||||
try:
|
|
||||||
logging.info(f'Connected to {addr}')
|
|
||||||
connect = asyncio.open_connection(host, port)
|
|
||||||
reader, writer = await connect
|
|
||||||
proxy.ClientStream = AsyncStream(proxy, reader, writer, addr, stream, server_side=False)
|
|
||||||
asyncio.create_task(proxy.client_loop(addr))
|
|
||||||
|
|
||||||
except ConnectionRefusedError as error:
|
|
||||||
logging.info(f'{error}')
|
|
||||||
except Exception:
|
|
||||||
logging.error(
|
|
||||||
f"Proxy: Exception for {addr}:\n"
|
|
||||||
f"{traceback.format_exc()}")
|
|
||||||
return proxy.ClientStream
|
|
||||||
|
|
||||||
def __del__ (proxy):
|
|
||||||
logging.debug ("Proxy __del__")
|
|
||||||
@@ -1,8 +1,7 @@
|
|||||||
import logging, asyncio, signal, functools, os
|
import logging, asyncio, signal, functools, os
|
||||||
#from logging.handlers import TimedRotatingFileHandler
|
|
||||||
from logging import config
|
from logging import config
|
||||||
from async_stream import AsyncStream
|
from async_stream import AsyncStream
|
||||||
from proxy import Proxy
|
from inverter import Inverter
|
||||||
from config import Config
|
from config import Config
|
||||||
from mqtt import Mqtt
|
from mqtt import Mqtt
|
||||||
|
|
||||||
@@ -11,7 +10,7 @@ async def handle_client(reader, writer):
|
|||||||
'''Handles a new incoming connection and starts an async loop'''
|
'''Handles a new incoming connection and starts an async loop'''
|
||||||
|
|
||||||
addr = writer.get_extra_info('peername')
|
addr = writer.get_extra_info('peername')
|
||||||
await Proxy(reader, writer, addr).server_loop(addr)
|
await Inverter(reader, writer, addr).server_loop(addr)
|
||||||
|
|
||||||
|
|
||||||
def handle_SIGTERM(loop):
|
def handle_SIGTERM(loop):
|
||||||
@@ -46,6 +45,7 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
logging.config.fileConfig('logging.ini')
|
logging.config.fileConfig('logging.ini')
|
||||||
logging.info(f'Server "{serv_name} - {version}" will be started')
|
logging.info(f'Server "{serv_name} - {version}" will be started')
|
||||||
|
logging.getLogger().setLevel(logging.DEBUG if __debug__ else logging.INFO)
|
||||||
|
|
||||||
# read config file
|
# read config file
|
||||||
Config.read()
|
Config.read()
|
||||||
|
|||||||
Reference in New Issue
Block a user