Compare commits

..

50 Commits

Author SHA1 Message Date
Stefan Allius
542f422e1e version 0.5.5 2023-12-31 16:28:06 +01:00
Stefan Allius
7225c20b01 S allius/issue33 (#34)
* - fix issue 33

  The TSUN Cloud now responds to contact_info and get_time messages with
  an empty display message and not with a response message as before.
  We tried to parse data from the empty message, which led to an
  exception

* Add test with empty conn_ind from inverter
2023-12-31 16:25:21 +01:00
Stefan Allius
d7b3ab54e8 Update README.md
Planning documented for MS-2000 support
2023-12-31 11:28:11 +01:00
Stefan Allius
d15741949f Update README.md
Fixes an incorrect marking in the display of the configuration file
2023-12-28 14:08:59 +01:00
Stefan Allius
cef28b06cd add ha couter for 'Internal SW Exceptions' 2023-12-24 11:49:26 +01:00
Stefan Allius
ba4a1f058f Update README.md
Definition of the inverter generations added to the compatibility table
2023-12-17 20:00:02 +01:00
Stefan Allius
43f513ecbf fix typo 2023-12-15 23:42:32 +01:00
Stefan Allius
3e217b96d9 home assistant add more diagnostic values 2023-12-15 23:27:06 +01:00
Stefan Allius
dc8fc5e4eb update unreleased changes 2023-12-11 00:42:56 +01:00
Stefan Allius
9acd781fa8 Merge branch 'main' of https://github.com/s-allius/tsun-gen3-proxy into main 2023-12-10 15:15:06 +01:00
Stefan Allius
5d51a0d9f8 - Preparation for overwriting received data 2023-12-10 15:14:51 +01:00
Stefan Allius
670424451d - Fixed detection of the connected inputs/MPPTs
- Add data acquisition interval
- Add number of connections
- Add communication type
2023-12-10 15:14:21 +01:00
Stefan Allius
ea95e540ec - Fixed detection of the connected inputs/MPPTs
- Add data acquisition interval
- Add number of connections
- Add communication type
- Preparation for overwriting received data
2023-12-10 15:13:44 +01:00
Stefan Allius
9a68542c5a Update README.md 2023-12-02 00:17:49 +01:00
Stefan Allius
d9c56fb1ab Hardening (#31)
* merge hardening branch into main
2023-11-29 23:54:04 +01:00
Stefan Allius
4c4628301f Update README.md
Fix typos
2023-11-26 21:45:24 +01:00
Stefan Allius
3dc7730084 Update README.md
Link for sending a trace
2023-11-26 19:50:16 +01:00
Stefan Allius
8401833c0e Update README.md
add compatibility section
2023-11-26 13:55:44 +01:00
Stefan Allius
b142cfbc3c fix typo 2023-11-22 23:56:33 +01:00
Stefan Allius
5996ca2500 add info about Over The Air (OTA) firmmware updates 2023-11-22 23:55:36 +01:00
Stefan Allius
bd7c4ae822 Version 0.5.4 2023-11-22 22:26:10 +01:00
Stefan Allius
e2873ffce7 Hardening (#30)
* set build-argument for environment

* hardening remove dangerous commands

* add hardening scripts for base and final image
2023-11-22 21:57:42 +01:00
Stefan Allius
f10207b5ba Merge branch 'main' of https://github.com/s-allius/tsun-gen3-proxy into main 2023-11-22 18:45:16 +01:00
Stefan Allius
aeb2a82df1 ignore bin directory 2023-11-22 18:45:03 +01:00
Stefan Allius
3b75c45344 OTA update (#29)
* add pv module configuration

* add OTA start message counter

* add OTA start message counter

* fix test_statistic_counter
2023-11-22 18:33:56 +01:00
Stefan Allius
9edfa40054 - add unit tests for ota messages 2023-11-21 22:31:46 +01:00
Stefan Allius
0a566a3df2 - add message handler for over the air updates 2023-11-21 22:29:59 +01:00
Stefan Allius
3e7eba9998 improve test coverage 2023-11-17 23:59:34 +01:00
Stefan Allius
00ddcc138f add tests for int64 datatype in controller msg 2023-11-17 23:21:34 +01:00
Stefan Allius
0db2c3945d cleanup msg_get_time handler 2023-11-17 23:20:03 +01:00
Stefan Allius
690c66a13a hardening docker image
remove the python packages setuptools, wheel and pip from
final image to reduce the attack surface
2023-11-13 20:47:14 +01:00
Stefan Allius
a47ebb1511 fix messgae unit tests 2023-11-13 00:01:26 +01:00
Stefan Allius
4b7431ede9 Merge pull request #28 from s-allius/s-allius/issue26
Version 0.5.3
2023-11-12 20:25:00 +01:00
Stefan Allius
c3430f509e Version 0.5.3 2023-11-12 15:23:43 +01:00
Stefan Allius
51b046c351 Version 0.5.3 2023-11-12 15:22:41 +01:00
Stefan Allius
32a669d0d1 Merge pull request #27 from s-allius/s-allius/issue26
S allius/issue26
2023-11-12 15:19:48 +01:00
Stefan Allius
4d9f00221c fix the palnt offline problem in tsun cloud
- use TSUN timestamp instead of local time,
  as TSUN also expects Central European Summer
  Time in winter
2023-11-12 15:15:30 +01:00
Stefan Allius
27c723b0c8 init contact_mail and contact_name 2023-11-12 01:06:24 +01:00
Stefan Allius
4bd59b91b3 send contact info every time a client connection is established 2023-11-11 23:49:06 +01:00
Stefan Allius
3a3c6142b8 ignore build.sh 2023-11-09 20:43:46 +01:00
Stefan Allius
5d36397f2f remover apk from the final image 2023-11-09 20:17:19 +01:00
Stefan Allius
bb39567d05 Version 0.5.2 2023-11-09 20:05:56 +01:00
Stefan Allius
b6431f8448 improve client conn disconection
- check for race cond. on closing and establishing
  client connections
- improve connection trace
2023-11-09 20:03:09 +01:00
Stefan Allius
714dd92f35 allow multiple calls to Message.close() 2023-11-08 18:57:56 +01:00
Stefan Allius
02861f70af - add int64 data type to info parser 2023-11-07 00:19:48 +01:00
Stefan Allius
942e17d7c3 Version 0.5.1 2023-11-05 00:57:10 +01:00
Stefan Allius
37f7052811 Merge pull request #23 from limes007/dns_desc
add description for DNS settings
2023-11-05 00:14:30 +01:00
Stefan Allius
05e446dc74 Merge pull request #24 from limes007/main
fix f-string
2023-11-05 00:09:51 +01:00
limes007
647ef157d4 fix f-string 2023-11-04 23:29:53 +01:00
limes007
9ae391b46d add description for DNS settings 2023-11-04 23:28:20 +01:00
16 changed files with 695 additions and 123 deletions

1
.gitignore vendored
View File

@@ -1,5 +1,6 @@
__pycache__ __pycache__
.pytest_cache .pytest_cache
bin/**
mosquitto/** mosquitto/**
homeassistant/** homeassistant/**
tsun_proxy/** tsun_proxy/**

View File

@@ -7,6 +7,46 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased] ## [Unreleased]
## [0.5.5] - 2023-12-31
- Fixed [#33](https://github.com/s-allius/tsun-gen3-proxy/issues/33)
- Fixed detection of the connected inputs/MPPTs
- Preparation for overwriting received data
- home assistant improvements:
- Add unit 'W' to the `Rated Power` value for home assistant
- `Collect_Interval`, `Connect_Count` and `Data_Up_Interval` as diagnostic value and not as graph
- Add data acquisition interval
- Add number of connections
- Add communication type
- Add 'Internal SW Exception' counter
## [0.5.4] - 2023-11-22
- hardening remove dangerous commands from busybox
- add OTA start message counter
- add message handler for over the air updates
- add unit tests for ota messages
- add unit test for int64 data type
- cleanup msg_get_time_handler
- remove python packages setuptools, wheel, pip from final image to reduce the attack surface
## [0.5.3] - 2023-11-12
- remove apk packet manager from the final image
- send contact info every time a client connection is established
- use TSUN timestamp instead of local time, as TSUN also expects Central European Summer Time in winter
## [0.5.2] - 2023-11-09
- add int64 data type to info parser
- allow multiple calls to Message.close()
- check for race cond. on closing and establishing client connections
## [0.5.1] - 2023-11-05
- fixes f-string by limes007
- add description for dns settings by limes007
## [0.5.0] - 2023-11-04 ## [0.5.0] - 2023-11-04
- fix issue [#21](https://github.com/s-allius/tsun-gen3-proxy/issues/21) - fix issue [#21](https://github.com/s-allius/tsun-gen3-proxy/issues/21)
@@ -27,7 +67,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- count definition errors in our internal tables - count definition errors in our internal tables
- increase test coverage of the Infos class to 100% - increase test coverage of the Infos class to 100%
- avoid resetting the daily generation counters even if the inverter sends zero values at sunset - avoid resetting the daily generation counters even if the inverter sends zero values at sunset
## [0.4.1] - 2023-10-20 ## [0.4.1] - 2023-10-20
- fix issue [#18](https://github.com/s-allius/tsun-gen3-proxy/issues/18) - fix issue [#18](https://github.com/s-allius/tsun-gen3-proxy/issues/18)
@@ -53,13 +93,13 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- optimize and reduce logging - optimize and reduce logging
- switch to pathon 3.12 - switch to pathon 3.12
- classify some values for diagnostics - classify some values for diagnostics
## [0.2.0] - 2023-10-07 ## [0.2.0] - 2023-10-07
This version halves the size of the Docker image and reduces the attack surface for security vulnerabilities, by omitting unneeded code. The feature set is exactly the same as the previous release version 0.1.0. This version halves the size of the Docker image and reduces the attack surface for security vulnerabilities, by omitting unneeded code. The feature set is exactly the same as the previous release version 0.1.0.
### Changes ### Changes in 0.2.0
- move from slim-bookworm to an alpine base image - move from slim-bookworm to an alpine base image
- install python requirements with pip wheel - install python requirements with pip wheel
@@ -92,31 +132,31 @@ This version halves the size of the Docker image and reduces the attack surface
❗Due to the change from one device to multiple devices in the Home Assistant, the previous MQTT device should be deleted in the Home Assistant after the update to pre-release '0.0.4'. Afterwards, the proxy must be restarted again to ensure that the sub-devices are created completely. ❗Due to the change from one device to multiple devices in the Home Assistant, the previous MQTT device should be deleted in the Home Assistant after the update to pre-release '0.0.4'. Afterwards, the proxy must be restarted again to ensure that the sub-devices are created completely.
### Added ### Added in 0.0.4
- Register multiple devices at home-assistant instead of one for all measurements. - Register multiple devices at home-assistant instead of one for all measurements.
Now we register: a Controller, the inverter and up to 4 input devices to home-assistant. Now we register: a Controller, the inverter and up to 4 input devices to home-assistant.
## [0.0.3] - 2023-09-28 ## [0.0.3] - 2023-09-28
### Added ### Added in 0.0.3
- Fixes Running Proxy with host UID and GUID #2 - Fixes Running Proxy with host UID and GUID #2
## [0.0.2] - 2023-09-27 ## [0.0.2] - 2023-09-27
### Added ### Added in 0.0.2
- Dockerfile opencontainer labels - Dockerfile opencontainer labels
- Send voltage and current of inputs to mqtt - Send voltage and current of inputs to mqtt
## [0.0.1] - 2023-09-25 ## [0.0.1] - 2023-09-25
### Added ### Added in 0.0.1
- Logger for inverter packets - Logger for inverter packets
- SIGTERM handler for fast docker restarts - SIGTERM handler for fast docker restarts
- Proxy as non-root docker application - Proxy as non-root docker application
- Unit- and system tests - Unit- and system tests
- Home asssistant auto configuration - Home asssistant auto configuration
- Self-sufficient island operation without internet - Self-sufficient island operation without internet

View File

@@ -40,7 +40,8 @@ If you use a Pi-hole, you can also store the host entry in the Pi-hole.
## Features ## Features
- supports TSOL MS300, MS350, MS400, MS600, MS700 and MS800 inverters from TSUN - supports TSUN G3 inverters: TSOL MS-300, MS-350, MS-400, MS-600, MS-700 and MS-800
- support for TSUN G3 Plus inverters is in preperation (e.g. MS-2000)
- `MQTT` support - `MQTT` support
- `Home-Assistant` auto-discovery support - `Home-Assistant` auto-discovery support
- Self-sufficient island operation without internet - Self-sufficient island operation without internet
@@ -120,6 +121,7 @@ inverters.allow_all = false # True: allow inverters, even if we have no invert
# inverter mapping, maps a `serial_no* to a `node_id` and defines an optional `suggested_area` for `home-assistant` # inverter mapping, maps a `serial_no* to a `node_id` and defines an optional `suggested_area` for `home-assistant`
# #
# for each inverter add a block starting with [inverters."<16-digit serial numbeer>"] # for each inverter add a block starting with [inverters."<16-digit serial numbeer>"]
[inverters."R17xxxxxxxxxxxx1"] [inverters."R17xxxxxxxxxxxx1"]
node_id = 'inv1' # Optional, MQTT replacement for inverters serial number node_id = 'inv1' # Optional, MQTT replacement for inverters serial number
suggested_area = 'roof' # Optional, suggested installation area for home-assistant suggested_area = 'roof' # Optional, suggested installation area for home-assistant
@@ -131,6 +133,50 @@ suggested_area = 'balcony' # Optional, suggested installation area for home-a
``` ```
## DNS Settings
### Loop the proxy into the connection
To include the proxy in the connection between the inverter and the TSUN Cloud, you must adapt the DNS record of *logger.talent-monitoring.com* within the network that your inverter uses. You need a mapping from logger.talent-monitoring.com to the IP address of the host running the Docker engine.
This can be done, for example, by adding a local DNS record to the Pi-hole if you are using it.
### DNS Rebind Protection
If you are using a router as local DNS server, the router may have DNS rebind protection that needs to be adjusted. For security reasons, DNS rebind protection blocks DNS queries that refer to an IP address on the local network.
If you are using a FRITZ!Box, you can do this in the Network Settings tab under Home Network / Network. Add logger.talent-monitoring.com as a hostname exception in DNS rebind protection.
### DNS server of proxy
The proxy itself must use a different DNS server to connect to the TSUN Cloud. If you use the DNS server with the adapted record, you will end up in an endless loop as soon as the proxy tries to send data to the TSUN Cloud.
As described above, set a DNS sever in the Docker command or Docker compose file.
### Over The Air (OTA) firmware update
Even if the proxy is connected between the inverter and the TSUN Cloud, an OTA update is supported. To do this, the inverter must be able to reach the website http://www.talent-monitoring.com:9002/ in order to download images from there.
It must be ensured that this address is not mapped to the proxy!
## Compatibility
In the following table you will find an overview of which inverter model has been tested for compatibility with which firmware version.
A combination with a red question mark should work, but I have not checked it in detail.
Micro Inverter Model | Fw. 1.00.06 | Fw. 1.00.17 | Fw. 1.00.20| Fw. 1.1.00.0B
:---|:---:|:---:|:---:|:---:|
G3 micro inverters (single MPPT):<br>MS-300, MS-350, MS-400| ❓ | ❓ | ❓ |
G3 micro inverters (dual MPPT):<br>MS-600, MS-700, MS-800| ✔️ | ✔️ | ✔️ |
G3 PLUS micro inverters:<br>MS-1600, MS-1800, MS-2000| | | | 🚧
balcony micro inverters:<br>MS-400-D, MS-800-D, MS-2000-D| ❓ | ❓ | ❓| ❓
```
Legend
: Firmware not available for this devices
✔️: proxy support testet
❓: proxy support possible but not testet
🚧: Proxy support in preparation
```
❗The new inverters of the G3Plus generation (e.g. MS-2000) use a completely different protocol for data transmission to the TSUN server. I already have such an inverter in operation and am working on the integration for the proxy version 0.6. The serial numbers of these inverters start with `Y17E` instead of `R17E`
If you have one of these combinations with a red question mark, it would be very nice if you could send me a proxy trace so that I can carry out the detailed checks and adjust the device and system tests. [Ask here how to send a trace](https://github.com/s-allius/tsun-gen3-proxy/discussions/categories/traces-for-compatibility-check)
## License ## License
This project is licensed under the [BSD 3-clause License](https://opensource.org/licenses/BSD-3-Clause). This project is licensed under the [BSD 3-clause License](https://opensource.org/licenses/BSD-3-Clause).
@@ -140,7 +186,6 @@ Note the aiomqtt library used is based on the paho-mqtt library, which has a dua
- One use of "COPYRIGHT OWNER" (EDL) instead of "COPYRIGHT HOLDER" (BSD) - One use of "COPYRIGHT OWNER" (EDL) instead of "COPYRIGHT HOLDER" (BSD)
- One use of "Eclipse Foundation, Inc." (EDL) instead of "copyright holder" (BSD) - One use of "Eclipse Foundation, Inc." (EDL) instead of "copyright holder" (BSD)
## Versioning ## Versioning
This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). Breaking changes will only occur in major `X.0.0` releases. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). Breaking changes will only occur in major `X.0.0` releases.

View File

@@ -1,4 +1,5 @@
tests/ tests/
**/__pycache__ **/__pycache__
*.pyc *.pyc
.DS_Store .DS_Store
build.sh

View File

@@ -7,20 +7,21 @@ ARG GID=1000
FROM python:3.12-alpine AS base FROM python:3.12-alpine AS base
USER root USER root
RUN apk update && \ COPY --chmod=0700 ./hardening_base.sh .
apk upgrade RUN apk upgrade --no-cache && \
RUN apk add --no-cache su-exec apk add --no-cache su-exec && \
./hardening_base.sh && \
rm ./hardening_base.sh
# #
# second stage for building wheels packages # second stage for building wheels packages
FROM base as builder FROM base as builder
RUN apk add --no-cache build-base && \
python -m pip install --no-cache-dir -U pip wheel
# copy the dependencies file to the root dir and install requirements # copy the dependencies file to the root dir and install requirements
COPY ./requirements.txt /root/ COPY ./requirements.txt /root/
RUN python -OO -m pip wheel --no-cache-dir --wheel-dir=/root/wheels -r /root/requirements.txt RUN apk add --no-cache build-base && \
python -m pip install --no-cache-dir -U pip wheel && \
python -OO -m pip wheel --no-cache-dir --wheel-dir=/root/wheels -r /root/requirements.txt
# #
@@ -31,26 +32,32 @@ ARG VERSION
ARG UID ARG UID
ARG GID ARG GID
ARG LOG_LVL ARG LOG_LVL
ARG environment
ENV VERSION=$VERSION ENV VERSION=$VERSION
ENV SERVICE_NAME=$SERVICE_NAME ENV SERVICE_NAME=$SERVICE_NAME
ENV UID=$UID ENV UID=$UID
ENV GID=$GID ENV GID=$GID
ENV LOG_LVL=$LOG_LVL ENV LOG_LVL=$LOG_LVL
ENV HOME=/home/$SERVICE_NAME
# set the working directory in the container # set the working directory in the container
WORKDIR /home/$SERVICE_NAME WORKDIR /home/$SERVICE_NAME
# update PATH environment variable
ENV HOME=/home/$SERVICE_NAME
VOLUME ["/home/$SERVICE_NAME/log", "/home/$SERVICE_NAME/config"] VOLUME ["/home/$SERVICE_NAME/log", "/home/$SERVICE_NAME/config"]
# install the requirements from the wheels packages from the builder stage # install the requirements from the wheels packages from the builder stage
# and unistall python packages and alpine package manger to reduce attack surface
COPY --from=builder /root/wheels /root/wheels COPY --from=builder /root/wheels /root/wheels
COPY --chmod=0700 ./hardening_final.sh .
RUN python -m pip install --no-cache --no-index /root/wheels/* && \ RUN python -m pip install --no-cache --no-index /root/wheels/* && \
rm -rf /root/wheels rm -rf /root/wheels && \
python -m pip uninstall --yes setuptools wheel pip && \
apk --purge del apk-tools && \
./hardening_final.sh && \
rm ./hardening_final.sh
# copy the content of the local src and config directory to the working directory # copy the content of the local src and config directory to the working directory
COPY --chmod=0700 entrypoint.sh /root/entrypoint.sh COPY --chmod=0700 entrypoint.sh /root/entrypoint.sh

View File

@@ -22,11 +22,11 @@ fi
echo version: $VERSION build-date: $BUILD_DATE image: $IMAGE echo version: $VERSION build-date: $BUILD_DATE image: $IMAGE
if [[ $1 == dev ]];then if [[ $1 == dev ]];then
docker build --build-arg "VERSION=${VERSION}" --build-arg "LOG_LVL=DEBUG" --label "org.label-schema.build-date=${BUILD_DATE}" --label "org.opencontainers.image.version=${VERSION}" -t ${IMAGE}:latest app docker build --build-arg "VERSION=${VERSION}" --build-arg environment=dev --build-arg "LOG_LVL=DEBUG" --label "org.label-schema.build-date=${BUILD_DATE}" --label "org.opencontainers.image.version=${VERSION}" -t ${IMAGE}:latest app
elif [[ $1 == rc ]];then elif [[ $1 == rc ]];then
docker build --build-arg "VERSION=${VERSION}" --label "org.label-schema.build-date=${BUILD_DATE}" --label "org.opencontainers.image.version=${VERSION}" -t ${IMAGE}:latest app docker build --build-arg "VERSION=${VERSION}" --build-arg environment=production --label "org.label-schema.build-date=${BUILD_DATE}" --label "org.opencontainers.image.version=${VERSION}" -t ${IMAGE}:latest app
elif [[ $1 == rel ]];then elif [[ $1 == rel ]];then
docker build --no-cache --build-arg "VERSION=${VERSION}" --label "org.label-schema.build-date=${BUILD_DATE}" --label "org.opencontainers.image.version=${VERSION}" -t ${IMAGE}:latest -t ${IMAGE}:${MAJOR} -t ${IMAGE}:${VERSION} app docker build --no-cache --build-arg "VERSION=${VERSION}" --build-arg environment=production --label "org.label-schema.build-date=${BUILD_DATE}" --label "org.opencontainers.image.version=${VERSION}" -t ${IMAGE}:latest -t ${IMAGE}:${MAJOR} -t ${IMAGE}:${VERSION} app
docker push ghcr.io/s-allius/tsun-gen3-proxy:latest docker push ghcr.io/s-allius/tsun-gen3-proxy:latest
docker push ghcr.io/s-allius/tsun-gen3-proxy:${MAJOR} docker push ghcr.io/s-allius/tsun-gen3-proxy:${MAJOR}
docker push ghcr.io/s-allius/tsun-gen3-proxy:${VERSION} docker push ghcr.io/s-allius/tsun-gen3-proxy:${VERSION}

View File

@@ -11,10 +11,12 @@ if [ "$user" = '0' ]; then
mkdir -p /home/$SERVICE_NAME/log /home/$SERVICE_NAME/config mkdir -p /home/$SERVICE_NAME/log /home/$SERVICE_NAME/config
if ! id $SERVICE_NAME &> /dev/null; then if ! id $SERVICE_NAME &> /dev/null; then
echo "# create user"
addgroup --gid $GID $SERVICE_NAME 2> /dev/null addgroup --gid $GID $SERVICE_NAME 2> /dev/null
adduser -G $SERVICE_NAME -s /bin/false -D -H -g "" -u $UID $SERVICE_NAME adduser -G $SERVICE_NAME -s /bin/false -D -H -g "" -u $UID $SERVICE_NAME
chown -R $SERVICE_NAME:$SERVICE_NAME /home/$SERVICE_NAME || true
rm -fr /usr/sbin/addgroup /usr/sbin/adduser /bin/chown
fi fi
chown -R $SERVICE_NAME:$SERVICE_NAME /home/$SERVICE_NAME || true
echo "######################################################" echo "######################################################"
echo "#" echo "#"

19
app/hardening_base.sh Normal file
View File

@@ -0,0 +1,19 @@
#!/bin/sh
rm -fr /var/spool/cron
rm -fr /etc/crontabs
rm -fr /etc/periodic
# Remove every user and group but root
sed -i -r '/^(root)/!d' /etc/group
sed -i -r '/^(root)/!d' /etc/passwd
# Remove init scripts since we do not use them.
rm -fr /etc/inittab
# Remove kernel tunables since we do not need them.
rm -fr /etc/sysctl*
rm -fr /etc/modprobe.d
# Remove fstab since we do not need it.
rm -f /etc/fstab

22
app/hardening_final.sh Normal file
View File

@@ -0,0 +1,22 @@
#!/bin/sh
# For production images delete all uneeded admin commands and remove dangerous commands.
# addgroup, adduser and chmod will be removed in entrypoint.sh during first start
# su-exec will be needed for ever restart of the cotainer
if [ "$environment" = "production" ] ; then \
find /sbin /usr/sbin ! -type d \
-a ! -name addgroup \
-a ! -name adduser \
-a ! -name nologin \
-a ! -name su-exec \
-delete; \
find /bin /usr/bin -xdev \( \
-name chgrp -o \
-name chmod -o \
-name hexdump -o \
-name ln -o \
-name od -o \
-name strings -o \
-name su -o \
-name wget -o \
\) -delete \
; fi

View File

@@ -9,18 +9,22 @@ logger = logging.getLogger('conn')
class AsyncStream(Message): class AsyncStream(Message):
def __init__(self, reader, writer, addr, remote_stream, server_side: bool def __init__(self, reader, writer, addr, remote_stream, server_side: bool,
) -> None: id_str=b'') -> None:
super().__init__(server_side) super().__init__(server_side, id_str)
self.reader = reader self.reader = reader
self.writer = writer self.writer = writer
self.remoteStream = remote_stream self.remoteStream = remote_stream
self.addr = addr self.addr = addr
self.r_addr = ''
self.l_addr = ''
''' '''
Our puplic methods Our puplic methods
''' '''
async def loop(self) -> None: async def loop(self):
self.r_addr = self.writer.get_extra_info('peername')
self.l_addr = self.writer.get_extra_info('sockname')
while True: while True:
try: try:
@@ -35,26 +39,28 @@ class AsyncStream(Message):
ConnectionAbortedError, ConnectionAbortedError,
BrokenPipeError, BrokenPipeError,
RuntimeError) as error: RuntimeError) as error:
logger.warning(f'In loop for {self.addr}: {error}') logger.warning(f'In loop for l{self.l_addr} | '
f'r{self.r_addr}: {error}')
self.close() self.close()
return return self
except Exception: except Exception:
self.inc_counter('SW_Exception')
logger.error( logger.error(
f"Exception for {self.addr}:\n" f"Exception for {self.addr}:\n"
f"{traceback.format_exc()}") f"{traceback.format_exc()}")
self.close() self.close()
return return self
def disc(self) -> None: def disc(self) -> None:
logger.debug(f'in AsyncStream.disc() {self.addr}') logger.debug(f'in AsyncStream.disc() l{self.l_addr} | r{self.r_addr}')
self.writer.close() self.writer.close()
def close(self): def close(self):
logger.debug(f'in AsyncStream.close() {self.addr}') logger.debug(f'in AsyncStream.close() l{self.l_addr} | r{self.r_addr}')
self.writer.close() self.writer.close()
super().close() # call close handler in the parent class super().close() # call close handler in the parent class
# logger.info (f'AsyncStream refs: {gc.get_referrers(self)}') # logger.info(f'AsyncStream refs: {gc.get_referrers(self)}')
''' '''
Our private methods Our private methods
@@ -79,6 +85,10 @@ class AsyncStream(Message):
if self._forward_buffer: if self._forward_buffer:
if not self.remoteStream: if not self.remoteStream:
await self.async_create_remote() await self.async_create_remote()
if self.remoteStream:
self.remoteStream._init_new_client_conn(self.contact_name,
self.contact_mail)
await self.remoteStream.__async_write()
if self.remoteStream: if self.remoteStream:
hex_dump_memory(logging.INFO, hex_dump_memory(logging.INFO,
@@ -96,4 +106,4 @@ class AsyncStream(Message):
pass pass
def __del__(self): def __del__(self):
logging.debug(f"AsyncStream.__del__ {self.addr}") logging.debug(f"AsyncStream.__del__ l{self.l_addr} | r{self.r_addr}")

View File

@@ -33,11 +33,13 @@ class Infos:
'controller': {'via': 'proxy', 'name': 'Controller', 'mdl': 0x00092f90, 'mf': 0x000927c0, 'sw': 0x00092ba8}, # noqa: E501 'controller': {'via': 'proxy', 'name': 'Controller', 'mdl': 0x00092f90, 'mf': 0x000927c0, 'sw': 0x00092ba8}, # noqa: E501
'inverter': {'via': 'controller', 'name': 'Micro Inverter', 'mdl': 0x00000032, 'mf': 0x00000014, 'sw': 0x0000001e}, # noqa: E501 'inverter': {'via': 'controller', 'name': 'Micro Inverter', 'mdl': 0x00000032, 'mf': 0x00000014, 'sw': 0x0000001e}, # noqa: E501
'input_pv1': {'via': 'inverter', 'name': 'Module PV1'}, 'input_pv1': {'via': 'inverter', 'name': 'Module PV1'},
'input_pv2': {'via': 'inverter', 'name': 'Module PV2', 'dep': {'reg': 0x00095b50, 'gte': 2}}, # noqa: E501 'input_pv2': {'via': 'inverter', 'name': 'Module PV2', 'dep': {'reg': 0x00013880, 'gte': 2}}, # noqa: E501
'input_pv3': {'via': 'inverter', 'name': 'Module PV3', 'dep': {'reg': 0x00095b50, 'gte': 3}}, # noqa: E501 'input_pv3': {'via': 'inverter', 'name': 'Module PV3', 'dep': {'reg': 0x00013880, 'gte': 3}}, # noqa: E501
'input_pv4': {'via': 'inverter', 'name': 'Module PV4', 'dep': {'reg': 0x00095b50, 'gte': 4}}, # noqa: E501 'input_pv4': {'via': 'inverter', 'name': 'Module PV4', 'dep': {'reg': 0x00013880, 'gte': 4}}, # noqa: E501
} }
__comm_type_val_tpl = "{%set com_types = ['n/a','Wi-Fi', 'G4', 'G5', 'GPRS'] %}{{com_types[value_json['Communication_Type']|int(0)]|default(value_json['Communication_Type'])}}" # noqa: E501
__info_defs = { __info_defs = {
# collector values used for device registration: # collector values used for device registration:
0x00092ba8: {'name': ['collector', 'Collector_Fw_Version'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 0x00092ba8: {'name': ['collector', 'Collector_Fw_Version'], 'level': logging.INFO, 'unit': ''}, # noqa: E501
@@ -45,7 +47,6 @@ class Infos:
0x00092f90: {'name': ['collector', 'Chip_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 0x00092f90: {'name': ['collector', 'Chip_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00095a88: {'name': ['collector', 'Trace_URL'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 0x00095a88: {'name': ['collector', 'Trace_URL'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00095aec: {'name': ['collector', 'Logger_URL'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 0x00095aec: {'name': ['collector', 'Logger_URL'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00095b50: {'name': ['collector', 'No_Inputs'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
# inverter values used for device registration: # inverter values used for device registration:
0x0000000a: {'name': ['inverter', 'Product_Name'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 0x0000000a: {'name': ['inverter', 'Product_Name'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
@@ -53,6 +54,7 @@ class Infos:
0x0000001e: {'name': ['inverter', 'Version'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 0x0000001e: {'name': ['inverter', 'Version'], 'level': logging.INFO, 'unit': ''}, # noqa: E501
0x00000028: {'name': ['inverter', 'Serial_Number'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 0x00000028: {'name': ['inverter', 'Serial_Number'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00000032: {'name': ['inverter', 'Equipment_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 0x00000032: {'name': ['inverter', 'Equipment_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00013880: {'name': ['inverter', 'No_Inputs'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
# proxy: # proxy:
0xffffff00: {'name': ['proxy', 'Inverter_Cnt'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_count_', 'fmt': '| int', 'name': 'Active Inverter Connections', 'icon': 'mdi:counter'}}, # noqa: E501 0xffffff00: {'name': ['proxy', 'Inverter_Cnt'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_count_', 'fmt': '| int', 'name': 'Active Inverter Connections', 'icon': 'mdi:counter'}}, # noqa: E501
@@ -61,6 +63,8 @@ class Infos:
0xffffff03: {'name': ['proxy', 'Invalid_Data_Type'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_data_type_', 'fmt': '| int', 'name': 'Invalid Data Type', 'icon': 'mdi:counter', 'ent_cat': 'diagnostic'}}, # noqa: E501 0xffffff03: {'name': ['proxy', 'Invalid_Data_Type'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_data_type_', 'fmt': '| int', 'name': 'Invalid Data Type', 'icon': 'mdi:counter', 'ent_cat': 'diagnostic'}}, # noqa: E501
0xffffff04: {'name': ['proxy', 'Internal_Error'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'intern_err_', 'fmt': '| int', 'name': 'Internal Error', 'icon': 'mdi:counter', 'ent_cat': 'diagnostic', 'en': False}}, # noqa: E501 0xffffff04: {'name': ['proxy', 'Internal_Error'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'intern_err_', 'fmt': '| int', 'name': 'Internal Error', 'icon': 'mdi:counter', 'ent_cat': 'diagnostic', 'en': False}}, # noqa: E501
0xffffff05: {'name': ['proxy', 'Unknown_Ctrl'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'unknown_ctrl_', 'fmt': '| int', 'name': 'Unknown Control Type', 'icon': 'mdi:counter', 'ent_cat': 'diagnostic'}}, # noqa: E501 0xffffff05: {'name': ['proxy', 'Unknown_Ctrl'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'unknown_ctrl_', 'fmt': '| int', 'name': 'Unknown Control Type', 'icon': 'mdi:counter', 'ent_cat': 'diagnostic'}}, # noqa: E501
0xffffff06: {'name': ['proxy', 'OTA_Start_Msg'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'ota_start_cmd_', 'fmt': '| int', 'name': 'OTA Start Cmd', 'icon': 'mdi:counter', 'ent_cat': 'diagnostic'}}, # noqa: E501
0xffffff07: {'name': ['proxy', 'SW_Exception'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'sw_exception_', 'fmt': '| int', 'name': 'Internal SW Exception', 'icon': 'mdi:counter', 'ent_cat': 'diagnostic'}}, # noqa: E501
# 0xffffff03: {'name':['proxy', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha':{'dev':'proxy', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id':'proxy_volt_', 'fmt':'| float','name': 'Grid Voltage'}}, # noqa: E501 # 0xffffff03: {'name':['proxy', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha':{'dev':'proxy', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id':'proxy_volt_', 'fmt':'| float','name': 'Grid Voltage'}}, # noqa: E501
# events # events
@@ -86,7 +90,7 @@ class Infos:
0x0000044c: {'name': ['grid', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'inverter', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'out_cur_', 'fmt': '| float', 'name': 'Grid Current', 'ent_cat': 'diagnostic'}}, # noqa: E501 0x0000044c: {'name': ['grid', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'inverter', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'out_cur_', 'fmt': '| float', 'name': 'Grid Current', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x000004b0: {'name': ['grid', 'Frequency'], 'level': logging.DEBUG, 'unit': 'Hz', 'ha': {'dev': 'inverter', 'dev_cla': 'frequency', 'stat_cla': 'measurement', 'id': 'out_freq_', 'fmt': '| float', 'name': 'Grid Frequency', 'ent_cat': 'diagnostic'}}, # noqa: E501 0x000004b0: {'name': ['grid', 'Frequency'], 'level': logging.DEBUG, 'unit': 'Hz', 'ha': {'dev': 'inverter', 'dev_cla': 'frequency', 'stat_cla': 'measurement', 'id': 'out_freq_', 'fmt': '| float', 'name': 'Grid Frequency', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x00000640: {'name': ['grid', 'Output_Power'], 'level': logging.INFO, 'unit': 'W', 'ha': {'dev': 'inverter', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'out_power_', 'fmt': '| float', 'name': 'Power'}}, # noqa: E501 0x00000640: {'name': ['grid', 'Output_Power'], 'level': logging.INFO, 'unit': 'W', 'ha': {'dev': 'inverter', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'out_power_', 'fmt': '| float', 'name': 'Power'}}, # noqa: E501
0x000005dc: {'name': ['env', 'Rated_Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'inverter', 'dev_cla': None, 'stat_cla': None, 'id': 'rated_power_', 'fmt': '| int', 'name': 'Rated Power', 'ent_cat': 'diagnostic'}}, # noqa: E501 0x000005dc: {'name': ['env', 'Rated_Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'inverter', 'dev_cla': None, 'stat_cla': None, 'id': 'rated_power_', 'fmt': '| string + " W"', 'name': 'Rated Power', 'icon': 'mdi:lightning-bolt', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x00000514: {'name': ['env', 'Inverter_Temp'], 'level': logging.DEBUG, 'unit': '°C', 'ha': {'dev': 'inverter', 'dev_cla': 'temperature', 'stat_cla': 'measurement', 'id': 'temp_', 'fmt': '| int', 'name': 'Temperature'}}, # noqa: E501 0x00000514: {'name': ['env', 'Inverter_Temp'], 'level': logging.DEBUG, 'unit': '°C', 'ha': {'dev': 'inverter', 'dev_cla': 'temperature', 'stat_cla': 'measurement', 'id': 'temp_', 'fmt': '| int', 'name': 'Temperature'}}, # noqa: E501
# input measures: # input measures:
@@ -115,9 +119,13 @@ class Infos:
0x00000bb8: {'name': ['total', 'Total_Generation'], 'level': logging.INFO, 'unit': 'kWh', 'ha': {'dev': 'inverter', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_', 'fmt': '| float', 'name': 'Total Generation', 'icon': 'mdi:solar-power', 'must_incr': True}}, # noqa: E501 0x00000bb8: {'name': ['total', 'Total_Generation'], 'level': logging.INFO, 'unit': 'kWh', 'ha': {'dev': 'inverter', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_', 'fmt': '| float', 'name': 'Total Generation', 'icon': 'mdi:solar-power', 'must_incr': True}}, # noqa: E501
# controller: # controller:
0x000c3500: {'name': ['controller', 'Signal_Strength'], 'level': logging.DEBUG, 'unit': '%', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': 'measurement', 'id': 'signal_', 'fmt': '| int', 'name': 'Signal Strength', 'icon': 'mdi:wifi'}}, # noqa: E501 0x000c3500: {'name': ['controller', 'Signal_Strength'], 'level': logging.DEBUG, 'unit': '%', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': 'measurement', 'id': 'signal_', 'fmt': '| int', 'name': 'Signal Strength', 'icon': 'mdi:wifi'}}, # noqa: E501
0x000c96a8: {'name': ['controller', 'Power_On_Time'], 'level': logging.DEBUG, 'unit': 's', 'ha': {'dev': 'controller', 'dev_cla': 'duration', 'stat_cla': 'measurement', 'id': 'power_on_time_', 'name': 'Power on Time', 'val_tpl': "{{ (value_json['Power_On_Time'] | float)}}", 'nat_prc': '3', 'ent_cat': 'diagnostic'}}, # noqa: E501 0x000c96a8: {'name': ['controller', 'Power_On_Time'], 'level': logging.DEBUG, 'unit': 's', 'ha': {'dev': 'controller', 'dev_cla': 'duration', 'stat_cla': 'measurement', 'id': 'power_on_time_', 'fmt': '| float', 'name': 'Power on Time', 'nat_prc': '3', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x000cf850: {'name': ['controller', 'Data_Up_Interval'], 'level': logging.DEBUG, 'unit': 's', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': 'measurement', 'id': 'data_up_intval_', 'fmt': '| int', 'name': 'Data Up Interval', 'icon': 'mdi:update', 'ent_cat': 'diagnostic'}}, # noqa: E501 0x000d0020: {'name': ['controller', 'Collect_Interval'], 'level': logging.DEBUG, 'unit': 's', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'data_collect_intval_', 'fmt': '| string + " s"', 'name': 'Data Collect Interval', 'icon': 'mdi:update', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x000cfc38: {'name': ['controller', 'Connect_Count'], 'level': logging.DEBUG, 'unit': '', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'connect_count_', 'fmt': '| int', 'name': 'Connect Count', 'icon': 'mdi:counter', 'comp': 'sensor', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x000c7f38: {'name': ['controller', 'Communication_Type'], 'level': logging.DEBUG, 'unit': '', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'comm_type_', 'name': 'Communication Type', 'val_tpl': __comm_type_val_tpl, 'comp': 'sensor', 'icon': 'mdi:wifi'}}, # noqa: E501
# 0x000c7f38: {'name': ['controller', 'Communication_Type'], 'level': logging.DEBUG, 'unit': 's', 'new_value': 5}, # noqa: E501
0x000cf850: {'name': ['controller', 'Data_Up_Interval'], 'level': logging.DEBUG, 'unit': 's', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'data_up_intval_', 'fmt': '| string + " s"', 'name': 'Data Up Interval', 'icon': 'mdi:update', 'ent_cat': 'diagnostic'}}, # noqa: E501
} }
@@ -262,7 +270,7 @@ class Infos:
f"{via}") f"{via}")
for key in ('mdl', 'mf', 'sw', 'hw'): # add optional for key in ('mdl', 'mf', 'sw', 'hw'): # add optional
# values fpr 'modell', 'manufaturer', 'sw version' and # values fpr 'modell', 'manufacturer', 'sw version' and
# 'hw version' # 'hw version'
if key in device: if key in device:
data = self.dev_value(device[key]) data = self.dev_value(device[key])
@@ -304,24 +312,27 @@ class Infos:
must_incr = d['ha']['must_incr'] must_incr = d['ha']['must_incr']
else: else:
must_incr = False must_incr = False
new_val = None
# if 'new_value' in d:
# new_val = d['new_value']
return d['name'], d['level'], d['unit'], must_incr return d['name'], d['level'], d['unit'], must_incr, new_val
def parse(self, buf) -> None: def parse(self, buf, ind=0) -> None:
'''parse a data sequence received from the inverter and '''parse a data sequence received from the inverter and
stores the values in Infos.db stores the values in Infos.db
buf: buffer of the sequence to parse''' buf: buffer of the sequence to parse'''
result = struct.unpack_from('!l', buf, 0) result = struct.unpack_from('!l', buf, ind)
elms = result[0] elms = result[0]
i = 0 i = 0
ind = 4 ind += 4
while i < elms: while i < elms:
result = struct.unpack_from('!lB', buf, ind) result = struct.unpack_from('!lB', buf, ind)
info_id = result[0] info_id = result[0]
data_type = result[1] data_type = result[1]
ind += 5 ind += 5
keys, level, unit, must_incr = self.__key_obj(info_id) keys, level, unit, must_incr, new_val = self.__key_obj(info_id)
if data_type == 0x54: # 'T' -> Pascal-String if data_type == 0x54: # 'T' -> Pascal-String
str_len = buf[ind] str_len = buf[ind]
@@ -331,16 +342,29 @@ class Infos:
ind += str_len+1 ind += str_len+1
elif data_type == 0x49: # 'I' -> int32 elif data_type == 0x49: # 'I' -> int32
# if new_val:
# struct.pack_into('!l', buf, ind, new_val)
result = struct.unpack_from('!l', buf, ind)[0] result = struct.unpack_from('!l', buf, ind)[0]
ind += 4 ind += 4
elif data_type == 0x53: # 'S' -> short elif data_type == 0x53: # 'S' -> short
# if new_val:
# struct.pack_into('!h', buf, ind, new_val)
result = struct.unpack_from('!h', buf, ind)[0] result = struct.unpack_from('!h', buf, ind)[0]
ind += 2 ind += 2
elif data_type == 0x46: # 'F' -> float32 elif data_type == 0x46: # 'F' -> float32
# if new_val:
# struct.pack_into('!f', buf, ind, new_val)
result = round(struct.unpack_from('!f', buf, ind)[0], 2) result = round(struct.unpack_from('!f', buf, ind)[0], 2)
ind += 4 ind += 4
elif data_type == 0x4c: # 'L' -> int64
# if new_val:
# struct.pack_into('!q', buf, ind, new_val)
result = struct.unpack_from('!q', buf, ind)[0]
ind += 8
else: else:
self.inc_counter('Invalid_Data_Type') self.inc_counter('Invalid_Data_Type')
logging.error(f"Infos.parse: data_type: {data_type}" logging.error(f"Infos.parse: data_type: {data_type}"
@@ -374,6 +398,6 @@ class Infos:
name = str(f'info-id.0x{info_id:x}') name = str(f'info-id.0x{info_id:x}')
self.tracer.log(level, f'{name} : {result}{unit}' self.tracer.log(level, f'{name} : {result}{unit}'
' update: {update}') f' update: {update}')
i += 1 i += 1

View File

@@ -28,7 +28,7 @@ class Inverter(AsyncStream):
class methods: class methods:
class_init(): initialize the common resources of the proxy (MQTT class_init(): initialize the common resources of the proxy (MQTT
broker, Proxy DB, etc). Must be called before the broker, Proxy DB, etc). Must be called before the
first Ib´verter instance can be created first inverter instance can be created
class_close(): release the common resources of the proxy. Should not class_close(): release the common resources of the proxy. Should not
be called before any instances of the class are be called before any instances of the class are
destroyed destroyed
@@ -107,7 +107,7 @@ class Inverter(AsyncStream):
self.inc_counter('Inverter_Cnt') self.inc_counter('Inverter_Cnt')
await self.loop() await self.loop()
self.dec_counter('Inverter_Cnt') self.dec_counter('Inverter_Cnt')
logging.info(f'Server loop stopped for {addr}') logging.info(f'Server loop stopped for r{self.r_addr}')
# if the server connection closes, we also have to disconnect # if the server connection closes, we also have to disconnect
# the connection to te TSUN cloud # the connection to te TSUN cloud
@@ -121,15 +121,22 @@ class Inverter(AsyncStream):
async def client_loop(self, addr): async def client_loop(self, addr):
'''Loop for receiving messages from the TSUN cloud (client-side)''' '''Loop for receiving messages from the TSUN cloud (client-side)'''
await self.remoteStream.loop() clientStream = await self.remoteStream.loop()
logging.info(f'Client loop stopped for {addr}') logging.info(f'Client loop stopped for l{clientStream.l_addr}')
# if the client connection closes, we don't touch the server # if the client connection closes, we don't touch the server
# connection. Instead we erase the client connection stream, # connection. Instead we erase the client connection stream,
# thus on the next received packet from the inverter, we can # thus on the next received packet from the inverter, we can
# establish a new connection to the TSUN cloud # establish a new connection to the TSUN cloud
self.remoteStream.remoteStream = None # erase backlink to inverter
self.remoteStream = None # than erase client connection # erase backlink to inverter
clientStream.remoteStream = None
if self.remoteStream == clientStream:
# logging.debug(f'Client l{clientStream.l_addr} refs:'
# f' {gc.get_referrers(clientStream)}')
# than erase client connection
self.remoteStream = None
async def async_create_remote(self) -> None: async def async_create_remote(self) -> None:
'''Establish a client connection to the TSUN cloud''' '''Establish a client connection to the TSUN cloud'''
@@ -142,12 +149,14 @@ class Inverter(AsyncStream):
logging.info(f'Connected to {addr}') logging.info(f'Connected to {addr}')
connect = asyncio.open_connection(host, port) connect = asyncio.open_connection(host, port)
reader, writer = await connect reader, writer = await connect
self.remoteStream = AsyncStream(reader, writer, addr, self, False) self.remoteStream = AsyncStream(reader, writer, addr, self,
False, self.id_str)
asyncio.create_task(self.client_loop(addr)) asyncio.create_task(self.client_loop(addr))
except ConnectionRefusedError as error: except ConnectionRefusedError as error:
logging.info(f'{error}') logging.info(f'{error}')
except Exception: except Exception:
self.inc_counter('SW_Exception')
logging.error( logging.error(
f"Inverter: Exception for {addr}:\n" f"Inverter: Exception for {addr}:\n"
f"{traceback.format_exc()}") f"{traceback.format_exc()}")
@@ -173,6 +182,7 @@ class Inverter(AsyncStream):
except MqttCodeError as error: except MqttCodeError as error:
logging.error(f'Mqtt except: {error}') logging.error(f'Mqtt except: {error}')
except Exception: except Exception:
self.inc_counter('SW_Exception')
logging.error( logging.error(
f"Inverter: Exception:\n" f"Inverter: Exception:\n"
f"{traceback.format_exc()}") f"{traceback.format_exc()}")
@@ -197,7 +207,7 @@ class Inverter(AsyncStream):
f"/{node_id}{id}/config", data_json) f"/{node_id}{id}/config", data_json)
def close(self) -> None: def close(self) -> None:
logging.debug(f'Inverter.close() {self.addr}') logging.debug(f'Inverter.close() l{self.l_addr} | r{self.r_addr}')
super().close() # call close handler in the parent class super().close() # call close handler in the parent class
# logger.debug (f'Inverter refs: {gc.get_referrers(self)}') # logger.debug (f'Inverter refs: {gc.get_referrers(self)}')

View File

@@ -55,8 +55,8 @@ class Control:
def is_ind(self) -> bool: def is_ind(self) -> bool:
return (self.ctrl == 0x91) return (self.ctrl == 0x91)
# def is_req(self) -> bool: def is_req(self) -> bool:
# return not (self.ctrl & 0x08) return (self.ctrl == 0x70)
def is_resp(self) -> bool: def is_resp(self) -> bool:
return (self.ctrl == 0x99) return (self.ctrl == 0x99)
@@ -74,7 +74,7 @@ class Message(metaclass=IterRegistry):
_registry = [] _registry = []
new_stat_data = {} new_stat_data = {}
def __init__(self, server_side: bool): def __init__(self, server_side: bool, id_str=b''):
self._registry.append(weakref.ref(self)) self._registry.append(weakref.ref(self))
self.server_side = server_side self.server_side = server_side
self.header_valid = False self.header_valid = False
@@ -83,13 +83,18 @@ class Message(metaclass=IterRegistry):
self.unique_id = 0 self.unique_id = 0
self.node_id = '' self.node_id = ''
self.sug_area = '' self.sug_area = ''
self._recv_buffer = b'' self.await_conn_resp_cnt = 0
self.id_str = id_str
self.contact_name = b''
self.contact_mail = b''
self._recv_buffer = bytearray(0)
self._send_buffer = bytearray(0) self._send_buffer = bytearray(0)
self._forward_buffer = bytearray(0) self._forward_buffer = bytearray(0)
self.db = Infos() self.db = Infos()
self.new_data = {} self.new_data = {}
self.switch = { self.switch = {
0x00: self.msg_contact_info, 0x00: self.msg_contact_info,
0x13: self.msg_ota_update,
0x22: self.msg_get_time, 0x22: self.msg_get_time,
0x71: self.msg_collector_data, 0x71: self.msg_collector_data,
0x04: self.msg_inverter_data, 0x04: self.msg_inverter_data,
@@ -110,7 +115,7 @@ class Message(metaclass=IterRegistry):
# we have refernces to methods of this class in self.switch # we have refernces to methods of this class in self.switch
# so we have to erase self.switch, otherwise this instance can't be # so we have to erase self.switch, otherwise this instance can't be
# deallocated by the garbage collector ==> we get a memory leak # deallocated by the garbage collector ==> we get a memory leak
del self.switch self.switch.clear()
def inc_counter(self, counter: str) -> None: def inc_counter(self, counter: str) -> None:
self.db.inc_counter(counter) self.db.inc_counter(counter)
@@ -175,6 +180,17 @@ class Message(metaclass=IterRegistry):
f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}') f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}')
return return
def _init_new_client_conn(self, contact_name, contact_mail) -> None:
logger.info(f'name: {contact_name} mail: {contact_mail}')
self.msg_id = 0
self.await_conn_resp_cnt += 1
self.__build_header(0x91)
self._send_buffer += struct.pack(f'!{len(contact_name)+1}p'
f'{len(contact_mail)+1}p',
contact_name, contact_mail)
self.__finish_send_msg()
''' '''
Our private methods Our private methods
''' '''
@@ -194,7 +210,7 @@ class Message(metaclass=IterRegistry):
type += 'S' type += 'S'
return switch.get(type, '???') return switch.get(type, '???')
def __timestamp(self): def _timestamp(self): # pragma: no cover
if False: if False:
# utc as epoche # utc as epoche
ts = time.time() ts = time.time()
@@ -268,33 +284,66 @@ class Message(metaclass=IterRegistry):
''' '''
def msg_contact_info(self): def msg_contact_info(self):
if self.ctrl.is_ind(): if self.ctrl.is_ind():
self.__build_header(0x99) if self.server_side and self.__process_contact_info():
self._send_buffer += b'\x01' self.__build_header(0x91)
self.__finish_send_msg() self._send_buffer += b'\x01'
elif self.ctrl.is_resp(): self.__finish_send_msg()
return # ignore received response from tsun # don't forward this contact info here, we will build one
# when the remote connection is established
elif self.await_conn_resp_cnt > 0:
self.await_conn_resp_cnt -= 1
else:
self.forward(self._recv_buffer, self.header_len+self.data_len)
return
else: else:
logger.warning('Unknown Ctrl')
self.inc_counter('Unknown_Ctrl') self.inc_counter('Unknown_Ctrl')
self.forward(self._recv_buffer, self.header_len+self.data_len)
self.forward(self._recv_buffer, self.header_len+self.data_len) def __process_contact_info(self) -> bool:
result = struct.unpack_from('!B', self._recv_buffer, self.header_len)
name_len = result[0]
if self.data_len < name_len+2:
return False
result = struct.unpack_from(f'!{name_len+1}pB', self._recv_buffer,
self.header_len)
self.contact_name = result[0]
mail_len = result[1]
logger.info(f'name: {self.contact_name}')
result = struct.unpack_from(f'!{mail_len+1}p', self._recv_buffer,
self.header_len+name_len+1)
self.contact_mail = result[0]
logger.info(f'mail: {self.contact_mail}')
return True
def msg_get_time(self): def msg_get_time(self):
if self.ctrl.is_ind(): tsun = Config.get('tsun')
ts = self.__timestamp() if tsun['enabled']:
logger.debug(f'time: {ts:08x}') if self.ctrl.is_ind():
if self.data_len >= 8:
self.__build_header(0x99) ts = self._timestamp()
self._send_buffer += struct.pack('!q', ts) result = struct.unpack_from('!q', self._recv_buffer,
self.__finish_send_msg() self.header_len)
logger.debug(f'tsun-time: {result[0]:08x}'
elif self.ctrl.is_resp(): f' proxy-time: {ts:08x}')
result = struct.unpack_from('!q', self._recv_buffer, else:
self.header_len) logger.warning('Unknown Ctrl')
logger.debug(f'tsun-time: {result[0]:08x}') self.inc_counter('Unknown_Ctrl')
return # ignore received response from tsun self.forward(self._recv_buffer, self.header_len+self.data_len)
else: else:
self.inc_counter('Unknown_Ctrl') if self.ctrl.is_ind():
self.forward(self._recv_buffer, self.header_len+self.data_len) if self.data_len == 0:
ts = self._timestamp()
logger.debug(f'time: {ts:08x}')
self.__build_header(0x91)
self._send_buffer += struct.pack('!q', ts)
self.__finish_send_msg()
else:
logger.warning('Unknown Ctrl')
self.inc_counter('Unknown_Ctrl')
def parse_msg_header(self): def parse_msg_header(self):
result = struct.unpack_from('!lB', self._recv_buffer, self.header_len) result = struct.unpack_from('!lB', self._recv_buffer, self.header_len)
@@ -324,6 +373,7 @@ class Message(metaclass=IterRegistry):
elif self.ctrl.is_resp(): elif self.ctrl.is_resp():
return # ignore received response return # ignore received response
else: else:
logger.warning('Unknown Ctrl')
self.inc_counter('Unknown_Ctrl') self.inc_counter('Unknown_Ctrl')
self.forward(self._recv_buffer, self.header_len+self.data_len) self.forward(self._recv_buffer, self.header_len+self.data_len)
@@ -338,6 +388,7 @@ class Message(metaclass=IterRegistry):
elif self.ctrl.is_resp(): elif self.ctrl.is_resp():
return # ignore received response return # ignore received response
else: else:
logger.warning('Unknown Ctrl')
self.inc_counter('Unknown_Ctrl') self.inc_counter('Unknown_Ctrl')
self.forward(self._recv_buffer, self.header_len+self.data_len) self.forward(self._recv_buffer, self.header_len+self.data_len)
@@ -345,11 +396,21 @@ class Message(metaclass=IterRegistry):
def __process_data(self): def __process_data(self):
msg_hdr_len = self.parse_msg_header() msg_hdr_len = self.parse_msg_header()
for key, update in self.db.parse(self._recv_buffer[self.header_len for key, update in self.db.parse(self._recv_buffer, self.header_len
+ msg_hdr_len:]): + msg_hdr_len):
if update: if update:
self.new_data[key] = True self.new_data[key] = True
def msg_ota_update(self):
if self.ctrl.is_req():
self.inc_counter('OTA_Start_Msg')
elif self.ctrl.is_ind():
pass
else:
logger.warning('Unknown Ctrl')
self.inc_counter('Unknown_Ctrl')
self.forward(self._recv_buffer, self.header_len+self.data_len)
def msg_unknown(self): def msg_unknown(self):
logger.warning(f"Unknow Msg: ID:{self.msg_id}") logger.warning(f"Unknow Msg: ID:{self.msg_id}")
self.inc_counter('Unknown_Msg') self.inc_counter('Unknown_Msg')

View File

@@ -12,16 +12,96 @@ def ContrDataSeq(): # Get Time Request message
msg += b'\x49\x00\x00\x00\x02\x00\x0d\x04\x08\x49\x00\x00\x00\x00\x00\x07\xa1\x84\x49\x00\x00\x00\x01\x00\x0c\x50\x59\x49\x00\x00\x00\x4c\x00\x0d\x1f\x60\x49\x00\x00\x00\x00' msg += b'\x49\x00\x00\x00\x02\x00\x0d\x04\x08\x49\x00\x00\x00\x00\x00\x07\xa1\x84\x49\x00\x00\x00\x01\x00\x0c\x50\x59\x49\x00\x00\x00\x4c\x00\x0d\x1f\x60\x49\x00\x00\x00\x00'
return msg return msg
@pytest.fixture
def Contr2DataSeq(): # Get Time Request message
msg = b'\x00\x00\x00\x39\x00\x09\x2b\xa8\x54\x10\x52'
msg += b'\x53\x57\x5f\x34\x30\x30\x5f\x56\x31\x2e\x30\x30\x2e\x32\x30\x00'
msg += b'\x09\x27\xc0\x54\x06\x52\x61\x79\x6d\x6f\x6e\x00\x09\x2f\x90\x54'
msg += b'\x0b\x52\x53\x57\x2d\x31\x2d\x31\x30\x30\x30\x31\x00\x09\x5a\x88'
msg += b'\x54\x0f\x74\x2e\x72\x61\x79\x6d\x6f\x6e\x69\x6f\x74\x2e\x63\x6f'
msg += b'\x6d\x00\x09\x5a\xec\x54\x1c\x6c\x6f\x67\x67\x65\x72\x2e\x74\x61'
msg += b'\x6c\x65\x6e\x74\x2d\x6d\x6f\x6e\x69\x74\x6f\x72\x69\x6e\x67\x2e'
msg += b'\x63\x6f\x6d\x00\x0d\x2f\x00\x54\x10\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x32\xe8\x54\x10\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00'
msg += b'\x0d\x36\xd0\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\x00\x0d\x3a\xb8\x54\x10\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x3e\xa0\x54'
msg += b'\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\x00\x0d\x42\x88\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x46\x70\x54\x10\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x4a'
msg += b'\x58\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\x00\x0d\x4e\x40\x54\x10\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x52\x28\x54\x10\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00'
msg += b'\x0d\x56\x10\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\x00\x0d\x59\xf8\x54\x10\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x5d\xe0\x54'
msg += b'\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\x00\x0d\x61\xc8\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x65\xb0\x54\x10\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x69'
msg += b'\x98\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\x00\x0d\x6d\x80\x54\x10\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x71\x68\x54\x10\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00'
msg += b'\x0d\x75\x50\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\x00\x0d\x79\x38\x54\x10\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x7d\x20\x54'
msg += b'\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\x00\x0d\x81\x08\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x84\xf0\x54\x10\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x88'
msg += b'\xd8\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\x00\x0d\x8c\xc0\x54\x10\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x90\xa8\x54\x10\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00'
msg += b'\x0d\x94\x90\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\x00\x0d\x98\x78\x54\x10\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x9c\x60\x54'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\x00\x0d\x00\x20\x49\x00\x00\x00\x01\x00\x0c\x35\x00\x49\x00\x00'
msg += b'\x00\x10\x00\x0c\x96\xa8\x49\x00\x00\x01\x4e\x00\x0c\x7f\x38\x49'
msg += b'\x00\x00\x00\x01\x00\x0c\xfc\x38\x49\x00\x00\x00\x01\x00\x0c\xf8'
msg += b'\x50\x49\x00\x00\x01\x2c\x00\x0c\x63\xe0\x49\x00\x00\x00\x00\x00'
msg += b'\x0c\x67\xc8\x49\x00\x00\x00\x00\x00\x0c\x50\x58\x49\x00\x00\x00'
msg += b'\x01\x00\x09\x5e\x70\x49\x00\x00\x13\x8d\x00\x09\x5e\xd4\x49\x00'
msg += b'\x00\x13\x8d\x00\x09\x5b\x50\x49\x00\x00\x00\x02\x00\x0d\x04\x08'
msg += b'\x49\x00\x00\x00\x00\x00\x07\xa1\x84\x49\x00\x00\x00\x01\x00\x0c'
msg += b'\x50\x59\x49\x00\x00\x00\x33\x00\x0d\x1f\x60\x49\x00\x00\x00\x00'
msg += b'\x00\x0d\x23\x48\x49\xff\xff\xff\xff\x00\x0d\x27\x30\x49\xff\xff'
msg += b'\xff\xff\x00\x0d\x2b\x18\x4c\x00\x00\x00\x00\x00\x00\xff\xff\x00'
msg += b'\x0c\xa2\x60\x49\x00\x00\x00\x00\x00\x0d\xa0\x48\x49\x00\x00\x00'
msg += b'\x00\x00\x0d\xa4\x30\x49\x00\x00\x00\x00\x00\x0d\xa8\x18\x49\x00'
msg += b'\x00\x00\x00'
return msg
@pytest.fixture @pytest.fixture
def InvDataSeq(): # Data indication from the controller def InvDataSeq(): # Data indication from the controller
msg = b'\x00\x00\x00\x06\x00\x00\x00\x0a\x54\x08\x4d\x69\x63\x72\x6f\x69\x6e\x76\x00\x00\x00\x14\x54\x04\x54\x53\x55\x4e\x00\x00\x00\x1E\x54\x07\x56\x35\x2e\x30\x2e\x31\x31\x00\x00\x00\x28' msg = b'\x00\x00\x00\x06\x00\x00\x00\x0a\x54\x08\x4d\x69\x63\x72\x6f\x69\x6e\x76\x00\x00\x00\x14\x54\x04\x54\x53\x55\x4e\x00\x00\x00\x1E\x54\x07\x56\x35\x2e\x30\x2e\x31\x31\x00\x00\x00\x28'
msg += b'\x54\x10\x54\x31\x37\x45\x37\x33\x30\x37\x30\x32\x31\x44\x30\x30\x36\x41\x00\x00\x00\x32\x54\x0a\x54\x53\x4f\x4c\x2d\x4d\x53\x36\x30\x30\x00\x00\x00\x3c\x54\x05\x41\x2c\x42\x2c\x43' msg += b'\x54\x10T170000000000001\x00\x00\x00\x32\x54\x0a\x54\x53\x4f\x4c\x2d\x4d\x53\x36\x30\x30\x00\x00\x00\x3c\x54\x05\x41\x2c\x42\x2c\x43'
return msg return msg
@pytest.fixture @pytest.fixture
def InvalidDataSeq(): # Data indication from the controller def InvalidDataSeq(): # Data indication from the controller
msg = b'\x00\x00\x00\x06\x00\x00\x00\x0a\x54\x08\x4d\x69\x63\x72\x6f\x69\x6e\x76\x00\x00\x00\x14\x64\x04\x54\x53\x55\x4e\x00\x00\x00\x1E\x54\x07\x56\x35\x2e\x30\x2e\x31\x31\x00\x00\x00\x28' msg = b'\x00\x00\x00\x06\x00\x00\x00\x0a\x54\x08\x4d\x69\x63\x72\x6f\x69\x6e\x76\x00\x00\x00\x14\x64\x04\x54\x53\x55\x4e\x00\x00\x00\x1E\x54\x07\x56\x35\x2e\x30\x2e\x31\x31\x00\x00\x00\x28'
msg += b'\x54\x10\x54\x31\x37\x45\x37\x33\x30\x37\x30\x32\x31\x44\x30\x30\x36\x41\x00\x00\x00\x32\x54\x0a\x54\x53\x4f\x4c\x2d\x4d\x53\x36\x30\x30\x00\x00\x00\x3c\x54\x05\x41\x2c\x42\x2c\x43' msg += b'\x54\x10T170000000000001\x00\x00\x00\x32\x54\x0a\x54\x53\x4f\x4c\x2d\x4d\x53\x36\x30\x30\x00\x00\x00\x3c\x54\x05\x41\x2c\x42\x2c\x43'
return msg return msg
@pytest.fixture @pytest.fixture
@@ -101,7 +181,15 @@ def test_parse_control(ContrDataSeq):
pass pass
assert json.dumps(i.db) == json.dumps( assert json.dumps(i.db) == json.dumps(
{"collector": {"Collector_Fw_Version": "RSW_400_V1.00.06", "Chip_Type": "Raymon", "Chip_Model": "RSW-1-10001", "Trace_URL": "t.raymoniot.com", "Logger_URL": "logger.talent-monitoring.com", "No_Inputs": 2}, "controller": {"Signal_Strength": 100, "Power_On_Time": 29, "Data_Up_Interval": 300}}) {"collector": {"Collector_Fw_Version": "RSW_400_V1.00.06", "Chip_Type": "Raymon", "Chip_Model": "RSW-1-10001", "Trace_URL": "t.raymoniot.com", "Logger_URL": "logger.talent-monitoring.com"}, "controller": {"Collect_Interval": 1, "Signal_Strength": 100, "Power_On_Time": 29, "Communication_Type": 1, "Connect_Count": 1, "Data_Up_Interval": 300}})
def test_parse_control2(Contr2DataSeq):
i = Infos()
for key, result in i.parse (Contr2DataSeq):
pass
assert json.dumps(i.db) == json.dumps(
{"collector": {"Collector_Fw_Version": "RSW_400_V1.00.20", "Chip_Type": "Raymon", "Chip_Model": "RSW-1-10001", "Trace_URL": "t.raymoniot.com", "Logger_URL": "logger.talent-monitoring.com"}, "controller": {"Collect_Interval": 1, "Signal_Strength": 16, "Power_On_Time": 334, "Communication_Type": 1, "Connect_Count": 1, "Data_Up_Interval": 300}})
def test_parse_inverter(InvDataSeq): def test_parse_inverter(InvDataSeq):
i = Infos() i = Infos()
@@ -109,7 +197,7 @@ def test_parse_inverter(InvDataSeq):
pass pass
assert json.dumps(i.db) == json.dumps( assert json.dumps(i.db) == json.dumps(
{"inverter": {"Product_Name": "Microinv", "Manufacturer": "TSUN", "Version": "V5.0.11", "Serial_Number": "T17E7307021D006A", "Equipment_Model": "TSOL-MS600"}}) {"inverter": {"Product_Name": "Microinv", "Manufacturer": "TSUN", "Version": "V5.0.11", "Serial_Number": "T170000000000001", "Equipment_Model": "TSOL-MS600"}})
def test_parse_cont_and_invert(ContrDataSeq, InvDataSeq): def test_parse_cont_and_invert(ContrDataSeq, InvDataSeq):
i = Infos() i = Infos()
@@ -121,8 +209,8 @@ def test_parse_cont_and_invert(ContrDataSeq, InvDataSeq):
assert json.dumps(i.db) == json.dumps( assert json.dumps(i.db) == json.dumps(
{ {
"collector": {"Collector_Fw_Version": "RSW_400_V1.00.06", "Chip_Type": "Raymon", "Chip_Model": "RSW-1-10001", "Trace_URL": "t.raymoniot.com", "Logger_URL": "logger.talent-monitoring.com", "No_Inputs": 2}, "controller": {"Signal_Strength": 100, "Power_On_Time": 29, "Data_Up_Interval": 300}, "collector": {"Collector_Fw_Version": "RSW_400_V1.00.06", "Chip_Type": "Raymon", "Chip_Model": "RSW-1-10001", "Trace_URL": "t.raymoniot.com", "Logger_URL": "logger.talent-monitoring.com"}, "controller": {"Collect_Interval": 1, "Signal_Strength": 100, "Power_On_Time": 29, "Communication_Type": 1, "Connect_Count": 1, "Data_Up_Interval": 300},
"inverter": {"Product_Name": "Microinv", "Manufacturer": "TSUN", "Version": "V5.0.11", "Serial_Number": "T17E7307021D006A", "Equipment_Model": "TSOL-MS600"}}) "inverter": {"Product_Name": "Microinv", "Manufacturer": "TSUN", "Version": "V5.0.11", "Serial_Number": "T170000000000001", "Equipment_Model": "TSOL-MS600"}})
def test_build_ha_conf1(ContrDataSeq): def test_build_ha_conf1(ContrDataSeq):
@@ -181,13 +269,14 @@ def test_build_ha_conf1(ContrDataSeq):
assert tests==5 assert tests==5
def test_build_ha_conf2(ContrDataSeq, InvDataSeq): def test_build_ha_conf2(ContrDataSeq, InvDataSeq, InvDataSeq2):
i = Infos() i = Infos()
for key, result in i.parse (ContrDataSeq): for key, result in i.parse (ContrDataSeq):
pass pass
for key, result in i.parse (InvDataSeq): for key, result in i.parse (InvDataSeq):
pass pass
for key, result in i.parse (InvDataSeq2):
pass
tests = 0 tests = 0
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id="garagendach/", snr='123', singleton=False, sug_area = 'roof'): for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id="garagendach/", snr='123', singleton=False, sug_area = 'roof'):
@@ -320,13 +409,13 @@ def test_statistic_counter():
assert val == None or val == 0 assert val == None or val == 0
i.static_init() # initialize counter i.static_init() # initialize counter
assert json.dumps(i.stat) == json.dumps({"proxy": {"Inverter_Cnt": 0, "Unknown_SNR": 0, "Unknown_Msg": 0, "Invalid_Data_Type": 0, "Internal_Error": 0,"Unknown_Ctrl": 0}}) assert json.dumps(i.stat) == json.dumps({"proxy": {"Inverter_Cnt": 0, "Unknown_SNR": 0, "Unknown_Msg": 0, "Invalid_Data_Type": 0, "Internal_Error": 0,"Unknown_Ctrl": 0, "OTA_Start_Msg": 0, "SW_Exception": 0}})
val = i.dev_value(0xffffff00) # valid and initiliazed addr val = i.dev_value(0xffffff00) # valid and initiliazed addr
assert val == 0 assert val == 0
i.inc_counter('Inverter_Cnt') i.inc_counter('Inverter_Cnt')
assert json.dumps(i.stat) == json.dumps({"proxy": {"Inverter_Cnt": 1, "Unknown_SNR": 0, "Unknown_Msg": 0, "Invalid_Data_Type": 0, "Internal_Error": 0,"Unknown_Ctrl": 0}}) assert json.dumps(i.stat) == json.dumps({"proxy": {"Inverter_Cnt": 1, "Unknown_SNR": 0, "Unknown_Msg": 0, "Invalid_Data_Type": 0, "Internal_Error": 0,"Unknown_Ctrl": 0, "OTA_Start_Msg": 0, "SW_Exception": 0}})
val = i.dev_value(0xffffff00) val = i.dev_value(0xffffff00)
assert val == 1 assert val == 1

View File

@@ -42,6 +42,8 @@ class MemoryStream(Message):
pass pass
return copied_bytes return copied_bytes
def _timestamp(self):
return 1700260990000
def _Message__flush_recv_msg(self) -> None: def _Message__flush_recv_msg(self) -> None:
super()._Message__flush_recv_msg() super()._Message__flush_recv_msg()
@@ -65,7 +67,11 @@ def Msg2ContactInfo(): # two Contact Info messages
@pytest.fixture @pytest.fixture
def MsgContactResp(): # Contact Response message def MsgContactResp(): # Contact Response message
return b'\x00\x00\x00\x14\x10R170000000000001\x99\x00\x01' return b'\x00\x00\x00\x14\x10R170000000000001\x91\x00\x01'
@pytest.fixture
def MsgContactResp2(): # Contact Response message
return b'\x00\x00\x00\x14\x10R170000000000002\x91\x00\x01'
@pytest.fixture @pytest.fixture
def MsgContactInvalid(): # Contact Response message def MsgContactInvalid(): # Contact Response message
@@ -77,7 +83,7 @@ def MsgGetTime(): # Get Time Request message
@pytest.fixture @pytest.fixture
def MsgTimeResp(): # Get Time Resonse message def MsgTimeResp(): # Get Time Resonse message
return b'\x00\x00\x00\x1b\x10R170000000000001\x99\x22\x00\x00\x01\x89\xc6\x63\x4d\x80' return b'\x00\x00\x00\x1b\x10R170000000000001\x91\x22\x00\x00\x01\x89\xc6\x63\x4d\x80'
@pytest.fixture @pytest.fixture
def MsgTimeInvalid(): # Get Time Request message def MsgTimeInvalid(): # Get Time Request message
@@ -108,7 +114,7 @@ def MsgInverterInd(): # Data indication from the controller
msg = b'\x00\x00\x00\x8b\x10R170000000000001\x91\x04\x01\x90\x00\x01\x10R170000000000001' msg = b'\x00\x00\x00\x8b\x10R170000000000001\x91\x04\x01\x90\x00\x01\x10R170000000000001'
msg += b'\x01\x00\x00\x01\x89\xc6\x63\x61\x08' msg += b'\x01\x00\x00\x01\x89\xc6\x63\x61\x08'
msg += b'\x00\x00\x00\x06\x00\x00\x00\x0a\x54\x08\x4d\x69\x63\x72\x6f\x69\x6e\x76\x00\x00\x00\x14\x54\x04\x54\x53\x55\x4e\x00\x00\x00\x1E\x54\x07\x56\x35\x2e\x30\x2e\x31\x31\x00\x00\x00\x28' msg += b'\x00\x00\x00\x06\x00\x00\x00\x0a\x54\x08\x4d\x69\x63\x72\x6f\x69\x6e\x76\x00\x00\x00\x14\x54\x04\x54\x53\x55\x4e\x00\x00\x00\x1E\x54\x07\x56\x35\x2e\x30\x2e\x31\x31\x00\x00\x00\x28'
msg += b'\x54\x10\x54\x31\x37\x45\x37\x33\x30\x37\x30\x32\x31\x44\x30\x30\x36\x41\x00\x00\x00\x32\x54\x0a\x54\x53\x4f\x4c\x2d\x4d\x53\x36\x30\x30\x00\x00\x00\x3c\x54\x05\x41\x2c\x42\x2c\x43' msg += b'\x54\x10T170000000000001\x00\x00\x00\x32\x54\x0a\x54\x53\x4f\x4c\x2d\x4d\x53\x36\x30\x30\x00\x00\x00\x3c\x54\x05\x41\x2c\x42\x2c\x43'
return msg return msg
@pytest.fixture @pytest.fixture
@@ -123,15 +129,6 @@ def MsgInverterInvalid(): # Get Time Request message
def MsgUnknown(): # Get Time Request message def MsgUnknown(): # Get Time Request message
return b'\x00\x00\x00\x17\x10R170000000000001\x91\x17\x01\x02\x03\x04' return b'\x00\x00\x00\x17\x10R170000000000001\x91\x17\x01\x02\x03\x04'
@pytest.fixture
def MsgGetTime(): # Get Time Request message
return b'\x00\x00\x00\x13\x10R170000000000001\x91\x22'
@pytest.fixture
def MsgTimeResp(): # Get Time Resonse message
return b'\x00\x00\x00\x1b\x10R170000000000001\x99\x22\x00\x00\x01\x89\xc6\x63\x4d\x80'
@pytest.fixture @pytest.fixture
def ConfigTsunAllowAll(): def ConfigTsunAllowAll():
Config.config = {'tsun':{'enabled': True}, 'inverters':{'allow_all':True}} Config.config = {'tsun':{'enabled': True}, 'inverters':{'allow_all':True}}
@@ -144,6 +141,36 @@ def ConfigNoTsunInv1():
def ConfigTsunInv1(): def ConfigTsunInv1():
Config.config = {'tsun':{'enabled': True},'inverters':{'R170000000000001':{'node_id':'inv1','suggested_area':'roof'}}} Config.config = {'tsun':{'enabled': True},'inverters':{'R170000000000001':{'node_id':'inv1','suggested_area':'roof'}}}
@pytest.fixture
def MsgOtaReq(): # Over the air update rewuest from tsun cloud
msg = b'\x00\x00\x01\x16\x10R170000000000001\x70\x13\x01\x02\x76\x35\x70\x68\x74\x74\x70'
msg += b'\x3a\x2f\x2f\x77\x77\x77\x2e\x74\x61\x6c\x65\x6e\x74\x2d\x6d\x6f'
msg += b'\x6e\x69\x74\x6f\x72\x69\x6e\x67\x2e\x63\x6f\x6d\x3a\x39\x30\x30'
msg += b'\x32\x2f\x70\x72\x6f\x64\x2d\x61\x70\x69\x2f\x72\x6f\x6d\x2f\x75'
msg += b'\x70\x64\x61\x74\x65\x2f\x64\x6f\x77\x6e\x6c\x6f\x61\x64\x3f\x76'
msg += b'\x65\x72\x3d\x56\x31\x2e\x30\x30\x2e\x31\x37\x26\x6e\x61\x6d\x65'
msg += b'\x3d\x47\x33\x2d\x57\x69\x46\x69\x2b\x2d\x56\x31\x2e\x30\x30\x2e'
msg += b'\x31\x37\x2d\x4f\x54\x41\x26\x65\x78\x74\x3d\x30\x60\x68\x74\x74'
msg += b'\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x74\x61\x6c\x65\x6e\x74\x2d\x6d'
msg += b'\x6f\x6e\x69\x74\x6f\x72\x69\x6e\x67\x2e\x63\x6f\x6d\x3a\x39\x30'
msg += b'\x30\x32\x2f\x70\x72\x6f\x64\x2d\x61\x70\x69\x2f\x72\x6f\x6d\x2f'
msg += b'\x75\x70\x64\x61\x74\x65\x2f\x63\x61\x6c\x6c\x62\x61\x63\x6b\x3f'
msg += b'\x71\x69\x64\x3d\x31\x35\x30\x33\x36\x32\x26\x72\x69\x64\x3d\x32'
msg += b'\x32\x39\x26\x64\x69\x64\x3d\x31\x33\x34\x32\x32\x35\x20\x36\x35'
msg += b'\x66\x30\x64\x37\x34\x34\x62\x66\x33\x39\x61\x62\x38\x32\x34\x64'
msg += b'\x32\x38\x62\x38\x34\x64\x31\x39\x65\x64\x33\x31\x31\x63\x06\x34'
msg += b'\x36\x38\x36\x33\x33\x01\x31\x01\x30\x00'
return msg
@pytest.fixture
def MsgOtaAck(): # Over the air update rewuest from tsun cloud
return b'\x00\x00\x00\x14\x10R170000000000001\x91\x13\x01'
@pytest.fixture
def MsgOtaInvalid(): # Get Time Request message
return b'\x00\x00\x00\x14\x10R170000000000001\x99\x13\x01'
def test_read_message(MsgContactInfo): def test_read_message(MsgContactInfo):
m = MemoryStream(MsgContactInfo, (0,)) m = MemoryStream(MsgContactInfo, (0,))
m.read() # read complete msg, and dispatch msg m.read() # read complete msg, and dispatch msg
@@ -246,7 +273,7 @@ def test_read_message_in_chunks2(MsgContactInfo):
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
m.close() m.close()
def test_read_two_messages(ConfigTsunAllowAll, Msg2ContactInfo): def test_read_two_messages(ConfigTsunAllowAll, Msg2ContactInfo,MsgContactResp,MsgContactResp2):
ConfigTsunAllowAll ConfigTsunAllowAll
m = MemoryStream(Msg2ContactInfo, (0,)) m = MemoryStream(Msg2ContactInfo, (0,))
m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.db.stat['proxy']['Unknown_Ctrl'] = 0
@@ -259,9 +286,15 @@ def test_read_two_messages(ConfigTsunAllowAll, Msg2ContactInfo):
assert m.msg_id==0 assert m.msg_id==0
assert m.header_len==23 assert m.header_len==23
assert m.data_len==25 assert m.data_len==25
assert m._forward_buffer==b'\x00\x00\x00,\x10R170000000000001\x91\x00\x08solarhub\x0fsolarhub@123456' assert m._forward_buffer==b''
assert m._send_buffer==MsgContactResp
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m._send_buffer = bytearray(0) # clear send buffer for next test
m._init_new_client_conn(b'solarhub', b'solarhub@123456')
assert m._send_buffer==b'\x00\x00\x00,\x10R170000000000001\x91\x00\x08solarhub\x0fsolarhub@123456'
m._send_buffer = bytearray(0) # clear send buffer for next test
m.read() # read complete msg, and dispatch msg m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 2 assert m.msg_count == 2
@@ -271,24 +304,72 @@ def test_read_two_messages(ConfigTsunAllowAll, Msg2ContactInfo):
assert m.msg_id==0 assert m.msg_id==0
assert m.header_len==23 assert m.header_len==23
assert m.data_len==25 assert m.data_len==25
assert m._forward_buffer==b'\x00\x00\x00,\x10R170000000000002\x91\x00\x08solarhub\x0fsolarhub@123456' assert m._forward_buffer==b''
assert m._send_buffer==MsgContactResp2
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m._send_buffer = bytearray(0) # clear send buffer for next test
m._init_new_client_conn(b'solarhub', b'solarhub@123456')
assert m._send_buffer==b'\x00\x00\x00,\x10R170000000000002\x91\x00\x08solarhub\x0fsolarhub@123456'
m.close() m.close()
def test_msg_contact_resp(ConfigTsunInv1, MsgContactResp): def test_msg_contact_resp(ConfigTsunInv1, MsgContactResp):
ConfigTsunInv1 ConfigTsunInv1
m = MemoryStream(MsgContactResp, (0,), False) m = MemoryStream(MsgContactResp, (0,), False)
m.await_conn_resp_cnt = 1
m.db.stat['proxy']['Unknown_Ctrl'] = 0 m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1 assert m.msg_count == 1
assert m.await_conn_resp_cnt == 0
assert m.id_str == b"R170000000000001" assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001' assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==153 assert int(m.ctrl)==145
assert m.msg_id==0 assert m.msg_id==0
assert m.header_len==23 assert m.header_len==23
assert m.data_len==1 assert m.data_len==1
assert m._forward_buffer==b'' assert m._forward_buffer==b''
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m.close()
def test_msg_contact_resp_2(ConfigTsunInv1, MsgContactResp):
ConfigTsunInv1
m = MemoryStream(MsgContactResp, (0,), False)
m.await_conn_resp_cnt = 0
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.await_conn_resp_cnt == 0
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==145
assert m.msg_id==0
assert m.header_len==23
assert m.data_len==1
assert m._forward_buffer==MsgContactResp
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m.close()
def test_msg_contact_resp_3(ConfigTsunInv1, MsgContactResp):
ConfigTsunInv1
m = MemoryStream(MsgContactResp, (0,), True)
m.await_conn_resp_cnt = 0
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.await_conn_resp_cnt == 0
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==145
assert m.msg_id==0
assert m.header_len==23
assert m.data_len==1
assert m._forward_buffer==MsgContactResp
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m.close() m.close()
@@ -324,6 +405,25 @@ def test_msg_get_time(ConfigTsunInv1, MsgGetTime):
assert m.header_len==23 assert m.header_len==23
assert m.data_len==0 assert m.data_len==0
assert m._forward_buffer==MsgGetTime assert m._forward_buffer==MsgGetTime
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m.close()
def test_msg_get_time_autark(ConfigNoTsunInv1, MsgGetTime):
ConfigNoTsunInv1
m = MemoryStream(MsgGetTime, (0,))
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==145
assert m.msg_id==34
assert m.header_len==23
assert m.data_len==0
assert m._forward_buffer==b''
assert m._send_buffer==b'\x00\x00\x00\x1b\x10R170000000000001\x91"\x00\x00\x01\x8b\xdfs\xcc0'
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m.close() m.close()
@@ -336,11 +436,30 @@ def test_msg_time_resp(ConfigTsunInv1, MsgTimeResp):
assert m.msg_count == 1 assert m.msg_count == 1
assert m.id_str == b"R170000000000001" assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001' assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==153 assert int(m.ctrl)==145
assert m.msg_id==34
assert m.header_len==23
assert m.data_len==8
assert m._forward_buffer==MsgTimeResp
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m.close()
def test_msg_time_resp_autark(ConfigNoTsunInv1, MsgTimeResp):
ConfigNoTsunInv1
m = MemoryStream(MsgTimeResp, (0,), False)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==145
assert m.msg_id==34 assert m.msg_id==34
assert m.header_len==23 assert m.header_len==23
assert m.data_len==8 assert m.data_len==8
assert m._forward_buffer==b'' assert m._forward_buffer==b''
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0 assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m.close() m.close()
@@ -362,6 +481,23 @@ def test_msg_time_invalid(ConfigTsunInv1, MsgTimeInvalid):
assert m.db.stat['proxy']['Unknown_Ctrl'] == 1 assert m.db.stat['proxy']['Unknown_Ctrl'] == 1
m.close() m.close()
def test_msg_time_invalid_autark(ConfigNoTsunInv1, MsgTimeInvalid):
ConfigNoTsunInv1
m = MemoryStream(MsgTimeInvalid, (0,), False)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==148
assert m.msg_id==34
assert m.header_len==23
assert m.data_len==0
assert m._forward_buffer==b''
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 1
m.close()
def test_msg_cntrl_ind(ConfigTsunInv1, MsgControllerInd, MsgControllerAck): def test_msg_cntrl_ind(ConfigTsunInv1, MsgControllerInd, MsgControllerAck):
ConfigTsunInv1 ConfigTsunInv1
@@ -474,6 +610,68 @@ def test_msg_inv_invalid(ConfigTsunInv1, MsgInverterInvalid):
assert m.db.stat['proxy']['Unknown_Ctrl'] == 1 assert m.db.stat['proxy']['Unknown_Ctrl'] == 1
m.close() m.close()
def test_msg_ota_req(ConfigTsunInv1, MsgOtaReq):
ConfigTsunInv1
m = MemoryStream(MsgOtaReq, (0,), False)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.db.stat['proxy']['OTA_Start_Msg'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==112
assert m.msg_id==19
assert m.header_len==23
assert m.data_len==259
assert m._forward_buffer==MsgOtaReq
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
assert m.db.stat['proxy']['OTA_Start_Msg'] == 1
m.close()
def test_msg_ota_ack(ConfigTsunInv1, MsgOtaAck):
ConfigTsunInv1
tracer.setLevel(logging.ERROR)
m = MemoryStream(MsgOtaAck, (0,), False)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.db.stat['proxy']['OTA_Start_Msg'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==145
assert m.msg_id==19
assert m.header_len==23
assert m.data_len==1
assert m._forward_buffer==MsgOtaAck
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
assert m.db.stat['proxy']['OTA_Start_Msg'] == 0
m.close()
def test_msg_ota_invalid(ConfigTsunInv1, MsgOtaInvalid):
ConfigTsunInv1
m = MemoryStream(MsgOtaInvalid, (0,), False)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.db.stat['proxy']['OTA_Start_Msg'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==153
assert m.msg_id==19
assert m.header_len==23
assert m.data_len==1
assert m._forward_buffer==MsgOtaInvalid
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 1
assert m.db.stat['proxy']['OTA_Start_Msg'] == 0
m.close()
def test_msg_unknown(ConfigTsunInv1, MsgUnknown): def test_msg_unknown(ConfigTsunInv1, MsgUnknown):
ConfigTsunInv1 ConfigTsunInv1
m = MemoryStream(MsgUnknown, (0,), False) m = MemoryStream(MsgUnknown, (0,), False)

View File

@@ -20,7 +20,7 @@ def MsgContactInfo(): # Contact Info message
@pytest.fixture @pytest.fixture
def MsgContactResp(): # Contact Response message def MsgContactResp(): # Contact Response message
return b'\x00\x00\x00\x14\x10'+get_sn()+b'\x99\x00\x01' return b'\x00\x00\x00\x14\x10'+get_sn()+b'\x91\x00\x01'
@pytest.fixture @pytest.fixture
def MsgContactInfo2(): # Contact Info message def MsgContactInfo2(): # Contact Info message
@@ -28,7 +28,7 @@ def MsgContactInfo2(): # Contact Info message
@pytest.fixture @pytest.fixture
def MsgContactResp2(): # Contact Response message def MsgContactResp2(): # Contact Response message
return b'\x00\x00\x00\x14\x10'+get_invalid_sn()+b'\x99\x00\x01' return b'\x00\x00\x00\x14\x10'+get_invalid_sn()+b'\x91\x00\x01'
@pytest.fixture @pytest.fixture
def MsgTimeStampReq(): # Get Time Request message def MsgTimeStampReq(): # Get Time Request message
@@ -92,8 +92,29 @@ def MsgInverterInd(): # Data indication from the inverter
msg += b'\x31\x38\x46\x42\x08\x00\x00\x00\x00\x31\x9c\x53\x00\x05\x00\x00\x32\x00\x53\x04\x00\x00\x00\x32\x64\x53\x00\x01\x00\x00\x32\xc8\x53\x13\x9c\x00\x00\x33\x2c\x53\x0f' msg += b'\x31\x38\x46\x42\x08\x00\x00\x00\x00\x31\x9c\x53\x00\x05\x00\x00\x32\x00\x53\x04\x00\x00\x00\x32\x64\x53\x00\x01\x00\x00\x32\xc8\x53\x13\x9c\x00\x00\x33\x2c\x53\x0f'
msg += b'\xa0\x00\x00\x33\x90\x53\x00\x4f\x00\x00\x33\xf4\x53\x00\x66\x00\x00\x34\x58\x53\x03\xe8\x00\x00\x34\xbc\x53\x04\x00\x00\x00\x35\x20\x53\x00\x00\x00\x00\x35\x84\x53' msg += b'\xa0\x00\x00\x33\x90\x53\x00\x4f\x00\x00\x33\xf4\x53\x00\x66\x00\x00\x34\x58\x53\x03\xe8\x00\x00\x34\xbc\x53\x04\x00\x00\x00\x35\x20\x53\x00\x00\x00\x00\x35\x84\x53'
msg += b'\x00\x00\x00\x00\x35\xe8\x53\x00\x00\x00\x00\x36\x4c\x53\x00\x00\x00\x01\x38\x80\x53\x00\x02\x00\x01\x38\x81\x53\x00\x01\x00\x01\x38\x82\x53\x00\x01\x00\x01\x38\x83' msg += b'\x00\x00\x00\x00\x35\xe8\x53\x00\x00\x00\x00\x36\x4c\x53\x00\x00\x00\x01\x38\x80\x53\x00\x02\x00\x01\x38\x81\x53\x00\x01\x00\x01\x38\x82\x53\x00\x01\x00\x01\x38\x83'
msg += b'\x53\x00\x00' msg += b'\x53\x00\x00'
return msg
@pytest.fixture
def MsgOtaUpdateReq(): # Over the air update request from talent cloud
msg = b'\x00\x00\x01\x16\x10'+ get_sn() + b'\x70\x13\x01\x02\x76\x35'
msg += b'\x70\x68\x74\x74\x70'
msg += b'\x3a\x2f\x2f\x77\x77\x77\x2e\x74\x61\x6c\x65\x6e\x74\x2d\x6d\x6f'
msg += b'\x6e\x69\x74\x6f\x72\x69\x6e\x67\x2e\x63\x6f\x6d\x3a\x39\x30\x30'
msg += b'\x32\x2f\x70\x72\x6f\x64\x2d\x61\x70\x69\x2f\x72\x6f\x6d\x2f\x75'
msg += b'\x70\x64\x61\x74\x65\x2f\x64\x6f\x77\x6e\x6c\x6f\x61\x64\x3f\x76'
msg += b'\x65\x72\x3d\x56\x31\x2e\x30\x30\x2e\x31\x37\x26\x6e\x61\x6d\x65'
msg += b'\x3d\x47\x33\x2d\x57\x69\x46\x69\x2b\x2d\x56\x31\x2e\x30\x30\x2e'
msg += b'\x31\x37\x2d\x4f\x54\x41\x26\x65\x78\x74\x3d\x30\x60\x68\x74\x74'
msg += b'\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x74\x61\x6c\x65\x6e\x74\x2d\x6d'
msg += b'\x6f\x6e\x69\x74\x6f\x72\x69\x6e\x67\x2e\x63\x6f\x6d\x3a\x39\x30'
msg += b'\x30\x32\x2f\x70\x72\x6f\x64\x2d\x61\x70\x69\x2f\x72\x6f\x6d\x2f'
msg += b'\x75\x70\x64\x61\x74\x65\x2f\x63\x61\x6c\x6c\x62\x61\x63\x6b\x3f'
msg += b'\x71\x69\x64\x3d\x31\x35\x30\x33\x36\x32\x26\x72\x69\x64\x3d\x32'
msg += b'\x32\x39\x26\x64\x69\x64\x3d\x31\x33\x34\x32\x32\x35\x20\x36\x35'
msg += b'\x66\x30\x64\x37\x34\x34\x62\x66\x33\x39\x61\x62\x38\x32\x34\x64'
msg += b'\x32\x38\x62\x38\x34\x64\x31\x39\x65\x64\x33\x31\x31\x63\x06\x34'
msg += b'\x36\x38\x36\x33\x33\x01\x31\x01\x30\x00'
return msg return msg
@@ -137,6 +158,7 @@ def test_send_contact_info1(ClientConnection, MsgContactInfo, MsgContactResp):
pass pass
assert data == MsgContactResp assert data == MsgContactResp
def test_send_contact_info2(ClientConnection, MsgContactInfo2, MsgContactInfo, MsgContactResp): def test_send_contact_info2(ClientConnection, MsgContactInfo2, MsgContactInfo, MsgContactResp):
s = ClientConnection s = ClientConnection
try: try:
@@ -154,7 +176,20 @@ def test_send_contact_info2(ClientConnection, MsgContactInfo2, MsgContactInfo, M
pass pass
assert data == MsgContactResp assert data == MsgContactResp
def test_send_contact_info3(ClientConnection, MsgContactInfo, MsgContactResp, MsgTimeStampReq):
s = ClientConnection
try:
s.sendall(MsgContactInfo)
data = s.recv(1024)
except TimeoutError:
pass
assert data == MsgContactResp
try:
s.sendall(MsgTimeStampReq)
data = s.recv(1024)
except TimeoutError:
pass
def test_send_contact_resp(ClientConnection, MsgContactResp): def test_send_contact_resp(ClientConnection, MsgContactResp):
s = ClientConnection s = ClientConnection
@@ -164,7 +199,7 @@ def test_send_contact_resp(ClientConnection, MsgContactResp):
except TimeoutError: except TimeoutError:
assert True assert True
else: else:
assert data =='' assert data == b''
def test_send_ctrl_data(ClientConnection, MsgTimeStampReq, MsgTimeStampResp, MsgContollerInd): def test_send_ctrl_data(ClientConnection, MsgTimeStampReq, MsgTimeStampResp, MsgContollerInd):
s = ClientConnection s = ClientConnection
@@ -197,3 +232,11 @@ def test_send_inv_data(ClientConnection, MsgTimeStampReq, MsgTimeStampResp, MsgI
data = s.recv(1024) data = s.recv(1024)
except TimeoutError: except TimeoutError:
pass pass
def test_ota_req(ClientConnection, MsgOtaUpdateReq):
s = ClientConnection
try:
s.sendall(MsgOtaUpdateReq)
data = s.recv(1024)
except TimeoutError:
pass