Compare commits

...

140 Commits

Author SHA1 Message Date
Stefan Allius
bb793a3f13 Version 0.4.3 2023-10-26 20:37:03 +02:00
Stefan Allius
c3da9d6101 - avoid resetting the daily generation counters 2023-10-26 20:32:08 +02:00
Stefan Allius
0c9f953476 don't initialize must_incr values with zero
- when the connection is just established by the inverter.
  sometimes the inverters send invalid data with the value zero.
  In this case, we no longer initialize the must_incr values,
  to avoid sending invalid data to the mqtt broker and the
  Home Assistant
2023-10-26 20:23:53 +02:00
Stefan Allius
658f42d4fe restart mqtt broker on errors 2023-10-23 21:38:58 +02:00
Stefan Allius
870a965c22 - fix typo 2023-10-23 21:27:58 +02:00
Stefan Allius
0c645812bd catch mqtt errros 2023-10-23 21:25:47 +02:00
Stefan Allius
7b71f25496 Merge branch 'main' of https://github.com/s-allius/tsun-gen3-proxy into main 2023-10-23 21:18:44 +02:00
Stefan Allius
50977d5afd catch Mqtt errors
- we catch mqtt errors, so we can forward messages to
  the tsun cloud even if the mqtt broker is not running
2023-10-23 21:17:17 +02:00
Stefan Allius
ff0979663e fetch broken pipe errors 2023-10-23 21:12:10 +02:00
Stefan Allius
a6ac9864af Merge pull request #20 from LenzGr/patch-1
CHANGELOG.md: fix typos
2023-10-23 19:12:25 +02:00
Lenz Grimmer
2e0331cb88 CHANGELOG.md: fix typos 2023-10-23 09:31:54 +02:00
Stefan Allius
ec54e399fb updae default config 2023-10-23 00:14:26 +02:00
Stefan Allius
600362d00b Version 0.4.2 2023-10-21 21:48:46 +02:00
Stefan Allius
341e5c3964 fix typo 2023-10-21 21:40:28 +02:00
Stefan Allius
27a99fccec setup test coverage measurement 2023-10-21 21:39:19 +02:00
Stefan Allius
9264faaf3d avoid resetting daily generation counters 2023-10-21 21:38:36 +02:00
Stefan Allius
342313b76d add more test cases 2023-10-21 18:20:32 +02:00
Stefan Allius
f96091affd Increase test coverage for the Infos class to 100% 2023-10-21 16:46:49 +02:00
Stefan Allius
1df8ae55f0 Version 0.4.1 2023-10-20 21:53:59 +02:00
Stefan Allius
311b36aaf1 avoid resetting total generation counters 2023-10-20 21:51:02 +02:00
Stefan Allius
1642fe5a8a Merge pull request #19 from s-allius/s-allius/issue18
S allius/issue18
2023-10-20 20:09:56 +02:00
Stefan Allius
2b7e671dfc initialize the proxy statistics 2023-10-20 20:08:39 +02:00
Stefan Allius
a1930d32ae initialize the proxy statistics 2023-10-20 19:54:48 +02:00
Stefan Allius
11d7d616fa add static constructor to init proxy statistics 2023-10-20 00:27:21 +02:00
Stefan Allius
5433e18389 Send proxy values when the inverter disconnets 2023-10-19 20:04:41 +02:00
Stefan Allius
9006472264 list supported inverters 2023-10-17 23:40:34 +02:00
Stefan Allius
605a269d84 Add screenshots 2023-10-17 21:18:54 +02:00
Stefan Allius
93392f49c0 Version 0.4.0 2023-10-16 21:56:04 +02:00
Stefan Allius
587ec3d517 Merge pull request #17 from s-allius/s-allius/issue10
S allius/issue10
2023-10-16 21:50:12 +02:00
Stefan Allius
b98313ae23 add test for open close without any msg transfer 2023-10-16 20:33:46 +02:00
Stefan Allius
dc27da2b56 initialize sug_area 2023-10-16 20:30:43 +02:00
Stefan Allius
0c4bd2a03a log forwarded messages on info level 2023-10-16 20:29:44 +02:00
Stefan Allius
ba961fdfd7 Send internal proxy states to Home-Assistant
Fixes #10
2023-10-16 20:29:23 +02:00
Stefan Allius
03aa0c5747 adapt tests 2023-10-15 23:13:43 +02:00
Stefan Allius
10ec949a5b register proxy entities under a unique device (singleton) 2023-10-15 23:05:56 +02:00
Stefan Allius
909d5ca44a add MQTT config for the proxy device 2023-10-15 22:59:18 +02:00
Stefan Allius
335e5d1184 add MQTT config for the proxy device 2023-10-15 22:55:29 +02:00
Stefan Allius
583ec0c2a7 Update changelog 2023-10-15 15:50:20 +02:00
Stefan Allius
3c8e7666d4 add inc and dec methods for proxy statistic counters 2023-10-15 15:46:05 +02:00
Stefan Allius
5f821802a5 Implement HA device for the proxy
- add singleton proxy device
- add static db for proxy counter
- add inc and dec methods
2023-10-15 15:45:25 +02:00
Stefan Allius
fc10912a12 Send internal proxy states to Home-Assistant
Fixes #10
2023-10-15 15:37:59 +02:00
Stefan Allius
4d5da5a91f fix unit tests, which were broken since version 0.3.0 2023-10-12 23:36:46 +02:00
Stefan Allius
a2dfb4c1a6 avoid crash on incomplete config.toml file 2023-10-12 23:04:54 +02:00
Stefan Allius
3adf968a59 add systemtests 2023-10-12 22:39:24 +02:00
Stefan Allius
89d8cecfb5 don't dispatch ignored messages
- move check of inverter serial number from asyn_stream to message class
- add trace for droped messages
2023-10-12 22:29:51 +02:00
Stefan Allius
00f735d0ce add a proxy device to home assistant 2023-10-12 13:22:28 +02:00
Stefan Allius
5c940bb7a2 Send internal proxy states to Home-Assistant
Fixes #10

Always register ha entities after connection setup
2023-10-12 13:19:56 +02:00
Stefan Allius
8cb8dea47b Merge pull request #15 from s-allius/s-allius/issue8
S allius/issue8
2023-10-11 21:09:03 +02:00
Stefan Allius
8edbd7928f add docstring 2023-10-11 21:01:57 +02:00
Stefan Allius
f0e9c67a06 fix issue #8 2023-10-11 20:22:33 +02:00
Stefan Allius
505beff6de Do not register non-existent inverter inputs in HA
Fixes #8
2023-10-11 20:01:10 +02:00
Stefan Allius
684e591835 Bump to python version 3.12 2023-10-10 21:54:12 +02:00
Stefan Allius
8b4a94bfcb Version 0.3.0 2023-10-10 20:45:12 +02:00
Stefan Allius
98dab7db99 Version 0.3.0 2023-10-10 20:17:04 +02:00
Stefan Allius
42ae95fd1c remove --no-cache for release candidates (rc) 2023-10-10 20:15:10 +02:00
Stefan Allius
9ffd105278 classify more value for diagnostics 2023-10-10 20:03:05 +02:00
Stefan Allius
97f426269f switch to python 3.12 2023-10-09 22:21:00 +02:00
Stefan Allius
c7bf3f2e44 formatting 2023-10-09 20:48:46 +02:00
Stefan Allius
2781bf3a14 Independence from TSUN 2023-10-09 20:47:05 +02:00
Stefan Allius
fcd3fddb19 optimize and reduce logging 2023-10-09 20:02:30 +02:00
Stefan Allius
88cdcabd6f use abbreviation 'ic' for icon 2023-10-09 19:58:37 +02:00
Stefan Allius
1f2f359188 optimize and reduce logging 2023-10-09 19:57:49 +02:00
Stefan Allius
2dd09288d5 bum aiomqtt version to 1.2.1 2023-10-08 16:32:24 +02:00
Stefan Allius
5c5c3bc926 Merge pull request #14 from s-allius/reduze-size
Reduze size
2023-10-07 23:10:40 +02:00
Stefan Allius
2cf7a2db36 Version 0.2.0 2023-10-07 23:08:39 +02:00
Stefan Allius
3225566b9b fix formating of a log message 2023-10-07 21:24:49 +02:00
Stefan Allius
fa567f68c0 - disable DEBUG log for releases
- support building of release candidates
2023-10-07 21:14:57 +02:00
Stefan Allius
e1536cb697 adapt log levels, optimize expensive hex dump logs 2023-10-07 21:03:49 +02:00
Stefan Allius
b06d832504 set log level to DEBUG for dev versions 2023-10-07 20:58:18 +02:00
Stefan Allius
ed14ed484b add build support for release candidates (rc) 2023-10-07 20:55:26 +02:00
Stefan Allius
ddba3f6285 optimize and update some comments 2023-10-07 16:39:39 +02:00
Stefan Allius
8264cc6d00 reduce continer size ans security attack surface 2023-10-07 16:20:40 +02:00
Stefan Allius
d5561d393a never log the config 2023-10-06 23:39:02 +02:00
Stefan Allius
a8f1a838c1 never print password in logs 2023-10-06 23:30:04 +02:00
Stefan Allius
b530353e54 Merge pull request #13 from s-allius/class-refact
Class refact
2023-10-06 23:15:13 +02:00
Stefan Allius
271b4f876e Version 0.1.0 2023-10-06 23:12:06 +02:00
Stefan Allius
6816a3e027 remove empty method Messages.__del__ 2023-10-06 23:11:12 +02:00
Stefan Allius
bee25a5f13 disable DEBUG logging for optimzed images 2023-10-06 22:59:57 +02:00
Stefan Allius
3db643cb87 send ha registrations only if values have changed 2023-10-06 20:02:38 +02:00
Stefan Allius
c791395e0e remove obsolete logging messages 2023-10-06 19:59:47 +02:00
Stefan Allius
0043e4c147 fix typo 2023-10-06 19:35:41 +02:00
Stefan Allius
f38047c931 update changelog 2023-10-05 22:51:52 +02:00
Stefan Allius
19cbd5a041 fix memory leak, improve traces 2023-10-05 22:48:20 +02:00
Stefan Allius
a48394d057 fix crash on container restart 2023-10-05 21:52:26 +02:00
Stefan Allius
1871f6c8d2 change owner id during startup 2023-10-05 21:25:02 +02:00
Stefan Allius
066459f14e rename class Proxy to Inverter 2023-10-05 19:34:10 +02:00
Stefan Allius
3f14f5cb9e make class Proxy to a derivation of class AsyncStream 2023-10-05 18:47:59 +02:00
Stefan Allius
4c51a159af remoce data logs from console 2023-10-03 20:32:46 +02:00
Stefan Allius
450012aac5 Version 0.0.6 2023-10-03 20:23:25 +02:00
Stefan Allius
00f800c17a put packet dumps only into tracer.log 2023-10-03 20:21:59 +02:00
Stefan Allius
421f7a533a dealloc async_stream instances in connection termination 2023-10-03 19:47:09 +02:00
Stefan Allius
6d9be75ce3 dealloc async_stream instances in connection termination
- improve close handler
- clearify logging on disconnection
2023-10-03 19:44:24 +02:00
Stefan Allius
0886b30032 fix control byte output in tx trace 2023-10-03 14:01:42 +02:00
Stefan Allius
d308c3a9fa Revert "fix memory leak on connection aborts"
This reverts commit f097b3350b.
2023-10-03 11:45:17 +02:00
Stefan Allius
38dacf2b97 Revert "use weakrefs to solve circular references"
This reverts commit dfe8bcb01e.
2023-10-03 11:43:08 +02:00
Stefan Allius
700b946acf dealloc async_stream instances in connection termination 2023-10-03 01:35:53 +02:00
Stefan Allius
dfe8bcb01e use weakrefs to solve circular references
- cleanup logging
2023-10-03 01:31:23 +02:00
Stefan Allius
a8449e8417 implement disc method 2023-10-03 01:30:06 +02:00
Stefan Allius
f097b3350b fix memory leak on connection aborts
- use weakrefs
- call Message.close() in the parent class
- call Message.__del__()
- cleanup logging
2023-10-03 00:48:22 +02:00
Stefan Allius
056e182f64 implement close() to release cercular references 2023-10-03 00:46:45 +02:00
Stefan Allius
00f1fe01bf disable MQTT debug logs 2023-10-03 00:45:56 +02:00
Stefan Allius
108da0a97e Merge pull request #12 from s-allius/s-allius/issue5
S allius/issue5
2023-10-02 19:49:46 +02:00
Stefan Allius
e5d19ce07d Force MQTT registration
- when the home assistant has set the status to online again
2023-10-02 19:42:42 +02:00
Stefan Allius
464e542a47 clearify comment 2023-10-02 19:38:34 +02:00
Stefan Allius
414eb19ffb clarify comment 2023-10-02 19:35:59 +02:00
Stefan Allius
283bc2257b send autoconfig on HA restart
Fixes #5
2023-10-02 19:31:12 +02:00
Stefan Allius
198146b5f4 Bump aiomqtt to version 1.2.1 2023-10-01 22:41:41 +02:00
Stefan Allius
242653da72 check MQTT componet and new icons 2023-10-01 21:54:59 +02:00
Stefan Allius
417b57c99a Version 0.0.5 2023-10-01 21:36:33 +02:00
Stefan Allius
ff9360d2a7 add versioning, build 'rel' and 'dev' variants 2023-10-01 21:30:40 +02:00
Stefan Allius
c570fbabfa add MQTT component configuration 2023-10-01 21:29:33 +02:00
Stefan Allius
7b69de8181 add MQTT component configuration; add MQTT origin 2023-10-01 21:27:36 +02:00
Stefan Allius
5377dd81c8 supports version string 2023-10-01 21:26:53 +02:00
Stefan Allius
64f573a369 supports version string 2023-10-01 21:26:22 +02:00
Stefan Allius
c31c0280e7 prints version on start 2023-10-01 21:22:43 +02:00
Stefan Allius
382d887f56 Update entity icons 2023-10-01 13:25:05 +02:00
Stefan Allius
92d44eaa6b update entity icons 2023-10-01 13:23:57 +02:00
Stefan Allius
c773d5a084 add build script and set docker labels with dyn. data 2023-09-30 22:34:52 +02:00
Stefan Allius
997195ea29 Version 0.0.4 2023-09-30 17:42:07 +02:00
Stefan Allius
b25a4619f3 Merge pull request #9 from s-allius/s-allius/issue6
S allius/issue6
2023-09-30 17:32:16 +02:00
Stefan Allius
030b9794bb add more code documentation 2023-09-30 17:27:51 +02:00
Stefan Allius
bf597c10a5 update changelog 2023-09-30 16:13:12 +02:00
Stefan Allius
0f4d41b466 add some code comments 2023-09-30 16:11:32 +02:00
Stefan Allius
a44c03fc98 add arg '-vv' for pytest to get better error msgs 2023-09-30 16:10:36 +02:00
Stefan Allius
6a6a89d6d3 fix unit test for multiple ha devices 2023-09-30 16:09:35 +02:00
Stefan Allius
7d56f47c10 Move data up interval to the controller block 2023-09-30 12:33:00 +02:00
Stefan Allius
aa1376208a ignore .DS_Store 2023-09-30 01:17:48 +02:00
Stefan Allius
4f1c3a53be support for sub devices
Fixes #6
2023-09-30 01:16:40 +02:00
Stefan Allius
d97d4ece43 cleanup 2023-09-29 16:38:52 +02:00
Stefan Allius
476cdf029e Version 0.0.3 2023-09-29 01:00:02 +02:00
Stefan Allius
dfcd5de166 uncomment at least one inverters definition 2023-09-29 00:46:00 +02:00
Stefan Allius
60fc38b1f0 Merge pull request #4 from s-allius/s-allius/issue2
S allius/issue2
2023-09-29 00:32:43 +02:00
Stefan Allius
5b155c7b4c Getting Started added 2023-09-29 00:25:23 +02:00
Stefan Allius
c0a2a705ec Running Proxy with host UID and GUID
Fixes #2
2023-09-28 21:46:02 +02:00
Stefan Allius
125f681bec move proxy image to ghcr.io 2023-09-27 22:08:58 +02:00
Stefan Allius
8b4ff6173c Version 0.02 2023-09-27 22:06:16 +02:00
Stefan Allius
9273c843d4 add opencontainer labels 2023-09-27 21:43:47 +02:00
Stefan Allius
d48ddcb151 ignore systemtest, since they include private data 2023-09-27 21:42:52 +02:00
Stefan Allius
5bc3ba8727 change default UIT to 1000 2023-09-27 19:11:35 +02:00
Stefan Allius
76cb9a19c7 Voltage and current of inputs added 2023-09-27 19:05:28 +02:00
24 changed files with 1420 additions and 310 deletions

2
.coveragerc Normal file
View File

@@ -0,0 +1,2 @@
[run]
branch = True

3
.gitignore vendored
View File

@@ -4,3 +4,6 @@ mosquitto/**
homeassistant/**
tsun_proxy/**
Doku/**
.DS_Store
.coverage
coverage.xml

View File

@@ -1,6 +1,11 @@
{
"python.testing.pytestArgs": [
"app","system_tests"
"-vv",
"app",
"--cov=app/src",
"--cov-report=xml",
"--cov-report=html",
"system_tests"
],
"python.testing.unittestEnabled": false,
"python.testing.pytestEnabled": true

View File

@@ -7,14 +7,100 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased]
## [0.4.3] - 2023-10-26
### Removed
- fix typos by Lenz Grimmer
- catch mqtt errors, so we can forward messages to tsun even if the mqtt broker is not reachable
- avoid resetting the daily generation counters even if the inverter sends zero values after reconnection
-
## [0.4.2] - 2023-10-21
- count unknown data types in received messages
- count definition errors in our internal tables
- increase test coverage of the Infos class to 100%
- avoid resetting the daily generation counters even if the inverter sends zero values at sunset
## [0.4.1] - 2023-10-20
- fix issue [#18](https://github.com/s-allius/tsun-gen3-proxy/issues/18)
- initialize the proxy statistics
- avoid resetting total generation counters
## [0.4.0] - 2023-10-16
- fix issue [#8](https://github.com/s-allius/tsun-gen3-proxy/issues/8)
- implement [#10](https://github.com/s-allius/tsun-gen3-proxy/issues/10)
- fix: don't dispatch ignored messages so that they are not forwarded
- add systemtests
- fix unit tests, which were broken since version 0.3.0
- add proxy device to home assistant
- add statistic counter to proxy device
- support multiple inverter registration at home assistant
## [0.3.0] - 2023-10-10
❗Due to the definition of values for diagnostics, the MQTT devices of controller and inverter should be deleted in the Home Assistant before updating to version '0.3.0'. After the update, these are automatically created again. The measurement data is retained.
### Changes
- optimize and reduce logging
- switch to pathon 3.12
- classify some values for diagnostics
## [0.2.0] - 2023-10-07
This version halves the size of the Docker image and reduces the attack surface for security vulnerabilities, by omitting unneeded code. The feature set is exactly the same as the previous release version 0.1.0.
### Changes
- move from slim-bookworm to an alpine base image
- install python requirements with pip wheel
- disable DEBUG log for releases
- support building of release candidates
## [0.1.0] - 2023-10-06
- refactoring of the connection classes
- change user id on startup
- register MQTT topics to home assistant, even if we have multiple inverters
## [0.0.6] - 2023-10-03
- Bump aiomqtt to version 1.2.1
- Force MQTT registration when the home assistant has set the status to online again
- fix control byte output in tx trace
- dealloc async_stream instances in connection termination
## [0.0.5] - 2023-10-01
- Entity icons updated
- Prints version on start
- Prepare for MQTT component != sensor
- Add MQTT origin
## [0.0.4] - 2023-09-30
- With this patch we ignore the setting 'suggested_area' in config.toml, because it makes no sense with multiple devices. We are looking for a better solution without combining all values into one area again in a later version.
❗Due to the change from one device to multiple devices in the Home Assistant, the previous MQTT device should be deleted in the Home Assistant after the update to pre-release '0.0.4'. Afterwards, the proxy must be restarted again to ensure that the sub-devices are created completely.
### Added
-
- Register multiple devices at home-assistant instead of one for all measurements.
Now we register: a Controller, the inverter and up to 4 input devices to home-assistant.
## [0.0.3] - 2023-09-28
### Added
- Fixes Running Proxy with host UID and GUID #2
## [0.0.2] - 2023-09-27
### Added
- Dockerfile opencontainer labels
- Send voltage and current of inputs to mqtt
## [0.0.1] - 2023-09-25
@@ -31,4 +117,4 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Added
- First checkin, the project was born
- First checkin, the project was born

View File

@@ -6,9 +6,9 @@
<p align="center">integration</p>
<p align="center">
<a href="https://opensource.org/licenses/BSD-3-Clause"><img alt="License: BSD-3-Clause" src="https://img.shields.io/badge/License-BSD_3--Clause-green.svg"></a>
<a href="https://www.python.org/downloads/release/python-3110/"><img alt="Supported Python versions" src="https://img.shields.io/badge/python-3.11-blue.svg"></a>
<a href="https://sbtinstruments.github.io/aiomqtt/introduction.html"><img alt="Supported Python versions" src="https://img.shields.io/badge/aiomqtt-1.2.0-lightblue.svg"></a>
<a href="https://toml.io/en/v1.0.0"><img alt="Supported Python versions" src="https://img.shields.io/badge/toml-1.0.0-lightblue.svg"></a>
<a href="https://www.python.org/downloads/release/python-3120/"><img alt="Supported Python versions" src="https://img.shields.io/badge/python-3.12-blue.svg"></a>
<a href="https://sbtinstruments.github.io/aiomqtt/introduction.html"><img alt="Supported aiomqtt versions" src="https://img.shields.io/badge/aiomqtt-1.2.1-lightblue.svg"></a>
<a href="https://toml.io/en/v1.0.0"><img alt="Supported toml versions" src="https://img.shields.io/badge/toml-1.0.0-lightblue.svg"></a>
</p>
@@ -16,13 +16,16 @@
###
# Overview
The "TSUN Gen3 Micro-Inverter" proxy enables a reliable connection between TSUN third generation inverters and an MQTT broker to integrate the inverter into typical home automations.
This proxy enables a reliable connection between TSUN third generation inverters and an MQTT broker. With the proxy, you can easily retrieve real-time values such as power, current and daily energy and integrate the inverter into typical home automations. This works even without an internet connection. The optional connection to the TSUN Cloud can be disabled!
The inverter establishes a TCP connection to the TSUN Cloud to transmit current measured values every 300 seconds. To be able to forward the measurement data to an MQTT broker, the proxy must be looped into this TCP connection.
In detail, the inverter establishes a TCP connection to the TSUN cloud to transmit current measured values every 300 seconds. To be able to forward the measurement data to an MQTT broker, the proxy must be looped into this TCP connection.
Through this, the inverter then establishes a connection to the proxy and the proxy establishes another connection to the TSUN Cloud. The transmitted data is interpreted by the proxy and then passed on to both the TSUN Cloud and the MQTT broker. The connection to the TSUN Cloud is optional and can be switched off in the configuration (default is on). Then no more data is sent to the Internet, but no more remote updates of firmware and operating parameters (e.g. rated power, grid parameters) are possible.
By means of `docker` a simple installation and operation is possible. By using `docker-composer`, a complete stack of proxy, `MQTT-brocker` and `home-assistant` can be started easily.
By means of `docker` a simple installation and operation is possible. By using `docker-composer`, a complete stack of proxy, `MQTT-brocker` and `home-assistant` can be started easily.
###
This project is not related to the company TSUN. It is a private initiative that aims to connect TSUN inverters with an MQTT broker. There is no support and no warranty from TSUN.
###
```
❗An essential requirement is that the proxy can be looped into the connection
@@ -37,37 +40,43 @@ If you use a Pi-hole, you can also store the host entry in the Pi-hole.
## Features
- supports TSOL MS300, MS350, MS400, MS600, MS700 and MS800 inverters from TSUN
- `MQTT` support
- `Home-Assistant` auto-discovery support
- Self-sufficient island operation without internet
- non-root Docker Container
## Home Assistant Screenshots
Here are some screenshots of how the inverter is displayed in the Home Assistant:
https://github.com/s-allius/tsun-gen3-proxy/wiki/home-assistant#home-assistant-screenshots
## Requirements
- A running Docker engine to host the container
- Ability to loop the proxy into the connection between the inverter and the TSUN cloud
## License
This project is licensed under the [BSD 3-clause License](https://opensource.org/licenses/BSD-3-Clause).
###
# Getting Started
Note the aiomqtt library used is based on the paho-mqtt library, which has a dual license. One of the licenses is the so-called [Eclipse Distribution License v1.0](https://www.eclipse.org/org/documents/edl-v10.php). It is almost word-for-word identical to the BSD 3-clause License. The only differences are:
- One use of "COPYRIGHT OWNER" (EDL) instead of "COPYRIGHT HOLDER" (BSD)
- One use of "Eclipse Foundation, Inc." (EDL) instead of "copyright holder" (BSD)
## Versioning
This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). Breaking changes will only occur in major `X.0.0` releases.
## Contributing
We're very happy to receive contributions to this project! You can get started by reading [CONTRIBUTING.md](https://github.com/s-allius/tsun-gen3-proxy/blob/main/CONTRIBUTING.md).
## Changelog
The changelog lives in [CHANGELOG.md](https://github.com/s-allius/tsun-gen3-proxy/blob/main/CHANGELOG.md). It follows the principles of [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
To run the proxy, you first need to create the image. You can do this quite simply as follows:
```sh
docker build https://github.com/s-allius/tsun-gen3-proxy.git#main:app -t tsun-proxy
```
after that you can run the image:
```sh
docker run --dns '8.8.8.8' --env 'UID=1000' -p '5005:5005' -v ./config:/home/tsun-proxy/config -v ./log:/home/tsun-proxy/log tsun-proxy
```
You will surely see a message that the configuration file was not found. So that we can create this without admin rights, the `uid` must still be adapted. To do this, simply stop the proxy with ctrl-c and use the `id` command to determine your own UserId:
```sh
% id
uid=1050(sallius) gid=20(staff) ...
```
With this information we can customize the `docker run`` statement:
```sh
docker run --dns '8.8.8.8' --env 'UID=1050' -p '5005:5005' -v ./config:/home/tsun-proxy/config -v ./log:/home/tsun-proxy/log tsun-proxy
```
###
# Configuration
@@ -101,6 +110,8 @@ mqtt.passwd = ''
ha.auto_conf_prefix = 'homeassistant' # MQTT prefix for subscribing for homeassistant status updates
ha.discovery_prefix = 'homeassistant' # MQTT prefix for discovery topic
ha.entity_prefix = 'tsun' # MQTT topic prefix for publishing inverter values
ha.proxy_node_id = 'proxy' # MQTT node id, for the proxy_node_id
ha.proxy_unique_id = 'P170000000000001' # MQTT unique id, to identify a proxy instance
# microinverters
@@ -120,3 +131,25 @@ suggested_area = 'balcony' # Optional, suggested installation area for home-a
```
## License
This project is licensed under the [BSD 3-clause License](https://opensource.org/licenses/BSD-3-Clause).
Note the aiomqtt library used is based on the paho-mqtt library, which has a dual license. One of the licenses is the so-called [Eclipse Distribution License v1.0](https://www.eclipse.org/org/documents/edl-v10.php). It is almost word-for-word identical to the BSD 3-clause License. The only differences are:
- One use of "COPYRIGHT OWNER" (EDL) instead of "COPYRIGHT HOLDER" (BSD)
- One use of "Eclipse Foundation, Inc." (EDL) instead of "copyright holder" (BSD)
## Versioning
This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). Breaking changes will only occur in major `X.0.0` releases.
## Contributing
We're very happy to receive contributions to this project! You can get started by reading [CONTRIBUTING.md](https://github.com/s-allius/tsun-gen3-proxy/blob/main/CONTRIBUTING.md).
## Changelog
The changelog lives in [CHANGELOG.md](https://github.com/s-allius/tsun-gen3-proxy/blob/main/CHANGELOG.md). It follows the principles of [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).

View File

@@ -1,3 +1,4 @@
tests/
**/__pycache__
*.pyc
*.pyc
.DS_Store

View File

@@ -1,52 +1,71 @@
ARG SERVICE_NAME="tsun-proxy"
ARG UID=1026
ARG UID=1000
ARG GID=1000
# set base image (host OS)
FROM python:3.11-slim-bookworm AS builder
#
# first stage for our base image
FROM python:3.12-alpine AS base
USER root
RUN pip install --upgrade pip
RUN apk update && \
apk upgrade
RUN apk add --no-cache su-exec
#
# second stage for building wheels packages
FROM base as builder
# copy the dependencies file to the working directory
COPY ./requirements.txt .
RUN apk add --no-cache build-base && \
python -m pip install --no-cache-dir -U pip wheel
# install dependencies
RUN pip install --user -r requirements.txt
# copy the dependencies file to the root dir and install requirements
COPY ./requirements.txt /root/
RUN python -OO -m pip wheel --no-cache-dir --wheel-dir=/root/wheels -r /root/requirements.txt
#
# second unnamed stage
FROM python:3.11-slim-bookworm
# third stage for our runtime image
FROM base as runtime
ARG SERVICE_NAME
ARG VERSION
ARG UID
ARG GID
ARG LOG_LVL
ENV VERSION=$VERSION
ENV SERVICE_NAME=$SERVICE_NAME
ENV UID=$UID
ENV GID=$GID
ENV LOG_LVL=$LOG_LVL
RUN addgroup --gid 1000 $SERVICE_NAME && \
adduser --ingroup $SERVICE_NAME --shell /bin/false --disabled-password --uid $UID $SERVICE_NAME && \
mkdir -p /home/$SERVICE_NAME/log && \
chown $SERVICE_NAME:$SERVICE_NAME /home/$SERVICE_NAME/log && \
mkdir -p /home/$SERVICE_NAME/config && \
chown $SERVICE_NAME:$SERVICE_NAME /home/$SERVICE_NAME/config
# set the working directory in the container
WORKDIR /home/$SERVICE_NAME
USER $SERVICE_NAME
# copy only the dependencies installation from the 1st stage image
COPY --from=builder --chown=$SERVICE_NAME:$SERVICE_NAME /root/.local /home/$SERVICE_NAME/.local
# copy the content of the local src and config directory to the working directory
COPY --chown=$SERVICE_NAME:$SERVICE_NAME config .
COPY --chown=$SERVICE_NAME:$SERVICE_NAME src .
# update PATH environment variable
ENV HOME=/home/$SERVICE_NAME
ENV PATH=/home/$SERVICE_NAME/.local:$PATH
EXPOSE 5005 5005
VOLUME ["/home/$SERVICE_NAME/log", "/home/$SERVICE_NAME/config"]
LABEL de.allius.image.authors="Stefan Allius <stefan.allius@t-online.de>"
# install the requirements from the wheels packages from the builder stage
COPY --from=builder /root/wheels /root/wheels
RUN python -m pip install --no-cache --no-index /root/wheels/* && \
rm -rf /root/wheels
# copy the content of the local src and config directory to the working directory
COPY --chmod=0700 entrypoint.sh /root/entrypoint.sh
COPY config .
COPY src .
EXPOSE 5005
# command to run on container start
CMD [ "python3", "./server.py" ]
ENTRYPOINT ["/root/entrypoint.sh"]
CMD [ "python3", "./server.py" ]
LABEL org.opencontainers.image.authors="Stefan Allius"
LABEL org.opencontainers.image.source https://github.com/s-allius/tsun-gen3-proxy
LABEL org.opencontainers.image.description 'The "TSUN Gen3 Micro-Inverter" proxy enables a reliable connection between TSUN third generation inverters and an MQTT broker to integrate the inverter into typical home automations'
LABEL org.opencontainers.image.licenses="BSD-3-Clause"
LABEL org.opencontainers.image.vendor="Stefan Allius"

33
app/build.sh Executable file
View File

@@ -0,0 +1,33 @@
#!/bin/bash
set -e
BUILD_DATE=$(date -Iminutes)
VERSION=$(git describe --tags --abbrev=0)
VERSION="${VERSION:1}"
arr=(${VERSION//./ })
MAJOR=${arr[0]}
IMAGE=tsun-gen3-proxy
if [[ $1 == dev ]] || [[ $1 == rc ]] ;then
IMAGE=docker.io/sallius/${IMAGE}
VERSION=${VERSION}-$1
elif [[ $1 == rel ]];then
IMAGE=ghcr.io/s-allius/${IMAGE}
else
echo argument missing!
echo try: $0 '[dev|rc|rel]'
exit 1
fi
echo version: $VERSION build-date: $BUILD_DATE image: $IMAGE
if [[ $1 == dev ]];then
docker build --build-arg "VERSION=${VERSION}" --build-arg "LOG_LVL=DEBUG" --label "org.label-schema.build-date=${BUILD_DATE}" --label "org.opencontainers.image.version=${VERSION}" -t ${IMAGE}:latest app
elif [[ $1 == rc ]];then
docker build --build-arg "VERSION=${VERSION}" --label "org.label-schema.build-date=${BUILD_DATE}" --label "org.opencontainers.image.version=${VERSION}" -t ${IMAGE}:latest app
elif [[ $1 == rel ]];then
docker build --no-cache --build-arg "VERSION=${VERSION}" --label "org.label-schema.build-date=${BUILD_DATE}" --label "org.opencontainers.image.version=${VERSION}" -t ${IMAGE}:latest -t ${IMAGE}:${MAJOR} -t ${IMAGE}:${VERSION} app
docker push ghcr.io/s-allius/tsun-gen3-proxy:latest
docker push ghcr.io/s-allius/tsun-gen3-proxy:${MAJOR}
docker push ghcr.io/s-allius/tsun-gen3-proxy:${VERSION}
fi

View File

@@ -15,6 +15,8 @@ mqtt.passwd = ''
ha.auto_conf_prefix = 'homeassistant' # MQTT prefix for subscribing for homeassistant status updates
ha.discovery_prefix = 'homeassistant' # MQTT prefix for discovery topic
ha.entity_prefix = 'tsun' # MQTT topic prefix for publishing inverter values
ha.proxy_node_id = 'proxy' # MQTT node id, for the proxy_node_id
ha.proxy_unique_id = 'P170000000000001' # MQTT unique id, to identify a proxy instance
# microinverters
inverters.allow_all = true # allow inverters, even if we have no inverter mapping
@@ -22,7 +24,7 @@ inverters.allow_all = true # allow inverters, even if we have no inverter mapp
# inverter mapping, maps a `serial_no* to a `mqtt_id` and defines an optional `suggested_place` for `home-assistant`
#
# for each inverter add a block starting with [inverters."<16-digit serial numbeer>"]
#[inverters."R17xxxxxxxxxxxx1"]
[inverters."R170000000000001"]
#node_id = '' # Optional, MQTT replacement for inverters serial number
#suggested_area = '' # Optional, suggested installation area for home-assistant

24
app/entrypoint.sh Normal file
View File

@@ -0,0 +1,24 @@
#!/bin/sh
set -e
user="$(id -u)"
echo "######################################################"
echo "# prepare: '$SERVICE_NAME' Version:$VERSION"
echo "# for running with UserID:$UID, GroupID:$GID"
echo "#"
if [ "$user" = '0' ]; then
mkdir -p /home/$SERVICE_NAME/log /home/$SERVICE_NAME/config
if ! id $SERVICE_NAME &> /dev/null; then
addgroup --gid $GID $SERVICE_NAME 2> /dev/null
adduser -G $SERVICE_NAME -s /bin/false -D -H -g "" -u $UID $SERVICE_NAME
fi
chown -R $SERVICE_NAME:$SERVICE_NAME /home/$SERVICE_NAME || true
echo "######################################################"
echo "#"
exec su-exec $SERVICE_NAME "$@"
else
exec "$@"
fi

View File

@@ -1,2 +1,2 @@
aiomqtt==1.2.0
schema
aiomqtt==1.2.1
schema==0.7.5

View File

@@ -1,88 +1,40 @@
import logging, traceback, aiomqtt, json
import logging, traceback
from config import Config
#import gc
from messages import Message, hex_dump_memory
from mqtt import Mqtt
logger = logging.getLogger('conn')
logger_mqtt = logging.getLogger('mqtt')
class AsyncStream(Message):
def __init__(self, proxy, reader, writer, addr, stream=None, server_side=True):
def __init__(self, reader, writer, addr, remote_stream, server_side: bool) -> None:
super().__init__()
self.proxy = proxy
self.reader = reader
self.writer = writer
self.remoteStream = stream
self.addr = addr
self.remoteStream = remote_stream
self.server_side = server_side
self.mqtt = Mqtt()
self.unique_id = 0
self.node_id = ''
self.addr = addr
'''
Our puplic methods
'''
async def set_serial_no(self, serial_no : str):
logger_mqtt.info(f'SerialNo: {serial_no}')
if self.unique_id != serial_no:
inverters = Config.get('inverters')
#logger_mqtt.debug(f'Inverters: {inverters}')
if serial_no in inverters:
logger_mqtt.debug(f'SerialNo {serial_no} allowed!')
inv = inverters[serial_no]
self.node_id = inv['node_id']
sug_area = inv['suggested_area']
else:
logger_mqtt.debug(f'SerialNo {serial_no} not known!')
self.node_id = ''
sug_area = ''
if not inverters['allow_all']:
self.unique_id = None
logger_mqtt.error('ignore message from unknow inverter!')
return
self.unique_id = serial_no
ha = Config.get('ha')
self.entitiy_prfx = ha['entity_prefix'] + '/'
discovery_prfx = ha['discovery_prefix'] + '/'
if self.server_side:
try:
for data_json, id in self.db.ha_confs(self.entitiy_prfx + self.node_id, self.unique_id, sug_area):
logger_mqtt.debug(f'Register: {data_json}')
await self.mqtt.publish(f"{discovery_prfx}sensor/{self.node_id}{id}/config", data_json)
except Exception:
logging.error(
f"Proxy: Exception:\n"
f"{traceback.format_exc()}")
async def loop(self) -> None:
while True:
try:
await self.__async_read()
if self.id_str:
await self.set_serial_no(self.id_str.decode("utf-8"))
if self.unique_id:
await self.__async_write()
await self.__async_forward()
await self.__async_publ_mqtt()
await self.async_publ_mqtt()
except (ConnectionResetError,
ConnectionAbortedError,
BrokenPipeError,
RuntimeError) as error:
logger.error(f'In loop for {self.addr}: {error}')
logger.warning(f'In loop for {self.addr}: {error}')
self.close()
return
except Exception:
@@ -91,12 +43,18 @@ class AsyncStream(Message):
f"{traceback.format_exc()}")
self.close()
return
def disc(self) -> None:
logger.debug(f'in AsyncStream.disc() {self.addr}')
self.writer.close()
def close(self):
logger.info(f'in async_stream.close() {self.addr}')
logger.debug(f'in AsyncStream.close() {self.addr}')
self.writer.close()
self.proxy = None
self.remoteStream = None
super().close() # call close handler in the parent class
# logger.info (f'AsyncStream refs: {gc.get_referrers(self)}')
'''
@@ -120,26 +78,22 @@ class AsyncStream(Message):
async def __async_forward(self) -> None:
if self._forward_buffer:
if not self.remoteStream:
tsun = Config.get('tsun')
self.remoteStream = await self.proxy.CreateClientStream (self, tsun['host'], tsun['port'])
await self.async_create_remote() # only implmeneted for server side => syncServerStream
if self.remoteStream:
hex_dump_memory(logging.DEBUG, f'Forward to {self.remoteStream.addr}:', self._forward_buffer, len(self._forward_buffer))
hex_dump_memory(logging.INFO, f'Forward to {self.remoteStream.addr}:', self._forward_buffer, len(self._forward_buffer))
self.remoteStream.writer.write (self._forward_buffer)
await self.remoteStream.writer.drain()
self._forward_buffer = bytearray(0)
async def __async_publ_mqtt(self) -> None:
if self.server_side:
db = self.db.db
for key in self.new_data:
if self.new_data[key] and key in db:
data_json = json.dumps(db[key])
logger_mqtt.info(f'{key}: {data_json}')
await self.mqtt.publish(f"{self.entitiy_prfx}{self.node_id}{key}", data_json)
self.new_data[key] = False
async def async_create_remote(self) -> None:
pass
async def async_publ_mqtt(self) -> None:
pass
def __del__ (self):
logger.debug ("AsyncStream __del__")
logging.debug (f"AsyncStream.__del__ {self.addr}")

View File

@@ -25,7 +25,9 @@ class Config():
'ha': {
'auto_conf_prefix': Use(str),
'discovery_prefix': Use(str),
'entity_prefix': Use(str)},
'entity_prefix': Use(str),
'proxy_node_id': Use(str),
'proxy_unique_id': Use(str)},
'inverters': {
'allow_all' : Use(bool),
@@ -60,7 +62,7 @@ class Config():
config['inverters'] = def_config['inverters'] | usr_config['inverters']
cls.config = cls.conf_schema.validate(config)
logging.debug(f'Readed config: "{cls.config}" ')
#logging.debug(f'Readed config: "{cls.config}" ')
except Exception as error:
logger.error(f'Config.read: {error}')

View File

@@ -1,32 +1,67 @@
import struct, json, logging
import struct, json, logging, os
class Infos:
stat = {}
app_name = os.getenv('SERVICE_NAME', 'proxy')
version = os.getenv('VERSION', 'unknown')
@classmethod
def static_init(cls):
logging.info('Initialize proxy statistics')
# init proxy counter in the class.stat dictionary
cls.stat['proxy'] = {}
for key in cls.__info_defs:
name = cls.__info_defs[key]['name']
if name[0]=='proxy':
cls.stat['proxy'][name[1]] = 0
# add values from the environment to the device definition table
prxy = cls.__info_devs['proxy']
prxy['sw'] = cls.version
prxy['mdl'] = cls.app_name
def __init__(self):
self.db = {}
self.tracer = logging.getLogger('data')
__info_devs={
'proxy': {'singleton': True, 'name':'Proxy', 'mf':'Stefan Allius'},
'controller':{'via':'proxy', 'name':'Controller', 'mdl':0x00092f90, 'mf':0x000927c0, 'sw':0x00092ba8},
'inverter': {'via':'controller', 'name':'Micro Inverter', 'mdl':0x00000032, 'mf':0x00000014, 'sw':0x0000001e},
'input_pv1': {'via':'inverter', 'name':'Module PV1'},
'input_pv2': {'via':'inverter', 'name':'Module PV2', 'dep':{'reg':0x00095b50, 'gte': 2}},
'input_pv3': {'via':'inverter', 'name':'Module PV3', 'dep':{'reg':0x00095b50, 'gte': 3}},
'input_pv4': {'via':'inverter', 'name':'Module PV4', 'dep':{'reg':0x00095b50, 'gte': 4}},
}
__info_defs={
# collector values:
# collector values used for device registration:
0x00092ba8: {'name':['collector', 'Collector_Fw_Version'], 'level': logging.INFO, 'unit': ''},
0x000927c0: {'name':['collector', 'Chip_Type'], 'level': logging.DEBUG, 'unit': ''},
0x00092f90: {'name':['collector', 'Chip_Model'], 'level': logging.DEBUG, 'unit': ''},
0x00095a88: {'name':['collector', 'Trace_URL'], 'level': logging.DEBUG, 'unit': ''},
0x00095aec: {'name':['collector', 'Logger_URL'], 'level': logging.DEBUG, 'unit': ''},
0x000cf850: {'name':['collector', 'Data_Up_Interval'], 'level': logging.DEBUG, 'unit': 's'},
0x000005dc: {'name':['collector', 'Rated_Power'], 'level': logging.DEBUG, 'unit': 'W'},
# inverter values:
0x00095b50: {'name':['collector', 'No_Inputs'], 'level': logging.DEBUG, 'unit': ''},
# inverter values used for device registration:
0x0000000a: {'name':['inverter', 'Product_Name'], 'level': logging.DEBUG, 'unit': ''},
0x00000014: {'name':['inverter', 'Manufacturer'], 'level': logging.DEBUG, 'unit': ''},
0x0000001e: {'name':['inverter', 'Version'], 'level': logging.INFO, 'unit': ''},
0x00000028: {'name':['inverter', 'Serial_Number'], 'level': logging.DEBUG, 'unit': ''},
0x00000032: {'name':['inverter', 'Equipment_Model'], 'level': logging.DEBUG, 'unit': ''},
# env:
0x00000514: {'name':['env', 'Inverter_Temp'], 'level': logging.DEBUG, 'unit': '°C', 'ha':{'dev_cla': 'temperature', 'stat_cla': 'measurement', 'id':'temp_', 'fmt':'| float','name': 'Inverter Temperature'}},
0x000c3500: {'name':['env', 'Signal_Strength'], 'level': logging.DEBUG, 'unit': '%' , 'ha':{'dev_cla': None, 'stat_cla': 'measurement', 'id':'signal_', 'fmt':'| float','name': 'Signal Strength', 'icon':'mdi:wifi'}},
# proxy:
0xffffff00: {'name':['proxy', 'Inverter_Cnt'], 'singleton': True, 'ha':{'dev':'proxy', 'comp' : 'sensor', 'dev_cla': None, 'stat_cla': None, 'id':'inv_count_', 'fmt':'| int', 'name': 'Active Inverter Connections', 'icon':'mdi:counter'}},
0xffffff01: {'name':['proxy', 'Unknown_SNR'], 'singleton': True, 'ha':{'dev':'proxy', 'comp' : 'sensor', 'dev_cla': None, 'stat_cla': None, 'id':'unknown_snr_', 'fmt':'| int', 'name': 'Unknown Serial No', 'icon':'mdi:counter', 'ent_cat':'diagnostic'}},
0xffffff02: {'name':['proxy', 'Unknown_Msg'], 'singleton': True, 'ha':{'dev':'proxy', 'comp' : 'sensor', 'dev_cla': None, 'stat_cla': None, 'id':'unknown_msg_', 'fmt':'| int', 'name': 'Unknown Msg Type', 'icon':'mdi:counter', 'ent_cat':'diagnostic'}},
0xffffff03: {'name':['proxy', 'Invalid_Data_Type'], 'singleton': True, 'ha':{'dev':'proxy', 'comp' : 'sensor', 'dev_cla': None, 'stat_cla': None, 'id':'inv_data_type_', 'fmt':'| int', 'name': 'Invalid Data Type','icon':'mdi:counter', 'ent_cat':'diagnostic'}},
0xffffff04: {'name':['proxy', 'Internal_Error'], 'singleton': True, 'ha':{'dev':'proxy', 'comp' : 'sensor', 'dev_cla': None, 'stat_cla': None, 'id':'intern_err_', 'fmt':'| int', 'name': 'Internal Error', 'icon':'mdi:counter', 'ent_cat':'diagnostic', 'en':False}},
# 0xffffff03: {'name':['proxy', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha':{'dev':'proxy', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id':'proxy_volt_', 'fmt':'| float','name': 'Grid Voltage'}},
# events:
# events
0x00000191: {'name':['events', '401_'], 'level': logging.DEBUG, 'unit': ''},
0x00000192: {'name':['events', '402_'], 'level': logging.DEBUG, 'unit': ''},
0x00000193: {'name':['events', '403_'], 'level': logging.DEBUG, 'unit': ''},
@@ -43,95 +78,225 @@ class Infos:
0x0000019e: {'name':['events', '414_'], 'level': logging.DEBUG, 'unit': ''},
0x0000019f: {'name':['events', '415_GridFreqOverRating'], 'level': logging.DEBUG, 'unit': ''},
0x000001a0: {'name':['events', '416_'], 'level': logging.DEBUG, 'unit': ''},
# grid measures:
0x000003e8: {'name':['grid', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha':{'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id':'out_volt_', 'fmt':'| float','name': 'Grid Voltage'}},
0x0000044c: {'name':['grid', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha':{'dev_cla': 'current', 'stat_cla': 'measurement', 'id':'out_cur_', 'fmt':'| float','name': 'Grid Current'}},
0x000004b0: {'name':['grid', 'Frequency'], 'level': logging.DEBUG, 'unit': 'Hz', 'ha':{'dev_cla': 'frequency', 'stat_cla': 'measurement', 'id':'out_freq_', 'fmt':'| float','name': 'Grid Frequency'}},
0x00000640: {'name':['grid', 'Output_Power'], 'level': logging.INFO, 'unit': 'W', 'ha':{'dev_cla': 'power', 'stat_cla': 'measurement', 'id':'out_power_', 'fmt':'| float','name': 'Actual Power'}},
0x000003e8: {'name':['grid', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha':{'dev':'inverter', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id':'out_volt_', 'fmt':'| float','name': 'Grid Voltage','ent_cat':'diagnostic'}},
0x0000044c: {'name':['grid', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha':{'dev':'inverter', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id':'out_cur_', 'fmt':'| float','name': 'Grid Current','ent_cat':'diagnostic'}},
0x000004b0: {'name':['grid', 'Frequency'], 'level': logging.DEBUG, 'unit': 'Hz', 'ha':{'dev':'inverter', 'dev_cla': 'frequency', 'stat_cla': 'measurement', 'id':'out_freq_', 'fmt':'| float','name': 'Grid Frequency','ent_cat':'diagnostic'}},
0x00000640: {'name':['grid', 'Output_Power'], 'level': logging.INFO, 'unit': 'W', 'ha':{'dev':'inverter', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id':'out_power_', 'fmt':'| float','name': 'Power'}},
0x000005dc: {'name':['env', 'Rated_Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha':{'dev':'inverter', 'dev_cla': None, 'stat_cla': None, 'id':'rated_power_', 'fmt':'| int', 'name': 'Rated Power','ent_cat':'diagnostic'}},
0x00000514: {'name':['env', 'Inverter_Temp'], 'level': logging.DEBUG, 'unit': '°C', 'ha':{'dev':'inverter', 'dev_cla': 'temperature', 'stat_cla': 'measurement', 'id':'temp_', 'fmt':'| int','name': 'Temperature'}},
# input measures:
0x000006a4: {'name':['input', 'pv1', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V'},
0x00000708: {'name':['input', 'pv1', 'Current'], 'level': logging.DEBUG, 'unit': 'A'},
0x0000076c: {'name':['input', 'pv1', 'Power'], 'level': logging.INFO, 'unit': 'W', 'ha':{'dev_cla': 'power', 'stat_cla': 'measurement', 'id':'power_pv1_','name': 'Power PV1', 'val_tpl' :"{{ (value_json['pv1']['Power'] | float)}}"}},
0x000007d0: {'name':['input', 'pv2', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V'},
0x00000834: {'name':['input', 'pv2', 'Current'], 'level': logging.DEBUG, 'unit': 'A'},
0x00000898: {'name':['input', 'pv2', 'Power'], 'level': logging.INFO, 'unit': 'W', 'ha':{'dev_cla': 'power', 'stat_cla': 'measurement', 'id':'power_pv2_','name': 'Power PV2', 'val_tpl' :"{{ (value_json['pv2']['Power'] | float)}}"}},
0x000008fc: {'name':['input', 'pv3', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V'},
0x00000960: {'name':['input', 'pv3', 'Curent'], 'level': logging.DEBUG, 'unit': 'A'},
0x000009c4: {'name':['input', 'pv3', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha':{'dev_cla': 'power', 'stat_cla': 'measurement', 'id':'power_pv3_','name': 'Power PV3', 'val_tpl' :"{{ (value_json['pv3']['Power'] | float)}}"}},
0x00000a28: {'name':['input', 'pv4', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V'},
0x00000a8c: {'name':['input', 'pv4', 'Current'], 'level': logging.DEBUG, 'unit': 'A'},
0x00000af0: {'name':['input', 'pv4', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha':{'dev_cla': 'power', 'stat_cla': 'measurement', 'id':'power_pv4_','name': 'Power PV4', 'val_tpl' :"{{ (value_json['pv4']['Power'] | float)}}"}},
0x00000c1c: {'name':['input', 'pv1', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id':'daily_gen_pv1_','name': 'Daily Generation PV1', 'val_tpl' :"{{ (value_json['pv1']['Daily_Generation'] | float)}}"}},
0x00000c80: {'name':['input', 'pv1', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev_cla': 'energy', 'stat_cla': 'total', 'id':'total_gen_pv1_','name': 'Total Generation PV1', 'val_tpl' :"{{ (value_json['pv1']['Total_Generation'] | float)}}"}},
0x00000ce4: {'name':['input', 'pv2', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id':'daily_gen_pv2_','name': 'Daily Generation PV2', 'val_tpl' :"{{ (value_json['pv2']['Daily_Generation'] | float)}}"}},
0x00000d48: {'name':['input', 'pv2', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev_cla': 'energy', 'stat_cla': 'total', 'id':'total_gen_pv2_','name': 'Total Generation PV2', 'val_tpl' :"{{ (value_json['pv2']['Total_Generation'] | float)}}"}},
0x00000dac: {'name':['input', 'pv3', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id':'daily_gen_pv3_','name': 'Daily Generation PV3', 'val_tpl' :"{{ (value_json['pv3']['Daily_Generation'] | float)}}"}},
0x00000e10: {'name':['input', 'pv3', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev_cla': 'energy', 'stat_cla': 'total', 'id':'total_gen_pv3_','name': 'Total Generation PV3', 'val_tpl' :"{{ (value_json['pv3']['Total_Generation'] | float)}}"}},
0x00000e74: {'name':['input', 'pv4', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id':'daily_gen_pv4_','name': 'Daily Generation PV4', 'val_tpl' :"{{ (value_json['pv4']['Daily_Generation'] | float)}}"}},
0x00000ed8: {'name':['input', 'pv4', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev_cla': 'energy', 'stat_cla': 'total', 'id':'total_gen_pv4_','name': 'Total Generation PV4', 'val_tpl' :"{{ (value_json['pv4']['Total_Generation'] | float)}}"}},
0x000006a4: {'name':['input', 'pv1', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha':{'dev':'input_pv1', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id':'volt_pv1_', 'val_tpl' :"{{ (value_json['pv1']['Voltage'] | float)}}", 'icon':'mdi:gauge','ent_cat':'diagnostic'}},
0x00000708: {'name':['input', 'pv1', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha':{'dev':'input_pv1', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id':'cur_pv1_', 'val_tpl' :"{{ (value_json['pv1']['Current'] | float)}}", 'icon':'mdi:gauge','ent_cat':'diagnostic'}},
0x0000076c: {'name':['input', 'pv1', 'Power'], 'level': logging.INFO, 'unit': 'W', 'ha':{'dev':'input_pv1', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id':'power_pv1_','val_tpl' :"{{ (value_json['pv1']['Power'] | float)}}"}},
0x000007d0: {'name':['input', 'pv2', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha':{'dev':'input_pv2', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id':'volt_pv2_', 'val_tpl' :"{{ (value_json['pv2']['Voltage'] | float)}}", 'icon':'mdi:gauge','ent_cat':'diagnostic'}},
0x00000834: {'name':['input', 'pv2', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha':{'dev':'input_pv2', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id':'cur_pv2_', 'val_tpl' :"{{ (value_json['pv2']['Current'] | float)}}", 'icon':'mdi:gauge','ent_cat':'diagnostic'}},
0x00000898: {'name':['input', 'pv2', 'Power'], 'level': logging.INFO, 'unit': 'W', 'ha':{'dev':'input_pv2', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id':'power_pv2_','val_tpl' :"{{ (value_json['pv2']['Power'] | float)}}"}},
0x000008fc: {'name':['input', 'pv3', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha':{'dev':'input_pv3', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id':'volt_pv3_', 'val_tpl' :"{{ (value_json['pv3']['Voltage'] | float)}}", 'icon':'mdi:gauge','ent_cat':'diagnostic'}},
0x00000960: {'name':['input', 'pv3', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha':{'dev':'input_pv3', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id':'cur_pv3_', 'val_tpl' :"{{ (value_json['pv3']['Current'] | float)}}", 'icon':'mdi:gauge','ent_cat':'diagnostic'}},
0x000009c4: {'name':['input', 'pv3', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha':{'dev':'input_pv3', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id':'power_pv3_','val_tpl' :"{{ (value_json['pv3']['Power'] | float)}}"}},
0x00000a28: {'name':['input', 'pv4', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha':{'dev':'input_pv4', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id':'volt_pv4_', 'val_tpl' :"{{ (value_json['pv4']['Voltage'] | float)}}", 'icon':'mdi:gauge','ent_cat':'diagnostic'}},
0x00000a8c: {'name':['input', 'pv4', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha':{'dev':'input_pv4', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id':'cur_pv4_', 'val_tpl' :"{{ (value_json['pv4']['Current'] | float)}}", 'icon':'mdi:gauge','ent_cat':'diagnostic'}},
0x00000af0: {'name':['input', 'pv4', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha':{'dev':'input_pv4', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id':'power_pv4_','val_tpl' :"{{ (value_json['pv4']['Power'] | float)}}"}},
0x00000c1c: {'name':['input', 'pv1', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev':'input_pv1', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id':'daily_gen_pv1_','name': 'Daily Generation', 'val_tpl' :"{{ (value_json['pv1']['Daily_Generation'] | float)}}", 'icon':'mdi:solar-power-variant', 'must_incr':True}},
0x00000c80: {'name':['input', 'pv1', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev':'input_pv1', 'dev_cla': 'energy', 'stat_cla': 'total', 'id':'total_gen_pv1_','name': 'Total Generation', 'val_tpl' :"{{ (value_json['pv1']['Total_Generation'] | float)}}", 'icon':'mdi:solar-power', 'must_incr':True}},
0x00000ce4: {'name':['input', 'pv2', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev':'input_pv2', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id':'daily_gen_pv2_','name': 'Daily Generation', 'val_tpl' :"{{ (value_json['pv2']['Daily_Generation'] | float)}}", 'icon':'mdi:solar-power-variant', 'must_incr':True}},
0x00000d48: {'name':['input', 'pv2', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev':'input_pv2', 'dev_cla': 'energy', 'stat_cla': 'total', 'id':'total_gen_pv2_','name': 'Total Generation', 'val_tpl' :"{{ (value_json['pv2']['Total_Generation'] | float)}}", 'icon':'mdi:solar-power', 'must_incr':True}},
0x00000dac: {'name':['input', 'pv3', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev':'input_pv3', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id':'daily_gen_pv3_','name': 'Daily Generation', 'val_tpl' :"{{ (value_json['pv3']['Daily_Generation'] | float)}}", 'icon':'mdi:solar-power-variant', 'must_incr':True}},
0x00000e10: {'name':['input', 'pv3', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev':'input_pv3', 'dev_cla': 'energy', 'stat_cla': 'total', 'id':'total_gen_pv3_','name': 'Total Generation', 'val_tpl' :"{{ (value_json['pv3']['Total_Generation'] | float)}}", 'icon':'mdi:solar-power', 'must_incr':True}},
0x00000e74: {'name':['input', 'pv4', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev':'input_pv4', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id':'daily_gen_pv4_','name': 'Daily Generation', 'val_tpl' :"{{ (value_json['pv4']['Daily_Generation'] | float)}}", 'icon':'mdi:solar-power-variant', 'must_incr':True}},
0x00000ed8: {'name':['input', 'pv4', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev':'input_pv4', 'dev_cla': 'energy', 'stat_cla': 'total', 'id':'total_gen_pv4_','name': 'Total Generation', 'val_tpl' :"{{ (value_json['pv4']['Total_Generation'] | float)}}", 'icon':'mdi:solar-power', 'must_incr':True}},
# total:
0x00000b54: {'name':['total', 'Daily_Generation'], 'level': logging.INFO, 'unit': 'kWh', 'ha':{'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id':'daily_gen_', 'fmt':'| float','name': 'Daily Generation'}},
0x00000bb8: {'name':['total', 'Total_Generation'], 'level': logging.INFO, 'unit': 'kWh', 'ha':{'dev_cla': 'energy', 'stat_cla': 'total', 'id':'total_gen_', 'fmt':'| float','name': 'Total Generation', 'icon':'mdi:solar-power'}},
0x000c96a8: {'name':['total', 'Power_On_Time'], 'level': logging.DEBUG, 'unit': 's', 'ha':{'dev_cla': 'duration', 'stat_cla': 'measurement', 'id':'power_on_time_', 'name': 'Power on Time', 'val_tpl':"{{ (value_json['Power_On_Time'] | float)}}", 'nat_prc':'3'}},
0x00000b54: {'name':['total', 'Daily_Generation'], 'level': logging.INFO, 'unit': 'kWh', 'ha':{'dev':'inverter', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id':'daily_gen_', 'fmt':'| float','name': 'Daily Generation', 'icon':'mdi:solar-power-variant', 'must_incr':True}},
0x00000bb8: {'name':['total', 'Total_Generation'], 'level': logging.INFO, 'unit': 'kWh', 'ha':{'dev':'inverter', 'dev_cla': 'energy', 'stat_cla': 'total', 'id':'total_gen_', 'fmt':'| float','name': 'Total Generation', 'icon':'mdi:solar-power', 'must_incr':True}},
# controller:
0x000c3500: {'name':['controller', 'Signal_Strength'], 'level': logging.DEBUG, 'unit': '%' , 'ha':{'dev':'controller', 'dev_cla': None, 'stat_cla': 'measurement', 'id':'signal_', 'fmt':'| int', 'name': 'Signal Strength', 'icon':'mdi:wifi'}},
0x000c96a8: {'name':['controller', 'Power_On_Time'], 'level': logging.DEBUG, 'unit': 's', 'ha':{'dev':'controller', 'dev_cla': 'duration', 'stat_cla': 'measurement', 'id':'power_on_time_', 'name': 'Power on Time', 'val_tpl':"{{ (value_json['Power_On_Time'] | float)}}", 'nat_prc':'3','ent_cat':'diagnostic'}},
0x000cf850: {'name':['controller', 'Data_Up_Interval'], 'level': logging.DEBUG, 'unit': 's', 'ha':{'dev':'controller', 'dev_cla': None, 'stat_cla': 'measurement', 'id':'data_up_intval_', 'fmt':'| int', 'name': 'Data Up Interval', 'icon':'mdi:update','ent_cat':'diagnostic'}},
}
def ha_confs(self, prfx="tsun/garagendach/", snr='123', sug_area =''):
def dev_value(self, idx:str|int) -> str|int|float|None:
'''returns the stored device value from our database
idx:int ==> lookup the value in the database and return it as str, int or flout. If the value is not available return 'None'
idx:str ==> returns the string as a fixed value without a database loopup
'''
if type (idx) is str:
return idx # return idx as a fixed value
elif idx in self.__info_defs:
row = self.__info_defs[idx]
if 'singleton' in row and row['singleton']:
dict = self.stat
else:
dict = self.db
keys = row['name']
for key in keys:
if key not in dict:
return None # value not found in the database
dict = dict[key]
return dict # value of the reqeusted entry
return None # unknwon idx, not in __info_defs
def ignore_this_device(self, dep:dict) -> bool:
'''Checks the equation in the dep dict
returns 'False' only if the equation is valid; 'True' in any other case'''
if 'reg' in dep:
value = self.dev_value(dep['reg'])
if not value: return True
if 'gte' in dep:
return not value >= dep['gte']
elif 'less_eq' in dep:
return not value <= dep['less_eq']
return True
def ha_confs(self, ha_prfx, inv_node_id, inv_snr, proxy_node_id, proxy_unique_id, sug_area =''):
'''Generator function yields a json register struct for home-assistant auto configuration and a unique entity string
arguments:
prfx:str ==> MQTT prefix for the home assistant 'stat_t string
snr:str ==> serial number of the inverter, used to build unique entity strings
sug_area:str ==> suggested area string from the config file'''
tab = self.__info_defs
for key in tab:
row = tab[key]
if 'singleton' in row and row['singleton']:
node_id = proxy_node_id
snr = proxy_unique_id
else:
node_id = inv_node_id
snr = inv_snr
prfx = ha_prfx + node_id
#check if we have details for home assistant
if 'ha' in row:
ha = row['ha']
attr = {}
if 'comp' in ha:
component = ha['comp']
else:
component = 'sensor'
attr = {} # dict to collect all the sensor entity details
if 'name' in ha:
attr['name'] = ha['name'] # eg. 'name': "Actual Power"
attr['name'] = ha['name'] # take the entity name from the ha dict
else:
attr['name'] = row['name'][-1] # eg. 'name': "Actual Power"
attr['name'] = row['name'][-1] # otherwise take a name from the name array
attr['stat_t'] = prfx +row['name'][0] # eg. 'stat_t': "tsun/garagendach/grid"
attr['dev_cla'] = ha['dev_cla'] # eg. 'dev_cla': 'power'
attr['stat_cla'] = ha['stat_cla'] # eg. 'stat_cla': "measurement"
attr['uniq_id'] = ha['id']+snr # eg. 'uniq_id':'out_power_123'
attr['uniq_id'] = ha['id']+snr # build the 'uniq_id' from the id str + the serial no of the inverter
if 'val_tpl' in ha:
attr['val_tpl'] = ha['val_tpl'] # eg. 'val_tpl': "{{ value_json['Output_Power']|float }}"
attr['val_tpl'] = ha['val_tpl'] # get value template for complexe data structures
elif 'fmt' in ha:
attr['val_tpl'] = '{{value_json' + f"['{row['name'][-1]}'] {ha['fmt']}" + '}}' # eg. 'val_tpl': "{{ value_json['Output_Power']|float }}"
else:
self.inc_counter('Internal_Error')
logging.error(f"Infos.__info_defs: the row for {key} do not have a 'val_tpl' nor a 'fmt' value")
if 'unit' in row:
attr['unit_of_meas'] = row['unit'] # eg. 'unit_of_meas': 'W'
# add unit_of_meas only, if status_class isn't none. If status_cla is None we want a number format and not line graph in home assistant.
# A unit will change the number format to a line graph
if 'unit' in row and attr['stat_cla'] != None:
attr['unit_of_meas'] = row['unit'] # optional add a 'unit_of_meas' e.g. 'W'
if 'icon' in ha:
attr['icon'] = ha['icon'] # eg. 'icon':'mdi:solar-power'
attr['ic'] = ha['icon'] # optional add an icon for the entity
if 'nat_prc' in ha:
attr['suggested_display_precision'] = ha['nat_prc']
attr['sug_dsp_prc'] = ha['nat_prc'] # optional add the precison of floats
if 'ent_cat' in ha:
attr['ent_cat'] = ha['ent_cat'] # diagnostic, config
# enabled_by_default is deactivated, since it avoid the via setup of the devices
# it seems, that there is a bug in home assistant. tested with 'Home Assistant 2023.10.4'
#if 'en' in ha: # enabled_by_default
# attr['en'] = ha['en']
# eg. 'dev':{'name':'Microinverter','mdl':'MS-600','ids':["inverter_123"],'mf':'TSUN','sa': 'auf Garagendach'}
# attr['dev'] = {'name':'Microinverter','mdl':'MS-600','ids':[f'inverter_{snr}'],'mf':'TSUN','sa': 'auf Garagendach'}
dev = {}
dev['name'] = 'Microinverter' #fixme
dev['mdl'] = 'MS-600' #fixme
dev['ids'] = [f'inverter_{snr}']
dev['mf'] = 'TSUN' #fixme
dev['sa'] = sug_area
dev['sw'] = '0.01' #fixme
dev['hw'] = 'Hw0.01' #fixme
#dev['via_device'] = #fixme
attr['dev'] = dev
if 'dev' in ha:
device = self.__info_devs[ha['dev']]
yield json.dumps (attr), attr['uniq_id']
if 'dep' in device and self.ignore_this_device(device['dep']):
continue
dev = {}
# the same name for 'name' and 'suggested area', so we get dedicated devices in home assistant with short value name and headline
if sug_area == '' or ('singleton' in device and device['singleton']):
dev['name'] = device['name']
dev['sa'] = device['name']
else:
dev['name'] = device['name']+' - '+sug_area
dev['sa'] = device['name']+' - '+sug_area
if 'via' in device: # add the link to the parent device
via = device['via']
if via in self.__info_devs:
via_dev = self.__info_devs[via]
if 'singleton' in via_dev and via_dev['singleton']:
dev['via_device'] = via
else:
dev['via_device'] = f"{via}_{snr}"
else:
self.inc_counter('Internal_Error')
logging.error(f"Infos.__info_defs: the row for {key} has an invalid via value: {via}")
for key in ('mdl','mf', 'sw', 'hw'): # add optional values fpr 'modell', 'manufaturer', 'sw version' and 'hw version'
if key in device:
data = self.dev_value(device[key])
if data is not None: dev[key] = data
if 'singleton' in device and device['singleton']:
dev['ids'] = [f"{ha['dev']}"]
else:
dev['ids'] = [f"{ha['dev']}_{snr}"]
attr['dev'] = dev
origin = {}
origin['name'] = self.app_name
origin['sw'] = self.version
attr['o'] = origin
else:
self.inc_counter('Internal_Error')
logging.error(f"Infos.__info_defs: the row for {key} missing 'dev' value for ha register")
yield json.dumps (attr), component, node_id, attr['uniq_id']
def inc_counter (self, counter:str) -> None:
'''inc proxy statistic counter'''
dict = self.stat['proxy']
dict[counter] += 1
def dec_counter (self, counter:str) -> None:
'''dec proxy statistic counter'''
dict = self.stat['proxy']
dict[counter] -= 1
def __key_obj(self, id) -> list:
d = self.__info_defs.get(id, {'name': None, 'level': logging.DEBUG, 'unit': ''})
return d['name'], d['level'], d['unit']
if 'ha' in d and 'must_incr' in d['ha']:
must_incr = d['ha']['must_incr']
else:
must_incr = False
return d['name'], d['level'], d['unit'], must_incr
def parse(self, buf):
def parse(self, buf) -> None:
'''parse a data sequence received from the inverter and stores the values in Infos.db
buf: buffer of the sequence to parse'''
result = struct.unpack_from('!l', buf, 0)
elms = result[0]
i = 0
@@ -141,7 +306,7 @@ class Infos:
info_id = result[0]
data_type = result[1]
ind += 5
keys, level, unit = self.__key_obj(info_id)
keys, level, unit, must_incr = self.__key_obj(info_id)
if data_type==0x54: # 'T' -> Pascal-String
str_len = buf[ind]
@@ -159,6 +324,11 @@ class Infos:
elif data_type==0x46: # 'F' -> float32
result = round(struct.unpack_from(f'!f', buf, ind)[0],2)
ind += 4
else:
self.inc_counter('Invalid_Data_Type')
logging.error(f"Infos.parse: data_type: {data_type} not supported")
return
if keys:
@@ -171,15 +341,22 @@ class Infos:
dict = dict[key]
name += key + '.'
update = keys[-1] not in dict or dict[keys[-1]] != result
dict[keys[-1]] = result
if keys[-1] not in dict:
update = (not must_incr or result>0)
else:
if must_incr:
update = dict[keys[-1]] < result
else:
update = dict[keys[-1]] != result
if update: dict[keys[-1]] = result
name += keys[-1]
yield keys[0], update
else:
update = False
name = str(f'info-id.0x{info_id:x}')
self.tracer.log(level, f'{name} : {result}{unit}')
self.tracer.log(level, f'{name} : {result}{unit} update: {update}')
i +=1

127
app/src/inverter.py Normal file
View File

@@ -0,0 +1,127 @@
import asyncio, logging, traceback, json
from config import Config
from async_stream import AsyncStream
from mqtt import Mqtt
from aiomqtt import MqttCodeError
#import gc
#logger = logging.getLogger('conn')
logger_mqtt = logging.getLogger('mqtt')
class Inverter(AsyncStream):
def __init__ (self, reader, writer, addr):
super().__init__(reader, writer, addr, None, True)
self.mqtt = Mqtt()
self.ha_restarts = -1
ha = Config.get('ha')
self.entity_prfx = ha['entity_prefix'] + '/'
self.discovery_prfx = ha['discovery_prefix'] + '/'
self.proxy_node_id = ha['proxy_node_id'] + '/'
self.proxy_unique_id = ha['proxy_unique_id']
async def server_loop(self, addr):
'''Loop for receiving messages from the inverter (server-side)'''
logging.info(f'Accept connection from {addr}')
self.inc_counter ('Inverter_Cnt')
await self.loop()
self.dec_counter ('Inverter_Cnt')
logging.info(f'Server loop stopped for {addr}')
# if the server connection closes, we also have to disconnect the connection to te TSUN cloud
if self.remoteStream:
logging.debug ("disconnect client connection")
self.remoteStream.disc()
try:
await self.__async_publ_mqtt_packet('proxy')
except: pass
async def client_loop(self, addr):
'''Loop for receiving messages from the TSUN cloud (client-side)'''
await self.remoteStream.loop()
logging.info(f'Client loop stopped for {addr}')
# if the client connection closes, we don't touch the server connection. Instead we erase the client
# connection stream, thus on the next received packet from the inverter, we can establish a new connection
# to the TSUN cloud
self.remoteStream.remoteStream = None # erase backlink to inverter instance
self.remoteStream = None # than erase client connection
async def async_create_remote(self) -> None:
'''Establish a client connection to the TSUN cloud'''
tsun = Config.get('tsun')
host = tsun['host']
port = tsun['port']
addr = (host, port)
try:
logging.info(f'Connected to {addr}')
connect = asyncio.open_connection(host, port)
reader, writer = await connect
self.remoteStream = AsyncStream(reader, writer, addr, self, False)
asyncio.create_task(self.client_loop(addr))
except ConnectionRefusedError as error:
logging.info(f'{error}')
except Exception:
logging.error(
f"Inverter: Exception for {addr}:\n"
f"{traceback.format_exc()}")
async def async_publ_mqtt(self) -> None:
'''puplish data to MQTT broker'''
# check if new inverter or collector infos are available or when the home assistant has changed the status back to online
try:
if (('inverter' in self.new_data and self.new_data['inverter']) or
('collector' in self.new_data and self.new_data['collector']) or
self.mqtt.ha_restarts != self.ha_restarts):
await self.__register_home_assistant()
self.ha_restarts = self.mqtt.ha_restarts
for key in self.new_data:
await self.__async_publ_mqtt_packet(key)
except MqttCodeError as error:
logging.error(f'Mqtt except: {error}')
except Exception:
logging.error(
f"Inverter: Exception:\n"
f"{traceback.format_exc()}")
async def __async_publ_mqtt_packet(self, key):
db = self.db.db
stat = self.db.stat
if self.new_data[key]:
if key in db:
data_json = json.dumps(db[key])
node_id = self.node_id
elif key in stat:
data_json = json.dumps(stat[key])
node_id = self.proxy_node_id
else:
return
logger_mqtt.debug(f'{key}: {data_json}')
await self.mqtt.publish(f"{self.entity_prfx}{node_id}{key}", data_json)
self.new_data[key] = False
async def __register_home_assistant(self) -> None:
'''register all our topics at home assistant'''
for data_json, component, node_id, id in self.db.ha_confs(self.entity_prfx, self.node_id, self.unique_id, self.proxy_node_id, self.proxy_unique_id, self.sug_area):
logger_mqtt.debug(f"MQTT Register: cmp:'{component}' node_id:'{node_id}' {data_json}")
await self.mqtt.publish(f"{self.discovery_prfx}{component}/{node_id}{id}/config", data_json)
def close(self) -> None:
logging.debug(f'Inverter.close() {self.addr}')
super().close() # call close handler in the parent class
# logger.debug (f'Inverter refs: {gc.get_referrers(self)}')
def __del__ (self):
logging.debug ("Inverter.__del__")
super().__del__()

View File

@@ -11,29 +11,31 @@ keys=console_formatter,file_formatter
level=DEBUG
handlers=console_handler,file_handler_name1
[logger_mesg]
level=DEBUG
handlers=console_handler,file_handler_name1,file_handler_name2
propagate=0
qualname=msg
[logger_conn]
level=DEBUG
handlers=console_handler,file_handler_name1,file_handler_name2
handlers=console_handler,file_handler_name1
propagate=0
qualname=conn
[logger_mqtt]
level=INFO
handlers=console_handler,file_handler_name1
propagate=0
qualname=mqtt
[logger_data]
level=DEBUG
handlers=console_handler,file_handler_name1,file_handler_name2
handlers=file_handler_name1
propagate=0
qualname=data
[logger_mqtt]
[logger_mesg]
level=DEBUG
handlers=console_handler,file_handler_name1,file_handler_name2
handlers=file_handler_name2
propagate=0
qualname=mqtt
qualname=msg
[logger_tracer]
level=INFO
@@ -43,12 +45,12 @@ qualname=tracer
[handler_console_handler]
class=StreamHandler
level=INFO
level=DEBUG
formatter=console_formatter
[handler_file_handler_name1]
class=handlers.TimedRotatingFileHandler
level=NOTSET
level=INFO
formatter=file_formatter
args=('log/proxy.log', when:='midnight')
@@ -60,9 +62,9 @@ args=('log/trace.log', when:='midnight')
[formatter_console_formatter]
format=%(asctime)s %(levelname)5s | %(name)4s | %(message)s'
datefmt='%d-%m-%Y %H:%M:%S
datefmt='%Y-%m-%d %H:%M:%S
[formatter_file_formatter]
format=%(asctime)s %(levelname)5s | %(name)4s | %(message)s'
datefmt='%d-%m-%Y %H:%M:%S
datefmt='%Y-%m-%d %H:%M:%S

View File

@@ -18,7 +18,8 @@ def hex_dump_memory(level, info, data, num):
lines = []
lines.append(info)
tracer = logging.getLogger('tracer')
if not tracer.isEnabledFor(level): return
#data = list((num * ctypes.c_byte).from_address(ptr))
@@ -79,6 +80,9 @@ class Message(metaclass=IterRegistry):
self.header_valid = False
self.header_len = 0
self.data_len = 0
self.unique_id = 0
self.node_id = ''
self.sug_area = ''
self._recv_buffer = b''
self._send_buffer = bytearray(0)
self._forward_buffer = bytearray(0)
@@ -100,6 +104,46 @@ class Message(metaclass=IterRegistry):
'''
Our puplic methods
'''
def close(self) -> None:
# we have refernces to methods of this class in self.switch
# so we have to erase self.switch, otherwise this instance can't be
# deallocated by the garbage collector ==> we get a memory leak
del self.switch
def inc_counter(self, counter:str) -> None:
self.db.inc_counter(counter)
self.new_data['proxy'] = True
def dec_counter(self, counter:str) -> None:
self.db.dec_counter(counter)
self.new_data['proxy'] = True
def set_serial_no(self, serial_no : str):
if self.unique_id == serial_no:
logger.debug(f'SerialNo: {serial_no}')
else:
inverters = Config.get('inverters')
#logger.debug(f'Inverters: {inverters}')
if serial_no in inverters:
inv = inverters[serial_no]
self.node_id = inv['node_id']
self.sug_area = inv['suggested_area']
logger.debug(f'SerialNo {serial_no} allowed! area:{self.sug_area}')
else:
self.node_id = ''
self.sug_area = ''
if 'allow_all' not in inverters or not inverters['allow_all']:
self.inc_counter('Unknown_SNR')
self.unique_id = None
logger.warning(f'ignore message from unknow inverter! (SerialNo: {serial_no})')
return
logger.debug(f'SerialNo {serial_no} not known but accepted!')
self.unique_id = serial_no
def read(self) -> None:
self._read()
@@ -107,6 +151,11 @@ class Message(metaclass=IterRegistry):
self.__parse_header(self._recv_buffer, len(self._recv_buffer))
if self.header_valid and len(self._recv_buffer) >= (self.header_len+self.data_len):
hex_dump_memory(logging.INFO, f'Received from {self.addr}:', self._recv_buffer, self.header_len+self.data_len)
if self.id_str:
self.set_serial_no(self.id_str.decode("utf-8"))
self.__dispatch_msg()
self.__flush_recv_msg()
return
@@ -186,7 +235,7 @@ class Message(metaclass=IterRegistry):
self.send_msg_ofs = len (self._send_buffer)
self._send_buffer += struct.pack(f'!l{len(self.id_str)+1}pBB', 0, self.id_str, ctrl, self.msg_id)
fnc = self.switch.get(self.msg_id, self.msg_unknown)
logger.info(self.__flow_str(self.server_side, 'tx') + f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}' )
logger.info(self.__flow_str(self.server_side, 'tx') + f' Ctl: {int(ctrl):#02x} Msg: {fnc.__name__!r}' )
def __finish_send_msg(self) -> None:
_len = len(self._send_buffer) - self.send_msg_ofs
@@ -195,11 +244,12 @@ class Message(metaclass=IterRegistry):
def __dispatch_msg(self) -> None:
hex_dump_memory(logging.INFO, f'Received from {self.addr}:', self._recv_buffer, self.header_len+self.data_len)
fnc = self.switch.get(self.msg_id, self.msg_unknown)
logger.info(self.__flow_str(self.server_side, 'rx') + f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}' )
fnc()
if self.unique_id:
logger.info(self.__flow_str(self.server_side, 'rx') + f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}' )
fnc()
else:
logger.info(self.__flow_str(self.server_side, 'drop') + f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}' )
def __flush_recv_msg(self) -> None:
@@ -287,11 +337,10 @@ class Message(metaclass=IterRegistry):
def msg_unknown(self):
logger.warning (f"Unknow Msg: ID:{self.msg_id}")
self.inc_counter('Unknown_Msg')
self.forward(self._recv_buffer, self.header_len+self.data_len)
def __del__ (self):
logger.debug ("Messages __del__")

View File

@@ -16,13 +16,22 @@ class Singleton(type):
class Mqtt(metaclass=Singleton):
client = None
def __init__(self):
logger_mqtt.debug(f'MQTT: __init__')
loop = asyncio.get_event_loop()
self.task = loop.create_task(self.__loop())
self.ha_restarts = 0
@property
def ha_restarts(self):
return self._ha_restarts
@ha_restarts.setter
def ha_restarts(self, value):
self._ha_restarts = value
def __del__(self):
logger_mqtt.debug(f'MQTT: __del__')
@@ -55,7 +64,11 @@ class Mqtt(metaclass=Singleton):
async with self.client.messages() as messages:
await self.client.subscribe(f"{ha['auto_conf_prefix']}/status")
async for message in messages:
logger_mqtt.info(f'Home-Assistant Status: {message.payload.decode("UTF-8")}')
status = message.payload.decode("UTF-8")
logger_mqtt.info(f'Home-Assistant Status: {status}')
if status == 'online':
self.ha_restarts += 1
except aiomqtt.MqttError:
logger_mqtt.info(f"Connection lost; Reconnecting in {interval} seconds ...")
await asyncio.sleep(interval)

View File

@@ -1,43 +0,0 @@
import asyncio, logging, traceback
from async_stream import AsyncStream
class Proxy:
def __init__ (proxy, reader, writer, addr):
proxy.ServerStream = AsyncStream(proxy, reader, writer, addr)
proxy.ClientStream = None
async def server_loop(proxy, addr):
logging.info(f'Accept connection from {addr}')
await proxy.ServerStream.loop()
logging.info(f'Close server connection {addr}')
if proxy.ClientStream:
logging.debug ("close client connection")
proxy.ClientStream.close()
async def client_loop(proxy, addr):
await proxy.ClientStream.loop()
logging.info(f'Close client connection {addr}')
proxy.ServerStream.remoteStream = None
proxy.ClientStream = None
async def CreateClientStream (proxy, stream, host, port):
addr = (host, port)
try:
logging.info(f'Connected to {addr}')
connect = asyncio.open_connection(host, port)
reader, writer = await connect
proxy.ClientStream = AsyncStream(proxy, reader, writer, addr, stream, server_side=False)
asyncio.create_task(proxy.client_loop(addr))
except ConnectionRefusedError as error:
logging.info(f'{error}')
except Exception:
logging.error(
f"Proxy: Exception for {addr}:\n"
f"{traceback.format_exc()}")
return proxy.ClientStream
def __del__ (proxy):
logging.debug ("Proxy __del__")

View File

@@ -1,17 +1,17 @@
import logging, asyncio, signal, functools, os
#from logging.handlers import TimedRotatingFileHandler
from logging import config
from async_stream import AsyncStream
from proxy import Proxy
from inverter import Inverter
from config import Config
from mqtt import Mqtt
from infos import Infos
async def handle_client(reader, writer):
'''Handles a new incoming connection and starts an async loop'''
addr = writer.get_extra_info('peername')
await Proxy(reader, writer, addr).server_loop(addr)
await Inverter(reader, writer, addr).server_loop(addr)
def handle_SIGTERM(loop):
@@ -33,8 +33,16 @@ def handle_SIGTERM(loop):
logging.info('Shutdown complete')
def get_log_level() -> int:
'''checks if LOG_LVL is set in the environment and returns the corresponding logging.LOG_LEVEL'''
log_level = os.getenv('LOG_LVL', 'INFO')
if log_level== 'DEBUG':
log_level = logging.DEBUG
elif log_level== 'WARN':
log_level = logging.WARNING
else:
log_level = logging.INFO
return log_level
if __name__ == "__main__":
@@ -42,17 +50,28 @@ if __name__ == "__main__":
# Setup our daily, rotating logger
#
serv_name = os.getenv('SERVICE_NAME', 'proxy')
version = os.getenv('VERSION', 'unknown')
logging.config.fileConfig('logging.ini')
logging.info(f'Server "{serv_name}" will be started')
logging.info(f'Server "{serv_name} - {version}" will be started')
# set lowest-severity for 'root', 'msg', 'conn' and 'data' logger
log_level = get_log_level()
logging.getLogger().setLevel(log_level)
logging.getLogger('msg').setLevel(log_level)
logging.getLogger('conn').setLevel(log_level)
logging.getLogger('data').setLevel(log_level)
# read config file
Config.read()
loop = asyncio.get_event_loop()
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
# call Mqtt singleton to establisch the connection to the mqtt broker
mqtt = Mqtt()
# initialize the proxy statistics
Infos.static_init()
#
# Register some UNIX Signal handler for a gracefully server shutdown on Docker restart and stop

View File

@@ -12,6 +12,88 @@ def ContrDataSeq(): # Get Time Request message
msg += b'\x49\x00\x00\x00\x02\x00\x0d\x04\x08\x49\x00\x00\x00\x00\x00\x07\xa1\x84\x49\x00\x00\x00\x01\x00\x0c\x50\x59\x49\x00\x00\x00\x4c\x00\x0d\x1f\x60\x49\x00\x00\x00\x00'
return msg
@pytest.fixture
def InvDataSeq(): # Data indication from the controller
msg = b'\x00\x00\x00\x06\x00\x00\x00\x0a\x54\x08\x4d\x69\x63\x72\x6f\x69\x6e\x76\x00\x00\x00\x14\x54\x04\x54\x53\x55\x4e\x00\x00\x00\x1E\x54\x07\x56\x35\x2e\x30\x2e\x31\x31\x00\x00\x00\x28'
msg += b'\x54\x10\x54\x31\x37\x45\x37\x33\x30\x37\x30\x32\x31\x44\x30\x30\x36\x41\x00\x00\x00\x32\x54\x0a\x54\x53\x4f\x4c\x2d\x4d\x53\x36\x30\x30\x00\x00\x00\x3c\x54\x05\x41\x2c\x42\x2c\x43'
return msg
@pytest.fixture
def InvalidDataSeq(): # Data indication from the controller
msg = b'\x00\x00\x00\x06\x00\x00\x00\x0a\x54\x08\x4d\x69\x63\x72\x6f\x69\x6e\x76\x00\x00\x00\x14\x64\x04\x54\x53\x55\x4e\x00\x00\x00\x1E\x54\x07\x56\x35\x2e\x30\x2e\x31\x31\x00\x00\x00\x28'
msg += b'\x54\x10\x54\x31\x37\x45\x37\x33\x30\x37\x30\x32\x31\x44\x30\x30\x36\x41\x00\x00\x00\x32\x54\x0a\x54\x53\x4f\x4c\x2d\x4d\x53\x36\x30\x30\x00\x00\x00\x3c\x54\x05\x41\x2c\x42\x2c\x43'
return msg
@pytest.fixture
def InvDataSeq2(): # Data indication from the controller
msg = b'\x00\x00\x00\xa3\x00\x00\x00\x64\x53\x00\x01\x00\x00\x00\xc8\x53\x00\x02\x00\x00\x01\x2c\x53\x00\x00\x00\x00\x01\x90\x49\x00\x00\x00\x00\x00\x00\x01\x91\x53\x00\x00'
msg += b'\x00\x00\x01\x92\x53\x00\x00\x00\x00\x01\x93\x53\x00\x00\x00\x00\x01\x94\x53\x00\x00\x00\x00\x01\x95\x53\x00\x00\x00\x00\x01\x96\x53\x00\x00\x00\x00\x01\x97\x53\x00'
msg += b'\x00\x00\x00\x01\x98\x53\x00\x00\x00\x00\x01\x99\x53\x00\x00\x00\x00\x01\x9a\x53\x00\x00\x00\x00\x01\x9b\x53\x00\x00\x00\x00\x01\x9c\x53\x00\x00\x00\x00\x01\x9d\x53'
msg += b'\x00\x00\x00\x00\x01\x9e\x53\x00\x00\x00\x00\x01\x9f\x53\x00\x00\x00\x00\x01\xa0\x53\x00\x00\x00\x00\x01\xf4\x49\x00\x00\x00\x00\x00\x00\x01\xf5\x53\x00\x00\x00\x00'
msg += b'\x01\xf6\x53\x00\x00\x00\x00\x01\xf7\x53\x00\x00\x00\x00\x01\xf8\x53\x00\x00\x00\x00\x01\xf9\x53\x00\x00\x00\x00\x01\xfa\x53\x00\x00\x00\x00\x01\xfb\x53\x00\x00\x00'
msg += b'\x00\x01\xfc\x53\x00\x00\x00\x00\x01\xfd\x53\x00\x00\x00\x00\x01\xfe\x53\x00\x00\x00\x00\x01\xff\x53\x00\x00\x00\x00\x02\x00\x53\x00\x00\x00\x00\x02\x01\x53\x00\x00'
msg += b'\x00\x00\x02\x02\x53\x00\x00\x00\x00\x02\x03\x53\x00\x00\x00\x00\x02\x04\x53\x00\x00\x00\x00\x02\x58\x49\x00\x00\x00\x00\x00\x00\x02\x59\x53\x00\x00\x00\x00\x02\x5a'
msg += b'\x53\x00\x00\x00\x00\x02\x5b\x53\x00\x00\x00\x00\x02\x5c\x53\x00\x00\x00\x00\x02\x5d\x53\x00\x00\x00\x00\x02\x5e\x53\x00\x00\x00\x00\x02\x5f\x53\x00\x00\x00\x00\x02'
msg += b'\x60\x53\x00\x00\x00\x00\x02\x61\x53\x00\x00\x00\x00\x02\x62\x53\x00\x00\x00\x00\x02\x63\x53\x00\x00\x00\x00\x02\x64\x53\x00\x00\x00\x00\x02\x65\x53\x00\x00\x00\x00'
msg += b'\x02\x66\x53\x00\x00\x00\x00\x02\x67\x53\x00\x00\x00\x00\x02\x68\x53\x00\x00\x00\x00\x02\xbc\x49\x00\x00\x00\x00\x00\x00\x02\xbd\x53\x00\x00\x00\x00\x02\xbe\x53\x00'
msg += b'\x00\x00\x00\x02\xbf\x53\x00\x00\x00\x00\x02\xc0\x53\x00\x00\x00\x00\x02\xc1\x53\x00\x00\x00\x00\x02\xc2\x53\x00\x00\x00\x00\x02\xc3\x53\x00\x00\x00\x00\x02\xc4\x53'
msg += b'\x00\x00\x00\x00\x02\xc5\x53\x00\x00\x00\x00\x02\xc6\x53\x00\x00\x00\x00\x02\xc7\x53\x00\x00\x00\x00\x02\xc8\x53\x00\x00\x00\x00\x02\xc9\x53\x00\x00\x00\x00\x02\xca'
msg += b'\x53\x00\x00\x00\x00\x02\xcb\x53\x00\x00\x00\x00\x02\xcc\x53\x00\x00\x00\x00\x03\x20\x53\x00\x00\x00\x00\x03\x84\x53\x50\x11\x00\x00\x03\xe8\x46\x43\x61\x66\x66\x00'
msg += b'\x00\x04\x4c\x46\x3e\xeb\x85\x1f\x00\x00\x04\xb0\x46\x42\x48\x14\x7b\x00\x00\x05\x14\x53\x00\x17\x00\x00\x05\x78\x53\x00\x00\x00\x00\x05\xdc\x53\x02\x58\x00\x00\x06'
msg += b'\x40\x46\x42\xd3\x66\x66\x00\x00\x06\xa4\x46\x42\x06\x66\x66\x00\x00\x07\x08\x46\x3f\xf4\x7a\xe1\x00\x00\x07\x6c\x46\x42\x81\x00\x00\x00\x00\x07\xd0\x46\x42\x06\x00'
msg += b'\x00\x00\x00\x08\x34\x46\x3f\xae\x14\x7b\x00\x00\x08\x98\x46\x42\x36\xcc\xcd\x00\x00\x08\xfc\x46\x00\x00\x00\x00\x00\x00\x09\x60\x46\x00\x00\x00\x00\x00\x00\x09\xc4'
msg += b'\x46\x00\x00\x00\x00\x00\x00\x0a\x28\x46\x00\x00\x00\x00\x00\x00\x0a\x8c\x46\x00\x00\x00\x00\x00\x00\x0a\xf0\x46\x00\x00\x00\x00\x00\x00\x0b\x54\x46\x3f\xd9\x99\x9a'
msg += b'\x00\x00\x0b\xb8\x46\x41\x8a\xe1\x48\x00\x00\x0c\x1c\x46\x3f\x8a\x3d\x71\x00\x00\x0c\x80\x46\x41\x1b\xd7\x0a\x00\x00\x0c\xe4\x46\x3f\x1e\xb8\x52\x00\x00\x0d\x48\x46'
msg += b'\x40\xf3\xd7\x0a\x00\x00\x0d\xac\x46\x00\x00\x00\x00\x00\x00\x0e\x10\x46\x00\x00\x00\x00\x00\x00\x0e\x74\x46\x00\x00\x00\x00\x00\x00\x0e\xd8\x46\x00\x00\x00\x00\x00'
msg += b'\x00\x0f\x3c\x53\x00\x00\x00\x00\x0f\xa0\x53\x00\x00\x00\x00\x10\x04\x53\x55\xaa\x00\x00\x10\x68\x53\x00\x00\x00\x00\x10\xcc\x53\x00\x00\x00\x00\x11\x30\x53\x00\x00'
msg += b'\x00\x00\x11\x94\x53\x00\x00\x00\x00\x11\xf8\x53\xff\xff\x00\x00\x12\x5c\x53\xff\xff\x00\x00\x12\xc0\x53\x00\x02\x00\x00\x13\x24\x53\xff\xff\x00\x00\x13\x88\x53\xff'
msg += b'\xff\x00\x00\x13\xec\x53\xff\xff\x00\x00\x14\x50\x53\xff\xff\x00\x00\x14\xb4\x53\xff\xff\x00\x00\x15\x18\x53\xff\xff\x00\x00\x15\x7c\x53\x00\x00\x00\x00\x27\x10\x53'
msg += b'\x00\x02\x00\x00\x27\x74\x53\x00\x3c\x00\x00\x27\xd8\x53\x00\x68\x00\x00\x28\x3c\x53\x05\x00\x00\x00\x28\xa0\x46\x43\x79\x00\x00\x00\x00\x29\x04\x46\x43\x48\x00\x00'
msg += b'\x00\x00\x29\x68\x46\x42\x48\x33\x33\x00\x00\x29\xcc\x46\x42\x3e\x3d\x71\x00\x00\x2a\x30\x53\x00\x01\x00\x00\x2a\x94\x46\x43\x37\x00\x00\x00\x00\x2a\xf8\x46\x42\xce'
msg += b'\x00\x00\x00\x00\x2b\x5c\x53\x00\x96\x00\x00\x2b\xc0\x53\x00\x10\x00\x00\x2c\x24\x46\x43\x90\x00\x00\x00\x00\x2c\x88\x46\x43\x95\x00\x00\x00\x00\x2c\xec\x53\x00\x06'
msg += b'\x00\x00\x2d\x50\x53\x00\x06\x00\x00\x2d\xb4\x46\x43\x7d\x00\x00\x00\x00\x2e\x18\x46\x42\x3d\xeb\x85\x00\x00\x2e\x7c\x46\x42\x3d\xeb\x85\x00\x00\x2e\xe0\x53\x00\x03'
msg += b'\x00\x00\x2f\x44\x53\x00\x03\x00\x00\x2f\xa8\x46\x42\x4d\xeb\x85\x00\x00\x30\x0c\x46\x42\x4d\xeb\x85\x00\x00\x30\x70\x53\x00\x03\x00\x00\x30\xd4\x53\x00\x03\x00\x00'
msg += b'\x31\x38\x46\x42\x08\x00\x00\x00\x00\x31\x9c\x53\x00\x05\x00\x00\x32\x00\x53\x04\x00\x00\x00\x32\x64\x53\x00\x01\x00\x00\x32\xc8\x53\x13\x9c\x00\x00\x33\x2c\x53\x0f'
msg += b'\xa0\x00\x00\x33\x90\x53\x00\x4f\x00\x00\x33\xf4\x53\x00\x66\x00\x00\x34\x58\x53\x03\xe8\x00\x00\x34\xbc\x53\x04\x00\x00\x00\x35\x20\x53\x00\x00\x00\x00\x35\x84\x53'
msg += b'\x00\x00\x00\x00\x35\xe8\x53\x00\x00\x00\x00\x36\x4c\x53\x00\x00\x00\x01\x38\x80\x53\x00\x02\x00\x01\x38\x81\x53\x00\x01\x00\x01\x38\x82\x53\x00\x01\x00\x01\x38\x83'
msg += b'\x53\x00\x00'
return msg
@pytest.fixture
def InvDataSeq2_Zero(): # Data indication from the controller
msg = b'\x00\x00\x00\xa3\x00\x00\x00\x64\x53\x00\x01\x00\x00\x00\xc8\x53\x00\x02\x00\x00\x01\x2c\x53\x00\x00\x00\x00\x01\x90\x49\x00\x00\x00\x00\x00\x00\x01\x91\x53\x00\x00'
msg += b'\x00\x00\x01\x92\x53\x00\x00\x00\x00\x01\x93\x53\x00\x00\x00\x00\x01\x94\x53\x00\x00\x00\x00\x01\x95\x53\x00\x00\x00\x00\x01\x96\x53\x00\x00\x00\x00\x01\x97\x53\x00'
msg += b'\x00\x00\x00\x01\x98\x53\x00\x00\x00\x00\x01\x99\x53\x00\x00\x00\x00\x01\x9a\x53\x00\x00\x00\x00\x01\x9b\x53\x00\x00\x00\x00\x01\x9c\x53\x00\x00\x00\x00\x01\x9d\x53'
msg += b'\x00\x00\x00\x00\x01\x9e\x53\x00\x00\x00\x00\x01\x9f\x53\x00\x00\x00\x00\x01\xa0\x53\x00\x00\x00\x00\x01\xf4\x49\x00\x00\x00\x00\x00\x00\x01\xf5\x53\x00\x00\x00\x00'
msg += b'\x01\xf6\x53\x00\x00\x00\x00\x01\xf7\x53\x00\x00\x00\x00\x01\xf8\x53\x00\x00\x00\x00\x01\xf9\x53\x00\x00\x00\x00\x01\xfa\x53\x00\x00\x00\x00\x01\xfb\x53\x00\x00\x00'
msg += b'\x00\x01\xfc\x53\x00\x00\x00\x00\x01\xfd\x53\x00\x00\x00\x00\x01\xfe\x53\x00\x00\x00\x00\x01\xff\x53\x00\x00\x00\x00\x02\x00\x53\x00\x00\x00\x00\x02\x01\x53\x00\x00'
msg += b'\x00\x00\x02\x02\x53\x00\x00\x00\x00\x02\x03\x53\x00\x00\x00\x00\x02\x04\x53\x00\x00\x00\x00\x02\x58\x49\x00\x00\x00\x00\x00\x00\x02\x59\x53\x00\x00\x00\x00\x02\x5a'
msg += b'\x53\x00\x00\x00\x00\x02\x5b\x53\x00\x00\x00\x00\x02\x5c\x53\x00\x00\x00\x00\x02\x5d\x53\x00\x00\x00\x00\x02\x5e\x53\x00\x00\x00\x00\x02\x5f\x53\x00\x00\x00\x00\x02'
msg += b'\x60\x53\x00\x00\x00\x00\x02\x61\x53\x00\x00\x00\x00\x02\x62\x53\x00\x00\x00\x00\x02\x63\x53\x00\x00\x00\x00\x02\x64\x53\x00\x00\x00\x00\x02\x65\x53\x00\x00\x00\x00'
msg += b'\x02\x66\x53\x00\x00\x00\x00\x02\x67\x53\x00\x00\x00\x00\x02\x68\x53\x00\x00\x00\x00\x02\xbc\x49\x00\x00\x00\x00\x00\x00\x02\xbd\x53\x00\x00\x00\x00\x02\xbe\x53\x00'
msg += b'\x00\x00\x00\x02\xbf\x53\x00\x00\x00\x00\x02\xc0\x53\x00\x00\x00\x00\x02\xc1\x53\x00\x00\x00\x00\x02\xc2\x53\x00\x00\x00\x00\x02\xc3\x53\x00\x00\x00\x00\x02\xc4\x53'
msg += b'\x00\x00\x00\x00\x02\xc5\x53\x00\x00\x00\x00\x02\xc6\x53\x00\x00\x00\x00\x02\xc7\x53\x00\x00\x00\x00\x02\xc8\x53\x00\x00\x00\x00\x02\xc9\x53\x00\x00\x00\x00\x02\xca'
msg += b'\x53\x00\x00\x00\x00\x02\xcb\x53\x00\x00\x00\x00\x02\xcc\x53\x00\x00\x00\x00\x03\x20\x53\x00\x00\x00\x00\x03\x84\x53\x50\x11\x00\x00\x03\xe8\x46\x43\x61\x66\x66\x00'
msg += b'\x00\x04\x4c\x46\x3e\xeb\x85\x1f\x00\x00\x04\xb0\x46\x42\x48\x14\x7b\x00\x00\x05\x14\x53\x00\x00\x00\x00\x05\x78\x53\x00\x00\x00\x00\x05\xdc\x53\x00\x00\x00\x00\x06'
msg += b'\x40\x46\x42\xd3\x66\x66\x00\x00\x06\xa4\x46\x42\x06\x66\x66\x00\x00\x07\x08\x46\x3f\xf4\x7a\xe1\x00\x00\x07\x6c\x46\x00\x00\x00\x00\x00\x00\x07\xd0\x46\x42\x06\x00'
msg += b'\x00\x00\x00\x08\x34\x46\x3f\xae\x14\x7b\x00\x00\x08\x98\x46\x00\x00\x00\x00\x00\x00\x08\xfc\x46\x00\x00\x00\x00\x00\x00\x09\x60\x46\x00\x00\x00\x00\x00\x00\x09\xc4'
msg += b'\x46\x00\x00\x00\x00\x00\x00\x0a\x28\x46\x00\x00\x00\x00\x00\x00\x0a\x8c\x46\x00\x00\x00\x00\x00\x00\x0a\xf0\x46\x00\x00\x00\x00\x00\x00\x0b\x54\x46\x00\x00\x00\x00'
msg += b'\x00\x00\x0b\xb8\x46\x00\x00\x00\x00\x00\x00\x0c\x1c\x46\x00\x00\x00\x00\x00\x00\x0c\x80\x46\x00\x00\x00\x00\x00\x00\x0c\xe4\x46\x00\x00\x00\x00\x00\x00\x0d\x48\x46'
msg += b'\x00\x00\x00\x00\x00\x00\x0d\xac\x46\x00\x00\x00\x00\x00\x00\x0e\x10\x46\x00\x00\x00\x00\x00\x00\x0e\x74\x46\x00\x00\x00\x00\x00\x00\x0e\xd8\x46\x00\x00\x00\x00\x00'
msg += b'\x00\x0f\x3c\x53\x00\x00\x00\x00\x0f\xa0\x53\x00\x00\x00\x00\x10\x04\x53\x55\xaa\x00\x00\x10\x68\x53\x00\x00\x00\x00\x10\xcc\x53\x00\x00\x00\x00\x11\x30\x53\x00\x00'
msg += b'\x00\x00\x11\x94\x53\x00\x00\x00\x00\x11\xf8\x53\xff\xff\x00\x00\x12\x5c\x53\xff\xff\x00\x00\x12\xc0\x53\x00\x02\x00\x00\x13\x24\x53\xff\xff\x00\x00\x13\x88\x53\xff'
msg += b'\xff\x00\x00\x13\xec\x53\xff\xff\x00\x00\x14\x50\x53\xff\xff\x00\x00\x14\xb4\x53\xff\xff\x00\x00\x15\x18\x53\xff\xff\x00\x00\x15\x7c\x53\x00\x00\x00\x00\x27\x10\x53'
msg += b'\x00\x02\x00\x00\x27\x74\x53\x00\x3c\x00\x00\x27\xd8\x53\x00\x68\x00\x00\x28\x3c\x53\x05\x00\x00\x00\x28\xa0\x46\x43\x79\x00\x00\x00\x00\x29\x04\x46\x43\x48\x00\x00'
msg += b'\x00\x00\x29\x68\x46\x42\x48\x33\x33\x00\x00\x29\xcc\x46\x42\x3e\x3d\x71\x00\x00\x2a\x30\x53\x00\x01\x00\x00\x2a\x94\x46\x43\x37\x00\x00\x00\x00\x2a\xf8\x46\x42\xce'
msg += b'\x00\x00\x00\x00\x2b\x5c\x53\x00\x96\x00\x00\x2b\xc0\x53\x00\x10\x00\x00\x2c\x24\x46\x43\x90\x00\x00\x00\x00\x2c\x88\x46\x43\x95\x00\x00\x00\x00\x2c\xec\x53\x00\x06'
msg += b'\x00\x00\x2d\x50\x53\x00\x06\x00\x00\x2d\xb4\x46\x43\x7d\x00\x00\x00\x00\x2e\x18\x46\x42\x3d\xeb\x85\x00\x00\x2e\x7c\x46\x42\x3d\xeb\x85\x00\x00\x2e\xe0\x53\x00\x03'
msg += b'\x00\x00\x2f\x44\x53\x00\x03\x00\x00\x2f\xa8\x46\x42\x4d\xeb\x85\x00\x00\x30\x0c\x46\x42\x4d\xeb\x85\x00\x00\x30\x70\x53\x00\x03\x00\x00\x30\xd4\x53\x00\x03\x00\x00'
msg += b'\x31\x38\x46\x42\x08\x00\x00\x00\x00\x31\x9c\x53\x00\x05\x00\x00\x32\x00\x53\x04\x00\x00\x00\x32\x64\x53\x00\x01\x00\x00\x32\xc8\x53\x13\x9c\x00\x00\x33\x2c\x53\x0f'
msg += b'\xa0\x00\x00\x33\x90\x53\x00\x4f\x00\x00\x33\xf4\x53\x00\x66\x00\x00\x34\x58\x53\x03\xe8\x00\x00\x34\xbc\x53\x04\x00\x00\x00\x35\x20\x53\x00\x00\x00\x00\x35\x84\x53'
msg += b'\x00\x00\x00\x00\x35\xe8\x53\x00\x00\x00\x00\x36\x4c\x53\x00\x00\x00\x01\x38\x80\x53\x00\x02\x00\x01\x38\x81\x53\x00\x01\x00\x01\x38\x82\x53\x00\x01\x00\x01\x38\x83'
msg += b'\x53\x00\x00'
return msg
def test_parse_control(ContrDataSeq):
i = Infos()
@@ -19,37 +101,319 @@ def test_parse_control(ContrDataSeq):
pass
assert json.dumps(i.db) == json.dumps(
{"collector": {"Collector_Fw_Version": "RSW_400_V1.00.06", "Chip_Type": "Raymon", "Chip_Model": "RSW-1-10001", "Trace_URL": "t.raymoniot.com", "Logger_URL": "logger.talent-monitoring.com", "Data_Up_Interval": 300}, "env": {"Signal_Strength": 100}, "total": {"Power_On_Time": 29}})
def test_build_ha_conf():
i = Infos()
d_json, id = next (i.ha_confs(prfx="tsun/garagendach/", snr='123'))
assert id == 'out_power_123'
assert d_json == json.dumps({"name": "Actual Power", "stat_t": "tsun/garagendach/grid", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "out_power_123", "val_tpl": "{{value_json['Output_Power'] | float}}", "unit_of_meas": "W", "dev": {"name": "Microinverter", "mdl": "MS-600", "ids": ["inverter_123"], "mf": "TSUN", "sa": "", "sw": "0.01", "hw": "Hw0.01"}})
{"collector": {"Collector_Fw_Version": "RSW_400_V1.00.06", "Chip_Type": "Raymon", "Chip_Model": "RSW-1-10001", "Trace_URL": "t.raymoniot.com", "Logger_URL": "logger.talent-monitoring.com", "No_Inputs": 2}, "controller": {"Signal_Strength": 100, "Power_On_Time": 29, "Data_Up_Interval": 300}})
def test_build_ha_conf2():
def test_parse_inverter(InvDataSeq):
i = Infos()
for key, result in i.parse (InvDataSeq):
pass
assert json.dumps(i.db) == json.dumps(
{"inverter": {"Product_Name": "Microinv", "Manufacturer": "TSUN", "Version": "V5.0.11", "Serial_Number": "T17E7307021D006A", "Equipment_Model": "TSOL-MS600"}})
def test_parse_cont_and_invert(ContrDataSeq, InvDataSeq):
i = Infos()
for key, result in i.parse (ContrDataSeq):
pass
for key, result in i.parse (InvDataSeq):
pass
assert json.dumps(i.db) == json.dumps(
{
"collector": {"Collector_Fw_Version": "RSW_400_V1.00.06", "Chip_Type": "Raymon", "Chip_Model": "RSW-1-10001", "Trace_URL": "t.raymoniot.com", "Logger_URL": "logger.talent-monitoring.com", "No_Inputs": 2}, "controller": {"Signal_Strength": 100, "Power_On_Time": 29, "Data_Up_Interval": 300},
"inverter": {"Product_Name": "Microinv", "Manufacturer": "TSUN", "Version": "V5.0.11", "Serial_Number": "T17E7307021D006A", "Equipment_Model": "TSOL-MS600"}})
def test_build_ha_conf1(ContrDataSeq):
i = Infos()
i.static_init() # initialize counter
tests = 0
for d_json, id in i.ha_confs(prfx="tsun/garagendach/", snr='123'):
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", inv_snr='123', inv_node_id="garagendach/",proxy_node_id = 'proxy/', proxy_unique_id = '456'):
if id == 'out_power_123':
assert d_json == json.dumps({"name": "Actual Power", "stat_t": "tsun/garagendach/grid", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "out_power_123", "val_tpl": "{{value_json['Output_Power'] | float}}", "unit_of_meas": "W", "dev": {"name": "Microinverter", "mdl": "MS-600", "ids": ["inverter_123"], "mf": "TSUN", "sa": "", "sw": "0.01", "hw": "Hw0.01"}})
assert comp == 'sensor'
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/grid", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "out_power_123", "val_tpl": "{{value_json['Output_Power'] | float}}", "unit_of_meas": "W", "dev": {"name": "Micro Inverter", "sa": "Micro Inverter", "via_device": "controller_123", "ids": ["inverter_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
tests +=1
elif id == 'daily_gen_123':
assert d_json == json.dumps({"name": "Daily Generation", "stat_t": "tsun/garagendach/total", "dev_cla": "energy", "stat_cla": "total_increasing", "uniq_id": "daily_gen_123", "val_tpl": "{{value_json['Daily_Generation'] | float}}", "unit_of_meas": "kWh", "dev": {"name": "Microinverter", "mdl": "MS-600", "ids": ["inverter_123"], "mf": "TSUN", "sa": "", "sw": "0.01", "hw": "Hw0.01"}})
assert comp == 'sensor'
assert d_json == json.dumps({"name": "Daily Generation", "stat_t": "tsun/garagendach/total", "dev_cla": "energy", "stat_cla": "total_increasing", "uniq_id": "daily_gen_123", "val_tpl": "{{value_json['Daily_Generation'] | float}}", "unit_of_meas": "kWh", "ic": "mdi:solar-power-variant", "dev": {"name": "Micro Inverter", "sa": "Micro Inverter", "via_device": "controller_123", "ids": ["inverter_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
tests +=1
elif id == 'power_pv1_123':
assert d_json == json.dumps({"name": "Power PV1", "stat_t": "tsun/garagendach/input", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "power_pv1_123", "val_tpl": "{{ (value_json['pv1']['Power'] | float)}}", "unit_of_meas": "W", "dev": {"name": "Microinverter", "mdl": "MS-600", "ids": ["inverter_123"], "mf": "TSUN", "sa": "", "sw": "0.01", "hw": "Hw0.01"}})
assert comp == 'sensor'
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/input", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "power_pv1_123", "val_tpl": "{{ (value_json['pv1']['Power'] | float)}}", "unit_of_meas": "W", "dev": {"name": "Module PV1", "sa": "Module PV1", "via_device": "inverter_123", "ids": ["input_pv1_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
tests +=1
elif id == 'total_gen_123':
assert d_json == json.dumps({"name": "Total Generation", "stat_t": "tsun/garagendach/total", "dev_cla": "energy", "stat_cla": "total", "uniq_id": "total_gen_123", "val_tpl": "{{value_json['Total_Generation'] | float}}", "unit_of_meas": "kWh", "icon": "mdi:solar-power", "dev": {"name": "Microinverter", "mdl": "MS-600", "ids": ["inverter_123"], "mf": "TSUN", "sa": "", "sw": "0.01", "hw": "Hw0.01"}})
tests +=1
assert tests==4
elif id == 'power_pv2_123':
assert False # if we haven't received and parsed a control data msg, we don't know the number of inputs. In this case we only register the first one!!
def test_build_ha_conf3():
elif id == 'signal_123':
assert comp == 'sensor'
assert d_json == json.dumps({"name": "Signal Strength", "stat_t": "tsun/garagendach/controller", "dev_cla": None, "stat_cla": "measurement", "uniq_id": "signal_123", "val_tpl": "{{value_json[\'Signal_Strength\'] | int}}", "unit_of_meas": "%", "ic": "mdi:wifi", "dev": {"name": "Controller", "sa": "Controller", "via_device": "proxy", "ids": ["controller_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
tests +=1
elif id == 'inv_count_456':
assert comp == 'sensor'
assert d_json == json.dumps({"name": "Active Inverter Connections", "stat_t": "tsun/proxy/proxy", "dev_cla": None, "stat_cla": None, "uniq_id": "inv_count_456", "val_tpl": "{{value_json['Inverter_Cnt'] | int}}", "ic": "mdi:counter", "dev": {"name": "Proxy", "sa": "Proxy", "mdl": "proxy", "mf": "Stefan Allius", "sw": "unknown", "ids": ["proxy"]}, "o": {"name": "proxy", "sw": "unknown"}})
tests +=1
assert tests==5
def test_build_ha_conf2(ContrDataSeq, InvDataSeq):
i = Infos()
for d_json, id in i.ha_confs(prfx="tsun/garagendach/", snr='123'):
for key, result in i.parse (ContrDataSeq):
pass
for key, result in i.parse (InvDataSeq):
pass
tests = 0
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", inv_snr='123', inv_node_id="garagendach/",proxy_node_id = 'proxy/', proxy_unique_id = '456', sug_area = 'roof'):
if id == 'out_power_123':
assert comp == 'sensor'
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/grid", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "out_power_123", "val_tpl": "{{value_json['Output_Power'] | float}}", "unit_of_meas": "W", "dev": {"name": "Micro Inverter - roof", "sa": "Micro Inverter - roof", "via_device": "controller_123", "mdl": "TSOL-MS600", "mf": "TSUN", "sw": "V5.0.11", "ids": ["inverter_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
tests +=1
if id == 'daily_gen_123':
assert comp == 'sensor'
assert d_json == json.dumps({"name": "Daily Generation", "stat_t": "tsun/garagendach/total", "dev_cla": "energy", "stat_cla": "total_increasing", "uniq_id": "daily_gen_123", "val_tpl": "{{value_json['Daily_Generation'] | float}}", "unit_of_meas": "kWh", "ic": "mdi:solar-power-variant", "dev": {"name": "Micro Inverter - roof", "sa": "Micro Inverter - roof", "via_device": "controller_123", "mdl": "TSOL-MS600", "mf": "TSUN", "sw": "V5.0.11", "ids": ["inverter_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
tests +=1
elif id == 'power_pv1_123':
assert comp == 'sensor'
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/input", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "power_pv1_123", "val_tpl": "{{ (value_json['pv1']['Power'] | float)}}", "unit_of_meas": "W", "dev": {"name": "Module PV1 - roof", "sa": "Module PV1 - roof", "via_device": "inverter_123", "ids": ["input_pv1_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
tests +=1
elif id == 'power_pv2_123':
assert comp == 'sensor'
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/input", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "power_pv2_123", "val_tpl": "{{ (value_json['pv2']['Power'] | float)}}", "unit_of_meas": "W", "dev": {"name": "Module PV2 - roof", "sa": "Module PV2 - roof", "via_device": "inverter_123", "ids": ["input_pv2_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
tests +=1
elif id == 'signal_123':
assert comp == 'sensor'
assert d_json == json.dumps({"name": "Signal Strength", "stat_t": "tsun/garagendach/controller", "dev_cla": None, "stat_cla": "measurement", "uniq_id": "signal_123", "val_tpl": "{{value_json[\'Signal_Strength\'] | int}}", "unit_of_meas": "%", "ic": "mdi:wifi", "dev": {"name": "Controller - roof", "sa": "Controller - roof", "via_device": "proxy", "mdl": "RSW-1-10001", "mf": "Raymon", "sw": "RSW_400_V1.00.06", "ids": ["controller_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
tests +=1
assert tests==5
def test_must_incr_total(InvDataSeq2, InvDataSeq2_Zero):
i = Infos()
tests = 0
for key, update in i.parse (InvDataSeq2):
if key == 'total':
assert update == True
tests +=1
elif key == 'env':
assert update == True
tests +=1
assert tests==4
assert json.dumps(i.db['total']) == json.dumps({'Daily_Generation': 1.7, 'Total_Generation': 17.36})
assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 64.5, "Daily_Generation": 1.08, "Total_Generation": 9.74}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 45.7, "Daily_Generation": 0.62, "Total_Generation": 7.62}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}})
assert json.dumps(i.db['env']) == json.dumps({"Inverter_Temp": 23, "Rated_Power": 600})
tests = 0
for key, update in i.parse (InvDataSeq2):
if key == 'total':
assert update == False
tests +=1
elif key == 'env':
assert update == False
tests +=1
assert tests==4
assert json.dumps(i.db['total']) == json.dumps({'Daily_Generation': 1.7, 'Total_Generation': 17.36})
assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 64.5, "Daily_Generation": 1.08, "Total_Generation": 9.74}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 45.7, "Daily_Generation": 0.62, "Total_Generation": 7.62}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}})
assert json.dumps(i.db['env']) == json.dumps({"Inverter_Temp": 23, "Rated_Power": 600})
tests = 0
for key, update in i.parse (InvDataSeq2_Zero):
if key == 'total':
assert update == False
tests +=1
elif key == 'env':
assert update == True
tests +=1
assert tests==4
assert json.dumps(i.db['total']) == json.dumps({'Daily_Generation': 1.7, 'Total_Generation': 17.36})
assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 0.0, "Daily_Generation": 1.08, "Total_Generation": 9.74}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 0.0, "Daily_Generation": 0.62, "Total_Generation": 7.62}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}})
assert json.dumps(i.db['env']) == json.dumps({"Inverter_Temp": 0, "Rated_Power": 0})
def test_must_incr_total2(InvDataSeq2, InvDataSeq2_Zero):
i = Infos()
tests = 0
for key, update in i.parse (InvDataSeq2_Zero):
if key == 'total':
assert update == False
tests +=1
elif key == 'env':
assert update == True
tests +=1
assert tests==4
assert json.dumps(i.db['total']) == json.dumps({})
assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 0.0}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 0.0}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}})
assert json.dumps(i.db['env']) == json.dumps({"Inverter_Temp": 0, "Rated_Power": 0})
tests = 0
for key, update in i.parse (InvDataSeq2_Zero):
if key == 'total':
assert update == False
tests +=1
elif key == 'env':
assert update == False
tests +=1
assert tests==4
assert json.dumps(i.db['total']) == json.dumps({})
assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 0.0}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 0.0}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}})
assert json.dumps(i.db['env']) == json.dumps({"Inverter_Temp": 0, "Rated_Power": 0})
tests = 0
for key, update in i.parse (InvDataSeq2):
if key == 'total':
assert update == True
tests +=1
elif key == 'env':
assert update == True
tests +=1
assert tests==4
assert json.dumps(i.db['total']) == json.dumps({'Daily_Generation': 1.7, 'Total_Generation': 17.36})
assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 64.5, "Daily_Generation": 1.08, "Total_Generation": 9.74}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 45.7, "Daily_Generation": 0.62, "Total_Generation": 7.62}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}})
assert json.dumps(i.db['env']) == json.dumps({"Inverter_Temp": 23, "Rated_Power": 600})
def test_statistic_counter():
i = Infos()
val = i.dev_value("Test-String")
assert val == "Test-String"
val = i.dev_value(0xffffffff) # invalid addr
assert val == None
val = i.dev_value(0xffffff00) # valid addr but not initiliazed
assert val == None or val == 0
i.static_init() # initialize counter
assert json.dumps(i.stat) == json.dumps({"proxy": {"Inverter_Cnt": 0, "Unknown_SNR": 0, "Unknown_Msg": 0, "Invalid_Data_Type": 0, "Internal_Error": 0}})
val = i.dev_value(0xffffff00) # valid and initiliazed addr
assert val == 0
i.inc_counter('Inverter_Cnt')
assert json.dumps(i.stat) == json.dumps({"proxy": {"Inverter_Cnt": 1, "Unknown_SNR": 0, "Unknown_Msg": 0, "Invalid_Data_Type": 0, "Internal_Error": 0}})
val = i.dev_value(0xffffff00)
assert val == 1
i.dec_counter('Inverter_Cnt')
val = i.dev_value(0xffffff00)
assert val == 0
def test_dep_rules():
i = Infos()
i.static_init() # initialize counter
res = i.ignore_this_device({})
assert res == True
res = i.ignore_this_device({'reg':0xffffffff})
assert res == True
i.inc_counter('Inverter_Cnt') # is 1
val = i.dev_value(0xffffff00)
assert val == 1
res = i.ignore_this_device({'reg':0xffffff00})
assert res == True
res = i.ignore_this_device({'reg':0xffffff00, 'less_eq': 2})
assert res == False
res = i.ignore_this_device({'reg':0xffffff00, 'gte': 2})
assert res == True
i.inc_counter('Inverter_Cnt') # is 2
res = i.ignore_this_device({'reg':0xffffff00, 'less_eq': 2})
assert res == False
res = i.ignore_this_device({'reg':0xffffff00, 'gte': 2})
assert res == False
i.inc_counter('Inverter_Cnt') is 3
res = i.ignore_this_device({'reg':0xffffff00, 'less_eq': 2})
assert res == True
res = i.ignore_this_device({'reg':0xffffff00, 'gte': 2})
assert res == False
def test_table_definition():
i = Infos()
i.static_init() # initialize counter
val = i.dev_value(0xffffff04) # check internal error counter
assert val == 0
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", inv_snr='123', inv_node_id="garagendach/",proxy_node_id = 'proxy/', proxy_unique_id = '456', sug_area = 'roof'):
pass
val = i.dev_value(0xffffff04) # check internal error counter
assert val == 0
# test missing 'fmt' value
Infos._Infos__info_defs[0xfffffffe] = {'name':['proxy', 'Internal_Test1'], 'singleton': True, 'ha':{'dev':'proxy', 'dev_cla': None, 'stat_cla': None, 'id':'intern_test1_'}}
tests = 0
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", inv_snr='123', inv_node_id="garagendach/",proxy_node_id = 'proxy/', proxy_unique_id = '456', sug_area = 'roof'):
if id == 'intern_test1_456':
tests +=1
assert tests == 1
val = i.dev_value(0xffffff04) # check internal error counter
assert val == 1
# test missing 'dev' value
Infos._Infos__info_defs[0xfffffffe] = {'name':['proxy', 'Internal_Test2'], 'singleton': True, 'ha':{'dev_cla': None, 'stat_cla': None, 'id':'intern_test2_', 'fmt':'| int'}}
tests = 0
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", inv_snr='123', inv_node_id="garagendach/",proxy_node_id = 'proxy/', proxy_unique_id = '456', sug_area = 'roof'):
if id == 'intern_test2_456':
tests +=1
assert tests == 1
val = i.dev_value(0xffffff04) # check internal error counter
assert val == 2
# test invalid 'via' value
Infos._Infos__info_devs['test_dev'] = {'via':'xyz', 'name':'Module PV1'}
Infos._Infos__info_defs[0xfffffffe] = {'name':['proxy', 'Internal_Test2'], 'singleton': True, 'ha':{'dev':'test_dev', 'dev_cla': None, 'stat_cla': None, 'id':'intern_test2_', 'fmt':'| int'}}
tests = 0
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", inv_snr='123', inv_node_id="garagendach/",proxy_node_id = 'proxy/', proxy_unique_id = '456', sug_area = 'roof'):
if id == 'intern_test2_456':
tests +=1
assert tests == 1
val = i.dev_value(0xffffff04) # check internal error counter
assert val == 3
def test_invalid_data_type(InvalidDataSeq):
i = Infos()
i.static_init() # initialize counter
val = i.dev_value(0xffffff03) # check invalid data type counter
assert val == 0
for key, result in i.parse (InvalidDataSeq):
pass
assert json.dumps(i.db) == json.dumps({"inverter": {"Product_Name": "Microinv"}})
val = i.dev_value(0xffffff03) # check invalid data type counter
assert val == 1

View File

@@ -2,7 +2,10 @@
import pytest
from app.src.messages import Message, Control
from app.src.config import Config
from app.src.infos import Infos
# initialize the proxy statistics
Infos.static_init()
class MemoryStream(Message):
def __init__(self, msg, chunks = (0,)):
@@ -72,6 +75,7 @@ def test_read_message(MsgContactInfo):
assert m.msg_id==0
assert m.header_len==23
assert m.data_len==25
m.close()
def test_read_message_long_id(MsgContactInfo_LongId):
@@ -90,6 +94,7 @@ def test_read_message_long_id(MsgContactInfo_LongId):
m.read() # try to read rest of message, but there is no chunk available
assert m.header_valid # must be valid, since header is complete but not the msg
assert m.msg_count == 0
m.close()
def test_read_message_in_chunks(MsgContactInfo):
@@ -108,6 +113,7 @@ def test_read_message_in_chunks(MsgContactInfo):
m.read() # read rest of message
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
m.close()
def test_read_message_in_chunks2(MsgContactInfo):
m = MemoryStream(MsgContactInfo, (4,10,0))
@@ -129,6 +135,7 @@ def test_read_message_in_chunks2(MsgContactInfo):
pass
assert m.msg_count == 1
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
m.close()
def test_read_two_messages(Msg2ContactInfo):
m = MemoryStream(Msg2ContactInfo, (0,))
@@ -148,6 +155,7 @@ def test_read_two_messages(Msg2ContactInfo):
assert m.msg_id==0
assert m.header_len==23
assert m.data_len==25
m.close()
def test_ctrl_byte():
c = Control(0x91)
@@ -158,3 +166,36 @@ def test_ctrl_byte():
assert c.is_resp()
def test_msg_iterator():
m1 = Message()
m2 = Message()
m3 = Message()
m3.close()
del m3
test1 = 0
test2 = 0
for key in Message:
if key == m1:
test1+=1
elif key == m2:
test2+=1
else:
assert False
assert test1 == 1
assert test2 == 1
def test_proxy_counter():
m = Message()
assert m.new_data == {}
assert 'proxy' in m.db.stat
assert 0 == m.db.stat['proxy']['Unknown_Msg']
m.inc_counter('Unknown_Msg')
assert m.new_data == {'proxy': True}
assert 1 == m.db.stat['proxy']['Unknown_Msg']
m.new_data['proxy'] = False
m.dec_counter('Unknown_Msg')
assert m.new_data == {'proxy': True}
assert 0 == m.db.stat['proxy']['Unknown_Msg']
m.close()

View File

@@ -52,6 +52,7 @@ services:
mqtt:
container_name: mqtt-broker
image: eclipse-mosquitto:2
restart: unless-stopped
expose:
- 1883
volumes:
@@ -66,20 +67,17 @@ services:
####### T S U N - P R O X Y ######
tsun-proxy:
container_name: tsun-proxy
image: docker.io/sallius/tsun-gen3-proxy:latest
build:
context: https://github.com/s-allius/tsun-gen3-proxy.git#main:app
args:
- UID=1026
image: ghcr.io/s-allius/tsun-gen3-proxy:latest
restart: unless-stopped
depends_on:
- mqtt
environment:
- TZ=Europe/Brussels
- SERVICE_NAME=tsun-proxy
- UID=${UID:-1000}
- GID=${GID:-1000}
dns:
- 8.8.8.8
- 4.4.4.4
- ${DNS1:-8.8.8.8}
- $(DNS2:-4.4.4.4}
ports:
- 5005:5005
volumes:

View File

@@ -0,0 +1,199 @@
# test_with_pytest.py and scapy
#
import pytest, socket, time
#from scapy.all import *
#from scapy.layers.inet import IP, TCP, TCP_client
def get_sn() -> bytes:
return b'R170000000000001'
def get_inv_no() -> bytes:
return b'T170000000000001'
def get_invalid_sn():
return b'R170000000000002'
@pytest.fixture
def MsgContactInfo(): # Contact Info message
return b'\x00\x00\x00\x2c\x10'+get_sn()+b'\x91\x00\x08solarhub\x0fsolarhub\x40123456'
@pytest.fixture
def MsgContactResp(): # Contact Response message
return b'\x00\x00\x00\x14\x10'+get_sn()+b'\x99\x00\x01'
@pytest.fixture
def MsgContactInfo2(): # Contact Info message
return b'\x00\x00\x00\x2c\x10'+get_invalid_sn()+b'\x91\x00\x08solarhub\x0fsolarhub\x40123456'
@pytest.fixture
def MsgContactResp2(): # Contact Response message
return b'\x00\x00\x00\x14\x10'+get_invalid_sn()+b'\x99\x00\x01'
@pytest.fixture
def MsgTimeStampReq(): # Get Time Request message
return b'\x00\x00\x00\x13\x10'+get_sn()+b'\x91\x22'
@pytest.fixture
def MsgTimeStampResp(): # Get Time Resonse message
return b'\x00\x00\x00\x1b\x10'+get_sn()+b'\x99\x22\x00\x00\x01\x89\xc6\x63\x4d\x80'
@pytest.fixture
def MsgContollerInd(): # Data indication from the controller
msg = b'\x00\x00\x01\x2f\x10'+ get_sn() + b'\x91\x71\x0e\x10\x00\x00\x10'+get_sn()
msg += b'\x01\x00\x00\x01\x89\xc6\x63\x55\x50'
msg += b'\x00\x00\x00\x15\x00\x09\x2b\xa8\x54\x10\x52\x53\x57\x5f\x34\x30\x30\x5f\x56\x31\x2e\x30\x30\x2e\x30\x36\x00\x09\x27\xc0\x54\x06\x52\x61\x79\x6d\x6f'
msg += b'\x6e\x00\x09\x2f\x90\x54\x0b\x52\x53\x57\x2d\x31\x2d\x31\x30\x30\x30\x31\x00\x09\x5a\x88\x54\x0f\x74\x2e\x72\x61\x79\x6d\x6f\x6e\x69\x6f\x74\x2e\x63\x6f\x6d\x00\x09\x5a\xec\x54'
msg += b'\x1c\x6c\x6f\x67\x67\x65\x72\x2e\x74\x61\x6c\x65\x6e\x74\x2d\x6d\x6f\x6e\x69\x74\x6f\x72\x69\x6e\x67\x2e\x63\x6f\x6d\x00\x0d\x00\x20\x49\x00\x00\x00\x01\x00\x0c\x35\x00\x49\x00'
msg += b'\x00\x00\x64\x00\x0c\x96\xa8\x49\x00\x00\x00\x1d\x00\x0c\x7f\x38\x49\x00\x00\x00\x01\x00\x0c\xfc\x38\x49\x00\x00\x00\x01\x00\x0c\xf8\x50\x49\x00\x00\x01\x2c\x00\x0c\x63\xe0\x49'
msg += b'\x00\x00\x00\x00\x00\x0c\x67\xc8\x49\x00\x00\x00\x00\x00\x0c\x50\x58\x49\x00\x00\x00\x01\x00\x09\x5e\x70\x49\x00\x00\x13\x8d\x00\x09\x5e\xd4\x49\x00\x00\x13\x8d\x00\x09\x5b\x50'
msg += b'\x49\x00\x00\x00\x02\x00\x0d\x04\x08\x49\x00\x00\x00\x00\x00\x07\xa1\x84\x49\x00\x00\x00\x01\x00\x0c\x50\x59\x49\x00\x00\x00\x4c\x00\x0d\x1f\x60\x49\x00\x00\x00\x00'
return msg
@pytest.fixture
def MsgInvData(): # Data indication from the controller
msg = b'\x00\x00\x00\x8b\x10'+ get_sn() + b'\x91\x04\x01\x90\x00\x01\x10'+get_inv_no()
msg += b'\x01\x00\x00\x01\x89\xc6\x63\x61\x08'
msg += b'\x00\x00\x00\x06\x00\x00\x00\x0a\x54\x08\x4d\x69\x63\x72\x6f\x69\x6e\x76\x00\x00\x00\x14\x54\x04\x54\x53\x55\x4e\x00\x00\x00\x1E\x54\x07\x56\x35\x2e\x30\x2e\x31\x31\x00\x00\x00\x28'
msg += b'\x54\x10\x54\x31\x37\x45\x37\x33\x30\x37\x30\x32\x31\x44\x30\x30\x36\x41\x00\x00\x00\x32\x54\x0a\x54\x53\x4f\x4c\x2d\x4d\x53\x36\x30\x30\x00\x00\x00\x3c\x54\x05\x41\x2c\x42\x2c\x43'
return msg
@pytest.fixture
def MsgInverterInd(): # Data indication from the inverter
msg = b'\x00\x00\x05\x02\x10'+ get_sn() + b'\x91\x04\x01\x90\x00\x01\x10'+get_inv_no()
msg += b'\x01\x00\x00\x01\x89\xc6\x63\x61\x08'
msg += b'\x00\x00\x00\xa3\x00\x00\x00\x64\x53\x00\x01\x00\x00\x00\xc8\x53\x00\x02\x00\x00\x01\x2c\x53\x00\x00\x00\x00\x01\x90\x49\x00\x00\x00\x00\x00\x00\x01\x91\x53\x00\x00'
msg += b'\x00\x00\x01\x92\x53\x00\x00\x00\x00\x01\x93\x53\x00\x00\x00\x00\x01\x94\x53\x00\x00\x00\x00\x01\x95\x53\x00\x00\x00\x00\x01\x96\x53\x00\x00\x00\x00\x01\x97\x53\x00'
msg += b'\x00\x00\x00\x01\x98\x53\x00\x00\x00\x00\x01\x99\x53\x00\x00\x00\x00\x01\x9a\x53\x00\x00\x00\x00\x01\x9b\x53\x00\x00\x00\x00\x01\x9c\x53\x00\x00\x00\x00\x01\x9d\x53'
msg += b'\x00\x00\x00\x00\x01\x9e\x53\x00\x00\x00\x00\x01\x9f\x53\x00\x00\x00\x00\x01\xa0\x53\x00\x00\x00\x00\x01\xf4\x49\x00\x00\x00\x00\x00\x00\x01\xf5\x53\x00\x00\x00\x00'
msg += b'\x01\xf6\x53\x00\x00\x00\x00\x01\xf7\x53\x00\x00\x00\x00\x01\xf8\x53\x00\x00\x00\x00\x01\xf9\x53\x00\x00\x00\x00\x01\xfa\x53\x00\x00\x00\x00\x01\xfb\x53\x00\x00\x00'
msg += b'\x00\x01\xfc\x53\x00\x00\x00\x00\x01\xfd\x53\x00\x00\x00\x00\x01\xfe\x53\x00\x00\x00\x00\x01\xff\x53\x00\x00\x00\x00\x02\x00\x53\x00\x00\x00\x00\x02\x01\x53\x00\x00'
msg += b'\x00\x00\x02\x02\x53\x00\x00\x00\x00\x02\x03\x53\x00\x00\x00\x00\x02\x04\x53\x00\x00\x00\x00\x02\x58\x49\x00\x00\x00\x00\x00\x00\x02\x59\x53\x00\x00\x00\x00\x02\x5a'
msg += b'\x53\x00\x00\x00\x00\x02\x5b\x53\x00\x00\x00\x00\x02\x5c\x53\x00\x00\x00\x00\x02\x5d\x53\x00\x00\x00\x00\x02\x5e\x53\x00\x00\x00\x00\x02\x5f\x53\x00\x00\x00\x00\x02'
msg += b'\x60\x53\x00\x00\x00\x00\x02\x61\x53\x00\x00\x00\x00\x02\x62\x53\x00\x00\x00\x00\x02\x63\x53\x00\x00\x00\x00\x02\x64\x53\x00\x00\x00\x00\x02\x65\x53\x00\x00\x00\x00'
msg += b'\x02\x66\x53\x00\x00\x00\x00\x02\x67\x53\x00\x00\x00\x00\x02\x68\x53\x00\x00\x00\x00\x02\xbc\x49\x00\x00\x00\x00\x00\x00\x02\xbd\x53\x00\x00\x00\x00\x02\xbe\x53\x00'
msg += b'\x00\x00\x00\x02\xbf\x53\x00\x00\x00\x00\x02\xc0\x53\x00\x00\x00\x00\x02\xc1\x53\x00\x00\x00\x00\x02\xc2\x53\x00\x00\x00\x00\x02\xc3\x53\x00\x00\x00\x00\x02\xc4\x53'
msg += b'\x00\x00\x00\x00\x02\xc5\x53\x00\x00\x00\x00\x02\xc6\x53\x00\x00\x00\x00\x02\xc7\x53\x00\x00\x00\x00\x02\xc8\x53\x00\x00\x00\x00\x02\xc9\x53\x00\x00\x00\x00\x02\xca'
msg += b'\x53\x00\x00\x00\x00\x02\xcb\x53\x00\x00\x00\x00\x02\xcc\x53\x00\x00\x00\x00\x03\x20\x53\x00\x00\x00\x00\x03\x84\x53\x50\x11\x00\x00\x03\xe8\x46\x43\x61\x66\x66\x00'
msg += b'\x00\x04\x4c\x46\x3e\xeb\x85\x1f\x00\x00\x04\xb0\x46\x42\x48\x14\x7b\x00\x00\x05\x14\x53\x00\x17\x00\x00\x05\x78\x53\x00\x00\x00\x00\x05\xdc\x53\x02\x58\x00\x00\x06'
msg += b'\x40\x46\x42\xd3\x66\x66\x00\x00\x06\xa4\x46\x42\x06\x66\x66\x00\x00\x07\x08\x46\x3f\xf4\x7a\xe1\x00\x00\x07\x6c\x46\x42\x81\x00\x00\x00\x00\x07\xd0\x46\x42\x06\x00'
msg += b'\x00\x00\x00\x08\x34\x46\x3f\xae\x14\x7b\x00\x00\x08\x98\x46\x42\x36\xcc\xcd\x00\x00\x08\xfc\x46\x00\x00\x00\x00\x00\x00\x09\x60\x46\x00\x00\x00\x00\x00\x00\x09\xc4'
msg += b'\x46\x00\x00\x00\x00\x00\x00\x0a\x28\x46\x00\x00\x00\x00\x00\x00\x0a\x8c\x46\x00\x00\x00\x00\x00\x00\x0a\xf0\x46\x00\x00\x00\x00\x00\x00\x0b\x54\x46\x3f\xd9\x99\x9a'
msg += b'\x00\x00\x0b\xb8\x46\x41\x8a\xe1\x48\x00\x00\x0c\x1c\x46\x3f\x8a\x3d\x71\x00\x00\x0c\x80\x46\x41\x1b\xd7\x0a\x00\x00\x0c\xe4\x46\x3f\x1e\xb8\x52\x00\x00\x0d\x48\x46'
msg += b'\x40\xf3\xd7\x0a\x00\x00\x0d\xac\x46\x00\x00\x00\x00\x00\x00\x0e\x10\x46\x00\x00\x00\x00\x00\x00\x0e\x74\x46\x00\x00\x00\x00\x00\x00\x0e\xd8\x46\x00\x00\x00\x00\x00'
msg += b'\x00\x0f\x3c\x53\x00\x00\x00\x00\x0f\xa0\x53\x00\x00\x00\x00\x10\x04\x53\x55\xaa\x00\x00\x10\x68\x53\x00\x00\x00\x00\x10\xcc\x53\x00\x00\x00\x00\x11\x30\x53\x00\x00'
msg += b'\x00\x00\x11\x94\x53\x00\x00\x00\x00\x11\xf8\x53\xff\xff\x00\x00\x12\x5c\x53\xff\xff\x00\x00\x12\xc0\x53\x00\x02\x00\x00\x13\x24\x53\xff\xff\x00\x00\x13\x88\x53\xff'
msg += b'\xff\x00\x00\x13\xec\x53\xff\xff\x00\x00\x14\x50\x53\xff\xff\x00\x00\x14\xb4\x53\xff\xff\x00\x00\x15\x18\x53\xff\xff\x00\x00\x15\x7c\x53\x00\x00\x00\x00\x27\x10\x53'
msg += b'\x00\x02\x00\x00\x27\x74\x53\x00\x3c\x00\x00\x27\xd8\x53\x00\x68\x00\x00\x28\x3c\x53\x05\x00\x00\x00\x28\xa0\x46\x43\x79\x00\x00\x00\x00\x29\x04\x46\x43\x48\x00\x00'
msg += b'\x00\x00\x29\x68\x46\x42\x48\x33\x33\x00\x00\x29\xcc\x46\x42\x3e\x3d\x71\x00\x00\x2a\x30\x53\x00\x01\x00\x00\x2a\x94\x46\x43\x37\x00\x00\x00\x00\x2a\xf8\x46\x42\xce'
msg += b'\x00\x00\x00\x00\x2b\x5c\x53\x00\x96\x00\x00\x2b\xc0\x53\x00\x10\x00\x00\x2c\x24\x46\x43\x90\x00\x00\x00\x00\x2c\x88\x46\x43\x95\x00\x00\x00\x00\x2c\xec\x53\x00\x06'
msg += b'\x00\x00\x2d\x50\x53\x00\x06\x00\x00\x2d\xb4\x46\x43\x7d\x00\x00\x00\x00\x2e\x18\x46\x42\x3d\xeb\x85\x00\x00\x2e\x7c\x46\x42\x3d\xeb\x85\x00\x00\x2e\xe0\x53\x00\x03'
msg += b'\x00\x00\x2f\x44\x53\x00\x03\x00\x00\x2f\xa8\x46\x42\x4d\xeb\x85\x00\x00\x30\x0c\x46\x42\x4d\xeb\x85\x00\x00\x30\x70\x53\x00\x03\x00\x00\x30\xd4\x53\x00\x03\x00\x00'
msg += b'\x31\x38\x46\x42\x08\x00\x00\x00\x00\x31\x9c\x53\x00\x05\x00\x00\x32\x00\x53\x04\x00\x00\x00\x32\x64\x53\x00\x01\x00\x00\x32\xc8\x53\x13\x9c\x00\x00\x33\x2c\x53\x0f'
msg += b'\xa0\x00\x00\x33\x90\x53\x00\x4f\x00\x00\x33\xf4\x53\x00\x66\x00\x00\x34\x58\x53\x03\xe8\x00\x00\x34\xbc\x53\x04\x00\x00\x00\x35\x20\x53\x00\x00\x00\x00\x35\x84\x53'
msg += b'\x00\x00\x00\x00\x35\xe8\x53\x00\x00\x00\x00\x36\x4c\x53\x00\x00\x00\x01\x38\x80\x53\x00\x02\x00\x01\x38\x81\x53\x00\x01\x00\x01\x38\x82\x53\x00\x01\x00\x01\x38\x83'
msg += b'\x53\x00\x00'
return msg
@pytest.fixture(scope="session")
def ClientConnection():
#host = '172.16.30.7'
host = 'logger.talent-monitoring.com'
#host = '127.0.0.1'
port = 5005
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.connect((host, port))
s.settimeout(1)
yield s
s.close()
def tempClientConnection():
#host = '172.16.30.7'
host = 'logger.talent-monitoring.com'
#host = '127.0.0.1'
port = 5005
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.connect((host, port))
s.settimeout(1)
yield s
s.close()
def test_open_close():
try:
for s in tempClientConnection():
pass
except:
assert False
assert True
def test_send_contact_info1(ClientConnection, MsgContactInfo, MsgContactResp):
s = ClientConnection
try:
s.sendall(MsgContactInfo)
data = s.recv(1024)
except TimeoutError:
pass
assert data == MsgContactResp
def test_send_contact_info2(ClientConnection, MsgContactInfo2, MsgContactInfo, MsgContactResp):
s = ClientConnection
try:
s.sendall(MsgContactInfo2)
data = s.recv(1024)
except TimeoutError:
assert True
else:
assert False
try:
s.sendall(MsgContactInfo)
data = s.recv(1024)
except TimeoutError:
pass
assert data == MsgContactResp
def test_send_contact_resp(ClientConnection, MsgContactResp):
s = ClientConnection
try:
s.sendall(MsgContactResp)
data = s.recv(1024)
except TimeoutError:
assert True
else:
assert data ==''
def test_send_ctrl_data(ClientConnection, MsgTimeStampReq, MsgTimeStampResp, MsgContollerInd):
s = ClientConnection
try:
s.sendall(MsgTimeStampReq)
data = s.recv(1024)
except TimeoutError:
pass
# time.sleep(2.5)
# assert data == MsgTimeStampResp
try:
s.sendall(MsgContollerInd)
data = s.recv(1024)
except TimeoutError:
pass
def test_send_inv_data(ClientConnection, MsgTimeStampReq, MsgTimeStampResp, MsgInvData, MsgInverterInd):
s = ClientConnection
try:
s.sendall(MsgTimeStampReq)
data = s.recv(1024)
except TimeoutError:
pass
# time.sleep(32.5)
# assert data == MsgTimeStampResp
try:
s.sendall(MsgInvData)
data = s.recv(1024)
s.sendall(MsgInverterInd)
data = s.recv(1024)
except TimeoutError:
pass