Compare commits

..

169 Commits

Author SHA1 Message Date
Stefan Allius
6e2f88423d Merge branch 'main' of https://github.com/s-allius/tsun-gen3-proxy into main 2024-04-02 18:52:51 +02:00
Stefan Allius
7fe9dcbe60 Version 0.6.0 2024-04-02 18:52:37 +02:00
Stefan Allius
009746a1e4 fix logging of incoming connections 2024-04-02 18:51:59 +02:00
Stefan Allius
4da8f8f3b2 Update README.md
Compatibility table
2024-04-02 00:15:04 +02:00
Stefan Allius
13b1930599 Update README.md 2024-04-01 23:36:43 +02:00
Stefan Allius
a2364115b3 prepare version 0.6 2024-04-01 23:31:48 +02:00
Stefan Allius
8f390b67cb cleanup 2024-04-01 23:31:15 +02:00
Stefan Allius
fa86dde991 prepare Version 0.6 2024-04-01 23:30:38 +02:00
Stefan Allius
6cfc1792ba add descriptions 2024-04-01 23:29:46 +02:00
Stefan Allius
04ba868b37 build model name for solarman logger 2024-04-01 22:20:46 +02:00
Stefan Allius
f3842d95d8 add testcases for building model names 2024-04-01 21:24:07 +02:00
Stefan Allius
fbbf698666 fix unit tests 2024-04-01 20:06:25 +02:00
Stefan Allius
ef8a461569 build gen 3 inverter modell name 2024-04-01 20:05:51 +02:00
Stefan Allius
73c35de3e5 add more values to Home Assistant 2024-04-01 15:00:15 +02:00
Stefan Allius
80f4dd722a remove useless parameter from _key_obj() 2024-04-01 02:08:28 +02:00
Stefan Allius
f38fea3807 move ignore_this_device() into base class Infos 2024-04-01 00:48:33 +02:00
Stefan Allius
db319f6aa3 fix system test, since repeat time may vary 2024-03-31 23:57:04 +02:00
Stefan Allius
695d8a8906 count AT commands in home assiatant 2024-03-31 23:56:18 +02:00
Stefan Allius
e4b7ef7a0c add more unit tests 2024-03-31 23:26:14 +02:00
Stefan Allius
884d4c04e6 improve error handling
- for wrong start bytes and stop bytes
- for wrong checksums
2024-03-31 19:10:58 +02:00
Stefan Allius
75bdaedc31 fix error counting on checksum errors 2024-03-31 01:18:01 +01:00
Stefan Allius
dccf0d22e1 Merge pull request #40 from s-allius/refactor-Infos-class
Unit tests for solarmal V5
2024-03-31 01:06:13 +01:00
Stefan Allius
c4db53bd1e Merge branch 'main' into refactor-Infos-class 2024-03-31 01:05:54 +01:00
Stefan Allius
f69b02aaeb add unit test for solarman V5 2024-03-31 00:59:57 +01:00
Stefan Allius
cdc3226adf count invalid messages 2024-03-31 00:51:30 +01:00
Stefan Allius
e29c250f39 add INVALID_MSG_FMT 2024-03-31 00:47:58 +01:00
Stefan Allius
643c0026d8 count INVALID_MSG_FMT errors 2024-03-31 00:26:54 +01:00
Stefan Allius
340f7a5127 Merge pull request #39 from s-allius/refactor-Infos-class
Refactor infos class
2024-03-30 22:22:16 +01:00
Stefan Allius
7cbd5f25bb parse data from received messages 2024-03-30 21:50:08 +01:00
Stefan Allius
27ce61adf4 add more registers and set default values 2024-03-30 21:49:03 +01:00
Stefan Allius
3d375d86be add set_db_def_value() 2024-03-30 21:48:25 +01:00
Stefan Allius
71ec0570ac make _info_defs and _info_devs private 2024-03-30 11:58:38 +01:00
Stefan Allius
e3fdeecf82 parse gen3plus inverter data 2024-03-30 01:15:07 +01:00
Stefan Allius
738dd708ac refactor ha_confs() interface 2024-03-29 19:21:59 +01:00
Stefan Allius
5853518afe fix test for Infos class 2024-03-29 10:49:55 +01:00
Stefan Allius
385a984fd2 use ha_proxy_confs for registering proxy at ha 2024-03-29 10:49:00 +01:00
Stefan Allius
37cb7cc1a1 implent register mapping 2024-03-29 10:48:09 +01:00
Stefan Allius
21e46ae456 refactor info class 2024-03-28 20:56:13 +01:00
Stefan Allius
c52fc990f4 Merge branch 'main' of https://github.com/s-allius/tsun-gen3-proxy into main 2024-03-28 15:09:39 +01:00
Stefan Allius
5ddc402e3c add msg_data_ind() handler 2024-03-28 15:09:10 +01:00
Stefan Allius
ac81b20ce7 Update README.md
remove unsupported config values
2024-03-27 01:45:56 +01:00
Stefan Allius
ef1fd4f913 Gen 3 plus support (#38)
* add tsun_v2 default configuration

* Add port 10000 for gen 3 plus inverters

* add monitor_sn for solarman support

* listen on port 10000 for solarman inverters

* initial version for gen 3 plus support

* refactoring split gen3 and gen3plus

* refactoring

* refactoring classes

* refactor proxy statistic counter

* - fix loggin levels
- user super() in close() and __del__()

* add config for gen 3 plus

* Add solarman config support

* refacot Message.. classes

* rename class MessageG3 into Talent

* refactor close() handler

* refactor disc() handler

* move loop() into the base class AsyncStream

* move async_read, _write and _forward into base class

* Cleanup

* move server_loop and client_loop into basic class

* add msg forwarding for solarman V5 protocol

* move server_loop() and client_loop to class AsyncStream

* rename AsyncStreamxx ton Connectionxx

* fix unit tests

* make more attributes privae

* load .env file

* wait after last test

* ignore .env

* add response handler

* Update README.md

* update unreleased changes

* home assistant add more diagnostic values

* fix typo

* Update README.md

Definition of the inverter generations added to the compatibility table

* add ha couter for 'Internal SW Exceptions'

* Update README.md

Fixes an incorrect marking in the display of the configuration file

* Update README.md

Planning documented for MS-2000 support

* S allius/issue33 (#34)

* - fix issue 33

  The TSUN Cloud now responds to contact_info and get_time messages with
  an empty display message and not with a response message as before.
  We tried to parse data from the empty message, which led to an
  exception

* Add test with empty conn_ind from inverter

* version 0.5.5

* add tsun_v2 default configuration

* Add port 10000 for gen 3 plus inverters

* add monitor_sn for solarman support

* listen on port 10000 for solarman inverters

initial version for gen 3 plus support

* refactoring split gen3 and gen3plus

* refactoring

* refactoring classes

* refactor proxy statistic counter

* - fix loggin levels
- user super() in close() and __del__()

* add config for gen 3 plus

* Add solarman config support

* refacot Message.. classes

* rename class MessageG3 into Talent

* refactor close() handler

* refactor disc() handler

* move loop() into the base class AsyncStream

* move async_read, _write and _forward into base class

* Cleanup

* move server_loop and client_loop into basic class

* add msg forwarding for solarman V5 protocol

* move server_loop() and client_loop to class AsyncStream

* rename AsyncStreamxx ton Connectionxx

* fix unit tests

* make more attributes privae

load .env file

* wait after last test

* ignore .env

* add response handler
2024-03-27 01:40:29 +01:00
Stefan Allius
542f422e1e version 0.5.5 2023-12-31 16:28:06 +01:00
Stefan Allius
7225c20b01 S allius/issue33 (#34)
* - fix issue 33

  The TSUN Cloud now responds to contact_info and get_time messages with
  an empty display message and not with a response message as before.
  We tried to parse data from the empty message, which led to an
  exception

* Add test with empty conn_ind from inverter
2023-12-31 16:25:21 +01:00
Stefan Allius
d7b3ab54e8 Update README.md
Planning documented for MS-2000 support
2023-12-31 11:28:11 +01:00
Stefan Allius
d15741949f Update README.md
Fixes an incorrect marking in the display of the configuration file
2023-12-28 14:08:59 +01:00
Stefan Allius
cef28b06cd add ha couter for 'Internal SW Exceptions' 2023-12-24 11:49:26 +01:00
Stefan Allius
ba4a1f058f Update README.md
Definition of the inverter generations added to the compatibility table
2023-12-17 20:00:02 +01:00
Stefan Allius
43f513ecbf fix typo 2023-12-15 23:42:32 +01:00
Stefan Allius
3e217b96d9 home assistant add more diagnostic values 2023-12-15 23:27:06 +01:00
Stefan Allius
dc8fc5e4eb update unreleased changes 2023-12-11 00:42:56 +01:00
Stefan Allius
9acd781fa8 Merge branch 'main' of https://github.com/s-allius/tsun-gen3-proxy into main 2023-12-10 15:15:06 +01:00
Stefan Allius
5d51a0d9f8 - Preparation for overwriting received data 2023-12-10 15:14:51 +01:00
Stefan Allius
670424451d - Fixed detection of the connected inputs/MPPTs
- Add data acquisition interval
- Add number of connections
- Add communication type
2023-12-10 15:14:21 +01:00
Stefan Allius
ea95e540ec - Fixed detection of the connected inputs/MPPTs
- Add data acquisition interval
- Add number of connections
- Add communication type
- Preparation for overwriting received data
2023-12-10 15:13:44 +01:00
Stefan Allius
9a68542c5a Update README.md 2023-12-02 00:17:49 +01:00
Stefan Allius
d9c56fb1ab Hardening (#31)
* merge hardening branch into main
2023-11-29 23:54:04 +01:00
Stefan Allius
4c4628301f Update README.md
Fix typos
2023-11-26 21:45:24 +01:00
Stefan Allius
3dc7730084 Update README.md
Link for sending a trace
2023-11-26 19:50:16 +01:00
Stefan Allius
8401833c0e Update README.md
add compatibility section
2023-11-26 13:55:44 +01:00
Stefan Allius
b142cfbc3c fix typo 2023-11-22 23:56:33 +01:00
Stefan Allius
5996ca2500 add info about Over The Air (OTA) firmmware updates 2023-11-22 23:55:36 +01:00
Stefan Allius
bd7c4ae822 Version 0.5.4 2023-11-22 22:26:10 +01:00
Stefan Allius
e2873ffce7 Hardening (#30)
* set build-argument for environment

* hardening remove dangerous commands

* add hardening scripts for base and final image
2023-11-22 21:57:42 +01:00
Stefan Allius
f10207b5ba Merge branch 'main' of https://github.com/s-allius/tsun-gen3-proxy into main 2023-11-22 18:45:16 +01:00
Stefan Allius
aeb2a82df1 ignore bin directory 2023-11-22 18:45:03 +01:00
Stefan Allius
3b75c45344 OTA update (#29)
* add pv module configuration

* add OTA start message counter

* add OTA start message counter

* fix test_statistic_counter
2023-11-22 18:33:56 +01:00
Stefan Allius
9edfa40054 - add unit tests for ota messages 2023-11-21 22:31:46 +01:00
Stefan Allius
0a566a3df2 - add message handler for over the air updates 2023-11-21 22:29:59 +01:00
Stefan Allius
3e7eba9998 improve test coverage 2023-11-17 23:59:34 +01:00
Stefan Allius
00ddcc138f add tests for int64 datatype in controller msg 2023-11-17 23:21:34 +01:00
Stefan Allius
0db2c3945d cleanup msg_get_time handler 2023-11-17 23:20:03 +01:00
Stefan Allius
690c66a13a hardening docker image
remove the python packages setuptools, wheel and pip from
final image to reduce the attack surface
2023-11-13 20:47:14 +01:00
Stefan Allius
a47ebb1511 fix messgae unit tests 2023-11-13 00:01:26 +01:00
Stefan Allius
4b7431ede9 Merge pull request #28 from s-allius/s-allius/issue26
Version 0.5.3
2023-11-12 20:25:00 +01:00
Stefan Allius
c3430f509e Version 0.5.3 2023-11-12 15:23:43 +01:00
Stefan Allius
51b046c351 Version 0.5.3 2023-11-12 15:22:41 +01:00
Stefan Allius
32a669d0d1 Merge pull request #27 from s-allius/s-allius/issue26
S allius/issue26
2023-11-12 15:19:48 +01:00
Stefan Allius
4d9f00221c fix the palnt offline problem in tsun cloud
- use TSUN timestamp instead of local time,
  as TSUN also expects Central European Summer
  Time in winter
2023-11-12 15:15:30 +01:00
Stefan Allius
27c723b0c8 init contact_mail and contact_name 2023-11-12 01:06:24 +01:00
Stefan Allius
4bd59b91b3 send contact info every time a client connection is established 2023-11-11 23:49:06 +01:00
Stefan Allius
3a3c6142b8 ignore build.sh 2023-11-09 20:43:46 +01:00
Stefan Allius
5d36397f2f remover apk from the final image 2023-11-09 20:17:19 +01:00
Stefan Allius
bb39567d05 Version 0.5.2 2023-11-09 20:05:56 +01:00
Stefan Allius
b6431f8448 improve client conn disconection
- check for race cond. on closing and establishing
  client connections
- improve connection trace
2023-11-09 20:03:09 +01:00
Stefan Allius
714dd92f35 allow multiple calls to Message.close() 2023-11-08 18:57:56 +01:00
Stefan Allius
02861f70af - add int64 data type to info parser 2023-11-07 00:19:48 +01:00
Stefan Allius
942e17d7c3 Version 0.5.1 2023-11-05 00:57:10 +01:00
Stefan Allius
37f7052811 Merge pull request #23 from limes007/dns_desc
add description for DNS settings
2023-11-05 00:14:30 +01:00
Stefan Allius
05e446dc74 Merge pull request #24 from limes007/main
fix f-string
2023-11-05 00:09:51 +01:00
limes007
647ef157d4 fix f-string 2023-11-04 23:29:53 +01:00
limes007
9ae391b46d add description for DNS settings 2023-11-04 23:28:20 +01:00
Stefan Allius
97dfe5d19e Version 0.5.0 2023-11-04 23:06:55 +01:00
Stefan Allius
4cdaa84c65 move self.server_side to Message class 2023-11-03 18:28:04 +01:00
Stefan Allius
9936ab0411 test server and client side 2023-11-03 18:27:30 +01:00
Stefan Allius
b079318c4b fix syntax error 2023-11-03 18:26:43 +01:00
Stefan Allius
a369e0ae6d Merge pull request #22 from s-allius/s-allius/issue21
S allius/issue21
2023-11-03 18:14:04 +01:00
Stefan Allius
fbd4eb1336 disable flake8 for tests 2023-11-03 01:07:05 +01:00
Stefan Allius
6821734238 fix missing f-string declaration 2023-11-03 00:16:22 +01:00
Stefan Allius
7f91994934 fix multiline bugs 2023-11-02 23:50:49 +01:00
Stefan Allius
a002408a98 fix missing import 2023-11-02 23:50:02 +01:00
Stefan Allius
de50f896dd fix missing f-string declaration 2023-11-02 23:49:34 +01:00
Stefan Allius
b23cae5bea fix exception since super().__del__() don't excists 2023-11-02 23:47:20 +01:00
Stefan Allius
2c4af0b7d8 lint code with flake8 2023-11-02 22:31:30 +01:00
Stefan Allius
c772eeeb28 Proxy counters don't work after restart properly
Fixes #21
2023-11-02 22:30:50 +01:00
Stefan Allius
165f94828f adapt unit tests 2023-11-01 21:18:25 +01:00
Stefan Allius
d8bc2dcae1 register proxy dev as soon as the MQTT connection is established 2023-11-01 21:17:43 +01:00
Stefan Allius
af27e95ef7 register proxy dev as soon as the MQTT connection is established 2023-11-01 21:16:54 +01:00
Stefan Allius
bcc901ba4c add class value new_stat_data 2023-11-01 21:14:22 +01:00
Stefan Allius
7a2667767e add callback on mqtt/ha conn establishment 2023-11-01 21:11:54 +01:00
Stefan Allius
85be9072db adapt interface of ha_confs 2023-11-01 21:09:57 +01:00
Stefan Allius
387bab01be Proxy counters don't work after restart properly
Fixes #21
2023-11-01 21:08:51 +01:00
Stefan Allius
bcd37faa4f Update changelog 2023-10-30 23:48:15 +01:00
Stefan Allius
47878adb23 fix definition of the 'Unknown Control Type' counter 2023-10-30 23:43:18 +01:00
Stefan Allius
205a4e38ee increase test coverage 2023-10-28 00:14:34 +02:00
Stefan Allius
36754196c2 add Unknown_Ctrl error counter 2023-10-28 00:11:22 +02:00
Stefan Allius
cfe64b1eae add Unknown_Ctrl error counter 2023-10-28 00:09:19 +02:00
Stefan Allius
bb793a3f13 Version 0.4.3 2023-10-26 20:37:03 +02:00
Stefan Allius
c3da9d6101 - avoid resetting the daily generation counters 2023-10-26 20:32:08 +02:00
Stefan Allius
0c9f953476 don't initialize must_incr values with zero
- when the connection is just established by the inverter.
  sometimes the inverters send invalid data with the value zero.
  In this case, we no longer initialize the must_incr values,
  to avoid sending invalid data to the mqtt broker and the
  Home Assistant
2023-10-26 20:23:53 +02:00
Stefan Allius
658f42d4fe restart mqtt broker on errors 2023-10-23 21:38:58 +02:00
Stefan Allius
870a965c22 - fix typo 2023-10-23 21:27:58 +02:00
Stefan Allius
0c645812bd catch mqtt errros 2023-10-23 21:25:47 +02:00
Stefan Allius
7b71f25496 Merge branch 'main' of https://github.com/s-allius/tsun-gen3-proxy into main 2023-10-23 21:18:44 +02:00
Stefan Allius
50977d5afd catch Mqtt errors
- we catch mqtt errors, so we can forward messages to
  the tsun cloud even if the mqtt broker is not running
2023-10-23 21:17:17 +02:00
Stefan Allius
ff0979663e fetch broken pipe errors 2023-10-23 21:12:10 +02:00
Stefan Allius
a6ac9864af Merge pull request #20 from LenzGr/patch-1
CHANGELOG.md: fix typos
2023-10-23 19:12:25 +02:00
Lenz Grimmer
2e0331cb88 CHANGELOG.md: fix typos 2023-10-23 09:31:54 +02:00
Stefan Allius
ec54e399fb updae default config 2023-10-23 00:14:26 +02:00
Stefan Allius
600362d00b Version 0.4.2 2023-10-21 21:48:46 +02:00
Stefan Allius
341e5c3964 fix typo 2023-10-21 21:40:28 +02:00
Stefan Allius
27a99fccec setup test coverage measurement 2023-10-21 21:39:19 +02:00
Stefan Allius
9264faaf3d avoid resetting daily generation counters 2023-10-21 21:38:36 +02:00
Stefan Allius
342313b76d add more test cases 2023-10-21 18:20:32 +02:00
Stefan Allius
f96091affd Increase test coverage for the Infos class to 100% 2023-10-21 16:46:49 +02:00
Stefan Allius
1df8ae55f0 Version 0.4.1 2023-10-20 21:53:59 +02:00
Stefan Allius
311b36aaf1 avoid resetting total generation counters 2023-10-20 21:51:02 +02:00
Stefan Allius
1642fe5a8a Merge pull request #19 from s-allius/s-allius/issue18
S allius/issue18
2023-10-20 20:09:56 +02:00
Stefan Allius
2b7e671dfc initialize the proxy statistics 2023-10-20 20:08:39 +02:00
Stefan Allius
a1930d32ae initialize the proxy statistics 2023-10-20 19:54:48 +02:00
Stefan Allius
11d7d616fa add static constructor to init proxy statistics 2023-10-20 00:27:21 +02:00
Stefan Allius
5433e18389 Send proxy values when the inverter disconnets 2023-10-19 20:04:41 +02:00
Stefan Allius
9006472264 list supported inverters 2023-10-17 23:40:34 +02:00
Stefan Allius
605a269d84 Add screenshots 2023-10-17 21:18:54 +02:00
Stefan Allius
93392f49c0 Version 0.4.0 2023-10-16 21:56:04 +02:00
Stefan Allius
587ec3d517 Merge pull request #17 from s-allius/s-allius/issue10
S allius/issue10
2023-10-16 21:50:12 +02:00
Stefan Allius
b98313ae23 add test for open close without any msg transfer 2023-10-16 20:33:46 +02:00
Stefan Allius
dc27da2b56 initialize sug_area 2023-10-16 20:30:43 +02:00
Stefan Allius
0c4bd2a03a log forwarded messages on info level 2023-10-16 20:29:44 +02:00
Stefan Allius
ba961fdfd7 Send internal proxy states to Home-Assistant
Fixes #10
2023-10-16 20:29:23 +02:00
Stefan Allius
03aa0c5747 adapt tests 2023-10-15 23:13:43 +02:00
Stefan Allius
10ec949a5b register proxy entities under a unique device (singleton) 2023-10-15 23:05:56 +02:00
Stefan Allius
909d5ca44a add MQTT config for the proxy device 2023-10-15 22:59:18 +02:00
Stefan Allius
335e5d1184 add MQTT config for the proxy device 2023-10-15 22:55:29 +02:00
Stefan Allius
583ec0c2a7 Update changelog 2023-10-15 15:50:20 +02:00
Stefan Allius
3c8e7666d4 add inc and dec methods for proxy statistic counters 2023-10-15 15:46:05 +02:00
Stefan Allius
5f821802a5 Implement HA device for the proxy
- add singleton proxy device
- add static db for proxy counter
- add inc and dec methods
2023-10-15 15:45:25 +02:00
Stefan Allius
fc10912a12 Send internal proxy states to Home-Assistant
Fixes #10
2023-10-15 15:37:59 +02:00
Stefan Allius
4d5da5a91f fix unit tests, which were broken since version 0.3.0 2023-10-12 23:36:46 +02:00
Stefan Allius
a2dfb4c1a6 avoid crash on incomplete config.toml file 2023-10-12 23:04:54 +02:00
Stefan Allius
3adf968a59 add systemtests 2023-10-12 22:39:24 +02:00
Stefan Allius
89d8cecfb5 don't dispatch ignored messages
- move check of inverter serial number from asyn_stream to message class
- add trace for droped messages
2023-10-12 22:29:51 +02:00
Stefan Allius
00f735d0ce add a proxy device to home assistant 2023-10-12 13:22:28 +02:00
Stefan Allius
5c940bb7a2 Send internal proxy states to Home-Assistant
Fixes #10

Always register ha entities after connection setup
2023-10-12 13:19:56 +02:00
Stefan Allius
8cb8dea47b Merge pull request #15 from s-allius/s-allius/issue8
S allius/issue8
2023-10-11 21:09:03 +02:00
Stefan Allius
8edbd7928f add docstring 2023-10-11 21:01:57 +02:00
Stefan Allius
f0e9c67a06 fix issue #8 2023-10-11 20:22:33 +02:00
Stefan Allius
505beff6de Do not register non-existent inverter inputs in HA
Fixes #8
2023-10-11 20:01:10 +02:00
Stefan Allius
684e591835 Bump to python version 3.12 2023-10-10 21:54:12 +02:00
35 changed files with 4858 additions and 829 deletions

2
.coveragerc Normal file
View File

@@ -0,0 +1,2 @@
[run]
branch = True

5
.gitignore vendored
View File

@@ -1,8 +1,11 @@
__pycache__
.pytest_cache
bin/**
mosquitto/**
homeassistant/**
tsun_proxy/**
system_tests/**
Doku/**
.DS_Store
.coverage
.env
coverage.xml

13
.vscode/settings.json vendored
View File

@@ -1,8 +1,15 @@
{
"python.testing.pytestArgs": [
"-vv",
"app","system_tests"
"-vv",
"app",
"--cov=app/src",
"--cov-report=xml",
"--cov-report=html",
"system_tests"
],
"python.testing.unittestEnabled": false,
"python.testing.pytestEnabled": true
"python.testing.pytestEnabled": true,
"flake8.args": [
"--extend-exclude=app/tests/*.py system_tests/*.py"
]
}

View File

@@ -7,6 +7,94 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased]
## [0.6.0] - 2024-04-02
- Refactoring to support Solarman V5 protocol
- Add unittest for Solarman V5 implementation
- Handle checksum errors
- Handle wrong start or Stop bytes
- Watch for AT commands and signal their occurrence to HA
- Build inverter type names for MS-1600 .. MS-2000
- Build device name for Solarman logger module
## [0.5.5] - 2023-12-31
- Fixed [#33](https://github.com/s-allius/tsun-gen3-proxy/issues/33)
- Fixed detection of the connected inputs/MPPTs
- Preparation for overwriting received data
- home assistant improvements:
- Add unit 'W' to the `Rated Power` value for home assistant
- `Collect_Interval`, `Connect_Count` and `Data_Up_Interval` as diagnostic value and not as graph
- Add data acquisition interval
- Add number of connections
- Add communication type
- Add 'Internal SW Exception' counter
## [0.5.4] - 2023-11-22
- hardening remove dangerous commands from busybox
- add OTA start message counter
- add message handler for over the air updates
- add unit tests for ota messages
- add unit test for int64 data type
- cleanup msg_get_time_handler
- remove python packages setuptools, wheel, pip from final image to reduce the attack surface
## [0.5.3] - 2023-11-12
- remove apk packet manager from the final image
- send contact info every time a client connection is established
- use TSUN timestamp instead of local time, as TSUN also expects Central European Summer Time in winter
## [0.5.2] - 2023-11-09
- add int64 data type to info parser
- allow multiple calls to Message.close()
- check for race cond. on closing and establishing client connections
## [0.5.1] - 2023-11-05
- fixes f-string by limes007
- add description for dns settings by limes007
## [0.5.0] - 2023-11-04
- fix issue [#21](https://github.com/s-allius/tsun-gen3-proxy/issues/21)
- register proxy dev as soon as the MQTT connection is established
- increase test coverage of the Messages class
- add error counter for unknown control bytes
- lint code with flake8
## [0.4.3] - 2023-10-26
- fix typos by Lenz Grimmer
- catch mqtt errors, so we can forward messages to tsun even if the mqtt broker is not reachable
- avoid resetting the daily generation counters even if the inverter sends zero values after reconnection
## [0.4.2] - 2023-10-21
- count unknown data types in received messages
- count definition errors in our internal tables
- increase test coverage of the Infos class to 100%
- avoid resetting the daily generation counters even if the inverter sends zero values at sunset
## [0.4.1] - 2023-10-20
- fix issue [#18](https://github.com/s-allius/tsun-gen3-proxy/issues/18)
- initialize the proxy statistics
- avoid resetting total generation counters
## [0.4.0] - 2023-10-16
- fix issue [#8](https://github.com/s-allius/tsun-gen3-proxy/issues/8)
- implement [#10](https://github.com/s-allius/tsun-gen3-proxy/issues/10)
- fix: don't dispatch ignored messages so that they are not forwarded
- add systemtests
- fix unit tests, which were broken since version 0.3.0
- add proxy device to home assistant
- add statistic counter to proxy device
- support multiple inverter registration at home assistant
## [0.3.0] - 2023-10-10
❗Due to the definition of values for diagnostics, the MQTT devices of controller and inverter should be deleted in the Home Assistant before updating to version '0.3.0'. After the update, these are automatically created again. The measurement data is retained.
@@ -15,13 +103,13 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- optimize and reduce logging
- switch to pathon 3.12
- classify some values for diagnostics
- classify some values for diagnostics
## [0.2.0] - 2023-10-07
This version halves the size of the Docker image and reduces the attack surface for security vulnerabilities, by omitting unneeded code. The feature set is exactly the same as the previous release version 0.1.0.
### Changes
### Changes in 0.2.0
- move from slim-bookworm to an alpine base image
- install python requirements with pip wheel
@@ -54,31 +142,31 @@ This version halves the size of the Docker image and reduces the attack surface
❗Due to the change from one device to multiple devices in the Home Assistant, the previous MQTT device should be deleted in the Home Assistant after the update to pre-release '0.0.4'. Afterwards, the proxy must be restarted again to ensure that the sub-devices are created completely.
### Added
### Added in 0.0.4
- Register multiple devices at home-assistant instead of one for all measurements.
Now we register: a Controller, the inverter and up to 4 input devices to home-assistant.
## [0.0.3] - 2023-09-28
### Added
### Added in 0.0.3
- Fixes Running Proxy with host UID and GUID #2
## [0.0.2] - 2023-09-27
### Added
### Added in 0.0.2
- Dockerfile opencontainer labels
- Send voltage and current of inputs to mqtt
## [0.0.1] - 2023-09-25
### Added
### Added in 0.0.1
- Logger for inverter packets
- SIGTERM handler for fast docker restarts
- Proxy as non-root docker application
- Proxy as non-root docker application
- Unit- and system tests
- Home asssistant auto configuration
- Self-sufficient island operation without internet
@@ -87,4 +175,4 @@ This version halves the size of the Docker image and reduces the attack surface
### Added
- First checkin, the project was born
- First checkin, the project was born

View File

@@ -6,7 +6,7 @@
<p align="center">integration</p>
<p align="center">
<a href="https://opensource.org/licenses/BSD-3-Clause"><img alt="License: BSD-3-Clause" src="https://img.shields.io/badge/License-BSD_3--Clause-green.svg"></a>
<a href="https://www.python.org/downloads/release/python-3110/"><img alt="Supported Python versions" src="https://img.shields.io/badge/python-3.11-blue.svg"></a>
<a href="https://www.python.org/downloads/release/python-3120/"><img alt="Supported Python versions" src="https://img.shields.io/badge/python-3.12-blue.svg"></a>
<a href="https://sbtinstruments.github.io/aiomqtt/introduction.html"><img alt="Supported aiomqtt versions" src="https://img.shields.io/badge/aiomqtt-1.2.1-lightblue.svg"></a>
<a href="https://toml.io/en/v1.0.0"><img alt="Supported toml versions" src="https://img.shields.io/badge/toml-1.0.0-lightblue.svg"></a>
@@ -40,11 +40,18 @@ If you use a Pi-hole, you can also store the host entry in the Pi-hole.
## Features
- supports TSUN GEN3 inverters: TSOL MS-300, MS-350, MS-400, MS-600, MS-700 and MS-800
- support for TSUN GEN3 PLUS inverters since proxy version 0.6 (e.g. MS-2000)
- `MQTT` support
- `Home-Assistant` auto-discovery support
- Self-sufficient island operation without internet
- Self-sufficient island operation without internet (for TSUN GEN3 PLUS inverters in preparation)
- non-root Docker Container
## Home Assistant Screenshots
Here are some screenshots of how the inverter is displayed in the Home Assistant:
https://github.com/s-allius/tsun-gen3-proxy/wiki/home-assistant#home-assistant-screenshots
## Requirements
- A running Docker engine to host the container
@@ -60,7 +67,7 @@ docker build https://github.com/s-allius/tsun-gen3-proxy.git#main:app -t tsun-pr
```
after that you can run the image:
```sh
docker run --dns '8.8.8.8' --env 'UID=1000' -p '5005:5005' -v ./config:/home/tsun-proxy/config -v ./log:/home/tsun-proxy/log tsun-proxy
docker run --dns '8.8.8.8' --env 'UID=1000' -p '5005:5005' -p '10000:10000' -v ./config:/home/tsun-proxy/config -v ./log:/home/tsun-proxy/log tsun-proxy
```
You will surely see a message that the configuration file was not found. So that we can create this without admin rights, the `uid` must still be adapted. To do this, simply stop the proxy with ctrl-c and use the `id` command to determine your own UserId:
```sh
@@ -69,7 +76,7 @@ uid=1050(sallius) gid=20(staff) ...
```
With this information we can customize the `docker run`` statement:
```sh
docker run --dns '8.8.8.8' --env 'UID=1050' -p '5005:5005' -v ./config:/home/tsun-proxy/config -v ./log:/home/tsun-proxy/log tsun-proxy
docker run --dns '8.8.8.8' --env 'UID=1050' -p '5005:5005' -p '10000:10000' -v ./config:/home/tsun-proxy/config -v ./log:/home/tsun-proxy/log tsun-proxy
```
###
@@ -77,7 +84,7 @@ docker run --dns '8.8.8.8' --env 'UID=1050' -p '5005:5005' -v ./config:/home/t
The Docker container does not require any special configuration.
On the host, two directories (for log files and for config files) must be mapped. If necessary, the UID of the proxy process can be adjusted, which is also the owner of the log and configuration files.
The proxy can be configured via the file 'config.toml'. When the proxy is started, a file 'config.example.toml' is copied into the config directory. This file shows all possible parameters and their default values. Changes in the example file itself are not evaluated. To configure the proxy, the config.example.toml file should be renamed to config.toml. After that the corresponding values can be adjusted. To load the new configuration, the proxy must be restarted.
The proxy can be configured via the file 'config.toml'. When the proxy is started, a file 'config.example.toml' is copied into the config directory. This file shows all possible parameters and their default values. Changes in the example file itself are not evaluated. To configure the proxy, the config.example.toml file should be renamed to config.toml. After that the corresponding values can be adjusted. To load the new configuration, the proxy must be restarted.
## Proxy Configuration
@@ -87,11 +94,16 @@ You find more details here: https://toml.io/en/v1.0.0
```toml
# configuration to reach tsun cloud
# configuration for tsun cloud for 'GEN3' inverters
tsun.enabled = true # false: disables connecting to the tsun cloud, and avoids updates
tsun.host = 'logger.talent-monitoring.com'
tsun.port = 5005
# configuration for solarman cloud for 'GEN3 PLUS' inverters
solarman.enabled = true # false: disables connecting to the tsun cloud, and avoids updates
solarman.host = 'iot.talent-monitoring.com'
solarman.port = 10000
# mqtt broker configuration
mqtt.host = 'mqtt' # URL or IP address of the mqtt broker
@@ -104,6 +116,8 @@ mqtt.passwd = ''
ha.auto_conf_prefix = 'homeassistant' # MQTT prefix for subscribing for homeassistant status updates
ha.discovery_prefix = 'homeassistant' # MQTT prefix for discovery topic
ha.entity_prefix = 'tsun' # MQTT topic prefix for publishing inverter values
ha.proxy_node_id = 'proxy' # MQTT node id, for the proxy_node_id
ha.proxy_unique_id = 'P170000000000001' # MQTT unique id, to identify a proxy instance
# microinverters
@@ -112,6 +126,7 @@ inverters.allow_all = false # True: allow inverters, even if we have no invert
# inverter mapping, maps a `serial_no* to a `node_id` and defines an optional `suggested_area` for `home-assistant`
#
# for each inverter add a block starting with [inverters."<16-digit serial numbeer>"]
[inverters."R17xxxxxxxxxxxx1"]
node_id = 'inv1' # Optional, MQTT replacement for inverters serial number
suggested_area = 'roof' # Optional, suggested installation area for home-assistant
@@ -120,9 +135,60 @@ suggested_area = 'roof' # Optional, suggested installation area for home-a
node_id = 'inv2' # Optional, MQTT replacement for inverters serial number
suggested_area = 'balcony' # Optional, suggested installation area for home-assistant
[inverters."Y17xxxxxxxxxxxx1"]
monitor_sn = 2000000000 # The "Monitoring SN:" can be found on a sticker enclosed with the inverter
node_id = 'inv_3' # MQTT replacement for inverters serial number
suggested_area = 'garage' # suggested installation place for home-assistant
```
## DNS Settings
### Loop the proxy into the connection
To include the proxy in the connection between the inverter and the TSUN Cloud, you must adapt the DNS record of *logger.talent-monitoring.com* within the network that your inverter uses. You need a mapping from logger.talent-monitoring.com to the IP address of the host running the Docker engine.
The new GEN3 PLUS inverters use a different URL. Here, *iot.talent-monitoring.com* must be redirected.
This can be done, for example, by adding a local DNS record to the Pi-hole if you are using it.
### DNS Rebind Protection
If you are using a router as local DNS server, the router may have DNS rebind protection that needs to be adjusted. For security reasons, DNS rebind protection blocks DNS queries that refer to an IP address on the local network.
If you are using a FRITZ!Box, you can do this in the Network Settings tab under Home Network / Network. Add logger.talent-monitoring.com as a hostname exception in DNS rebind protection.
### DNS server of proxy
The proxy itself must use a different DNS server to connect to the TSUN Cloud. If you use the DNS server with the adapted record, you will end up in an endless loop as soon as the proxy tries to send data to the TSUN Cloud.
As described above, set a DNS sever in the Docker command or Docker compose file.
### Over The Air (OTA) firmware update
Even if the proxy is connected between the inverter and the TSUN Cloud, an OTA update is supported. To do this, the inverter must be able to reach the website http://www.talent-monitoring.com:9002/ in order to download images from there.
It must be ensured that this address is not mapped to the proxy!
## Compatibility
In the following table you will find an overview of which inverter model has been tested for compatibility with which firmware version.
A combination with a red question mark should work, but I have not checked it in detail.
Micro Inverter Model | Fw. 1.00.06 | Fw. 1.00.17 | Fw. 1.00.20| Fw. 1.1.00.0B
:---|:---:|:---:|:---:|:---:|
GEN3 micro inverters (single MPPT):<br>MS300, MS350,MS-400| ❓ | ❓ | ❓ |
GEN3 micro inverters (dual MPPT):<br>MS600, MS700, MS800| ✔️ | ✔️ | ✔️ |
GEN3 PLUS micro inverters:<br>MS1600, MS1800, MS2000| | | | ✔️
Balcony micro inverters:<br>MS400-D, MS800-D, MS2000-D| ❓ | ❓ | ❓| ❓
TITAN micro inverters:<br>TSOL-MP3000, MP2250, MS3000| ❓ | ❓ | ❓| ❓
```
Legend
: Firmware not available for this devices
✔️: proxy support testet
❓: proxy support possible but not testet
🚧: Proxy support in preparation
```
❗The new inverters of the GEN3 Plus generation (e.g. MS-2000) use a completely different protocol for data transmission to the TSUN server. These inverters are supported from proxy version 0.6. The serial numbers of these inverters start with `Y17E` instead of `R17E`
If you have one of these combinations with a red question mark, it would be very nice if you could send me a proxy trace so that I can carry out the detailed checks and adjust the device and system tests. [Ask here how to send a trace](https://github.com/s-allius/tsun-gen3-proxy/discussions/categories/traces-for-compatibility-check)
## License
This project is licensed under the [BSD 3-clause License](https://opensource.org/licenses/BSD-3-Clause).
@@ -132,7 +198,6 @@ Note the aiomqtt library used is based on the paho-mqtt library, which has a dua
- One use of "COPYRIGHT OWNER" (EDL) instead of "COPYRIGHT HOLDER" (BSD)
- One use of "Eclipse Foundation, Inc." (EDL) instead of "copyright holder" (BSD)
## Versioning
This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). Breaking changes will only occur in major `X.0.0` releases.

View File

@@ -1,4 +1,5 @@
tests/
**/__pycache__
*.pyc
.DS_Store
.DS_Store
build.sh

View File

@@ -7,20 +7,21 @@ ARG GID=1000
FROM python:3.12-alpine AS base
USER root
RUN apk update && \
apk upgrade
RUN apk add --no-cache su-exec
COPY --chmod=0700 ./hardening_base.sh .
RUN apk upgrade --no-cache && \
apk add --no-cache su-exec && \
./hardening_base.sh && \
rm ./hardening_base.sh
#
# second stage for building wheels packages
FROM base as builder
RUN apk add --no-cache build-base && \
python -m pip install --no-cache-dir -U pip wheel
# copy the dependencies file to the root dir and install requirements
COPY ./requirements.txt /root/
RUN python -OO -m pip wheel --no-cache-dir --wheel-dir=/root/wheels -r /root/requirements.txt
RUN apk add --no-cache build-base && \
python -m pip install --no-cache-dir -U pip wheel && \
python -OO -m pip wheel --no-cache-dir --wheel-dir=/root/wheels -r /root/requirements.txt
#
@@ -31,26 +32,32 @@ ARG VERSION
ARG UID
ARG GID
ARG LOG_LVL
ARG environment
ENV VERSION=$VERSION
ENV SERVICE_NAME=$SERVICE_NAME
ENV UID=$UID
ENV GID=$GID
ENV LOG_LVL=$LOG_LVL
ENV HOME=/home/$SERVICE_NAME
# set the working directory in the container
WORKDIR /home/$SERVICE_NAME
# update PATH environment variable
ENV HOME=/home/$SERVICE_NAME
VOLUME ["/home/$SERVICE_NAME/log", "/home/$SERVICE_NAME/config"]
# install the requirements from the wheels packages from the builder stage
# install the requirements from the wheels packages from the builder stage
# and unistall python packages and alpine package manger to reduce attack surface
COPY --from=builder /root/wheels /root/wheels
COPY --chmod=0700 ./hardening_final.sh .
RUN python -m pip install --no-cache --no-index /root/wheels/* && \
rm -rf /root/wheels
rm -rf /root/wheels && \
python -m pip uninstall --yes setuptools wheel pip && \
apk --purge del apk-tools && \
./hardening_final.sh && \
rm ./hardening_final.sh
# copy the content of the local src and config directory to the working directory
COPY --chmod=0700 entrypoint.sh /root/entrypoint.sh

View File

@@ -22,11 +22,11 @@ fi
echo version: $VERSION build-date: $BUILD_DATE image: $IMAGE
if [[ $1 == dev ]];then
docker build --build-arg "VERSION=${VERSION}" --build-arg "LOG_LVL=DEBUG" --label "org.label-schema.build-date=${BUILD_DATE}" --label "org.opencontainers.image.version=${VERSION}" -t ${IMAGE}:latest app
docker build --build-arg "VERSION=${VERSION}" --build-arg environment=dev --build-arg "LOG_LVL=DEBUG" --label "org.label-schema.build-date=${BUILD_DATE}" --label "org.opencontainers.image.version=${VERSION}" -t ${IMAGE}:latest app
elif [[ $1 == rc ]];then
docker build --build-arg "VERSION=${VERSION}" --label "org.label-schema.build-date=${BUILD_DATE}" --label "org.opencontainers.image.version=${VERSION}" -t ${IMAGE}:latest app
docker build --build-arg "VERSION=${VERSION}" --build-arg environment=production --label "org.label-schema.build-date=${BUILD_DATE}" --label "org.opencontainers.image.version=${VERSION}" -t ${IMAGE}:latest app
elif [[ $1 == rel ]];then
docker build --no-cache --build-arg "VERSION=${VERSION}" --label "org.label-schema.build-date=${BUILD_DATE}" --label "org.opencontainers.image.version=${VERSION}" -t ${IMAGE}:latest -t ${IMAGE}:${MAJOR} -t ${IMAGE}:${VERSION} app
docker build --no-cache --build-arg "VERSION=${VERSION}" --build-arg environment=production --label "org.label-schema.build-date=${BUILD_DATE}" --label "org.opencontainers.image.version=${VERSION}" -t ${IMAGE}:latest -t ${IMAGE}:${MAJOR} -t ${IMAGE}:${VERSION} app
docker push ghcr.io/s-allius/tsun-gen3-proxy:latest
docker push ghcr.io/s-allius/tsun-gen3-proxy:${MAJOR}
docker push ghcr.io/s-allius/tsun-gen3-proxy:${VERSION}

View File

@@ -3,6 +3,10 @@ tsun.enabled = true # false: disables connecting to the tsun cloud, and avoids
tsun.host = 'logger.talent-monitoring.com'
tsun.port = 5005
# configuration to reach the new tsun cloud for G3 Plus inverters
solarman.enabled = true # false: disables connecting to the tsun cloud, and avoids updates
solarman.host = 'iot.talent-monitoring.com'
solarman.port = 10000
# mqtt broker configuration
mqtt.host = 'mqtt' # URL or IP address of the mqtt broker
@@ -15,6 +19,8 @@ mqtt.passwd = ''
ha.auto_conf_prefix = 'homeassistant' # MQTT prefix for subscribing for homeassistant status updates
ha.discovery_prefix = 'homeassistant' # MQTT prefix for discovery topic
ha.entity_prefix = 'tsun' # MQTT topic prefix for publishing inverter values
ha.proxy_node_id = 'proxy' # MQTT node id, for the proxy_node_id
ha.proxy_unique_id = 'P170000000000001' # MQTT unique id, to identify a proxy instance
# microinverters
inverters.allow_all = true # allow inverters, even if we have no inverter mapping
@@ -23,12 +29,15 @@ inverters.allow_all = true # allow inverters, even if we have no inverter mapp
#
# for each inverter add a block starting with [inverters."<16-digit serial numbeer>"]
[inverters."R170000000000001"]
#node_id = '' # Optional, MQTT replacement for inverters serial number
#suggested_area = '' # Optional, suggested installation area for home-assistant
#node_id = '' # Optional, MQTT replacement for inverters serial number
#suggested_area = '' # Optional, suggested installation area for home-assistant
#[inverters."R17xxxxxxxxxxxx2"]
#node_id = '' # Optional, MQTT replacement for inverters serial number
#suggested_area = '' # Optional, suggested installation area for home-assistant
#node_id = '' # Optional, MQTT replacement for inverters serial number
#suggested_area = '' # Optional, suggested installation area for home-assistant
[inverters."Y170000000000001"]
#monitor_sn = 2000000000 # The "Monitoring SN:" can be found on a sticker enclosed with the inverter
#node_id = '' # Optional, MQTT replacement for inverters serial number
#suggested_area = '' # Optional, suggested installation place for home-assistant

View File

@@ -11,10 +11,12 @@ if [ "$user" = '0' ]; then
mkdir -p /home/$SERVICE_NAME/log /home/$SERVICE_NAME/config
if ! id $SERVICE_NAME &> /dev/null; then
echo "# create user"
addgroup --gid $GID $SERVICE_NAME 2> /dev/null
adduser -G $SERVICE_NAME -s /bin/false -D -H -g "" -u $UID $SERVICE_NAME
chown -R $SERVICE_NAME:$SERVICE_NAME /home/$SERVICE_NAME || true
rm -fr /usr/sbin/addgroup /usr/sbin/adduser /bin/chown
fi
chown -R $SERVICE_NAME:$SERVICE_NAME /home/$SERVICE_NAME || true
echo "######################################################"
echo "#"

19
app/hardening_base.sh Normal file
View File

@@ -0,0 +1,19 @@
#!/bin/sh
rm -fr /var/spool/cron
rm -fr /etc/crontabs
rm -fr /etc/periodic
# Remove every user and group but root
sed -i -r '/^(root)/!d' /etc/group
sed -i -r '/^(root)/!d' /etc/passwd
# Remove init scripts since we do not use them.
rm -fr /etc/inittab
# Remove kernel tunables since we do not need them.
rm -fr /etc/sysctl*
rm -fr /etc/modprobe.d
# Remove fstab since we do not need it.
rm -f /etc/fstab

22
app/hardening_final.sh Normal file
View File

@@ -0,0 +1,22 @@
#!/bin/sh
# For production images delete all uneeded admin commands and remove dangerous commands.
# addgroup, adduser and chmod will be removed in entrypoint.sh during first start
# su-exec will be needed for ever restart of the cotainer
if [ "$environment" = "production" ] ; then \
find /sbin /usr/sbin ! -type d \
-a ! -name addgroup \
-a ! -name adduser \
-a ! -name nologin \
-a ! -name su-exec \
-delete; \
find /bin /usr/bin -xdev \( \
-name chgrp -o \
-name chmod -o \
-name hexdump -o \
-name ln -o \
-name od -o \
-name strings -o \
-name su -o \
-name wget -o \
\) -delete \
; fi

278
app/proxy.svg Normal file
View File

@@ -0,0 +1,278 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<!-- Generated by graphviz version 2.40.1 (20161225.0304)
-->
<!-- Title: G Pages: 1 -->
<svg width="520pt" height="1060pt"
viewBox="0.00 0.00 519.50 1060.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 1056)">
<title>G</title>
<polygon fill="#ffffff" stroke="transparent" points="-4,4 -4,-1056 515.5,-1056 515.5,4 -4,4"/>
<!-- A0 -->
<g id="node1" class="node">
<title>A0</title>
<polygon fill="#fff8dc" stroke="#000000" points="113.6964,-1028 5.3036,-1028 5.3036,-992 119.6964,-992 119.6964,-1022 113.6964,-1028"/>
<polyline fill="none" stroke="#000000" points="113.6964,-1028 113.6964,-1022 "/>
<polyline fill="none" stroke="#000000" points="119.6964,-1022 113.6964,-1022 "/>
<text text-anchor="middle" x="62.5" y="-1013" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">You can stick notes</text>
<text text-anchor="middle" x="62.5" y="-1001" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">on diagrams too!</text>
</g>
<!-- A1 -->
<g id="node2" class="node">
<title>A1</title>
<polygon fill="none" stroke="#000000" points="485.1817,-804 415.8183,-804 415.8183,-768 485.1817,-768 485.1817,-804"/>
<text text-anchor="middle" x="450.5" y="-783" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">Singleton</text>
</g>
<!-- A2 -->
<g id="node3" class="node">
<title>A2</title>
<polygon fill="none" stroke="#000000" points="389.5,-518 389.5,-550 511.5,-550 511.5,-518 389.5,-518"/>
<text text-anchor="start" x="440.777" y="-531" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">Mqtt</text>
<polygon fill="none" stroke="#000000" points="389.5,-462 389.5,-518 511.5,-518 511.5,-462 389.5,-462"/>
<text text-anchor="start" x="407.9875" y="-499" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">&lt;static&gt;ha_restarts</text>
<text text-anchor="start" x="415.7665" y="-487" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">&lt;static&gt;__client</text>
<text text-anchor="start" x="399.3735" y="-475" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">&lt;static&gt;__cb_MqttIsUp</text>
<polygon fill="none" stroke="#000000" points="389.5,-418 389.5,-462 511.5,-462 511.5,-418 389.5,-418"/>
<text text-anchor="start" x="412.436" y="-443" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">&lt;async&gt;publish()</text>
<text text-anchor="start" x="416.6045" y="-431" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">&lt;async&gt;close()</text>
</g>
<!-- A1&#45;&gt;A2 -->
<g id="edge1" class="edge">
<title>A1&#45;&gt;A2</title>
<path fill="none" stroke="#000000" d="M450.5,-757.4632C450.5,-710.3291 450.5,-615.0013 450.5,-550.3153"/>
<polygon fill="none" stroke="#000000" points="447.0001,-757.5631 450.5,-767.5632 454.0001,-757.5632 447.0001,-757.5631"/>
</g>
<!-- A10 -->
<g id="node11" class="node">
<title>A10</title>
<polygon fill="none" stroke="#000000" points="396.5,-282 396.5,-314 504.5,-314 504.5,-282 396.5,-282"/>
<text text-anchor="start" x="433.5535" y="-295" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">Inverter</text>
<polygon fill="none" stroke="#000000" points="396.5,-190 396.5,-282 504.5,-282 504.5,-190 396.5,-190"/>
<text text-anchor="start" x="426.604" y="-263" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">cls.db_stat</text>
<text text-anchor="start" x="419.9405" y="-251" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">cls.entity_prfx</text>
<text text-anchor="start" x="410.7755" y="-239" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">cls.discovery_prfx</text>
<text text-anchor="start" x="410.2115" y="-227" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">cls.proxy_node_id</text>
<text text-anchor="start" x="406.3225" y="-215" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">cls.proxy_unique_id</text>
<text text-anchor="start" x="422.1655" y="-203" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">cls.mqtt:Mqtt</text>
<polygon fill="none" stroke="#000000" points="396.5,-170 396.5,-190 504.5,-190 504.5,-170 396.5,-170"/>
</g>
<!-- A2&#45;&gt;A10 -->
<g id="edge11" class="edge">
<title>A2&#45;&gt;A10</title>
<path fill="none" stroke="#000000" d="M450.5,-417.8724C450.5,-385.8251 450.5,-347.2624 450.5,-314.4235"/>
</g>
<!-- A3 -->
<g id="node4" class="node">
<title>A3</title>
<polygon fill="none" stroke="#000000" points="138.5,-1020 138.5,-1052 209.5,-1052 209.5,-1020 138.5,-1020"/>
<text text-anchor="start" x="148.445" y="-1033" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">IterRegistry</text>
<polygon fill="none" stroke="#000000" points="138.5,-1000 138.5,-1020 209.5,-1020 209.5,-1000 138.5,-1000"/>
<polygon fill="none" stroke="#000000" points="138.5,-968 138.5,-1000 209.5,-1000 209.5,-968 138.5,-968"/>
<text text-anchor="start" x="155.939" y="-981" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">__iter__</text>
</g>
<!-- A4 -->
<g id="node5" class="node">
<title>A4</title>
<polygon fill="none" stroke="#000000" points="106.5,-886 106.5,-918 240.5,-918 240.5,-886 106.5,-886"/>
<text text-anchor="start" x="153.2175" y="-899" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">Message</text>
<polygon fill="none" stroke="#000000" points="106.5,-722 106.5,-886 240.5,-886 240.5,-722 106.5,-722"/>
<text text-anchor="start" x="136.8265" y="-867" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">server_side:bool</text>
<text text-anchor="start" x="134.043" y="-855" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">header_valid:bool</text>
<text text-anchor="start" x="126.814" y="-843" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">header_len:unsigned</text>
<text text-anchor="start" x="132.648" y="-831" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">data_len:unsigned</text>
<text text-anchor="start" x="151.8245" y="-819" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">unique_id</text>
<text text-anchor="start" x="155.7135" y="-807" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">node_id</text>
<text text-anchor="start" x="152.6585" y="-795" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">sug_area</text>
<text text-anchor="start" x="123.489" y="-783" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">_recv_buffer:bytearray</text>
<text text-anchor="start" x="122.0945" y="-771" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">_send_buffer:bytearray</text>
<text text-anchor="start" x="116.2665" y="-759" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">_forward_buffer:bytearray</text>
<text text-anchor="start" x="155.7135" y="-747" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">db:Infos</text>
<text text-anchor="start" x="144.326" y="-735" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">new_data:list</text>
<polygon fill="none" stroke="#000000" points="106.5,-654 106.5,-722 240.5,-722 240.5,-654 106.5,-654"/>
<text text-anchor="start" x="123.2095" y="-703" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">_read():void&lt;abstract&gt;</text>
<text text-anchor="start" x="147.9445" y="-691" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">close():void</text>
<text text-anchor="start" x="133.7725" y="-679" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">inc_counter():void</text>
<text text-anchor="start" x="132.1025" y="-667" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">dec_counter():void</text>
</g>
<!-- A3&#45;&gt;A4 -->
<g id="edge2" class="edge">
<title>A3&#45;&gt;A4</title>
<path fill="none" stroke="#000000" d="M173.5,-957.5789C173.5,-945.4616 173.5,-932.0319 173.5,-918.1761"/>
<polygon fill="none" stroke="#000000" points="170.0001,-957.8673 173.5,-967.8673 177.0001,-957.8673 170.0001,-957.8673"/>
</g>
<!-- A5 -->
<g id="node6" class="node">
<title>A5</title>
<polygon fill="none" stroke="#000000" points="243.5,-566 243.5,-598 357.5,-598 357.5,-566 243.5,-566"/>
<text text-anchor="start" x="286.608" y="-579" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">Talent</text>
<polygon fill="none" stroke="#000000" points="243.5,-486 243.5,-566 357.5,-566 357.5,-486 243.5,-486"/>
<text text-anchor="start" x="253.263" y="-547" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">await_conn_resp_cnt</text>
<text text-anchor="start" x="288.2775" y="-535" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">id_str</text>
<text text-anchor="start" x="269.1" y="-523" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">contact_name</text>
<text text-anchor="start" x="272.44" y="-511" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">contact_mail</text>
<text text-anchor="start" x="286.612" y="-499" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">switch</text>
<polygon fill="none" stroke="#000000" points="243.5,-370 243.5,-486 357.5,-486 357.5,-370 243.5,-370"/>
<text text-anchor="start" x="257.9925" y="-467" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">msg_contact_info()</text>
<text text-anchor="start" x="259.9325" y="-455" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">msg_ota_update()</text>
<text text-anchor="start" x="265.7765" y="-443" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">msg_get_time()</text>
<text text-anchor="start" x="253.8285" y="-431" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">msg_collector_data()</text>
<text text-anchor="start" x="255.7735" y="-419" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">msg_inverter_data()</text>
<text text-anchor="start" x="264.9405" y="-407" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">msg_unknown()</text>
<text text-anchor="start" x="285.5025" y="-383" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">close()</text>
</g>
<!-- A4&#45;&gt;A5 -->
<g id="edge3" class="edge">
<title>A4&#45;&gt;A5</title>
<path fill="none" stroke="#000000" d="M233.058,-644.3739C239.5598,-628.913 246.1169,-613.3205 252.4553,-598.2481"/>
<polygon fill="none" stroke="#000000" points="229.6695,-643.4029 229.0193,-653.9777 236.1222,-646.1164 229.6695,-643.4029"/>
</g>
<!-- A6 -->
<g id="node7" class="node">
<title>A6</title>
<polygon fill="none" stroke="#000000" points=".5,-530 .5,-562 91.5,-562 91.5,-530 .5,-530"/>
<text text-anchor="start" x="18.495" y="-543" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">SolarmanV5</text>
<polygon fill="none" stroke="#000000" points=".5,-462 .5,-530 91.5,-530 91.5,-462 .5,-462"/>
<text text-anchor="start" x="30.998" y="-511" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">control</text>
<text text-anchor="start" x="34.0575" y="-499" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">serial</text>
<text text-anchor="start" x="39.056" y="-487" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">snr</text>
<text text-anchor="start" x="32.112" y="-475" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">switch</text>
<polygon fill="none" stroke="#000000" points=".5,-406 .5,-462 91.5,-462 91.5,-406 .5,-406"/>
<text text-anchor="start" x="10.4405" y="-443" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">msg_unknown()</text>
<text text-anchor="start" x="31.0025" y="-419" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">close()</text>
</g>
<!-- A4&#45;&gt;A6 -->
<g id="edge4" class="edge">
<title>A4&#45;&gt;A6</title>
<path fill="none" stroke="#000000" d="M113.4937,-644.4225C101.5234,-616.1802 89.3661,-587.4965 78.7055,-562.3442"/>
<polygon fill="none" stroke="#000000" points="110.4186,-646.1364 117.5435,-653.9777 116.8636,-643.4047 110.4186,-646.1364"/>
</g>
<!-- A7 -->
<g id="node8" class="node">
<title>A7</title>
<polygon fill="none" stroke="#000000" points="210.5,-258 210.5,-290 360.5,-290 360.5,-258 210.5,-258"/>
<text text-anchor="start" x="253.5455" y="-271" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">ConnectionG3</text>
<polygon fill="none" stroke="#000000" points="210.5,-226 210.5,-258 360.5,-258 360.5,-226 210.5,-226"/>
<text text-anchor="start" x="220.487" y="-239" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">remoteStream:ConnectionG3</text>
<polygon fill="none" stroke="#000000" points="210.5,-194 210.5,-226 360.5,-226 360.5,-194 210.5,-194"/>
<text text-anchor="start" x="270.5025" y="-207" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">close()</text>
</g>
<!-- A5&#45;&gt;A7 -->
<g id="edge5" class="edge">
<title>A5&#45;&gt;A7</title>
<path fill="none" stroke="#000000" d="M292.7856,-359.5407C291.2573,-334.8843 289.7409,-310.4196 288.4905,-290.2462"/>
<polygon fill="none" stroke="#000000" points="289.3053,-359.968 293.4173,-369.7323 296.2918,-359.5349 289.3053,-359.968"/>
</g>
<!-- A8 -->
<g id="node9" class="node">
<title>A8</title>
<polygon fill="none" stroke="#000000" points="18.5,-258 18.5,-290 174.5,-290 174.5,-258 18.5,-258"/>
<text text-anchor="start" x="61.211" y="-271" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">ConnectionG3P</text>
<polygon fill="none" stroke="#000000" points="18.5,-226 18.5,-258 174.5,-258 174.5,-226 18.5,-226"/>
<text text-anchor="start" x="28.1525" y="-239" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">remoteStream:ConnectionG3P</text>
<polygon fill="none" stroke="#000000" points="18.5,-194 18.5,-226 174.5,-226 174.5,-194 18.5,-194"/>
<text text-anchor="start" x="81.5025" y="-207" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">close()</text>
</g>
<!-- A6&#45;&gt;A8 -->
<g id="edge6" class="edge">
<title>A6&#45;&gt;A8</title>
<path fill="none" stroke="#000000" d="M64.0865,-395.8051C71.6179,-360.0682 80.0195,-320.2015 86.3817,-290.0125"/>
<polygon fill="none" stroke="#000000" points="60.6253,-395.2569 61.9878,-405.7637 67.4748,-396.7004 60.6253,-395.2569"/>
</g>
<!-- A7&#45;&gt;A7 -->
<g id="edge13" class="edge">
<title>A7&#45;&gt;A7</title>
<path fill="none" stroke="#000000" d="M360.6684,-272.6238C371.3394,-267.6708 378.5,-257.4629 378.5,-242 378.5,-231.1277 374.9599,-222.8533 369.1486,-217.1769"/>
<polygon fill="#000000" stroke="#000000" points="360.6684,-211.3762 371.4628,-213.3079 364.7953,-214.1991 368.9222,-217.0221 368.9222,-217.0221 368.9222,-217.0221 364.7953,-214.1991 366.3816,-220.7363 360.6684,-211.3762 360.6684,-211.3762"/>
<text text-anchor="middle" x="380.4014" y="-211.6335" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">0..1</text>
<text text-anchor="middle" x="372.7075" y="-253.6532" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">has</text>
</g>
<!-- A11 -->
<g id="node12" class="node">
<title>A11</title>
<polygon fill="none" stroke="#000000" points="306.5,-88 306.5,-120 428.5,-120 428.5,-88 306.5,-88"/>
<text text-anchor="start" x="343.8845" y="-101" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">InverterG3</text>
<polygon fill="none" stroke="#000000" points="306.5,-56 306.5,-88 428.5,-88 428.5,-56 306.5,-56"/>
<text text-anchor="start" x="336.9355" y="-69" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">__ha_restarts</text>
<polygon fill="none" stroke="#000000" points="306.5,0 306.5,-56 428.5,-56 428.5,0 306.5,0"/>
<text text-anchor="start" x="316.1035" y="-37" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">async_create_remote()</text>
<text text-anchor="start" x="352.5025" y="-13" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">close()</text>
</g>
<!-- A7&#45;&gt;A11 -->
<g id="edge12" class="edge">
<title>A7&#45;&gt;A11</title>
<path fill="none" stroke="#000000" d="M311.4044,-184.5048C320.6254,-164.0387 331.0304,-140.9447 340.3527,-120.2539"/>
<polygon fill="none" stroke="#000000" points="308.176,-183.1501 307.2592,-193.7052 314.5582,-186.0256 308.176,-183.1501"/>
</g>
<!-- A8&#45;&gt;A8 -->
<g id="edge15" class="edge">
<title>A8&#45;&gt;A8</title>
<path fill="none" stroke="#000000" d="M174.8471,-272.2739C185.4443,-267.1987 192.5,-257.1074 192.5,-242 192.5,-231.3776 189.0118,-223.2351 183.2569,-217.5725"/>
<polygon fill="#000000" stroke="#000000" points="174.8471,-211.7261 185.6266,-213.7393 178.9525,-214.5802 183.0579,-217.4342 183.0579,-217.4342 183.0579,-217.4342 178.9525,-214.5802 180.4893,-221.1291 174.8471,-211.7261 174.8471,-211.7261"/>
<text text-anchor="middle" x="194.5548" y="-212.1325" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">0..1</text>
<text text-anchor="middle" x="186.7174" y="-253.1774" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">has</text>
</g>
<!-- A12 -->
<g id="node13" class="node">
<title>A12</title>
<polygon fill="none" stroke="#000000" points="142.5,-88 142.5,-120 264.5,-120 264.5,-88 142.5,-88"/>
<text text-anchor="start" x="176.55" y="-101" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">InverterG3P</text>
<polygon fill="none" stroke="#000000" points="142.5,-56 142.5,-88 264.5,-88 264.5,-56 142.5,-56"/>
<text text-anchor="start" x="172.9355" y="-69" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">__ha_restarts</text>
<polygon fill="none" stroke="#000000" points="142.5,0 142.5,-56 264.5,-56 264.5,0 142.5,0"/>
<text text-anchor="start" x="152.1035" y="-37" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">async_create_remote()</text>
<text text-anchor="start" x="188.5025" y="-13" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">close()</text>
</g>
<!-- A8&#45;&gt;A12 -->
<g id="edge14" class="edge">
<title>A8&#45;&gt;A12</title>
<path fill="none" stroke="#000000" d="M129.9773,-185.0573C142.0896,-164.4551 155.802,-141.1311 168.076,-120.2539"/>
<polygon fill="none" stroke="#000000" points="126.9441,-183.3107 124.8931,-193.7052 132.9785,-186.8585 126.9441,-183.3107"/>
</g>
<!-- A9 -->
<g id="node10" class="node">
<title>A9</title>
<polygon fill="none" stroke="#000000" points="109.5,-572 109.5,-604 225.5,-604 225.5,-572 109.5,-572"/>
<text text-anchor="start" x="137.774" y="-585" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">AsyncStream</text>
<polygon fill="none" stroke="#000000" points="109.5,-492 109.5,-572 225.5,-572 225.5,-492 109.5,-492"/>
<text text-anchor="start" x="153.053" y="-553" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">reader</text>
<text text-anchor="start" x="155.283" y="-541" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">writer</text>
<text text-anchor="start" x="157.497" y="-529" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">addr</text>
<text text-anchor="start" x="153.053" y="-517" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">r_addr</text>
<text text-anchor="start" x="153.608" y="-505" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">l_addr</text>
<polygon fill="none" stroke="#000000" points="109.5,-364 109.5,-492 225.5,-492 225.5,-364 109.5,-364"/>
<text text-anchor="start" x="119.1575" y="-473" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">&lt;async&gt;server_loop()</text>
<text text-anchor="start" x="121.378" y="-461" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">&lt;async&gt;client_loop()</text>
<text text-anchor="start" x="139.154" y="-449" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">&lt;async&gt;loop</text>
<text text-anchor="start" x="155.282" y="-437" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">disc()</text>
<text text-anchor="start" x="152.5025" y="-425" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">close()</text>
<text text-anchor="start" x="132.7705" y="-401" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">__async_read()</text>
<text text-anchor="start" x="132.221" y="-389" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">__async_write()</text>
<text text-anchor="start" x="126.107" y="-377" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">__async_forward()</text>
</g>
<!-- A9&#45;&gt;A7 -->
<g id="edge7" class="edge">
<title>A9&#45;&gt;A7</title>
<path fill="none" stroke="#000000" d="M230.0233,-355.7743C241.4579,-332.3236 252.7154,-309.2362 262.0696,-290.0521"/>
<polygon fill="none" stroke="#000000" points="226.8727,-354.25 225.6358,-364.7724 233.1645,-357.318 226.8727,-354.25"/>
</g>
<!-- A9&#45;&gt;A8 -->
<g id="edge8" class="edge">
<title>A9&#45;&gt;A8</title>
<path fill="none" stroke="#000000" d="M129.3389,-353.9299C122.6561,-331.1516 116.0996,-308.8044 110.6242,-290.1415"/>
<polygon fill="none" stroke="#000000" points="126.0343,-355.0988 132.208,-363.709 132.7512,-353.1281 126.0343,-355.0988"/>
</g>
<!-- A10&#45;&gt;A11 -->
<g id="edge9" class="edge">
<title>A10&#45;&gt;A11</title>
<path fill="none" stroke="#000000" d="M413.3164,-160.4648C407.1223,-146.8826 400.7985,-133.016 394.9051,-120.0931"/>
<polygon fill="none" stroke="#000000" points="410.2431,-162.1611 417.577,-169.8074 416.6121,-159.2566 410.2431,-162.1611"/>
</g>
<!-- A10&#45;&gt;A12 -->
<g id="edge10" class="edge">
<title>A10&#45;&gt;A12</title>
<path fill="none" stroke="#000000" d="M388.6131,-170.9072C388.2425,-170.6025 387.8715,-170.3001 387.5,-170 351.9061,-141.2441 336.8037,-143.4318 297.5,-120 286.7739,-113.6054 275.4857,-106.6243 264.5994,-99.7581"/>
<polygon fill="none" stroke="#000000" points="386.4756,-173.6866 396.3191,-177.6053 391.0678,-168.4034 386.4756,-173.6866"/>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 22 KiB

21
app/proxy.yuml Normal file
View File

@@ -0,0 +1,21 @@
// {type:class}
// {direction:topDown}
// {generate:true}
[note: You can stick notes on diagrams too!{bg:cornsilk}]
[Singleton]^[Mqtt|<static>ha_restarts;<static>__client;<static>__cb_MqttIsUp|<async>publish();<async>close()]
[IterRegistry||__iter__]^[Message|server_side:bool;header_valid:bool;header_len:unsigned;data_len:unsigned;unique_id;node_id;sug_area;_recv_buffer:bytearray;_send_buffer:bytearray;_forward_buffer:bytearray;db:Infos;new_data:list|_read():void<abstract>;close():void;inc_counter():void;dec_counter():void]
[Message]^[Talent|await_conn_resp_cnt;id_str;contact_name;contact_mail;switch|msg_contact_info();msg_ota_update();msg_get_time();msg_collector_data();msg_inverter_data();msg_unknown();;close()]
[Message]^[SolarmanV5|control;serial;snr;switch|msg_unknown();;close()]
[Talent]^[ConnectionG3|remoteStream:ConnectionG3|close()]
[SolarmanV5]^[ConnectionG3P|remoteStream:ConnectionG3P|close()]
[AsyncStream|reader;writer;addr;r_addr;l_addr|<async>server_loop();<async>client_loop();<async>loop;disc();close();;__async_read();__async_write();__async_forward()]^[ConnectionG3]
[AsyncStream]^[ConnectionG3P]
[Inverter|cls.db_stat;cls.entity_prfx;cls.discovery_prfx;cls.proxy_node_id;cls.proxy_unique_id;cls.mqtt:Mqtt|]^[InverterG3|__ha_restarts|async_create_remote();;close()]
[Inverter]^[InverterG3P|__ha_restarts|async_create_remote();;close()]
[Mqtt]-[Inverter]
[ConnectionG3]^[InverterG3]
[ConnectionG3]has-0..1>[ConnectionG3]
[ConnectionG3P]^[InverterG3P]
[ConnectionG3P]has-0..1>[ConnectionG3P]

View File

@@ -1,95 +1,94 @@
import logging, traceback
from config import Config
#import gc
from messages import Message, hex_dump_memory
import logging
import traceback
from messages import hex_dump_memory
logger = logging.getLogger('conn')
class AsyncStream(Message):
def __init__(self, reader, writer, addr, remote_stream, server_side: bool) -> None:
super().__init__()
class AsyncStream():
def __init__(self, reader, writer, addr) -> None:
logger.debug('AsyncStream.__init__')
self.reader = reader
self.writer = writer
self.remoteStream = remote_stream
self.server_side = server_side
self.addr = addr
self.unique_id = 0
self.node_id = ''
'''
Our puplic methods
'''
def set_serial_no(self, serial_no : str):
logger.debug(f'SerialNo: {serial_no}')
if self.unique_id != serial_no:
inverters = Config.get('inverters')
#logger.debug(f'Inverters: {inverters}')
if serial_no in inverters:
logger.debug(f'SerialNo {serial_no} allowed!')
inv = inverters[serial_no]
self.node_id = inv['node_id']
self.sug_area = inv['suggested_area']
else:
logger.debug(f'SerialNo {serial_no} not known!')
self.node_id = ''
self.sug_area = ''
if not inverters['allow_all']:
self.unique_id = None
logger.warning(f'ignore message from unknow inverter! (SerialNo: {serial_no})')
return
self.r_addr = ''
self.l_addr = ''
self.unique_id = serial_no
async def server_loop(self, addr):
'''Loop for receiving messages from the inverter (server-side)'''
logging.info(f'Accept connection from {addr}')
self.inc_counter('Inverter_Cnt')
await self.loop()
self.dec_counter('Inverter_Cnt')
logging.info(f'Server loop stopped for r{self.r_addr}')
# if the server connection closes, we also have to disconnect
# the connection to te TSUN cloud
if self.remoteStream:
logging.debug("disconnect client connection")
self.remoteStream.disc()
try:
await self._async_publ_mqtt_proxy_stat('proxy')
except Exception:
pass
async def client_loop(self, addr):
'''Loop for receiving messages from the TSUN cloud (client-side)'''
clientStream = await self.remoteStream.loop()
logging.info(f'Client loop stopped for l{clientStream.l_addr}')
# if the client connection closes, we don't touch the server
# connection. Instead we erase the client connection stream,
# thus on the next received packet from the inverter, we can
# establish a new connection to the TSUN cloud
# erase backlink to inverter
clientStream.remoteStream = None
if self.remoteStream == clientStream:
# logging.debug(f'Client l{clientStream.l_addr} refs:'
# f' {gc.get_referrers(clientStream)}')
# than erase client connection
self.remoteStream = None
async def loop(self):
self.r_addr = self.writer.get_extra_info('peername')
self.l_addr = self.writer.get_extra_info('sockname')
async def loop(self) -> None:
while True:
try:
await self.__async_read()
if self.id_str:
self.set_serial_no(self.id_str.decode("utf-8"))
if self.unique_id:
await self.__async_write()
await self.__async_read()
if self.unique_id:
await self.__async_write()
await self.__async_forward()
await self.async_publ_mqtt()
except (ConnectionResetError,
ConnectionAbortedError,
BrokenPipeError,
RuntimeError) as error:
logger.warning(f'In loop for {self.addr}: {error}')
logger.warning(f'In loop for l{self.l_addr} | '
f'r{self.r_addr}: {error}')
self.close()
return
return self
except Exception:
self.inc_counter('SW_Exception')
logger.error(
f"Exception for {self.addr}:\n"
f"{traceback.format_exc()}")
self.close()
return
return self
def disc(self) -> None:
logger.debug(f'in AsyncStream.disc() {self.addr}')
logger.debug(f'AsyncStream.disc() l{self.l_addr} | r{self.r_addr}')
self.writer.close()
def close(self):
logger.debug(f'in AsyncStream.close() {self.addr}')
logger.debug(f'AsyncStream.close() l{self.l_addr} | r{self.r_addr}')
self.writer.close()
super().close() # call close handler in the parent class
# logger.info (f'AsyncStream refs: {gc.get_referrers(self)}')
'''
Our private methods
'''
@@ -100,33 +99,32 @@ class AsyncStream(Message):
self.read() # call read in parent class
else:
raise RuntimeError("Peer closed.")
async def __async_write(self) -> None:
if self._send_buffer:
hex_dump_memory(logging.INFO, f'Transmit to {self.addr}:', self._send_buffer, len(self._send_buffer))
hex_dump_memory(logging.INFO, f'Transmit to {self.addr}:',
self._send_buffer, len(self._send_buffer))
self.writer.write(self._send_buffer)
await self.writer.drain()
self._send_buffer = bytearray(0) #self._send_buffer[sent:]
self._send_buffer = bytearray(0) # self._send_buffer[sent:]
async def __async_forward(self) -> None:
if self._forward_buffer:
if not self.remoteStream:
await self.async_create_remote() # only implmeneted for server side => syncServerStream
await self.async_create_remote()
if self.remoteStream:
if self.remoteStream._init_new_client_conn():
await self.remoteStream.__async_write()
if self.remoteStream:
hex_dump_memory(logging.DEBUG, f'Forward to {self.remoteStream.addr}:', self._forward_buffer, len(self._forward_buffer))
self.remoteStream.writer.write (self._forward_buffer)
await self.remoteStream.writer.drain()
hex_dump_memory(logging.INFO,
f'Forward to {self.remoteStream.addr}:',
self._forward_buffer,
len(self._forward_buffer))
self.remoteStream.writer.write(self._forward_buffer)
await self.remoteStream.writer.drain()
self._forward_buffer = bytearray(0)
async def async_create_remote(self) -> None:
pass
async def async_publ_mqtt(self) -> None:
pass
def __del__ (self):
logging.debug (f"AsyncStream.__del__ {self.addr}")
def __del__(self):
logger.debug(
f"AsyncStream.__del__ l{self.l_addr} | r{self.r_addr}")

View File

@@ -1,76 +1,98 @@
'''Config module handles the proxy configuration in the config.toml file'''
import shutil, tomllib, logging
import shutil
import tomllib
import logging
from schema import Schema, And, Use, Optional
class Config():
'''Static class Config is reads and sanitize the config.
Read config.toml file and sanitize it with read().
'''Static class Config is reads and sanitize the config.
Read config.toml file and sanitize it with read().
Get named parts of the config with get()'''
config = {}
conf_schema = Schema({ 'tsun': {
'enabled': Use(bool),
'host': Use(str),
'port': And(Use(int), lambda n: 1024 <= n <= 65535)},
conf_schema = Schema({
'tsun': {
'enabled': Use(bool),
'host': Use(str),
'port': And(Use(int), lambda n: 1024 <= n <= 65535)
},
'solarman': {
'enabled': Use(bool),
'host': Use(str),
'port': And(Use(int), lambda n: 1024 <= n <= 65535)
},
'mqtt': {
'host': Use(str),
'port': And(Use(int), lambda n: 1024 <= n <= 65535),
'user': And(Use(str), Use(lambda s: s if len(s) > 0 else None)),
'passwd': And(Use(str), Use(lambda s: s if len(s) > 0 else None))
},
'ha': {
'auto_conf_prefix': Use(str),
'discovery_prefix': Use(str),
'entity_prefix': Use(str),
'proxy_node_id': Use(str),
'proxy_unique_id': Use(str)
},
'inverters': {
'allow_all': Use(bool), And(Use(str), lambda s: len(s) == 16): {
Optional('monitor_sn', default=0): Use(int),
Optional('node_id', default=""): And(Use(str),
Use(lambda s: s + '/'
if len(s) > 0 and
s[-1] != '/' else s)),
'mqtt': {
'host': Use(str),
'port': And(Use(int), lambda n: 1024 <= n <= 65535),
'user': And(Use(str), Use(lambda s: s if len(s) >0 else None)),
'passwd': And(Use(str), Use(lambda s: s if len(s) >0 else None))},
'ha': {
'auto_conf_prefix': Use(str),
'discovery_prefix': Use(str),
'entity_prefix': Use(str)},
'inverters': {
'allow_all' : Use(bool),
And(Use(str), lambda s: len(s) == 16 ): {
Optional('node_id', default=""): And(Use(str),Use(lambda s: s +'/' if len(s)> 0 and s[-1] != '/' else s)),
Optional('suggested_area', default=""): Use(str)
}}
}, ignore_extra_keys=True)
Optional('suggested_area', default=""): Use(str)
}}
}, ignore_extra_keys=True
)
@classmethod
def read(cls) -> None:
'''Read config file, merge it with the default config and sanitize the result'''
'''Read config file, merge it with the default config
and sanitize the result'''
config = {}
logger = logging.getLogger('data')
try:
# make the default config transparaent by copying it in the config.example file
# make the default config transparaent by copying it
# in the config.example file
shutil.copy2("default_config.toml", "config/config.example.toml")
# read example config file as default configuration
with open("default_config.toml", "rb") as f:
def_config = tomllib.load(f)
# overwrite the default values, with values from the config.toml file
# overwrite the default values, with values from
# the config.toml file
with open("config/config.toml", "rb") as f:
usr_config = tomllib.load(f)
config['tsun'] = def_config['tsun'] | usr_config['tsun']
config['mqtt'] = def_config['mqtt'] | usr_config['mqtt']
config['ha'] = def_config['ha'] | usr_config['ha']
config['inverters'] = def_config['inverters'] | usr_config['inverters']
config['tsun'] = def_config['tsun'] | usr_config['tsun']
config['solarman'] = def_config['solarman'] | \
usr_config['solarman']
config['mqtt'] = def_config['mqtt'] | usr_config['mqtt']
config['ha'] = def_config['ha'] | usr_config['ha']
config['inverters'] = def_config['inverters'] | \
usr_config['inverters']
cls.config = cls.conf_schema.validate(config)
#logging.debug(f'Readed config: "{cls.config}" ')
# logging.debug(f'Readed config: "{cls.config}" ')
except Exception as error:
logger.error(f'Config.read: {error}')
cls.config = {}
@classmethod
def get(cls, member:str = None):
'''Get a named attribute from the proxy config. If member == None it returns the complete config dict'''
def get(cls, member: str = None):
'''Get a named attribute from the proxy config. If member ==
None it returns the complete config dict'''
if member:
return cls.config.get(member, {})
else:
return cls.config
return cls.config

View File

@@ -0,0 +1,36 @@
import logging
# import gc
from async_stream import AsyncStream
from gen3.talent import Talent
logger = logging.getLogger('conn')
class ConnectionG3(AsyncStream, Talent):
def __init__(self, reader, writer, addr, remote_stream, server_side: bool,
id_str=b'') -> None:
AsyncStream.__init__(self, reader, writer, addr)
Talent.__init__(self, server_side, id_str)
self.remoteStream = remote_stream
'''
Our puplic methods
'''
def close(self):
AsyncStream.close(self)
Talent.close(self)
# logger.info(f'AsyncStream refs: {gc.get_referrers(self)}')
async def async_create_remote(self) -> None:
pass
async def async_publ_mqtt(self) -> None:
pass
'''
Our private methods
'''
def __del__(self):
super().__del__()

167
app/src/gen3/infos_g3.py Normal file
View File

@@ -0,0 +1,167 @@
import struct
import logging
from typing import Generator
if __name__ == "app.src.gen3.infos_g3":
from app.src.infos import Infos, Register
else: # pragma: no cover
from infos import Infos, Register
class RegisterMap:
map = {
0x00092ba8: Register.COLLECTOR_FW_VERSION,
0x000927c0: Register.CHIP_TYPE,
0x00092f90: Register.CHIP_MODEL,
0x00095a88: Register.TRACE_URL,
0x00095aec: Register.LOGGER_URL,
0x0000000a: Register.PRODUCT_NAME,
0x00000014: Register.MANUFACTURER,
0x0000001e: Register.VERSION,
0x00000028: Register.SERIAL_NUMBER,
0x00000032: Register.EQUIPMENT_MODEL,
0x00013880: Register.NO_INPUTS,
0xffffff00: Register.INVERTER_CNT,
0xffffff01: Register.UNKNOWN_SNR,
0xffffff02: Register.UNKNOWN_MSG,
0xffffff03: Register.INVALID_DATA_TYPE,
0xffffff04: Register.INTERNAL_ERROR,
0xffffff05: Register.UNKNOWN_CTRL,
0xffffff06: Register.OTA_START_MSG,
0xffffff07: Register.SW_EXCEPTION,
0xfffffffe: Register.TEST_REG1,
0xffffffff: Register.TEST_REG2,
0x00000640: Register.OUTPUT_POWER,
0x000005dc: Register.RATED_POWER,
0x00000514: Register.INVERTER_TEMP,
0x000006a4: Register.PV1_VOLTAGE,
0x00000708: Register.PV1_CURRENT,
0x0000076c: Register.PV1_POWER,
0x000007d0: Register.PV2_VOLTAGE,
0x00000834: Register.PV2_CURRENT,
0x00000898: Register.PV2_POWER,
0x000008fc: Register.PV3_VOLTAGE,
0x00000960: Register.PV3_CURRENT,
0x000009c4: Register.PV3_POWER,
0x00000a28: Register.PV4_VOLTAGE,
0x00000a8c: Register.PV4_CURRENT,
0x00000af0: Register.PV4_POWER,
0x00000c1c: Register.PV1_DAILY_GENERATION,
0x00000c80: Register.PV1_TOTAL_GENERATION,
0x00000ce4: Register.PV2_DAILY_GENERATION,
0x00000d48: Register.PV2_TOTAL_GENERATION,
0x00000dac: Register.PV3_DAILY_GENERATION,
0x00000e10: Register.PV3_TOTAL_GENERATION,
0x00000e74: Register.PV4_DAILY_GENERATION,
0x00000ed8: Register.PV4_TOTAL_GENERATION,
0x00000b54: Register.DAILY_GENERATION,
0x00000bb8: Register.TOTAL_GENERATION,
0x000003e8: Register.GRID_VOLTAGE,
0x0000044c: Register.GRID_CURRENT,
0x000004b0: Register.GRID_FREQUENCY,
0x000cfc38: Register.CONNECT_COUNT,
0x000c3500: Register.SIGNAL_STRENGTH,
0x000c96a8: Register.POWER_ON_TIME,
0x000d0020: Register.COLLECT_INTERVAL,
0x000cf850: Register.DATA_UP_INTERVAL,
0x000c7f38: Register.COMMUNICATION_TYPE,
0x00000191: Register.EVENT_401,
0x00000192: Register.EVENT_402,
0x00000193: Register.EVENT_403,
0x00000194: Register.EVENT_404,
0x00000195: Register.EVENT_405,
0x00000196: Register.EVENT_406,
0x00000197: Register.EVENT_407,
0x00000198: Register.EVENT_408,
0x00000199: Register.EVENT_409,
0x0000019a: Register.EVENT_410,
0x0000019b: Register.EVENT_411,
0x0000019c: Register.EVENT_412,
0x0000019d: Register.EVENT_413,
0x0000019e: Register.EVENT_414,
0x0000019f: Register.EVENT_415,
0x000001a0: Register.EVENT_416,
}
class InfosG3(Infos):
def ha_confs(self, ha_prfx: str, node_id: str, snr: str,
sug_area: str = '') \
-> Generator[tuple[dict, str], None, None]:
'''Generator function yields a json register struct for home-assistant
auto configuration and a unique entity string
arguments:
prfx:str ==> MQTT prefix for the home assistant 'stat_t string
snr:str ==> serial number of the inverter, used to build unique
entity strings
sug_area:str ==> suggested area string from the config file'''
# iterate over RegisterMap.map and get the register values
for reg in RegisterMap.map.values():
res = self.ha_conf(reg, ha_prfx, node_id, snr, False, sug_area) # noqa: E501
if res:
yield res
def parse(self, buf, ind=0) -> Generator[tuple[str, bool], None, None]:
'''parse a data sequence received from the inverter and
stores the values in Infos.db
buf: buffer of the sequence to parse'''
result = struct.unpack_from('!l', buf, ind)
elms = result[0]
i = 0
ind += 4
while i < elms:
result = struct.unpack_from('!lB', buf, ind)
addr = result[0]
if addr not in RegisterMap.map:
info_id = -1
else:
info_id = RegisterMap.map[addr]
data_type = result[1]
ind += 5
if data_type == 0x54: # 'T' -> Pascal-String
str_len = buf[ind]
result = struct.unpack_from(f'!{str_len+1}p', buf,
ind)[0].decode(encoding='ascii',
errors='replace')
ind += str_len+1
elif data_type == 0x49: # 'I' -> int32
result = struct.unpack_from('!l', buf, ind)[0]
ind += 4
elif data_type == 0x53: # 'S' -> short
result = struct.unpack_from('!h', buf, ind)[0]
ind += 2
elif data_type == 0x46: # 'F' -> float32
result = round(struct.unpack_from('!f', buf, ind)[0], 2)
ind += 4
elif data_type == 0x4c: # 'L' -> int64
result = struct.unpack_from('!q', buf, ind)[0]
ind += 8
else:
self.inc_counter('Invalid_Data_Type')
logging.error(f"Infos.parse: data_type: {data_type}"
" not supported")
return
keys, level, unit, must_incr = self._key_obj(info_id)
if keys:
name, update = self.update_db(keys, must_incr, result)
yield keys[0], update
else:
update = False
name = str(f'info-id.0x{addr:x}')
self.tracer.log(level, f'{name} : {result}{unit}'
f' update: {update}')
i += 1

126
app/src/gen3/inverter_g3.py Normal file
View File

@@ -0,0 +1,126 @@
import asyncio
import logging
import traceback
import json
from config import Config
from inverter import Inverter
from gen3.connection_g3 import ConnectionG3
from aiomqtt import MqttCodeError
from infos import Infos
# import gc
# logger = logging.getLogger('conn')
logger_mqtt = logging.getLogger('mqtt')
class InverterG3(Inverter, ConnectionG3):
'''class Inverter is a derivation of an Async_Stream
The class has some class method for managing common resources like a
connection to the MQTT broker or proxy error counter which are common
for all inverter connection
Instances of the class are connections to an inverter and can have an
optional link to an remote connection to the TSUN cloud. A remote
connection dies with the inverter connection.
class methods:
class_init(): initialize the common resources of the proxy (MQTT
broker, Proxy DB, etc). Must be called before the
first inverter instance can be created
class_close(): release the common resources of the proxy. Should not
be called before any instances of the class are
destroyed
methods:
server_loop(addr): Async loop method for receiving messages from the
inverter (server-side)
client_loop(addr): Async loop method for receiving messages from the
TSUN cloud (client-side)
async_create_remote(): Establish a client connection to the TSUN cloud
async_publ_mqtt(): Publish data to MQTT broker
close(): Release method which must be called before a instance can be
destroyed
'''
def __init__(self, reader, writer, addr):
super().__init__(reader, writer, addr, None, True)
self.__ha_restarts = -1
async def async_create_remote(self) -> None:
'''Establish a client connection to the TSUN cloud'''
tsun = Config.get('tsun')
host = tsun['host']
port = tsun['port']
addr = (host, port)
try:
logging.info(f'Connected to {addr}')
connect = asyncio.open_connection(host, port)
reader, writer = await connect
self.remoteStream = ConnectionG3(reader, writer, addr, self,
False, self.id_str)
asyncio.create_task(self.client_loop(addr))
except (ConnectionRefusedError, TimeoutError) as error:
logging.info(f'{error}')
except Exception:
self.inc_counter('SW_Exception')
logging.error(
f"Inverter: Exception for {addr}:\n"
f"{traceback.format_exc()}")
async def async_publ_mqtt(self) -> None:
'''publish data to MQTT broker'''
# check if new inverter or collector infos are available or when the
# home assistant has changed the status back to online
try:
if (('inverter' in self.new_data and self.new_data['inverter'])
or ('collector' in self.new_data and
self.new_data['collector'])
or self.mqtt.ha_restarts != self.__ha_restarts):
await self._register_proxy_stat_home_assistant()
await self.__register_home_assistant()
self.__ha_restarts = self.mqtt.ha_restarts
for key in self.new_data:
await self.__async_publ_mqtt_packet(key)
for key in Infos.new_stat_data:
await self._async_publ_mqtt_proxy_stat(key)
except MqttCodeError as error:
logging.error(f'Mqtt except: {error}')
except Exception:
self.inc_counter('SW_Exception')
logging.error(
f"Inverter: Exception:\n"
f"{traceback.format_exc()}")
async def __async_publ_mqtt_packet(self, key):
db = self.db.db
if key in db and self.new_data[key]:
data_json = json.dumps(db[key])
node_id = self.node_id
logger_mqtt.debug(f'{key}: {data_json}')
await self.mqtt.publish(f'{self.entity_prfx}{node_id}{key}', data_json) # noqa: E501
self.new_data[key] = False
async def __register_home_assistant(self) -> None:
'''register all our topics at home assistant'''
for data_json, component, node_id, id in self.db.ha_confs(
self.entity_prfx, self.node_id, self.unique_id,
self.sug_area):
logger_mqtt.debug(f"MQTT Register: cmp:'{component}'"
f" node_id:'{node_id}' {data_json}")
await self.mqtt.publish(f"{self.discovery_prfx}{component}"
f"/{node_id}{id}/config", data_json)
def close(self) -> None:
logging.debug(f'InverterG3.close() l{self.l_addr} | r{self.r_addr}')
super().close() # call close handler in the parent class
# logger.debug (f'Inverter refs: {gc.get_referrers(self)}')
def __del__(self):
logging.debug("InverterG3.__del__")
super().__del__()

353
app/src/gen3/talent.py Normal file
View File

@@ -0,0 +1,353 @@
import struct
import logging
import time
from datetime import datetime
if __name__ == "app.src.gen3.talent":
from app.src.messages import hex_dump_memory, Message
from app.src.config import Config
from app.src.gen3.infos_g3 import InfosG3
else: # pragma: no cover
from messages import hex_dump_memory, Message
from config import Config
from gen3.infos_g3 import InfosG3
logger = logging.getLogger('msg')
class Control:
def __init__(self, ctrl: int):
self.ctrl = ctrl
def __int__(self) -> int:
return self.ctrl
def is_ind(self) -> bool:
return (self.ctrl == 0x91)
def is_req(self) -> bool:
return (self.ctrl == 0x70)
def is_resp(self) -> bool:
return (self.ctrl == 0x99)
class Talent(Message):
def __init__(self, server_side: bool, id_str=b''):
super().__init__(server_side)
self.await_conn_resp_cnt = 0
self.id_str = id_str
self.contact_name = b''
self.contact_mail = b''
self.db = InfosG3()
self.switch = {
0x00: self.msg_contact_info,
0x13: self.msg_ota_update,
0x22: self.msg_get_time,
0x71: self.msg_collector_data,
0x04: self.msg_inverter_data,
}
'''
Our puplic methods
'''
def close(self) -> None:
logging.debug('Talent.close()')
# we have refernces to methods of this class in self.switch
# so we have to erase self.switch, otherwise this instance can't be
# deallocated by the garbage collector ==> we get a memory leak
self.switch.clear()
def set_serial_no(self, serial_no: str):
if self.unique_id == serial_no:
logger.debug(f'SerialNo: {serial_no}')
else:
inverters = Config.get('inverters')
# logger.debug(f'Inverters: {inverters}')
if serial_no in inverters:
inv = inverters[serial_no]
self.node_id = inv['node_id']
self.sug_area = inv['suggested_area']
logger.debug(f'SerialNo {serial_no} allowed! area:{self.sug_area}') # noqa: E501
else:
self.node_id = ''
self.sug_area = ''
if 'allow_all' not in inverters or not inverters['allow_all']:
self.inc_counter('Unknown_SNR')
self.unique_id = None
logger.warning(f'ignore message from unknow inverter! (SerialNo: {serial_no})') # noqa: E501
return
logger.debug(f'SerialNo {serial_no} not known but accepted!')
self.unique_id = serial_no
def read(self) -> None:
self._read()
if not self.header_valid:
self.__parse_header(self._recv_buffer, len(self._recv_buffer))
if self.header_valid and len(self._recv_buffer) >= (self.header_len +
self.data_len):
hex_dump_memory(logging.INFO, f'Received from {self.addr}:',
self._recv_buffer, self.header_len+self.data_len)
self.set_serial_no(self.id_str.decode("utf-8"))
self.__dispatch_msg()
self.__flush_recv_msg()
return
def forward(self, buffer, buflen) -> None:
tsun = Config.get('tsun')
if tsun['enabled']:
self._forward_buffer = buffer[:buflen]
hex_dump_memory(logging.DEBUG, 'Store for forwarding:',
buffer, buflen)
self.__parse_header(self._forward_buffer,
len(self._forward_buffer))
fnc = self.switch.get(self.msg_id, self.msg_unknown)
logger.info(self.__flow_str(self.server_side, 'forwrd') +
f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}')
return
def _init_new_client_conn(self) -> bool:
contact_name = self.contact_name
contact_mail = self.contact_mail
logger.info(f'name: {contact_name} mail: {contact_mail}')
self.msg_id = 0
self.await_conn_resp_cnt += 1
self.__build_header(0x91)
self._send_buffer += struct.pack(f'!{len(contact_name)+1}p'
f'{len(contact_mail)+1}p',
contact_name, contact_mail)
self.__finish_send_msg()
return True
'''
Our private methods
'''
def __flow_str(self, server_side: bool, type: str): # noqa: F821
switch = {
'rx': ' <',
'tx': ' >',
'forwrd': '<< ',
'drop': ' xx',
'rxS': '> ',
'txS': '< ',
'forwrdS': ' >>',
'dropS': 'xx ',
}
if server_side:
type += 'S'
return switch.get(type, '???')
def _timestamp(self): # pragma: no cover
if False:
# utc as epoche
ts = time.time()
else:
# convert localtime in epoche
ts = (datetime.now() - datetime(1970, 1, 1)).total_seconds()
return round(ts*1000)
# check if there is a complete header in the buffer, parse it
# and set
# self.header_len
# self.data_len
# self.id_str
# self.ctrl
# self.msg_id
#
# if the header is incomplete, than self.header_len is still 0
#
def __parse_header(self, buf: bytes, buf_len: int) -> None:
if (buf_len < 5): # enough bytes to read len and id_len?
return
result = struct.unpack_from('!lB', buf, 0)
len = result[0] # len of complete message
id_len = result[1] # len of variable id string
hdr_len = 5+id_len+2
if (buf_len < hdr_len): # enough bytes for complete header?
return
result = struct.unpack_from(f'!{id_len+1}pBB', buf, 4)
# store parsed header values in the class
self.id_str = result[0]
self.ctrl = Control(result[1])
self.msg_id = result[2]
self.data_len = len-id_len-3
self.header_len = hdr_len
self.header_valid = True
return
def __build_header(self, ctrl) -> None:
self.send_msg_ofs = len(self._send_buffer)
self._send_buffer += struct.pack(f'!l{len(self.id_str)+1}pBB',
0, self.id_str, ctrl, self.msg_id)
fnc = self.switch.get(self.msg_id, self.msg_unknown)
logger.info(self.__flow_str(self.server_side, 'tx') +
f' Ctl: {int(ctrl):#02x} Msg: {fnc.__name__!r}')
def __finish_send_msg(self) -> None:
_len = len(self._send_buffer) - self.send_msg_ofs
struct.pack_into('!l', self._send_buffer, self.send_msg_ofs, _len-4)
def __dispatch_msg(self) -> None:
fnc = self.switch.get(self.msg_id, self.msg_unknown)
if self.unique_id:
logger.info(self.__flow_str(self.server_side, 'rx') +
f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}')
fnc()
else:
logger.info(self.__flow_str(self.server_side, 'drop') +
f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}')
def __flush_recv_msg(self) -> None:
self._recv_buffer = self._recv_buffer[(self.header_len+self.data_len):]
self.header_valid = False
'''
Message handler methods
'''
def msg_contact_info(self):
if self.ctrl.is_ind():
if self.server_side and self.__process_contact_info():
self.__build_header(0x91)
self._send_buffer += b'\x01'
self.__finish_send_msg()
# don't forward this contact info here, we will build one
# when the remote connection is established
elif self.await_conn_resp_cnt > 0:
self.await_conn_resp_cnt -= 1
else:
self.forward(self._recv_buffer, self.header_len+self.data_len)
return
else:
logger.warning('Unknown Ctrl')
self.inc_counter('Unknown_Ctrl')
self.forward(self._recv_buffer, self.header_len+self.data_len)
def __process_contact_info(self) -> bool:
result = struct.unpack_from('!B', self._recv_buffer, self.header_len)
name_len = result[0]
if self.data_len < name_len+2:
return False
result = struct.unpack_from(f'!{name_len+1}pB', self._recv_buffer,
self.header_len)
self.contact_name = result[0]
mail_len = result[1]
logger.info(f'name: {self.contact_name}')
result = struct.unpack_from(f'!{mail_len+1}p', self._recv_buffer,
self.header_len+name_len+1)
self.contact_mail = result[0]
logger.info(f'mail: {self.contact_mail}')
return True
def msg_get_time(self):
tsun = Config.get('tsun')
if tsun['enabled']:
if self.ctrl.is_ind():
if self.data_len >= 8:
ts = self._timestamp()
result = struct.unpack_from('!q', self._recv_buffer,
self.header_len)
logger.debug(f'tsun-time: {result[0]:08x}'
f' proxy-time: {ts:08x}')
else:
logger.warning('Unknown Ctrl')
self.inc_counter('Unknown_Ctrl')
self.forward(self._recv_buffer, self.header_len+self.data_len)
else:
if self.ctrl.is_ind():
if self.data_len == 0:
ts = self._timestamp()
logger.debug(f'time: {ts:08x}')
self.__build_header(0x91)
self._send_buffer += struct.pack('!q', ts)
self.__finish_send_msg()
else:
logger.warning('Unknown Ctrl')
self.inc_counter('Unknown_Ctrl')
def parse_msg_header(self):
result = struct.unpack_from('!lB', self._recv_buffer, self.header_len)
data_id = result[0] # len of complete message
id_len = result[1] # len of variable id string
logger.debug(f'Data_ID: {data_id} id_len: {id_len}')
msg_hdr_len = 5+id_len+9
result = struct.unpack_from(f'!{id_len+1}pBq', self._recv_buffer,
self.header_len + 4)
logger.debug(f'ID: {result[0]} B: {result[1]}')
logger.debug(f'time: {result[2]:08x}')
# logger.info(f'time: {datetime.utcfromtimestamp(result[2]).strftime(
# "%Y-%m-%d %H:%M:%S")}')
return msg_hdr_len
def msg_collector_data(self):
if self.ctrl.is_ind():
self.__build_header(0x99)
self._send_buffer += b'\x01'
self.__finish_send_msg()
self.__process_data()
elif self.ctrl.is_resp():
return # ignore received response
else:
logger.warning('Unknown Ctrl')
self.inc_counter('Unknown_Ctrl')
self.forward(self._recv_buffer, self.header_len+self.data_len)
def msg_inverter_data(self):
if self.ctrl.is_ind():
self.__build_header(0x99)
self._send_buffer += b'\x01'
self.__finish_send_msg()
self.__process_data()
elif self.ctrl.is_resp():
return # ignore received response
else:
logger.warning('Unknown Ctrl')
self.inc_counter('Unknown_Ctrl')
self.forward(self._recv_buffer, self.header_len+self.data_len)
def __process_data(self):
msg_hdr_len = self.parse_msg_header()
for key, update in self.db.parse(self._recv_buffer, self.header_len
+ msg_hdr_len):
if update:
self.new_data[key] = True
def msg_ota_update(self):
if self.ctrl.is_req():
self.inc_counter('OTA_Start_Msg')
elif self.ctrl.is_ind():
pass
else:
logger.warning('Unknown Ctrl')
self.inc_counter('Unknown_Ctrl')
self.forward(self._recv_buffer, self.header_len+self.data_len)
def msg_unknown(self):
logger.warning(f"Unknow Msg: ID:{self.msg_id}")
self.inc_counter('Unknown_Msg')
self.forward(self._recv_buffer, self.header_len+self.data_len)

View File

@@ -0,0 +1,36 @@
import logging
# import gc
from async_stream import AsyncStream
from gen3plus.solarman_v5 import SolarmanV5
logger = logging.getLogger('conn')
class ConnectionG3P(AsyncStream, SolarmanV5):
def __init__(self, reader, writer, addr, remote_stream,
server_side: bool) -> None:
AsyncStream.__init__(self, reader, writer, addr)
SolarmanV5.__init__(self, server_side)
self.remoteStream = remote_stream
'''
Our puplic methods
'''
def close(self):
AsyncStream.close(self)
SolarmanV5.close(self)
# logger.info(f'AsyncStream refs: {gc.get_referrers(self)}')
async def async_create_remote(self) -> None:
pass
async def async_publ_mqtt(self) -> None:
pass
'''
Our private methods
'''
def __del__(self):
super().__del__()

View File

@@ -0,0 +1,121 @@
import struct
from typing import Generator
if __name__ == "app.src.gen3plus.infos_g3p":
from app.src.infos import Infos, Register
else: # pragma: no cover
from infos import Infos, Register
class RegisterMap:
# make the class read/only by using __slots__
__slots__ = ()
map = {
# 0x41020007: {'reg': Register.DEVICE_SNR, 'fmt': '<L'}, # noqa: E501
0x41020018: {'reg': Register.DATA_UP_INTERVAL, 'fmt': '!B', 'ratio': 60}, # noqa: E501
0x41020019: {'reg': Register.COLLECT_INTERVAL, 'fmt': '!B', 'ratio': 1}, # noqa: E501
0x4102001a: {'reg': Register.HEARTBEAT_INTERVAL, 'fmt': '!B', 'ratio': 1}, # noqa: E501
0x4102001c: {'reg': Register.SIGNAL_STRENGTH, 'fmt': '!B', 'ratio': 1}, # noqa: E501
0x4102001e: {'reg': Register.COLLECTOR_FW_VERSION, 'fmt': '!40s'}, # noqa: E501
0x4102004c: {'reg': Register.IP_ADRESS, 'fmt': '!16s'}, # noqa: E501
0x41020064: {'reg': Register.VERSION, 'fmt': '!40s'}, # noqa: E501
0x4201001c: {'reg': Register.POWER_ON_TIME, 'fmt': '!H', 'ratio': 1}, # noqa: E501
0x42010020: {'reg': Register.SERIAL_NUMBER, 'fmt': '!16s'}, # noqa: E501
0x420100d2: {'reg': Register.GRID_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
0x420100d4: {'reg': Register.GRID_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x420100d6: {'reg': Register.GRID_FREQUENCY, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
# 0x420100d8: {'reg': Register.INVERTER_TEMP, 'fmt': '!H', 'eval': '(result-32)/1.8'}, # noqa: E501
0x420100d8: {'reg': Register.INVERTER_TEMP, 'fmt': '!H'}, # noqa: E501
0x420100dc: {'reg': Register.RATED_POWER, 'fmt': '!H', 'ratio': 1}, # noqa: E501
0x420100de: {'reg': Register.OUTPUT_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
0x420100e0: {'reg': Register.PV1_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
0x420100e2: {'reg': Register.PV1_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x420100e4: {'reg': Register.PV1_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
0x420100e6: {'reg': Register.PV2_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
0x420100e8: {'reg': Register.PV2_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x420100ea: {'reg': Register.PV2_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
0x420100ec: {'reg': Register.PV3_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
0x420100ee: {'reg': Register.PV3_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x420100f0: {'reg': Register.PV3_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
0x420100f2: {'reg': Register.PV4_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
0x420100f4: {'reg': Register.PV4_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x420100f6: {'reg': Register.PV4_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
0x420100f8: {'reg': Register.DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x420100fa: {'reg': Register.TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501
0x420100fe: {'reg': Register.PV1_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x42010100: {'reg': Register.PV1_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501
0x42010104: {'reg': Register.PV2_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x42010106: {'reg': Register.PV2_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501
0x4201010a: {'reg': Register.PV3_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x4201010c: {'reg': Register.PV3_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501
0x42010110: {'reg': Register.PV4_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
0x42010112: {'reg': Register.PV4_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501
0x42010126: {'reg': Register.MAX_DESIGNED_POWER, 'fmt': '!H', 'ratio': 1}, # noqa: E501
0x42010170: {'reg': Register.NO_INPUTS, 'fmt': '!B'}, # noqa: E501
}
class InfosG3P(Infos):
def __init__(self):
super().__init__()
self.set_db_def_value(Register.MANUFACTURER, 'TSUN')
self.set_db_def_value(Register.EQUIPMENT_MODEL, 'TSOL-MSxx00')
self.set_db_def_value(Register.CHIP_TYPE, 'IGEN TECH')
def ha_confs(self, ha_prfx: str, node_id: str, snr: str,
sug_area: str = '') \
-> Generator[tuple[dict, str], None, None]:
'''Generator function yields a json register struct for home-assistant
auto configuration and a unique entity string
arguments:
prfx:str ==> MQTT prefix for the home assistant 'stat_t string
snr:str ==> serial number of the inverter, used to build unique
entity strings
sug_area:str ==> suggested area string from the config file'''
# iterate over RegisterMap.map and get the register values
for row in RegisterMap.map.values():
info_id = row['reg']
res = self.ha_conf(info_id, ha_prfx, node_id, snr, False, sug_area) # noqa: E501
if res:
yield res
def parse(self, buf, msg_type: int, rcv_ftype: int) \
-> Generator[tuple[str, bool], None, None]:
'''parse a data sequence received from the inverter and
stores the values in Infos.db
buf: buffer of the sequence to parse'''
for idx, row in RegisterMap.map.items():
addr = idx & 0xffff
ftype = (idx >> 16) & 0xff
mtype = (idx >> 24) & 0xff
if ftype != rcv_ftype or mtype != msg_type:
continue
if isinstance(row, dict):
info_id = row['reg']
fmt = row['fmt']
res = struct.unpack_from(fmt, buf, addr)
result = res[0]
if isinstance(result, (bytearray, bytes)):
result = result.decode('utf-8')
if 'eval' in row:
result = eval(row['eval'])
if 'ratio' in row:
result = round(result * row['ratio'], 2)
keys, level, unit, must_incr = self._key_obj(info_id)
if keys:
name, update = self.update_db(keys, must_incr, result)
yield keys[0], update
else:
name = str(f'info-id.0x{addr:x}')
update = False
self.tracer.log(level, f'{name} : {result}{unit}'
f' update: {update}')

View File

@@ -0,0 +1,126 @@
import asyncio
import logging
import traceback
import json
from config import Config
from inverter import Inverter
from gen3plus.connection_g3p import ConnectionG3P
from aiomqtt import MqttCodeError
from infos import Infos
# import gc
# logger = logging.getLogger('conn')
logger_mqtt = logging.getLogger('mqtt')
class InverterG3P(Inverter, ConnectionG3P):
'''class Inverter is a derivation of an Async_Stream
The class has some class method for managing common resources like a
connection to the MQTT broker or proxy error counter which are common
for all inverter connection
Instances of the class are connections to an inverter and can have an
optional link to an remote connection to the TSUN cloud. A remote
connection dies with the inverter connection.
class methods:
class_init(): initialize the common resources of the proxy (MQTT
broker, Proxy DB, etc). Must be called before the
first inverter instance can be created
class_close(): release the common resources of the proxy. Should not
be called before any instances of the class are
destroyed
methods:
server_loop(addr): Async loop method for receiving messages from the
inverter (server-side)
client_loop(addr): Async loop method for receiving messages from the
TSUN cloud (client-side)
async_create_remote(): Establish a client connection to the TSUN cloud
async_publ_mqtt(): Publish data to MQTT broker
close(): Release method which must be called before a instance can be
destroyed
'''
def __init__(self, reader, writer, addr):
super().__init__(reader, writer, addr, None, True)
self.__ha_restarts = -1
async def async_create_remote(self) -> None:
'''Establish a client connection to the TSUN cloud'''
tsun = Config.get('solarman')
host = tsun['host']
port = tsun['port']
addr = (host, port)
try:
logging.info(f'Connected to {addr}')
connect = asyncio.open_connection(host, port)
reader, writer = await connect
self.remoteStream = ConnectionG3P(reader, writer, addr, self,
False)
asyncio.create_task(self.client_loop(addr))
except (ConnectionRefusedError, TimeoutError) as error:
logging.info(f'{error}')
except Exception:
self.inc_counter('SW_Exception')
logging.error(
f"Inverter: Exception for {addr}:\n"
f"{traceback.format_exc()}")
async def async_publ_mqtt(self) -> None:
'''publish data to MQTT broker'''
# check if new inverter or collector infos are available or when the
# home assistant has changed the status back to online
try:
if (('inverter' in self.new_data and self.new_data['inverter'])
or ('collector' in self.new_data and
self.new_data['collector'])
or self.mqtt.ha_restarts != self.__ha_restarts):
await self._register_proxy_stat_home_assistant()
await self.__register_home_assistant()
self.__ha_restarts = self.mqtt.ha_restarts
for key in self.new_data:
await self.__async_publ_mqtt_packet(key)
for key in Infos.new_stat_data:
await self._async_publ_mqtt_proxy_stat(key)
except MqttCodeError as error:
logging.error(f'Mqtt except: {error}')
except Exception:
self.inc_counter('SW_Exception')
logging.error(
f"Inverter: Exception:\n"
f"{traceback.format_exc()}")
async def __async_publ_mqtt_packet(self, key):
db = self.db.db
if key in db and self.new_data[key]:
data_json = json.dumps(db[key])
node_id = self.node_id
logger_mqtt.debug(f'{key}: {data_json}')
await self.mqtt.publish(f'{self.entity_prfx}{node_id}{key}', data_json) # noqa: E501
self.new_data[key] = False
async def __register_home_assistant(self) -> None:
'''register all our topics at home assistant'''
for data_json, component, node_id, id in self.db.ha_confs(
self.entity_prfx, self.node_id, self.unique_id,
self.sug_area):
logger_mqtt.debug(f"MQTT Register: cmp:'{component}'"
f" node_id:'{node_id}' {data_json}")
await self.mqtt.publish(f"{self.discovery_prfx}{component}"
f"/{node_id}{id}/config", data_json)
def close(self) -> None:
logging.debug(f'InverterG3P.close() l{self.l_addr} | r{self.r_addr}')
super().close() # call close handler in the parent class
# logger.debug (f'Inverter refs: {gc.get_referrers(self)}')
def __del__(self):
logging.debug("InverterG3P.__del__")
super().__del__()

View File

@@ -0,0 +1,365 @@
import struct
# import json
import logging
# import time
from datetime import datetime
if __name__ == "app.src.gen3plus.solarman_v5":
from app.src.messages import hex_dump_memory, Message
from app.src.config import Config
from app.src.gen3plus.infos_g3p import InfosG3P
from app.src.infos import Register
else: # pragma: no cover
from messages import hex_dump_memory, Message
from config import Config
from gen3plus.infos_g3p import InfosG3P
from infos import Register
# import traceback
logger = logging.getLogger('msg')
class SolarmanV5(Message):
def __init__(self, server_side: bool):
super().__init__(server_side)
self.header_len = 11 # overwrite construcor in class Message
self.control = 0
self.serial = 0
self.snr = 0
self.db = InfosG3P()
self.switch = {
0x4210: self.msg_data_ind, # real time data
0x1210: self.msg_data_rsp, # at least every 5 minutes
0x4710: self.msg_hbeat_ind, # heatbeat
0x1710: self.msg_hbeat_rsp, # every 2 minutes
# every 3 hours comes a sync seuqence:
# 00:00:00 0x4110 device data ftype: 0x02
# 00:00:02 0x4210 real time data ftype: 0x01
# 00:00:03 0x4210 real time data ftype: 0x81
# 00:00:05 0x4310 wifi data ftype: 0x81 sub-id 0x0018: 0c # noqa: E501
# 00:00:06 0x4310 wifi data ftype: 0x81 sub-id 0x0018: 1c # noqa: E501
# 00:00:07 0x4310 wifi data ftype: 0x01 sub-id 0x0018: 0c # noqa: E501
# 00:00:08 0x4810 options? ftype: 0x01
0x4110: self.msg_dev_ind, # device data, sync start
0x1110: self.msg_dev_rsp, # every 3 hours
0x4310: self.msg_forward, # regulary after 3-6 hours
0x1310: self.msg_forward,
0x4810: self.msg_forward, # sync end
0x1810: self.msg_forward,
#
# AT cmd
0x4510: self.at_command_ind, # from server
0x1510: self.msg_forward, # from inverter
}
'''
Our puplic methods
'''
def close(self) -> None:
logging.debug('Solarman.close()')
# we have refernces to methods of this class in self.switch
# so we have to erase self.switch, otherwise this instance can't be
# deallocated by the garbage collector ==> we get a memory leak
self.switch.clear()
def set_serial_no(self, snr: int):
serial_no = str(snr)
if self.unique_id == serial_no:
logger.debug(f'SerialNo: {serial_no}')
else:
found = False
inverters = Config.get('inverters')
# logger.debug(f'Inverters: {inverters}')
for inv in inverters.values():
# logger.debug(f'key: {key} -> {inv}')
if (type(inv) is dict and 'monitor_sn' in inv
and inv['monitor_sn'] == snr):
found = True
self.node_id = inv['node_id']
self.sug_area = inv['suggested_area']
logger.debug(f'SerialNo {serial_no} allowed! area:{self.sug_area}') # noqa: E501
if not found:
self.node_id = ''
self.sug_area = ''
if 'allow_all' not in inverters or not inverters['allow_all']:
self.inc_counter('Unknown_SNR')
self.unique_id = None
logger.warning(f'ignore message from unknow inverter! (SerialNo: {serial_no})') # noqa: E501
return
logger.debug(f'SerialNo {serial_no} not known but accepted!')
self.unique_id = serial_no
def read(self) -> None:
self._read()
if not self.header_valid:
self.__parse_header(self._recv_buffer, len(self._recv_buffer))
if self.header_valid and len(self._recv_buffer) >= (self.header_len +
self.data_len+2):
hex_dump_memory(logging.INFO, f'Received from {self.addr}:',
self._recv_buffer, self.header_len+self.data_len+2)
if self.__trailer_is_ok(self._recv_buffer, self.header_len
+ self.data_len + 2):
self.set_serial_no(self.snr)
self.__dispatch_msg()
self.__flush_recv_msg()
return
def forward(self, buffer, buflen) -> None:
tsun = Config.get('solarman')
if tsun['enabled']:
self._forward_buffer = buffer[:buflen]
hex_dump_memory(logging.DEBUG, 'Store for forwarding:',
buffer, buflen)
self.__parse_header(self._forward_buffer,
len(self._forward_buffer))
fnc = self.switch.get(self.control, self.msg_unknown)
logger.info(self.__flow_str(self.server_side, 'forwrd') +
f' Ctl: {int(self.control):#04x}'
f' Msg: {fnc.__name__!r}')
return
def _init_new_client_conn(self) -> bool:
# self.__build_header(0x91)
# self._send_buffer += struct.pack(f'!{len(contact_name)+1}p'
# f'{len(contact_mail)+1}p',
# contact_name, contact_mail)
# self.__finish_send_msg()
return False
'''
Our private methods
'''
def __flow_str(self, server_side: bool, type: str): # noqa: F821
switch = {
'rx': ' <',
'tx': ' >',
'forwrd': '<< ',
'drop': ' xx',
'rxS': '> ',
'txS': '< ',
'forwrdS': ' >>',
'dropS': 'xx ',
}
if server_side:
type += 'S'
return switch.get(type, '???')
def __parse_header(self, buf: bytes, buf_len: int) -> None:
if (buf_len < self.header_len): # enough bytes for complete header?
return
result = struct.unpack_from('<BHHHL', buf, 0)
# store parsed header values in the class
start = result[0] # len of complete message
self.data_len = result[1] # len of variable id string
self.control = result[2]
self.serial = result[3]
self.snr = result[4]
if start != 0xA5:
self.inc_counter('Invalid_Msg_Format')
# erase broken recv buffer
self._recv_buffer = bytearray()
return
self.header_valid = True
return
def __trailer_is_ok(self, buf: bytes, buf_len: int) -> bool:
crc = buf[self.data_len+11]
stop = buf[self.data_len+12]
if stop != 0x15:
self.inc_counter('Invalid_Msg_Format')
if len(self._recv_buffer) > (self.data_len+13):
next_start = buf[self.data_len+13]
if next_start != 0xa5:
# erase broken recv buffer
self._recv_buffer = bytearray()
return False
check = sum(buf[1:buf_len-2]) & 0xff
if check != crc:
self.inc_counter('Invalid_Msg_Format')
logger.debug(f'CRC {int(crc):#02x} {int(check):#08x}'
f' Stop:{int(stop):#02x}')
# start & stop byte are valid, discard only this message
return False
return True
def __dispatch_msg(self) -> None:
fnc = self.switch.get(self.control, self.msg_unknown)
if self.unique_id:
logger.info(self.__flow_str(self.server_side, 'rx') +
f' Ctl: {int(self.control):#04x}' +
f' Msg: {fnc.__name__!r}')
fnc()
else:
logger.info(self.__flow_str(self.server_side, 'drop') +
f' Ctl: {int(self.control):#04x}' +
f' Msg: {fnc.__name__!r}')
def __flush_recv_msg(self) -> None:
self._recv_buffer = self._recv_buffer[(self.header_len +
self.data_len+2):]
self.header_valid = False
'''
def modbus(self, data):
POLY = 0xA001
crc = 0xFFFF
for byte in data:
crc ^= byte
for _ in range(8):
crc = ((crc >> 1) ^ POLY
if (crc & 0x0001)
else crc >> 1)
return crc
def validate_modbus_crc(self, frame):
# Calculate crc with all but the last 2 bytes of
# the frame (they contain the crc)
calc_crc = 0xFFFF
for pos in frame[:-2]:
calc_crc ^= pos
for i in range(8):
if (calc_crc & 1) != 0:
calc_crc >>= 1
calc_crc ^= 0xA001 # bitwise 'or' with modbus magic
# number (0xa001 == bitwise
# reverse of 0x8005)
else:
calc_crc >>= 1
# Compare calculated crc with the one supplied in the frame....
frame_crc, = struct.unpack('<H', frame[-2:])
if calc_crc == frame_crc:
return 1
else:
return 0
'''
'''
Message handler methods
'''
def msg_unknown(self):
logger.warning(f"Unknow Msg: ID:{int(self.control):#04x}")
self.inc_counter('Unknown_Msg')
self.msg_forward()
def msg_forward(self):
self.forward(self._recv_buffer, self.header_len+self.data_len+2)
def msg_dev_ind(self):
data = self._recv_buffer[self.header_len:]
result = struct.unpack_from('<BLLL', data, 0)
ftype = result[0] # always 2
total = result[1]
tim = result[2]
res = result[3] # always zero
logger.info(f'frame type:{ftype:02x} total:{total}s'
f' timer:{tim:08x}s null:{res}')
dt = datetime.fromtimestamp(total)
logger.info(f'ts: {dt.strftime("%Y-%m-%d %H:%M:%S")}')
self.__process_data(ftype)
self.forward(self._recv_buffer, self.header_len+self.data_len+2)
def msg_dev_rsp(self):
self.msg_response()
def msg_data_ind(self):
data = self._recv_buffer
result = struct.unpack_from('<BLLLLL', data, self.header_len)
ftype = result[0] # 1 or 0x81
total = result[1]
tim = result[2]
offset = result[3]
unkn = result[4]
cnt = result[5]
logger.info(f'ftype:{ftype:02x} total:{total}s'
f' timer:{tim:08x}s ofs:{offset}'
f' ??: {unkn:08x} cnt:{cnt}')
dt = datetime.fromtimestamp(total)
logger.info(f'ts: {dt.strftime("%Y-%m-%d %H:%M:%S")}')
ftype &= 0x7f # mask bit 7 (0x80)
self.__process_data(ftype)
self.forward(self._recv_buffer, self.header_len+self.data_len+2)
def __process_data(self, ftype):
inv_update = False
ctrl_update = False
msg_type = self.control >> 8
for key, update in self.db.parse(self._recv_buffer, msg_type, ftype):
if update:
if key == 'inverter':
inv_update = True
if key == 'controller':
ctrl_update = True
self.new_data[key] = True
if inv_update:
db = self.db
MaxPow = db.get_db_value(Register.MAX_DESIGNED_POWER, 0)
Rated = db.get_db_value(Register.RATED_POWER, 0)
Model = None
if MaxPow == 2000:
if Rated == 800 or Rated == 600:
Model = f'TSOL-MS{MaxPow}({Rated})'
else:
Model = f'TSOL-MS{MaxPow}'
elif MaxPow == 1800 or MaxPow == 1600:
Model = f'TSOL-MS{MaxPow}'
if Model:
logger.info(f'Model: {Model}')
self.db.set_db_def_value(Register.EQUIPMENT_MODEL, Model)
if ctrl_update:
db = self.db
Version = db.get_db_value(Register.COLLECTOR_FW_VERSION, 0)
if isinstance(Version, str):
Model = Version.split('_')[0]
self.db.set_db_def_value(Register.CHIP_MODEL, Model)
def msg_data_rsp(self):
self.msg_response()
def msg_hbeat_ind(self):
self.forward(self._recv_buffer, self.header_len+self.data_len+2)
def msg_hbeat_rsp(self):
self.msg_response()
def msg_response(self):
data = self._recv_buffer[self.header_len:]
result = struct.unpack_from('<BBLL', data, 0)
ftype = result[0] # always 2
valid = result[1] == 1 # status
ts = result[2]
repeat = result[3] # always 60
logger.info(f'ftype:{ftype} accepted:{valid}'
f' ts:{ts:08x} repeat:{repeat}s')
dt = datetime.fromtimestamp(ts)
logger.info(f'ts: {dt.strftime("%Y-%m-%d %H:%M:%S")}')
self.forward(self._recv_buffer, self.header_len+self.data_len+2)
def at_command_ind(self):
self.inc_counter('AT_Command')
self.msg_forward()

View File

@@ -1,257 +1,473 @@
import struct, json, logging, os
import logging
import json
import os
from enum import Enum
from typing import Generator
class Register(Enum):
COLLECTOR_FW_VERSION = 1
CHIP_TYPE = 2
CHIP_MODEL = 3
TRACE_URL = 4
LOGGER_URL = 5
PRODUCT_NAME = 20
MANUFACTURER = 21
VERSION = 22
SERIAL_NUMBER = 23
EQUIPMENT_MODEL = 24
NO_INPUTS = 25
MAX_DESIGNED_POWER = 26
INVERTER_CNT = 50
UNKNOWN_SNR = 51
UNKNOWN_MSG = 52
INVALID_DATA_TYPE = 53
INTERNAL_ERROR = 54
UNKNOWN_CTRL = 55
OTA_START_MSG = 56
SW_EXCEPTION = 57
INVALID_MSG_FMT = 58
AT_COMMAND = 59
OUTPUT_POWER = 83
RATED_POWER = 84
INVERTER_TEMP = 85
PV1_VOLTAGE = 100
PV1_CURRENT = 101
PV1_POWER = 102
PV2_VOLTAGE = 110
PV2_CURRENT = 111
PV2_POWER = 112
PV3_VOLTAGE = 120
PV3_CURRENT = 121
PV3_POWER = 122
PV4_VOLTAGE = 130
PV4_CURRENT = 131
PV4_POWER = 132
PV5_VOLTAGE = 140
PV5_CURRENT = 141
PV5_POWER = 142
PV6_VOLTAGE = 150
PV6_CURRENT = 151
PV6_POWER = 152
PV1_DAILY_GENERATION = 200
PV1_TOTAL_GENERATION = 201
PV2_DAILY_GENERATION = 210
PV2_TOTAL_GENERATION = 211
PV3_DAILY_GENERATION = 220
PV3_TOTAL_GENERATION = 221
PV4_DAILY_GENERATION = 230
PV4_TOTAL_GENERATION = 231
PV5_DAILY_GENERATION = 240
PV5_TOTAL_GENERATION = 241
PV6_DAILY_GENERATION = 250
PV6_TOTAL_GENERATION = 251
GRID_VOLTAGE = 300
GRID_CURRENT = 301
GRID_FREQUENCY = 302
DAILY_GENERATION = 303
TOTAL_GENERATION = 304
COMMUNICATION_TYPE = 400
SIGNAL_STRENGTH = 401
POWER_ON_TIME = 402
COLLECT_INTERVAL = 403
DATA_UP_INTERVAL = 404
CONNECT_COUNT = 405
HEARTBEAT_INTERVAL = 406
IP_ADRESS = 407
EVENT_401 = 500
EVENT_402 = 501
EVENT_403 = 502
EVENT_404 = 503
EVENT_405 = 504
EVENT_406 = 505
EVENT_407 = 506
EVENT_408 = 507
EVENT_409 = 508
EVENT_410 = 509
EVENT_411 = 510
EVENT_412 = 511
EVENT_413 = 512
EVENT_414 = 513
EVENT_415 = 514
EVENT_416 = 515
VALUE_1 = 9000
TEST_REG1 = 10000
TEST_REG2 = 10001
class Infos:
stat = {}
app_name = os.getenv('SERVICE_NAME', 'proxy')
version = os.getenv('VERSION', 'unknown')
new_stat_data = {}
@classmethod
def static_init(cls):
logging.info('Initialize proxy statistics')
# init proxy counter in the class.stat dictionary
cls.stat['proxy'] = {}
for key in cls.__info_defs:
name = cls.__info_defs[key]['name']
if name[0] == 'proxy':
cls.stat['proxy'][name[1]] = 0
# add values from the environment to the device definition table
prxy = cls.__info_devs['proxy']
prxy['sw'] = cls.version
prxy['mdl'] = cls.app_name
class Infos:
def __init__(self):
self.db = {}
self.app_name = os.getenv('SERVICE_NAME', 'proxy')
self.version = os.getenv('VERSION', 'unknown')
self.tracer = logging.getLogger('data')
__info_devs={
'controller':{ 'name':'Controller', 'mdl':0x00092f90, 'mf':0x000927c0, 'sw':0x00092ba8},
'inverter': {'via':'controller', 'name':'Micro Inverter', 'mdl':0x00000032, 'mf':0x00000014, 'sw':0x0000001e},
'input_pv1': {'via':'inverter', 'name':'Module PV1'},
'input_pv2': {'via':'inverter', 'name':'Module PV2'},
'input_pv3': {'via':'inverter', 'name':'Module PV3'},
'input_pv4': {'via':'inverter', 'name':'Module PV4'},
__info_devs = {
'proxy': {'singleton': True, 'name': 'Proxy', 'mf': 'Stefan Allius'}, # noqa: E501
'controller': {'via': 'proxy', 'name': 'Controller', 'mdl': Register.CHIP_MODEL, 'mf': Register.CHIP_TYPE, 'sw': Register.COLLECTOR_FW_VERSION}, # noqa: E501
'inverter': {'via': 'controller', 'name': 'Micro Inverter', 'mdl': Register.EQUIPMENT_MODEL, 'mf': Register.MANUFACTURER, 'sw': Register.VERSION}, # noqa: E501
'input_pv1': {'via': 'inverter', 'name': 'Module PV1'},
'input_pv2': {'via': 'inverter', 'name': 'Module PV2', 'dep': {'reg': Register.NO_INPUTS, 'gte': 2}}, # noqa: E501
'input_pv3': {'via': 'inverter', 'name': 'Module PV3', 'dep': {'reg': Register.NO_INPUTS, 'gte': 3}}, # noqa: E501
'input_pv4': {'via': 'inverter', 'name': 'Module PV4', 'dep': {'reg': Register.NO_INPUTS, 'gte': 4}}, # noqa: E501
}
__info_defs={
# collector values used for device registration:
0x00092ba8: {'name':['collector', 'Collector_Fw_Version'], 'level': logging.INFO, 'unit': ''},
0x000927c0: {'name':['collector', 'Chip_Type'], 'level': logging.DEBUG, 'unit': ''},
0x00092f90: {'name':['collector', 'Chip_Model'], 'level': logging.DEBUG, 'unit': ''},
0x00095a88: {'name':['collector', 'Trace_URL'], 'level': logging.DEBUG, 'unit': ''},
0x00095aec: {'name':['collector', 'Logger_URL'], 'level': logging.DEBUG, 'unit': ''},
# inverter values used for device registration:
0x0000000a: {'name':['inverter', 'Product_Name'], 'level': logging.DEBUG, 'unit': ''},
0x00000014: {'name':['inverter', 'Manufacturer'], 'level': logging.DEBUG, 'unit': ''},
0x0000001e: {'name':['inverter', 'Version'], 'level': logging.INFO, 'unit': ''},
0x00000028: {'name':['inverter', 'Serial_Number'], 'level': logging.DEBUG, 'unit': ''},
0x00000032: {'name':['inverter', 'Equipment_Model'], 'level': logging.DEBUG, 'unit': ''},
# events
0x00000191: {'name':['events', '401_'], 'level': logging.DEBUG, 'unit': ''},
0x00000192: {'name':['events', '402_'], 'level': logging.DEBUG, 'unit': ''},
0x00000193: {'name':['events', '403_'], 'level': logging.DEBUG, 'unit': ''},
0x00000194: {'name':['events', '404_'], 'level': logging.DEBUG, 'unit': ''},
0x00000195: {'name':['events', '405_'], 'level': logging.DEBUG, 'unit': ''},
0x00000196: {'name':['events', '406_'], 'level': logging.DEBUG, 'unit': ''},
0x00000197: {'name':['events', '407_'], 'level': logging.DEBUG, 'unit': ''},
0x00000198: {'name':['events', '408_'], 'level': logging.DEBUG, 'unit': ''},
0x00000199: {'name':['events', '409_'], 'level': logging.DEBUG, 'unit': ''},
0x0000019a: {'name':['events', '410_'], 'level': logging.DEBUG, 'unit': ''},
0x0000019b: {'name':['events', '411_'], 'level': logging.DEBUG, 'unit': ''},
0x0000019c: {'name':['events', '412_'], 'level': logging.DEBUG, 'unit': ''},
0x0000019d: {'name':['events', '413_'], 'level': logging.DEBUG, 'unit': ''},
0x0000019e: {'name':['events', '414_'], 'level': logging.DEBUG, 'unit': ''},
0x0000019f: {'name':['events', '415_GridFreqOverRating'], 'level': logging.DEBUG, 'unit': ''},
0x000001a0: {'name':['events', '416_'], 'level': logging.DEBUG, 'unit': ''},
# grid measures:
0x000003e8: {'name':['grid', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha':{'dev':'inverter', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id':'out_volt_', 'fmt':'| float','name': 'Grid Voltage'}},
0x0000044c: {'name':['grid', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha':{'dev':'inverter', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id':'out_cur_', 'fmt':'| float','name': 'Grid Current'}},
0x000004b0: {'name':['grid', 'Frequency'], 'level': logging.DEBUG, 'unit': 'Hz', 'ha':{'dev':'inverter', 'dev_cla': 'frequency', 'stat_cla': 'measurement', 'id':'out_freq_', 'fmt':'| float','name': 'Grid Frequency'}},
0x00000640: {'name':['grid', 'Output_Power'], 'level': logging.INFO, 'unit': 'W', 'ha':{'dev':'inverter', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id':'out_power_', 'fmt':'| float','name': 'Power'}},
0x000005dc: {'name':['env', 'Rated_Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha':{'dev':'inverter', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id':'rated_power_','fmt':'| int', 'name': 'Rated Power','ent_cat':'diagnostic'}},
0x00000514: {'name':['env', 'Inverter_Temp'], 'level': logging.DEBUG, 'unit': '°C', 'ha':{'dev':'inverter', 'dev_cla': 'temperature', 'stat_cla': 'measurement', 'id':'temp_', 'fmt':'| int','name': 'Temperature'}},
__comm_type_val_tpl = "{%set com_types = ['n/a','Wi-Fi', 'G4', 'G5', 'GPRS'] %}{{com_types[value_json['Communication_Type']|int(0)]|default(value_json['Communication_Type'])}}" # noqa: E501
# input measures:
0x000006a4: {'name':['input', 'pv1', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha':{'dev':'input_pv1', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id':'volt_pv1_', 'name': 'Voltage', 'val_tpl' :"{{ (value_json['pv1']['Voltage'] | float)}}", 'unvisible':1, 'icon':'mdi:gauge'}},
0x00000708: {'name':['input', 'pv1', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha':{'dev':'input_pv1', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id':'cur_pv1_', 'name': 'Current', 'val_tpl' :"{{ (value_json['pv1']['Current'] | float)}}", 'unvisible':1, 'icon':'mdi:gauge'}},
0x0000076c: {'name':['input', 'pv1', 'Power'], 'level': logging.INFO, 'unit': 'W', 'ha':{'dev':'input_pv1', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id':'power_pv1_','name': 'Power', 'val_tpl' :"{{ (value_json['pv1']['Power'] | float)}}", 'icon':'mdi:gauge'}},
0x000007d0: {'name':['input', 'pv2', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha':{'dev':'input_pv2', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id':'volt_pv2_', 'name': 'Voltage', 'val_tpl' :"{{ (value_json['pv2']['Voltage'] | float)}}", 'unvisible':1, 'icon':'mdi:gauge'}},
0x00000834: {'name':['input', 'pv2', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha':{'dev':'input_pv2', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id':'cur_pv2_', 'name': 'Current', 'val_tpl' :"{{ (value_json['pv2']['Current'] | float)}}", 'unvisible':1, 'icon':'mdi:gauge'}},
0x00000898: {'name':['input', 'pv2', 'Power'], 'level': logging.INFO, 'unit': 'W', 'ha':{'dev':'input_pv2', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id':'power_pv2_','name': 'Power', 'val_tpl' :"{{ (value_json['pv2']['Power'] | float)}}", 'icon':'mdi:gauge'}},
0x000008fc: {'name':['input', 'pv3', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha':{'dev':'input_pv3', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id':'volt_pv3_', 'name': 'Voltage', 'val_tpl' :"{{ (value_json['pv3']['Voltage'] | float)}}", 'unvisible':1, 'icon':'mdi:gauge'}},
0x00000960: {'name':['input', 'pv3', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha':{'dev':'input_pv3', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id':'cur_pv3_', 'name': 'Current', 'val_tpl' :"{{ (value_json['pv3']['Current'] | float)}}", 'unvisible':1, 'icon':'mdi:gauge'}},
0x000009c4: {'name':['input', 'pv3', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha':{'dev':'input_pv3', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id':'power_pv3_','name': 'Power', 'val_tpl' :"{{ (value_json['pv3']['Power'] | float)}}", 'icon':'mdi:gauge'}},
0x00000a28: {'name':['input', 'pv4', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha':{'dev':'input_pv4', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id':'volt_pv4_', 'name': 'Voltage', 'val_tpl' :"{{ (value_json['pv4']['Voltage'] | float)}}", 'unvisible':1, 'icon':'mdi:gauge'}},
0x00000a8c: {'name':['input', 'pv4', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha':{'dev':'input_pv4', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id':'cur_pv4_', 'name': 'Current', 'val_tpl' :"{{ (value_json['pv4']['Current'] | float)}}", 'unvisible':1, 'icon':'mdi:gauge'}},
0x00000af0: {'name':['input', 'pv4', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha':{'dev':'input_pv4', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id':'power_pv4_','name': 'Power', 'val_tpl' :"{{ (value_json['pv4']['Power'] | float)}}", 'icon':'mdi:gauge'}},
0x00000c1c: {'name':['input', 'pv1', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev':'input_pv1', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id':'daily_gen_pv1_','name': 'Daily Generation', 'val_tpl' :"{{ (value_json['pv1']['Daily_Generation'] | float)}}", 'icon':'mdi:solar-power-variant'}},
0x00000c80: {'name':['input', 'pv1', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev':'input_pv1', 'dev_cla': 'energy', 'stat_cla': 'total', 'id':'total_gen_pv1_','name': 'Total Generation', 'val_tpl' :"{{ (value_json['pv1']['Total_Generation'] | float)}}", 'icon':'mdi:solar-power'}},
0x00000ce4: {'name':['input', 'pv2', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev':'input_pv2', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id':'daily_gen_pv2_','name': 'Daily Generation', 'val_tpl' :"{{ (value_json['pv2']['Daily_Generation'] | float)}}", 'icon':'mdi:solar-power-variant'}},
0x00000d48: {'name':['input', 'pv2', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev':'input_pv2', 'dev_cla': 'energy', 'stat_cla': 'total', 'id':'total_gen_pv2_','name': 'Total Generation', 'val_tpl' :"{{ (value_json['pv2']['Total_Generation'] | float)}}", 'icon':'mdi:solar-power'}},
0x00000dac: {'name':['input', 'pv3', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev':'input_pv3', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id':'daily_gen_pv3_','name': 'Daily Generation', 'val_tpl' :"{{ (value_json['pv3']['Daily_Generation'] | float)}}", 'icon':'mdi:solar-power-variant'}},
0x00000e10: {'name':['input', 'pv3', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev':'input_pv3', 'dev_cla': 'energy', 'stat_cla': 'total', 'id':'total_gen_pv3_','name': 'Total Generation', 'val_tpl' :"{{ (value_json['pv3']['Total_Generation'] | float)}}", 'icon':'mdi:solar-power'}},
0x00000e74: {'name':['input', 'pv4', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev':'input_pv4', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id':'daily_gen_pv4_','name': 'Daily Generation', 'val_tpl' :"{{ (value_json['pv4']['Daily_Generation'] | float)}}", 'icon':'mdi:solar-power-variant'}},
0x00000ed8: {'name':['input', 'pv4', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev':'input_pv4', 'dev_cla': 'energy', 'stat_cla': 'total', 'id':'total_gen_pv4_','name': 'Total Generation', 'val_tpl' :"{{ (value_json['pv4']['Total_Generation'] | float)}}", 'icon':'mdi:solar-power'}},
# total:
0x00000b54: {'name':['total', 'Daily_Generation'], 'level': logging.INFO, 'unit': 'kWh', 'ha':{'dev':'inverter', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id':'daily_gen_', 'fmt':'| float','name': 'Daily Generation', 'icon':'mdi:solar-power-variant'}},
0x00000bb8: {'name':['total', 'Total_Generation'], 'level': logging.INFO, 'unit': 'kWh', 'ha':{'dev':'inverter', 'dev_cla': 'energy', 'stat_cla': 'total', 'id':'total_gen_', 'fmt':'| float','name': 'Total Generation', 'icon':'mdi:solar-power'}},
__info_defs = {
# collector values used for device registration:
Register.COLLECTOR_FW_VERSION: {'name': ['collector', 'Collector_Fw_Version'], 'level': logging.INFO, 'unit': ''}, # noqa: E501
Register.CHIP_TYPE: {'name': ['collector', 'Chip_Type'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.CHIP_MODEL: {'name': ['collector', 'Chip_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.TRACE_URL: {'name': ['collector', 'Trace_URL'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.LOGGER_URL: {'name': ['collector', 'Logger_URL'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
# inverter values used for device registration:
Register.PRODUCT_NAME: {'name': ['inverter', 'Product_Name'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.MANUFACTURER: {'name': ['inverter', 'Manufacturer'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.VERSION: {'name': ['inverter', 'Version'], 'level': logging.INFO, 'unit': ''}, # noqa: E501
Register.SERIAL_NUMBER: {'name': ['inverter', 'Serial_Number'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.EQUIPMENT_MODEL: {'name': ['inverter', 'Equipment_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.NO_INPUTS: {'name': ['inverter', 'No_Inputs'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.MAX_DESIGNED_POWER: {'name': ['inverter', 'Max_Designed_Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'inverter', 'dev_cla': None, 'stat_cla': None, 'id': 'designed_power_', 'fmt': '| string + " W"', 'name': 'Max Designed Power', 'icon': 'mdi:lightning-bolt', 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.RATED_POWER: {'name': ['inverter', 'Rated_Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'inverter', 'dev_cla': None, 'stat_cla': None, 'id': 'rated_power_', 'fmt': '| string + " W"', 'name': 'Rated Power', 'icon': 'mdi:lightning-bolt', 'ent_cat': 'diagnostic'}}, # noqa: E501
# proxy:
Register.INVERTER_CNT: {'name': ['proxy', 'Inverter_Cnt'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_count_', 'fmt': '| int', 'name': 'Active Inverter Connections', 'icon': 'mdi:counter'}}, # noqa: E501
Register.UNKNOWN_SNR: {'name': ['proxy', 'Unknown_SNR'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'unknown_snr_', 'fmt': '| int', 'name': 'Unknown Serial No', 'icon': 'mdi:counter', 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.UNKNOWN_MSG: {'name': ['proxy', 'Unknown_Msg'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'unknown_msg_', 'fmt': '| int', 'name': 'Unknown Msg Type', 'icon': 'mdi:counter', 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.INVALID_DATA_TYPE: {'name': ['proxy', 'Invalid_Data_Type'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_data_type_', 'fmt': '| int', 'name': 'Invalid Data Type', 'icon': 'mdi:counter', 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.INTERNAL_ERROR: {'name': ['proxy', 'Internal_Error'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'intern_err_', 'fmt': '| int', 'name': 'Internal Error', 'icon': 'mdi:counter', 'ent_cat': 'diagnostic', 'en': False}}, # noqa: E501
Register.UNKNOWN_CTRL: {'name': ['proxy', 'Unknown_Ctrl'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'unknown_ctrl_', 'fmt': '| int', 'name': 'Unknown Control Type', 'icon': 'mdi:counter', 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.OTA_START_MSG: {'name': ['proxy', 'OTA_Start_Msg'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'ota_start_cmd_', 'fmt': '| int', 'name': 'OTA Start Cmd', 'icon': 'mdi:counter', 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.SW_EXCEPTION: {'name': ['proxy', 'SW_Exception'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'sw_exception_', 'fmt': '| int', 'name': 'Internal SW Exception', 'icon': 'mdi:counter', 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.INVALID_MSG_FMT: {'name': ['proxy', 'Invalid_Msg_Format'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_msg_fmt_', 'fmt': '| int', 'name': 'Invalid Message Format', 'icon': 'mdi:counter', 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.AT_COMMAND: {'name': ['proxy', 'AT_Command'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'at_cmd_', 'fmt': '| int', 'name': 'AT Command', 'icon': 'mdi:counter', 'ent_cat': 'diagnostic'}}, # noqa: E501
# 0xffffff03: {'name':['proxy', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha':{'dev':'proxy', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id':'proxy_volt_', 'fmt':'| float','name': 'Grid Voltage'}}, # noqa: E501
# events
Register.EVENT_401: {'name': ['events', '401_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.EVENT_402: {'name': ['events', '402_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.EVENT_403: {'name': ['events', '403_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.EVENT_404: {'name': ['events', '404_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.EVENT_405: {'name': ['events', '405_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.EVENT_406: {'name': ['events', '406_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.EVENT_407: {'name': ['events', '407_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.EVENT_408: {'name': ['events', '408_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.EVENT_409: {'name': ['events', '409_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.EVENT_410: {'name': ['events', '410_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.EVENT_411: {'name': ['events', '411_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.EVENT_412: {'name': ['events', '412_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.EVENT_413: {'name': ['events', '413_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.EVENT_414: {'name': ['events', '414_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.EVENT_415: {'name': ['events', '415_GridFreqOverRating'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
Register.EVENT_416: {'name': ['events', '416_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
# grid measures:
Register.GRID_VOLTAGE: {'name': ['grid', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'inverter', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'out_volt_', 'fmt': '| float', 'name': 'Grid Voltage', 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.GRID_CURRENT: {'name': ['grid', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'inverter', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'out_cur_', 'fmt': '| float', 'name': 'Grid Current', 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.GRID_FREQUENCY: {'name': ['grid', 'Frequency'], 'level': logging.DEBUG, 'unit': 'Hz', 'ha': {'dev': 'inverter', 'dev_cla': 'frequency', 'stat_cla': 'measurement', 'id': 'out_freq_', 'fmt': '| float', 'name': 'Grid Frequency', 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.OUTPUT_POWER: {'name': ['grid', 'Output_Power'], 'level': logging.INFO, 'unit': 'W', 'ha': {'dev': 'inverter', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'out_power_', 'fmt': '| float', 'name': 'Power'}}, # noqa: E501
Register.INVERTER_TEMP: {'name': ['env', 'Inverter_Temp'], 'level': logging.DEBUG, 'unit': '°C', 'ha': {'dev': 'inverter', 'dev_cla': 'temperature', 'stat_cla': 'measurement', 'id': 'temp_', 'fmt': '| int', 'name': 'Temperature'}}, # noqa: E501
Register.VALUE_1: {'name': ['env', 'Value_1'], 'level': logging.INFO, 'unit': '', 'ha': {'dev': 'inverter', 'dev_cla': None, 'stat_cla': 'measurement', 'id': 'value_1_', 'fmt': '| int', 'name': 'Value 1', 'ent_cat': 'diagnostic'}}, # noqa: E501
# input measures:
Register.PV1_VOLTAGE: {'name': ['input', 'pv1', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'input_pv1', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'volt_pv1_', 'val_tpl': "{{ (value_json['pv1']['Voltage'] | float)}}", 'icon': 'mdi:gauge', 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.PV1_CURRENT: {'name': ['input', 'pv1', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'input_pv1', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'cur_pv1_', 'val_tpl': "{{ (value_json['pv1']['Current'] | float)}}", 'icon': 'mdi:gauge', 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.PV1_POWER: {'name': ['input', 'pv1', 'Power'], 'level': logging.INFO, 'unit': 'W', 'ha': {'dev': 'input_pv1', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'power_pv1_', 'val_tpl': "{{ (value_json['pv1']['Power'] | float)}}"}}, # noqa: E501
Register.PV2_VOLTAGE: {'name': ['input', 'pv2', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'input_pv2', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'volt_pv2_', 'val_tpl': "{{ (value_json['pv2']['Voltage'] | float)}}", 'icon': 'mdi:gauge', 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.PV2_CURRENT: {'name': ['input', 'pv2', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'input_pv2', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'cur_pv2_', 'val_tpl': "{{ (value_json['pv2']['Current'] | float)}}", 'icon': 'mdi:gauge', 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.PV2_POWER: {'name': ['input', 'pv2', 'Power'], 'level': logging.INFO, 'unit': 'W', 'ha': {'dev': 'input_pv2', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'power_pv2_', 'val_tpl': "{{ (value_json['pv2']['Power'] | float)}}"}}, # noqa: E501
Register.PV3_VOLTAGE: {'name': ['input', 'pv3', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'input_pv3', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'volt_pv3_', 'val_tpl': "{{ (value_json['pv3']['Voltage'] | float)}}", 'icon': 'mdi:gauge', 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.PV3_CURRENT: {'name': ['input', 'pv3', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'input_pv3', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'cur_pv3_', 'val_tpl': "{{ (value_json['pv3']['Current'] | float)}}", 'icon': 'mdi:gauge', 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.PV3_POWER: {'name': ['input', 'pv3', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'input_pv3', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'power_pv3_', 'val_tpl': "{{ (value_json['pv3']['Power'] | float)}}"}}, # noqa: E501
Register.PV4_VOLTAGE: {'name': ['input', 'pv4', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'input_pv4', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'volt_pv4_', 'val_tpl': "{{ (value_json['pv4']['Voltage'] | float)}}", 'icon': 'mdi:gauge', 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.PV4_CURRENT: {'name': ['input', 'pv4', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'input_pv4', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'cur_pv4_', 'val_tpl': "{{ (value_json['pv4']['Current'] | float)}}", 'icon': 'mdi:gauge', 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.PV4_POWER: {'name': ['input', 'pv4', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'input_pv4', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'power_pv4_', 'val_tpl': "{{ (value_json['pv4']['Power'] | float)}}"}}, # noqa: E501
Register.PV1_DAILY_GENERATION: {'name': ['input', 'pv1', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv1', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_pv1_', 'name': 'Daily Generation', 'val_tpl': "{{ (value_json['pv1']['Daily_Generation'] | float)}}", 'icon': 'mdi:solar-power-variant', 'must_incr': True}}, # noqa: E501
Register.PV1_TOTAL_GENERATION: {'name': ['input', 'pv1', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv1', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_pv1_', 'name': 'Total Generation', 'val_tpl': "{{ (value_json['pv1']['Total_Generation'] | float)}}", 'icon': 'mdi:solar-power', 'must_incr': True}}, # noqa: E501
Register.PV2_DAILY_GENERATION: {'name': ['input', 'pv2', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv2', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_pv2_', 'name': 'Daily Generation', 'val_tpl': "{{ (value_json['pv2']['Daily_Generation'] | float)}}", 'icon': 'mdi:solar-power-variant', 'must_incr': True}}, # noqa: E501
Register.PV2_TOTAL_GENERATION: {'name': ['input', 'pv2', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv2', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_pv2_', 'name': 'Total Generation', 'val_tpl': "{{ (value_json['pv2']['Total_Generation'] | float)}}", 'icon': 'mdi:solar-power', 'must_incr': True}}, # noqa: E501
Register.PV3_DAILY_GENERATION: {'name': ['input', 'pv3', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv3', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_pv3_', 'name': 'Daily Generation', 'val_tpl': "{{ (value_json['pv3']['Daily_Generation'] | float)}}", 'icon': 'mdi:solar-power-variant', 'must_incr': True}}, # noqa: E501
Register.PV3_TOTAL_GENERATION: {'name': ['input', 'pv3', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv3', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_pv3_', 'name': 'Total Generation', 'val_tpl': "{{ (value_json['pv3']['Total_Generation'] | float)}}", 'icon': 'mdi:solar-power', 'must_incr': True}}, # noqa: E501
Register.PV4_DAILY_GENERATION: {'name': ['input', 'pv4', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv4', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_pv4_', 'name': 'Daily Generation', 'val_tpl': "{{ (value_json['pv4']['Daily_Generation'] | float)}}", 'icon': 'mdi:solar-power-variant', 'must_incr': True}}, # noqa: E501
Register.PV4_TOTAL_GENERATION: {'name': ['input', 'pv4', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv4', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_pv4_', 'name': 'Total Generation', 'val_tpl': "{{ (value_json['pv4']['Total_Generation'] | float)}}", 'icon': 'mdi:solar-power', 'must_incr': True}}, # noqa: E501
# total:
Register.DAILY_GENERATION: {'name': ['total', 'Daily_Generation'], 'level': logging.INFO, 'unit': 'kWh', 'ha': {'dev': 'inverter', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_', 'fmt': '| float', 'name': 'Daily Generation', 'icon': 'mdi:solar-power-variant', 'must_incr': True}}, # noqa: E501
Register.TOTAL_GENERATION: {'name': ['total', 'Total_Generation'], 'level': logging.INFO, 'unit': 'kWh', 'ha': {'dev': 'inverter', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_', 'fmt': '| float', 'name': 'Total Generation', 'icon': 'mdi:solar-power', 'must_incr': True}}, # noqa: E501
# controller:
0x000c3500: {'name':['controller', 'Signal_Strength'], 'level': logging.DEBUG, 'unit': '%' , 'ha':{'dev':'controller', 'dev_cla': None, 'stat_cla': 'measurement', 'id':'signal_', 'fmt':'| int', 'name': 'Signal Strength', 'icon':'mdi:wifi','ent_cat':'diagnostic'}},
0x000c96a8: {'name':['controller', 'Power_On_Time'], 'level': logging.DEBUG, 'unit': 's', 'ha':{'dev':'controller', 'dev_cla': 'duration', 'stat_cla': 'measurement', 'id':'power_on_time_', 'name': 'Power on Time', 'val_tpl':"{{ (value_json['Power_On_Time'] | float)}}", 'nat_prc':'3','ent_cat':'diagnostic'}},
0x000cf850: {'name':['controller', 'Data_Up_Interval'], 'level': logging.DEBUG, 'unit': 's', 'ha':{'dev':'controller', 'dev_cla': None, 'stat_cla': 'measurement', 'id':'data_up_intval_', 'fmt':'| int', 'name': 'Data Up Interval', 'icon':'mdi:update','ent_cat':'diagnostic'}},
Register.SIGNAL_STRENGTH: {'name': ['controller', 'Signal_Strength'], 'level': logging.DEBUG, 'unit': '%', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': 'measurement', 'id': 'signal_', 'fmt': '| int', 'name': 'Signal Strength', 'icon': 'mdi:wifi'}}, # noqa: E501
Register.POWER_ON_TIME: {'name': ['controller', 'Power_On_Time'], 'level': logging.DEBUG, 'unit': 's', 'ha': {'dev': 'controller', 'dev_cla': 'duration', 'stat_cla': 'measurement', 'id': 'power_on_time_', 'fmt': '| float', 'name': 'Power on Time', 'nat_prc': '3', 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.COLLECT_INTERVAL: {'name': ['controller', 'Collect_Interval'], 'level': logging.DEBUG, 'unit': 's', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'data_collect_intval_', 'fmt': '| string + " s"', 'name': 'Data Collect Interval', 'icon': 'mdi:update', 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.CONNECT_COUNT: {'name': ['controller', 'Connect_Count'], 'level': logging.DEBUG, 'unit': '', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'connect_count_', 'fmt': '| int', 'name': 'Connect Count', 'icon': 'mdi:counter', 'comp': 'sensor', 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.COMMUNICATION_TYPE: {'name': ['controller', 'Communication_Type'], 'level': logging.DEBUG, 'unit': '', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'comm_type_', 'name': 'Communication Type', 'val_tpl': __comm_type_val_tpl, 'comp': 'sensor', 'icon': 'mdi:wifi'}}, # noqa: E501
Register.DATA_UP_INTERVAL: {'name': ['controller', 'Data_Up_Interval'], 'level': logging.DEBUG, 'unit': 's', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'data_up_intval_', 'fmt': '| string + " s"', 'name': 'Data Up Interval', 'icon': 'mdi:update', 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.HEARTBEAT_INTERVAL: {'name': ['controller', 'Heartbeat_Interval'], 'level': logging.DEBUG, 'unit': 's', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'heartbeat_intval_', 'fmt': '| string + " s"', 'name': 'Heartbeat Interval', 'icon': 'mdi:update', 'ent_cat': 'diagnostic'}}, # noqa: E501
Register.IP_ADRESS: {'name': ['controller', 'IP_Adress'], 'level': logging.DEBUG, 'unit': '', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'ip_adress_', 'fmt': '| string', 'name': 'IP Adress', 'icon': 'mdi:wifi', 'ent_cat': 'diagnostic'}}, # noqa: E501
}
}
def dev_value(self, idx:str|int) -> str|int|float|None:
@property
def info_devs(self) -> dict:
return self.__info_devs
@property
def info_defs(self) -> dict:
return self.__info_defs
'''
if __name__ == "app.src.messages":
@info_defs.setter
def info_defs(self, value: dict) -> None:
self.__info_defs = value
@info_devs.setter
def info_devs(self, value: dict) -> None:
self.__info_devs = value
'''
def dev_value(self, idx: str | int) -> str | int | float | None:
'''returns the stored device value from our database
idx:int ==> lookup the value in the database and return it as str, int or flout. If the value is not available return 'None'
idx:str ==> returns the string as a fixed value without a database loopup
idx:int ==> lookup the value in the database and return it as str,
int or flout. If the value is not available return 'None'
idx:str ==> returns the string as a fixed value without a
database loopup
'''
if type (idx) is str:
return idx # return idx as a fixed value
elif idx in self.__info_defs:
dict = self.db
row = self.__info_defs[idx]
if type(idx) is str:
return idx # return idx as a fixed value
elif idx in self.info_defs:
row = self.info_defs[idx]
if 'singleton' in row and row['singleton']:
dict = self.stat
else:
dict = self.db
keys = row['name']
for key in keys:
if key not in dict:
return None # value not found in the database
dict = dict[key]
return dict # value of the reqeusted entry
return None # unknwon idx, not in __info_defs
return dict # value of the reqeusted entry
return None # unknwon idx, not in info_defs
def ha_confs(self, prfx="tsun/garagendach/", snr='123', sug_area =''):
'''Generator function yields a json register struct for home-assistant auto configuration and a unique entity string
def inc_counter(self, counter: str) -> None:
'''inc proxy statistic counter'''
dict = self.stat['proxy']
dict[counter] += 1
def dec_counter(self, counter: str) -> None:
'''dec proxy statistic counter'''
dict = self.stat['proxy']
dict[counter] -= 1
def ha_proxy_confs(self, ha_prfx: str, node_id: str, snr: str) \
-> Generator[tuple[dict, str], None, None]:
'''Generator function yields json register struct for home-assistant
auto configuration and the unique entity string, for all proxy
registers
arguments:
prfx:str ==> MQTT prefix for the home assistant 'stat_t string
snr:str ==> serial number of the inverter, used to build unique entity strings
sug_area:str ==> suggested area string from the config file'''
tab = self.__info_defs
for key in tab:
row = tab[key]
ha_prfx:str ==> MQTT prefix for the home assistant 'stat_t string
node_id:str ==> node id of the inverter, used to build unique entity
snr:str ==> serial number of the inverter, used to build unique
entity strings
'''
# iterate over RegisterMap.map and get the register values for entries
# with Singleton=True, which means that this is a proxy register
for reg in self.info_defs.keys():
res = self.ha_conf(reg, ha_prfx, node_id, snr, True) # noqa: E501
if res:
yield res
#check if we have details for home assistant
if 'ha' in row:
ha = row['ha']
if 'comp' in ha:
component = ha['comp']
else:
component = 'sensor'
attr = {} # dict to collect all the sensor entity details
if 'name' in ha:
attr['name'] = ha['name'] # take the entity name from the ha dict
else:
attr['name'] = row['name'][-1] # otherwise take a name from the name array
def ha_conf(self, key, ha_prfx, node_id, snr, singleton: bool, sug_area: str = '') -> tuple[str, str, str, str]: # noqa: E501
if key not in self.info_defs:
return None
row = self.info_defs[key]
attr['stat_t'] = prfx +row['name'][0] # eg. 'stat_t': "tsun/garagendach/grid"
attr['dev_cla'] = ha['dev_cla'] # eg. 'dev_cla': 'power'
attr['stat_cla'] = ha['stat_cla'] # eg. 'stat_cla': "measurement"
attr['uniq_id'] = ha['id']+snr # build the 'uniq_id' from the id str + the serial no of the inverter
if 'val_tpl' in ha:
attr['val_tpl'] = ha['val_tpl'] # get value template for complexe data structures
elif 'fmt' in ha:
attr['val_tpl'] = '{{value_json' + f"['{row['name'][-1]}'] {ha['fmt']}" + '}}' # eg. 'val_tpl': "{{ value_json['Output_Power']|float }}"
if 'singleton' in row:
if singleton != row['singleton']:
return None
elif singleton:
return None
prfx = ha_prfx + node_id
if 'unit' in row:
attr['unit_of_meas'] = row['unit'] # optional add a 'unit_of_meas' e.g. 'W'
if 'icon' in ha:
attr['ic'] = ha['icon'] # optional add an icon for the entity
if 'nat_prc' in ha:
attr['sug_dsp_prc'] = ha['nat_prc'] # optional add the precison of floats
if 'ent_cat' in ha:
attr['ent_cat'] = ha['ent_cat'] # diagnostic, config
# eg. 'dev':{'name':'Microinverter','mdl':'MS-600','ids':["inverter_123"],'mf':'TSUN','sa': 'auf Garagendach'}
# attr['dev'] = {'name':'Microinverter','mdl':'MS-600','ids':[f'inverter_{snr}'],'mf':'TSUN','sa': 'auf Garagendach'}
if 'dev' in ha:
device = self.__info_devs[ha['dev']]
dev = {}
# the same name fpr 'name' and 'suggested area', so we get dedicated devices in home assistant with short value name and headline
if 'name' in device:
dev['name'] = device['name']
dev['sa'] = device['name']
# fixme: we ignore the suggested area, since one area make no sense for multiple devices
#else:
# dev['name'] = sug_area
# dev['sa'] = sug_area
if 'via' in device: # add the link to the parent device
dev['via_device'] = f"{device['via']}_{snr}"
for key in ('mdl','mf', 'sw', 'hw'): # add optional values fpr 'modell', 'manufaturer', 'sw version' and 'hw version'
if key in device:
data = self.dev_value(device[key])
if data is not None: dev[key] = data
dev['ids'] = [f"{ha['dev']}_{snr}"]
attr['dev'] = dev
origin = {}
origin['name'] = self.app_name
origin['sw'] = self.version
attr['o'] = origin
yield json.dumps (attr), component, attr['uniq_id']
def __key_obj(self, id) -> list:
d = self.__info_defs.get(id, {'name': None, 'level': logging.DEBUG, 'unit': ''})
return d['name'], d['level'], d['unit']
def parse(self, buf) -> None:
'''parse a data sequence received from the inverter and stores the values in Infos.db
buf: buffer of the sequence to parse'''
result = struct.unpack_from('!l', buf, 0)
elms = result[0]
i = 0
ind = 4
while i < elms:
result = struct.unpack_from('!lB', buf, ind)
info_id = result[0]
data_type = result[1]
ind += 5
keys, level, unit = self.__key_obj(info_id)
if data_type==0x54: # 'T' -> Pascal-String
str_len = buf[ind]
result = struct.unpack_from(f'!{str_len+1}p', buf, ind)[0].decode(encoding='ascii', errors='replace')
ind += str_len+1
elif data_type==0x49: # 'I' -> int32
result = struct.unpack_from(f'!l', buf, ind)[0]
ind += 4
elif data_type==0x53: # 'S' -> short
result = struct.unpack_from(f'!h', buf, ind)[0]
ind += 2
elif data_type==0x46: # 'F' -> float32
result = round(struct.unpack_from(f'!f', buf, ind)[0],2)
ind += 4
if keys:
dict = self.db
name = ''
for key in keys[:-1]:
if key not in dict:
dict[key] = {}
dict = dict[key]
name += key + '.'
update = keys[-1] not in dict or dict[keys[-1]] != result
dict[keys[-1]] = result
name += keys[-1]
yield keys[0], update
# check if we have details for home assistant
if 'ha' in row:
ha = row['ha']
if 'comp' in ha:
component = ha['comp']
else:
update = False
name = str(f'info-id.0x{info_id:x}')
self.tracer.log(level, f'{name} : {result}{unit}')
i +=1
component = 'sensor'
attr = {}
if 'name' in ha:
attr['name'] = ha['name']
else:
attr['name'] = row['name'][-1]
attr['stat_t'] = prfx + row['name'][0]
attr['dev_cla'] = ha['dev_cla']
attr['stat_cla'] = ha['stat_cla']
attr['uniq_id'] = ha['id']+snr
if 'val_tpl' in ha:
attr['val_tpl'] = ha['val_tpl']
elif 'fmt' in ha:
attr['val_tpl'] = '{{value_json' + f"['{row['name'][-1]}'] {ha['fmt']}" + '}}' # eg. 'val_tpl': "{{ value_json['Output_Power']|float }} # noqa: E501
else:
self.inc_counter('Internal_Error')
logging.error(f"Infos.info_defs: the row for {key} do"
" not have a 'val_tpl' nor a 'fmt' value")
# add unit_of_meas only, if status_class isn't none. If
# status_cla is None we want a number format and not line
# graph in home assistant. A unit will change the number
# format to a line graph
if 'unit' in row and attr['stat_cla'] is not None:
attr['unit_of_meas'] = row['unit'] # 'unit_of_meas'
if 'icon' in ha:
attr['ic'] = ha['icon'] # icon for the entity
if 'nat_prc' in ha:
attr['sug_dsp_prc'] = ha['nat_prc'] # precison of floats
if 'ent_cat' in ha:
attr['ent_cat'] = ha['ent_cat'] # diagnostic, config
# enabled_by_default is deactivated, since it avoid the via
# setup of the devices. It seems, that there is a bug in home
# assistant. tested with 'Home Assistant 2023.10.4'
# if 'en' in ha: # enabled_by_default
# attr['en'] = ha['en']
if 'dev' in ha:
device = self.info_devs[ha['dev']]
if 'dep' in device and self.ignore_this_device(device['dep']): # noqa: E501
return None
dev = {}
# the same name for 'name' and 'suggested area', so we get
# dedicated devices in home assistant with short value
# name and headline
if (sug_area == '' or
('singleton' in device and device['singleton'])):
dev['name'] = device['name']
dev['sa'] = device['name']
else:
dev['name'] = device['name']+' - '+sug_area
dev['sa'] = device['name']+' - '+sug_area
if 'via' in device: # add the link to the parent device
via = device['via']
if via in self.info_devs:
via_dev = self.info_devs[via]
if 'singleton' in via_dev and via_dev['singleton']:
dev['via_device'] = via
else:
dev['via_device'] = f"{via}_{snr}"
else:
self.inc_counter('Internal_Error')
logging.error(f"Infos.info_defs: the row for "
f"{key} has an invalid via value: "
f"{via}")
for key in ('mdl', 'mf', 'sw', 'hw'): # add optional
# values fpr 'modell', 'manufacturer', 'sw version' and
# 'hw version'
if key in device:
data = self.dev_value(device[key])
if data is not None:
dev[key] = data
if 'singleton' in device and device['singleton']:
dev['ids'] = [f"{ha['dev']}"]
else:
dev['ids'] = [f"{ha['dev']}_{snr}"]
attr['dev'] = dev
origin = {}
origin['name'] = self.app_name
origin['sw'] = self.version
attr['o'] = origin
else:
self.inc_counter('Internal_Error')
logging.error(f"Infos.info_defs: the row for {key} "
"missing 'dev' value for ha register")
return json.dumps(attr), component, node_id, attr['uniq_id']
return None
def _key_obj(self, id) -> list:
d = self.info_defs.get(id, {'name': None, 'level': logging.DEBUG,
'unit': ''})
if 'ha' in d and 'must_incr' in d['ha']:
must_incr = d['ha']['must_incr']
else:
must_incr = False
return d['name'], d['level'], d['unit'], must_incr
def update_db(self, keys, must_incr, result):
name = ''
dict = self.db
for key in keys[:-1]:
if key not in dict:
dict[key] = {}
dict = dict[key]
name += key + '.'
if keys[-1] not in dict:
update = (not must_incr or result > 0)
else:
if must_incr:
update = dict[keys[-1]] < result
else:
update = dict[keys[-1]] != result
if update:
dict[keys[-1]] = result
name += keys[-1]
return name, update
def set_db_def_value(self, id, value):
'''set default value'''
row = self.info_defs[id]
if isinstance(row, dict): # pragma: no cover
keys = row['name']
self.update_db(keys, False, value)
def get_db_value(self, id, not_found_result=None):
'''get database value'''
row = self.info_defs[id]
if isinstance(row, dict): # pragma: no cover
keys = row['name']
elm = self.db
for key in keys[:-1]:
if key not in elm:
return not_found_result
elm = elm[key]
if keys[-1] in elm:
return elm[keys[-1]]
return not_found_result
def ignore_this_device(self, dep: dict) -> bool:
'''Checks the equation in the dep dict
returns 'False' only if the equation is valid;
'True' in any other case'''
if 'reg' in dep:
value = self.dev_value(dep['reg'])
if not value:
return True
if 'gte' in dep:
return not value >= dep['gte']
elif 'less_eq' in dep:
return not value <= dep['less_eq']
return True

View File

@@ -1,104 +1,64 @@
import asyncio, logging, traceback, json
import asyncio
import logging
import json
from config import Config
from async_stream import AsyncStream
from mqtt import Mqtt
#import gc
from infos import Infos
#logger = logging.getLogger('conn')
# logger = logging.getLogger('conn')
logger_mqtt = logging.getLogger('mqtt')
class Inverter():
@classmethod
def class_init(cls) -> None:
logging.debug('Inverter.class_init')
# initialize the proxy statistics
Infos.static_init()
cls.db_stat = Infos()
class Inverter(AsyncStream):
def __init__ (self, reader, writer, addr):
super().__init__(reader, writer, addr, None, True)
self.mqtt = Mqtt()
self.ha_restarts = 0
ha = Config.get('ha')
self.entitiy_prfx = ha['entity_prefix'] + '/'
self.discovery_prfx = ha['discovery_prefix'] + '/'
cls.entity_prfx = ha['entity_prefix'] + '/'
cls.discovery_prfx = ha['discovery_prefix'] + '/'
cls.proxy_node_id = ha['proxy_node_id'] + '/'
cls.proxy_unique_id = ha['proxy_unique_id']
# call Mqtt singleton to establisch the connection to the mqtt broker
cls.mqtt = Mqtt(cls._cb_mqtt_is_up)
async def server_loop(self, addr):
'''Loop for receiving messages from the inverter (server-side)'''
logging.info(f'Accept connection from {addr}')
await self.loop()
logging.info(f'Server loop stopped for {addr}')
# if the server connection closes, we also have to disconnect the connection to te TSUN cloud
if self.remoteStream:
logging.debug ("disconnect client connection")
self.remoteStream.disc()
async def client_loop(self, addr):
'''Loop for receiving messages from the TSUN cloud (client-side)'''
await self.remoteStream.loop()
logging.info(f'Client loop stopped for {addr}')
@classmethod
async def _cb_mqtt_is_up(cls) -> None:
logging.info('Initialize proxy device on home assistant')
# register proxy status counters at home assistant
await cls._register_proxy_stat_home_assistant()
# if the client connection closes, we don't touch the server connection. Instead we erase the client
# connection stream, thus on the next received packet from the inverter, we can establish a new connection
# to the TSUN cloud
self.remoteStream.remoteStream = None # erase backlink to inverter instance
self.remoteStream = None # than erase client connection
async def async_create_remote(self) -> None:
'''Establish a client connection to the TSUN cloud'''
tsun = Config.get('tsun')
host = tsun['host']
port = tsun['port']
addr = (host, port)
try:
logging.info(f'Connected to {addr}')
connect = asyncio.open_connection(host, port)
reader, writer = await connect
self.remoteStream = AsyncStream(reader, writer, addr, self, False)
asyncio.create_task(self.client_loop(addr))
except ConnectionRefusedError as error:
logging.info(f'{error}')
except Exception:
logging.error(
f"Inverter: Exception for {addr}:\n"
f"{traceback.format_exc()}")
# send values of the proxy status counters
await asyncio.sleep(0.5) # wait a bit, before sending data
Infos.new_stat_data['proxy'] = True # force sending data to sync ha
await cls._async_publ_mqtt_proxy_stat('proxy')
async def async_publ_mqtt(self) -> None:
'''puplish data to MQTT broker'''
db = self.db.db
# check if new inverter or collector infos are available or when the home assistant has changed the status back to online
if (('inverter' in self.new_data and self.new_data['inverter']) or
('collector' in self.new_data and self.new_data['collector']) or
self.mqtt.ha_restarts != self.ha_restarts):
await self.__register_home_assistant()
self.ha_restarts = self.mqtt.ha_restarts
for key in self.new_data:
if self.new_data[key] and key in db:
data_json = json.dumps(db[key])
logger_mqtt.debug(f'{key}: {data_json}')
await self.mqtt.publish(f"{self.entitiy_prfx}{self.node_id}{key}", data_json)
self.new_data[key] = False
async def __register_home_assistant(self) -> None:
@classmethod
async def _register_proxy_stat_home_assistant(cls) -> None:
'''register all our topics at home assistant'''
try:
for data_json, component, id in self.db.ha_confs(self.entitiy_prfx + self.node_id, self.unique_id, self.sug_area):
logger_mqtt.debug(f'MQTT Register: {data_json}')
await self.mqtt.publish(f"{self.discovery_prfx}{component}/{self.node_id}{id}/config", data_json)
except Exception:
logging.error(
f"Inverter: Exception:\n"
f"{traceback.format_exc()}")
def close(self) -> None:
logging.debug(f'Inverter.close() {self.addr}')
super().close() # call close handler in the parent class
# logger.debug (f'Inverter refs: {gc.get_referrers(self)}')
for data_json, component, node_id, id in cls.db_stat.ha_proxy_confs(
cls.entity_prfx, cls.proxy_node_id, cls.proxy_unique_id):
logger_mqtt.debug(f"MQTT Register: cmp:'{component}' node_id:'{node_id}' {data_json}") # noqa: E501
await cls.mqtt.publish(f'{cls.discovery_prfx}{component}/{node_id}{id}/config', data_json) # noqa: E501
@classmethod
async def _async_publ_mqtt_proxy_stat(cls, key) -> None:
stat = Infos.stat
if key in stat and Infos.new_stat_data[key]:
data_json = json.dumps(stat[key])
node_id = cls.proxy_node_id
logger_mqtt.debug(f'{key}: {data_json}')
await cls.mqtt.publish(f"{cls.entity_prfx}{node_id}{key}",
data_json)
Infos.new_stat_data[key] = False
def __del__ (self):
logging.debug ("Inverter.__del__")
super().__del__()
@classmethod
def class_close(cls, loop) -> None:
logging.debug('Inverter.class_close')
logging.info('Close MQTT Task')
loop.run_until_complete(cls.mqtt.close())
cls.mqtt = None

View File

@@ -1,30 +1,21 @@
import struct, logging, time, datetime
import logging
import weakref
from datetime import datetime
if __name__ == "app.src.messages":
from app.src.infos import Infos
from app.src.config import Config
else:
else: # pragma: no cover
from infos import Infos
from config import Config
logger = logging.getLogger('msg')
def hex_dump_memory(level, info, data, num):
s = ''
n = 0
lines = []
lines.append(info)
tracer = logging.getLogger('tracer')
if not tracer.isEnabledFor(level): return
#data = list((num * ctypes.c_byte).from_address(ptr))
if len(data) == 0:
return '<empty>'
if not tracer.isEnabledFor(level):
return
for i in range(0, num, 16):
line = ' '
@@ -32,272 +23,67 @@ def hex_dump_memory(level, info, data, num):
n += 16
for j in range(n-16, n):
if j >= len(data): break
if j >= len(data):
break
line += '%02x ' % abs(data[j])
line += ' ' * (3 * 16 + 9 - len(line)) + ' | '
for j in range(n-16, n):
if j >= len(data): break
if j >= len(data):
break
c = data[j] if not (data[j] < 0x20 or data[j] > 0x7e) else '.'
line += '%c' % c
lines.append(line)
tracer.log(level, '\n'.join(lines))
#return '\n'.join(lines)
class Control:
def __init__(self, ctrl:int):
self.ctrl = ctrl
def __int__(self) -> int:
return self.ctrl
def is_ind(self) -> bool:
return not (self.ctrl & 0x08)
#def is_req(self) -> bool:
# return not (self.ctrl & 0x08)
def is_resp(self) -> bool:
return self.ctrl & 0x08
class IterRegistry(type):
def __iter__(cls):
for ref in cls._registry:
obj = ref()
if obj is not None: yield obj
if obj is not None:
yield obj
class Message(metaclass=IterRegistry):
_registry = []
def __init__(self):
def __init__(self, server_side: bool):
self._registry.append(weakref.ref(self))
self.server_side = server_side
self.header_valid = False
self.header_len = 0
self.data_len = 0
self._recv_buffer = b''
self.unique_id = 0
self.node_id = ''
self.sug_area = ''
self._recv_buffer = bytearray(0)
self._send_buffer = bytearray(0)
self._forward_buffer = bytearray(0)
self.db = Infos()
self.new_data = {}
self.switch={
0x00: self.msg_contact_info,
0x22: self.msg_get_time,
0x71: self.msg_collector_data,
0x04: self.msg_inverter_data,
}
'''
Empty methods, that have to be implemented in any child class which don't use asyncio
Empty methods, that have to be implemented in any child class which
don't use asyncio
'''
def _read(self) -> None: # read data bytes from socket and copy them to our _recv_buffer
return
def _read(self) -> None: # read data bytes from socket and copy them
# to our _recv_buffer
return # pragma: no cover
'''
Our puplic methods
'''
def close(self) -> None:
# we have refernces to methods of this class in self.switch
# so we have to erase self.switch, otherwise this instance can't be
# deallocated by the garbage collector ==> we get a memory leak
del self.switch
def read(self) -> None:
self._read()
if not self.header_valid:
self.__parse_header(self._recv_buffer, len(self._recv_buffer))
if self.header_valid and len(self._recv_buffer) >= (self.header_len+self.data_len):
self.__dispatch_msg()
self.__flush_recv_msg()
return
def forward(self, buffer, buflen) -> None:
tsun = Config.get('tsun')
if tsun['enabled']:
self._forward_buffer = buffer[:buflen]
hex_dump_memory(logging.DEBUG, 'Store for forwarding:', buffer, buflen)
self.__parse_header(self._forward_buffer, len(self._forward_buffer))
fnc = self.switch.get(self.msg_id, self.msg_unknown)
logger.info(self.__flow_str(self.server_side, 'forwrd') + f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}' )
return
'''
Our private methods
'''
def __flow_str(self, server_side:bool, type:('rx','tx','forwrd', 'drop')):
switch={
'rx': ' <',
'tx': ' >',
'forwrd': '<< ',
'drop': ' xx',
'rxS': '> ',
'txS': '< ',
'forwrdS':' >>',
'dropS': 'xx ',
}
if server_side: type +='S'
return switch.get(type, '???')
def __timestamp(self):
if False:
# utc as epoche
ts = time.time()
else:
# convert localtime in epoche
ts = (datetime.now() - datetime(1970,1,1)).total_seconds()
return round(ts*1000)
# check if there is a complete header in the buffer, parse it
# and set
# self.header_len
# self.data_len
# self.id_str
# self.ctrl
# self.msg_id
#
# if the header is incomplete, than self.header_len is still 0
#
def __parse_header(self, buf:bytes, buf_len:int) -> None:
if (buf_len <5): # enough bytes to read len and id_len?
return
result = struct.unpack_from('!lB', buf, 0)
len = result[0] # len of complete message
id_len = result[1] # len of variable id string
hdr_len = 5+id_len+2
if (buf_len < hdr_len): # enough bytes for complete header?
return
result = struct.unpack_from(f'!{id_len+1}pBB', buf, 4)
# store parsed header values in the class
self.id_str = result[0]
self.ctrl = Control(result[1])
self.msg_id = result[2]
self.data_len = len-id_len-3
self.header_len = hdr_len
self.header_valid = True
return
def __build_header(self, ctrl) -> None:
self.send_msg_ofs = len (self._send_buffer)
self._send_buffer += struct.pack(f'!l{len(self.id_str)+1}pBB', 0, self.id_str, ctrl, self.msg_id)
fnc = self.switch.get(self.msg_id, self.msg_unknown)
logger.info(self.__flow_str(self.server_side, 'tx') + f' Ctl: {int(ctrl):#02x} Msg: {fnc.__name__!r}' )
def __finish_send_msg(self) -> None:
_len = len(self._send_buffer) - self.send_msg_ofs
struct.pack_into('!l',self._send_buffer, self.send_msg_ofs, _len-4)
def __dispatch_msg(self) -> None:
hex_dump_memory(logging.INFO, f'Received from {self.addr}:', self._recv_buffer, self.header_len+self.data_len)
fnc = self.switch.get(self.msg_id, self.msg_unknown)
logger.info(self.__flow_str(self.server_side, 'rx') + f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}' )
fnc()
def __flush_recv_msg(self) -> None:
self._recv_buffer = self._recv_buffer[(self.header_len+self.data_len):]
self.header_valid = False
'''
Message handler methods
'''
def msg_contact_info(self):
if self.ctrl.is_ind():
self.__build_header(0x99)
self._send_buffer += b'\x01'
self.__finish_send_msg()
elif self.ctrl.is_resp():
return # ignore received response from tsun
self.forward(self._recv_buffer, self.header_len+self.data_len)
def msg_get_time(self):
if self.ctrl.is_ind():
ts = self.__timestamp()
logger.debug(f'time: {ts:08x}')
self.__build_header(0x99)
self._send_buffer += struct.pack('!q', ts)
self.__finish_send_msg()
elif self.ctrl.is_resp():
result = struct.unpack_from(f'!q', self._recv_buffer, self.header_len)
logger.debug(f'tsun-time: {result[0]:08x}')
return # ignore received response from tsun
self.forward(self._recv_buffer, self.header_len+self.data_len)
def parse_msg_header(self):
result = struct.unpack_from('!lB', self._recv_buffer, self.header_len)
data_id = result[0] # len of complete message
id_len = result[1] # len of variable id string
logger.debug(f'Data_ID: {data_id} id_len: {id_len}')
msg_hdr_len= 5+id_len+9
result = struct.unpack_from(f'!{id_len+1}pBq', self._recv_buffer, self.header_len+4)
logger.debug(f'ID: {result[0]} B: {result[1]}')
logger.debug(f'time: {result[2]:08x}')
#logger.info(f'time: {datetime.utcfromtimestamp(result[2]).strftime("%Y-%m-%d %H:%M:%S")}')
return msg_hdr_len
def msg_collector_data(self):
if self.ctrl.is_ind():
self.__build_header(0x99)
self._send_buffer += b'\x01'
self.__finish_send_msg()
elif self.ctrl.is_resp():
return # ignore received response
self.forward(self._recv_buffer, self.header_len+self.data_len)
self.__process_data()
def msg_inverter_data(self):
if self.ctrl.is_ind():
self.__build_header(0x99)
self._send_buffer += b'\x01'
self.__finish_send_msg()
elif self.ctrl.is_resp():
return # ignore received response
self.forward(self._recv_buffer, self.header_len+self.data_len)
self.__process_data()
def __process_data(self):
msg_hdr_len = self.parse_msg_header()
for key, update in self.db.parse(self._recv_buffer[self.header_len + msg_hdr_len:]):
if update: self.new_data[key] = True
def msg_unknown(self):
logger.warning (f"Unknow Msg: ID:{self.msg_id}")
self.forward(self._recv_buffer, self.header_len+self.data_len)
pass # pragma: no cover
def inc_counter(self, counter: str) -> None:
self.db.inc_counter(counter)
Infos.new_stat_data['proxy'] = True
def dec_counter(self, counter: str) -> None:
self.db.dec_counter(counter)
Infos.new_stat_data['proxy'] = True

View File

@@ -1,4 +1,5 @@
import asyncio, logging
import asyncio
import logging
import aiomqtt
from config import Config
@@ -7,72 +8,88 @@ logger_mqtt = logging.getLogger('mqtt')
class Singleton(type):
_instances = {}
def __call__(cls, *args, **kwargs):
logger_mqtt.debug(f'singleton: __call__')
logger_mqtt.debug('singleton: __call__')
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
cls._instances[cls] = super(Singleton,
cls).__call__(*args, **kwargs)
return cls._instances[cls]
class Mqtt(metaclass=Singleton):
client = None
def __init__(self):
logger_mqtt.debug(f'MQTT: __init__')
__client = None
__cb_MqttIsUp = None
def __init__(self, cb_MqttIsUp):
logger_mqtt.debug('MQTT: __init__')
if cb_MqttIsUp:
self.cb_MqttIsUp = cb_MqttIsUp
loop = asyncio.get_event_loop()
self.task = loop.create_task(self.__loop())
self.ha_restarts = 0
self.ha_restarts = 0
@property
def ha_restarts(self):
return self._ha_restarts
@ha_restarts.setter
def ha_restarts(self, value):
self._ha_restarts = value
def __del__(self):
logger_mqtt.debug(f'MQTT: __del__')
logger_mqtt.debug('MQTT: __del__')
async def close(self) -> None:
logger_mqtt.debug(f'MQTT: close')
logger_mqtt.debug('MQTT: close')
self.task.cancel()
try:
await self.task
except Exception as e:
logging.debug(f"Mqtt.close: exception: {e} ...")
async def publish(self, topic: str, payload: str | bytes | bytearray | int | float | None = None) -> None:
if self.client:
await self.client.publish(topic, payload)
async def publish(self, topic: str, payload: str | bytes | bytearray
| int | float | None = None) -> None:
if self.__client:
await self.__client.publish(topic, payload)
async def __loop(self) -> None:
mqtt = Config.get('mqtt')
ha = Config.get('ha')
logger_mqtt.info(f'start MQTT: host:{mqtt["host"]} port:{mqtt["port"]} user:{mqtt["user"]}')
self.client = aiomqtt.Client(hostname=mqtt['host'], port=mqtt['port'], username=mqtt['user'], password=mqtt['passwd'])
logger_mqtt.info(f'start MQTT: host:{mqtt["host"]} port:'
f'{mqtt["port"]} '
f'user:{mqtt["user"]}')
self.__client = aiomqtt.Client(hostname=mqtt['host'],
port=mqtt['port'],
username=mqtt['user'],
password=mqtt['passwd'])
interval = 5 # Seconds
while True:
try:
async with self.client:
async with self.client.messages() as messages:
await self.client.subscribe(f"{ha['auto_conf_prefix']}/status")
async with self.__client:
logger_mqtt.info('MQTT broker connection established')
if self.cb_MqttIsUp:
await self.cb_MqttIsUp()
async with self.__client.messages() as messages:
await self.__client.subscribe(
f"{ha['auto_conf_prefix']}"
"/status")
async for message in messages:
status = message.payload.decode("UTF-8")
logger_mqtt.info(f'Home-Assistant Status: {status}')
logger_mqtt.info('Home-Assistant Status:'
f' {status}')
if status == 'online':
self.ha_restarts += 1
await self.cb_MqttIsUp()
except aiomqtt.MqttError:
logger_mqtt.info(f"Connection lost; Reconnecting in {interval} seconds ...")
logger_mqtt.info(f"Connection lost; Reconnecting in {interval}"
" seconds ...")
await asyncio.sleep(interval)
except asyncio.CancelledError:
logger_mqtt.debug(f"MQTT task cancelled")
self.client = None
logger_mqtt.debug("MQTT task cancelled")
self.__client = None
return

View File

@@ -1,16 +1,28 @@
import logging, asyncio, signal, functools, os
from logging import config
from async_stream import AsyncStream
import logging
import asyncio
import signal
import functools
import os
from logging import config # noqa F401
from messages import Message
from inverter import Inverter
from gen3.inverter_g3 import InverterG3
from gen3plus.inverter_g3p import InverterG3P
from config import Config
from mqtt import Mqtt
async def handle_client(reader, writer):
'''Handles a new incoming connection and starts an async loop'''
addr = writer.get_extra_info('peername')
await Inverter(reader, writer, addr).server_loop(addr)
await InverterG3(reader, writer, addr).server_loop(addr)
async def handle_client_v2(reader, writer):
'''Handles a new incoming connection and starts an async loop'''
addr = writer.get_extra_info('peername')
await InverterG3P(reader, writer, addr).server_loop(addr)
def handle_SIGTERM(loop):
@@ -21,7 +33,7 @@ def handle_SIGTERM(loop):
#
# first, close all open TCP connections
#
for stream in AsyncStream:
for stream in Message:
stream.close()
#
@@ -30,14 +42,15 @@ def handle_SIGTERM(loop):
loop.stop()
logging.info('Shutdown complete')
def get_log_level() -> int:
'''checks if LOG_LVL is set in the environment and returns the corresponding logging.LOG_LEVEL'''
'''checks if LOG_LVL is set in the environment and returns the
corresponding logging.LOG_LEVEL'''
log_level = os.getenv('LOG_LVL', 'INFO')
if log_level== 'DEBUG':
if log_level == 'DEBUG':
log_level = logging.DEBUG
elif log_level== 'WARN':
elif log_level == 'WARN':
log_level = logging.WARNING
else:
log_level = logging.INFO
@@ -49,48 +62,47 @@ if __name__ == "__main__":
# Setup our daily, rotating logger
#
serv_name = os.getenv('SERVICE_NAME', 'proxy')
version = os.getenv('VERSION', 'unknown')
version = os.getenv('VERSION', 'unknown')
logging.config.fileConfig('logging.ini')
logging.info(f'Server "{serv_name} - {version}" will be started')
# set lowest-severity for 'root', 'msg', 'conn' and 'data' logger
log_level = get_log_level()
logging.getLogger().setLevel(log_level)
logging.getLogger('msg').setLevel(log_level)
logging.getLogger('conn').setLevel(log_level)
logging.getLogger('data').setLevel(log_level)
# read config file
Config.read()
Config.read()
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
# call Mqtt singleton to establisch the connection to the mqtt broker
mqtt = Mqtt()
Inverter.class_init()
#
# Register some UNIX Signal handler for a gracefully server shutdown on Docker restart and stop
#
for signame in ('SIGINT','SIGTERM'):
loop.add_signal_handler(getattr(signal, signame), functools.partial(handle_SIGTERM, loop))
# Register some UNIX Signal handler for a gracefully server shutdown
# on Docker restart and stop
#
for signame in ('SIGINT', 'SIGTERM'):
loop.add_signal_handler(getattr(signal, signame),
functools.partial(handle_SIGTERM, loop))
#
# Create a task for our listening server. This must be a task! If we call start_server directly out
# of our main task, the eventloop will be blocked and we can't receive and handle the UNIX signals!
#
# Create taska for our listening servera. These must be tasks! If we call
# start_server directly out of our main task, the eventloop will be blocked
# and we can't receive and handle the UNIX signals!
#
loop.create_task(asyncio.start_server(handle_client, '0.0.0.0', 5005))
loop.create_task(asyncio.start_server(handle_client_v2, '0.0.0.0', 10000))
try:
loop.run_forever()
except KeyboardInterrupt:
pass
finally:
logging.info ('Close MQTT Task')
loop.run_until_complete(mqtt.close())
mqtt = None # release the last reference to the singleton
logging.info ('Close event loop')
Inverter.class_close(loop)
logging.info('Close event loop')
loop.close()
logging.info (f'Finally, exit Server "{serv_name}"')
logging.info(f'Finally, exit Server "{serv_name}"')

View File

@@ -1,6 +1,7 @@
# test_with_pytest.py
import pytest, json
from app.src.infos import Infos
from app.src.infos import Register
from app.src.gen3.infos_g3 import InfosG3
@pytest.fixture
def ContrDataSeq(): # Get Time Request message
@@ -12,31 +13,195 @@ def ContrDataSeq(): # Get Time Request message
msg += b'\x49\x00\x00\x00\x02\x00\x0d\x04\x08\x49\x00\x00\x00\x00\x00\x07\xa1\x84\x49\x00\x00\x00\x01\x00\x0c\x50\x59\x49\x00\x00\x00\x4c\x00\x0d\x1f\x60\x49\x00\x00\x00\x00'
return msg
@pytest.fixture
def Contr2DataSeq(): # Get Time Request message
msg = b'\x00\x00\x00\x39\x00\x09\x2b\xa8\x54\x10\x52'
msg += b'\x53\x57\x5f\x34\x30\x30\x5f\x56\x31\x2e\x30\x30\x2e\x32\x30\x00'
msg += b'\x09\x27\xc0\x54\x06\x52\x61\x79\x6d\x6f\x6e\x00\x09\x2f\x90\x54'
msg += b'\x0b\x52\x53\x57\x2d\x31\x2d\x31\x30\x30\x30\x31\x00\x09\x5a\x88'
msg += b'\x54\x0f\x74\x2e\x72\x61\x79\x6d\x6f\x6e\x69\x6f\x74\x2e\x63\x6f'
msg += b'\x6d\x00\x09\x5a\xec\x54\x1c\x6c\x6f\x67\x67\x65\x72\x2e\x74\x61'
msg += b'\x6c\x65\x6e\x74\x2d\x6d\x6f\x6e\x69\x74\x6f\x72\x69\x6e\x67\x2e'
msg += b'\x63\x6f\x6d\x00\x0d\x2f\x00\x54\x10\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x32\xe8\x54\x10\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00'
msg += b'\x0d\x36\xd0\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\x00\x0d\x3a\xb8\x54\x10\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x3e\xa0\x54'
msg += b'\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\x00\x0d\x42\x88\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x46\x70\x54\x10\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x4a'
msg += b'\x58\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\x00\x0d\x4e\x40\x54\x10\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x52\x28\x54\x10\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00'
msg += b'\x0d\x56\x10\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\x00\x0d\x59\xf8\x54\x10\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x5d\xe0\x54'
msg += b'\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\x00\x0d\x61\xc8\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x65\xb0\x54\x10\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x69'
msg += b'\x98\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\x00\x0d\x6d\x80\x54\x10\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x71\x68\x54\x10\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00'
msg += b'\x0d\x75\x50\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\x00\x0d\x79\x38\x54\x10\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x7d\x20\x54'
msg += b'\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\x00\x0d\x81\x08\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x84\xf0\x54\x10\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x88'
msg += b'\xd8\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\x00\x0d\x8c\xc0\x54\x10\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x90\xa8\x54\x10\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00'
msg += b'\x0d\x94\x90\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\x00\x0d\x98\x78\x54\x10\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x9c\x60\x54'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
msg += b'\x00\x0d\x00\x20\x49\x00\x00\x00\x01\x00\x0c\x35\x00\x49\x00\x00'
msg += b'\x00\x10\x00\x0c\x96\xa8\x49\x00\x00\x01\x4e\x00\x0c\x7f\x38\x49'
msg += b'\x00\x00\x00\x01\x00\x0c\xfc\x38\x49\x00\x00\x00\x01\x00\x0c\xf8'
msg += b'\x50\x49\x00\x00\x01\x2c\x00\x0c\x63\xe0\x49\x00\x00\x00\x00\x00'
msg += b'\x0c\x67\xc8\x49\x00\x00\x00\x00\x00\x0c\x50\x58\x49\x00\x00\x00'
msg += b'\x01\x00\x09\x5e\x70\x49\x00\x00\x13\x8d\x00\x09\x5e\xd4\x49\x00'
msg += b'\x00\x13\x8d\x00\x09\x5b\x50\x49\x00\x00\x00\x02\x00\x0d\x04\x08'
msg += b'\x49\x00\x00\x00\x00\x00\x07\xa1\x84\x49\x00\x00\x00\x01\x00\x0c'
msg += b'\x50\x59\x49\x00\x00\x00\x33\x00\x0d\x1f\x60\x49\x00\x00\x00\x00'
msg += b'\x00\x0d\x23\x48\x49\xff\xff\xff\xff\x00\x0d\x27\x30\x49\xff\xff'
msg += b'\xff\xff\x00\x0d\x2b\x18\x4c\x00\x00\x00\x00\x00\x00\xff\xff\x00'
msg += b'\x0c\xa2\x60\x49\x00\x00\x00\x00\x00\x0d\xa0\x48\x49\x00\x00\x00'
msg += b'\x00\x00\x0d\xa4\x30\x49\x00\x00\x00\x00\x00\x0d\xa8\x18\x49\x00'
msg += b'\x00\x00\x00'
return msg
@pytest.fixture
def InvDataSeq(): # Data indication from the controller
msg = b'\x00\x00\x00\x06\x00\x00\x00\x0a\x54\x08\x4d\x69\x63\x72\x6f\x69\x6e\x76\x00\x00\x00\x14\x54\x04\x54\x53\x55\x4e\x00\x00\x00\x1E\x54\x07\x56\x35\x2e\x30\x2e\x31\x31\x00\x00\x00\x28'
msg += b'\x54\x10\x54\x31\x37\x45\x37\x33\x30\x37\x30\x32\x31\x44\x30\x30\x36\x41\x00\x00\x00\x32\x54\x0a\x54\x53\x4f\x4c\x2d\x4d\x53\x36\x30\x30\x00\x00\x00\x3c\x54\x05\x41\x2c\x42\x2c\x43'
msg += b'\x54\x10T170000000000001\x00\x00\x00\x32\x54\x0a\x54\x53\x4f\x4c\x2d\x4d\x53\x36\x30\x30\x00\x00\x00\x3c\x54\x05\x41\x2c\x42\x2c\x43'
return msg
@pytest.fixture
def InvalidDataSeq(): # Data indication from the controller
msg = b'\x00\x00\x00\x06\x00\x00\x00\x0a\x54\x08\x4d\x69\x63\x72\x6f\x69\x6e\x76\x00\x00\x00\x14\x64\x04\x54\x53\x55\x4e\x00\x00\x00\x1E\x54\x07\x56\x35\x2e\x30\x2e\x31\x31\x00\x00\x00\x28'
msg += b'\x54\x10T170000000000001\x00\x00\x00\x32\x54\x0a\x54\x53\x4f\x4c\x2d\x4d\x53\x36\x30\x30\x00\x00\x00\x3c\x54\x05\x41\x2c\x42\x2c\x43'
return msg
@pytest.fixture
def InvDataSeq2(): # Data indication from the controller
msg = b'\x00\x00\x00\xa3\x00\x00\x00\x64\x53\x00\x01\x00\x00\x00\xc8\x53\x00\x02\x00\x00\x01\x2c\x53\x00\x00\x00\x00\x01\x90\x49\x00\x00\x00\x00\x00\x00\x01\x91\x53\x00\x00'
msg += b'\x00\x00\x01\x92\x53\x00\x00\x00\x00\x01\x93\x53\x00\x00\x00\x00\x01\x94\x53\x00\x00\x00\x00\x01\x95\x53\x00\x00\x00\x00\x01\x96\x53\x00\x00\x00\x00\x01\x97\x53\x00'
msg += b'\x00\x00\x00\x01\x98\x53\x00\x00\x00\x00\x01\x99\x53\x00\x00\x00\x00\x01\x9a\x53\x00\x00\x00\x00\x01\x9b\x53\x00\x00\x00\x00\x01\x9c\x53\x00\x00\x00\x00\x01\x9d\x53'
msg += b'\x00\x00\x00\x00\x01\x9e\x53\x00\x00\x00\x00\x01\x9f\x53\x00\x00\x00\x00\x01\xa0\x53\x00\x00\x00\x00\x01\xf4\x49\x00\x00\x00\x00\x00\x00\x01\xf5\x53\x00\x00\x00\x00'
msg += b'\x01\xf6\x53\x00\x00\x00\x00\x01\xf7\x53\x00\x00\x00\x00\x01\xf8\x53\x00\x00\x00\x00\x01\xf9\x53\x00\x00\x00\x00\x01\xfa\x53\x00\x00\x00\x00\x01\xfb\x53\x00\x00\x00'
msg += b'\x00\x01\xfc\x53\x00\x00\x00\x00\x01\xfd\x53\x00\x00\x00\x00\x01\xfe\x53\x00\x00\x00\x00\x01\xff\x53\x00\x00\x00\x00\x02\x00\x53\x00\x00\x00\x00\x02\x01\x53\x00\x00'
msg += b'\x00\x00\x02\x02\x53\x00\x00\x00\x00\x02\x03\x53\x00\x00\x00\x00\x02\x04\x53\x00\x00\x00\x00\x02\x58\x49\x00\x00\x00\x00\x00\x00\x02\x59\x53\x00\x00\x00\x00\x02\x5a'
msg += b'\x53\x00\x00\x00\x00\x02\x5b\x53\x00\x00\x00\x00\x02\x5c\x53\x00\x00\x00\x00\x02\x5d\x53\x00\x00\x00\x00\x02\x5e\x53\x00\x00\x00\x00\x02\x5f\x53\x00\x00\x00\x00\x02'
msg += b'\x60\x53\x00\x00\x00\x00\x02\x61\x53\x00\x00\x00\x00\x02\x62\x53\x00\x00\x00\x00\x02\x63\x53\x00\x00\x00\x00\x02\x64\x53\x00\x00\x00\x00\x02\x65\x53\x00\x00\x00\x00'
msg += b'\x02\x66\x53\x00\x00\x00\x00\x02\x67\x53\x00\x00\x00\x00\x02\x68\x53\x00\x00\x00\x00\x02\xbc\x49\x00\x00\x00\x00\x00\x00\x02\xbd\x53\x00\x00\x00\x00\x02\xbe\x53\x00'
msg += b'\x00\x00\x00\x02\xbf\x53\x00\x00\x00\x00\x02\xc0\x53\x00\x00\x00\x00\x02\xc1\x53\x00\x00\x00\x00\x02\xc2\x53\x00\x00\x00\x00\x02\xc3\x53\x00\x00\x00\x00\x02\xc4\x53'
msg += b'\x00\x00\x00\x00\x02\xc5\x53\x00\x00\x00\x00\x02\xc6\x53\x00\x00\x00\x00\x02\xc7\x53\x00\x00\x00\x00\x02\xc8\x53\x00\x00\x00\x00\x02\xc9\x53\x00\x00\x00\x00\x02\xca'
msg += b'\x53\x00\x00\x00\x00\x02\xcb\x53\x00\x00\x00\x00\x02\xcc\x53\x00\x00\x00\x00\x03\x20\x53\x00\x00\x00\x00\x03\x84\x53\x50\x11\x00\x00\x03\xe8\x46\x43\x61\x66\x66\x00'
msg += b'\x00\x04\x4c\x46\x3e\xeb\x85\x1f\x00\x00\x04\xb0\x46\x42\x48\x14\x7b\x00\x00\x05\x14\x53\x00\x17\x00\x00\x05\x78\x53\x00\x00\x00\x00\x05\xdc\x53\x02\x58\x00\x00\x06'
msg += b'\x40\x46\x42\xd3\x66\x66\x00\x00\x06\xa4\x46\x42\x06\x66\x66\x00\x00\x07\x08\x46\x3f\xf4\x7a\xe1\x00\x00\x07\x6c\x46\x42\x81\x00\x00\x00\x00\x07\xd0\x46\x42\x06\x00'
msg += b'\x00\x00\x00\x08\x34\x46\x3f\xae\x14\x7b\x00\x00\x08\x98\x46\x42\x36\xcc\xcd\x00\x00\x08\xfc\x46\x00\x00\x00\x00\x00\x00\x09\x60\x46\x00\x00\x00\x00\x00\x00\x09\xc4'
msg += b'\x46\x00\x00\x00\x00\x00\x00\x0a\x28\x46\x00\x00\x00\x00\x00\x00\x0a\x8c\x46\x00\x00\x00\x00\x00\x00\x0a\xf0\x46\x00\x00\x00\x00\x00\x00\x0b\x54\x46\x3f\xd9\x99\x9a'
msg += b'\x00\x00\x0b\xb8\x46\x41\x8a\xe1\x48\x00\x00\x0c\x1c\x46\x3f\x8a\x3d\x71\x00\x00\x0c\x80\x46\x41\x1b\xd7\x0a\x00\x00\x0c\xe4\x46\x3f\x1e\xb8\x52\x00\x00\x0d\x48\x46'
msg += b'\x40\xf3\xd7\x0a\x00\x00\x0d\xac\x46\x00\x00\x00\x00\x00\x00\x0e\x10\x46\x00\x00\x00\x00\x00\x00\x0e\x74\x46\x00\x00\x00\x00\x00\x00\x0e\xd8\x46\x00\x00\x00\x00\x00'
msg += b'\x00\x0f\x3c\x53\x00\x00\x00\x00\x0f\xa0\x53\x00\x00\x00\x00\x10\x04\x53\x55\xaa\x00\x00\x10\x68\x53\x00\x00\x00\x00\x10\xcc\x53\x00\x00\x00\x00\x11\x30\x53\x00\x00'
msg += b'\x00\x00\x11\x94\x53\x00\x00\x00\x00\x11\xf8\x53\xff\xff\x00\x00\x12\x5c\x53\xff\xff\x00\x00\x12\xc0\x53\x00\x02\x00\x00\x13\x24\x53\xff\xff\x00\x00\x13\x88\x53\xff'
msg += b'\xff\x00\x00\x13\xec\x53\xff\xff\x00\x00\x14\x50\x53\xff\xff\x00\x00\x14\xb4\x53\xff\xff\x00\x00\x15\x18\x53\xff\xff\x00\x00\x15\x7c\x53\x00\x00\x00\x00\x27\x10\x53'
msg += b'\x00\x02\x00\x00\x27\x74\x53\x00\x3c\x00\x00\x27\xd8\x53\x00\x68\x00\x00\x28\x3c\x53\x05\x00\x00\x00\x28\xa0\x46\x43\x79\x00\x00\x00\x00\x29\x04\x46\x43\x48\x00\x00'
msg += b'\x00\x00\x29\x68\x46\x42\x48\x33\x33\x00\x00\x29\xcc\x46\x42\x3e\x3d\x71\x00\x00\x2a\x30\x53\x00\x01\x00\x00\x2a\x94\x46\x43\x37\x00\x00\x00\x00\x2a\xf8\x46\x42\xce'
msg += b'\x00\x00\x00\x00\x2b\x5c\x53\x00\x96\x00\x00\x2b\xc0\x53\x00\x10\x00\x00\x2c\x24\x46\x43\x90\x00\x00\x00\x00\x2c\x88\x46\x43\x95\x00\x00\x00\x00\x2c\xec\x53\x00\x06'
msg += b'\x00\x00\x2d\x50\x53\x00\x06\x00\x00\x2d\xb4\x46\x43\x7d\x00\x00\x00\x00\x2e\x18\x46\x42\x3d\xeb\x85\x00\x00\x2e\x7c\x46\x42\x3d\xeb\x85\x00\x00\x2e\xe0\x53\x00\x03'
msg += b'\x00\x00\x2f\x44\x53\x00\x03\x00\x00\x2f\xa8\x46\x42\x4d\xeb\x85\x00\x00\x30\x0c\x46\x42\x4d\xeb\x85\x00\x00\x30\x70\x53\x00\x03\x00\x00\x30\xd4\x53\x00\x03\x00\x00'
msg += b'\x31\x38\x46\x42\x08\x00\x00\x00\x00\x31\x9c\x53\x00\x05\x00\x00\x32\x00\x53\x04\x00\x00\x00\x32\x64\x53\x00\x01\x00\x00\x32\xc8\x53\x13\x9c\x00\x00\x33\x2c\x53\x0f'
msg += b'\xa0\x00\x00\x33\x90\x53\x00\x4f\x00\x00\x33\xf4\x53\x00\x66\x00\x00\x34\x58\x53\x03\xe8\x00\x00\x34\xbc\x53\x04\x00\x00\x00\x35\x20\x53\x00\x00\x00\x00\x35\x84\x53'
msg += b'\x00\x00\x00\x00\x35\xe8\x53\x00\x00\x00\x00\x36\x4c\x53\x00\x00\x00\x01\x38\x80\x53\x00\x02\x00\x01\x38\x81\x53\x00\x01\x00\x01\x38\x82\x53\x00\x01\x00\x01\x38\x83'
msg += b'\x53\x00\x00'
return msg
@pytest.fixture
def InvDataSeq2_Zero(): # Data indication from the controller
msg = b'\x00\x00\x00\xa3\x00\x00\x00\x64\x53\x00\x01\x00\x00\x00\xc8\x53\x00\x02\x00\x00\x01\x2c\x53\x00\x00\x00\x00\x01\x90\x49\x00\x00\x00\x00\x00\x00\x01\x91\x53\x00\x00'
msg += b'\x00\x00\x01\x92\x53\x00\x00\x00\x00\x01\x93\x53\x00\x00\x00\x00\x01\x94\x53\x00\x00\x00\x00\x01\x95\x53\x00\x00\x00\x00\x01\x96\x53\x00\x00\x00\x00\x01\x97\x53\x00'
msg += b'\x00\x00\x00\x01\x98\x53\x00\x00\x00\x00\x01\x99\x53\x00\x00\x00\x00\x01\x9a\x53\x00\x00\x00\x00\x01\x9b\x53\x00\x00\x00\x00\x01\x9c\x53\x00\x00\x00\x00\x01\x9d\x53'
msg += b'\x00\x00\x00\x00\x01\x9e\x53\x00\x00\x00\x00\x01\x9f\x53\x00\x00\x00\x00\x01\xa0\x53\x00\x00\x00\x00\x01\xf4\x49\x00\x00\x00\x00\x00\x00\x01\xf5\x53\x00\x00\x00\x00'
msg += b'\x01\xf6\x53\x00\x00\x00\x00\x01\xf7\x53\x00\x00\x00\x00\x01\xf8\x53\x00\x00\x00\x00\x01\xf9\x53\x00\x00\x00\x00\x01\xfa\x53\x00\x00\x00\x00\x01\xfb\x53\x00\x00\x00'
msg += b'\x00\x01\xfc\x53\x00\x00\x00\x00\x01\xfd\x53\x00\x00\x00\x00\x01\xfe\x53\x00\x00\x00\x00\x01\xff\x53\x00\x00\x00\x00\x02\x00\x53\x00\x00\x00\x00\x02\x01\x53\x00\x00'
msg += b'\x00\x00\x02\x02\x53\x00\x00\x00\x00\x02\x03\x53\x00\x00\x00\x00\x02\x04\x53\x00\x00\x00\x00\x02\x58\x49\x00\x00\x00\x00\x00\x00\x02\x59\x53\x00\x00\x00\x00\x02\x5a'
msg += b'\x53\x00\x00\x00\x00\x02\x5b\x53\x00\x00\x00\x00\x02\x5c\x53\x00\x00\x00\x00\x02\x5d\x53\x00\x00\x00\x00\x02\x5e\x53\x00\x00\x00\x00\x02\x5f\x53\x00\x00\x00\x00\x02'
msg += b'\x60\x53\x00\x00\x00\x00\x02\x61\x53\x00\x00\x00\x00\x02\x62\x53\x00\x00\x00\x00\x02\x63\x53\x00\x00\x00\x00\x02\x64\x53\x00\x00\x00\x00\x02\x65\x53\x00\x00\x00\x00'
msg += b'\x02\x66\x53\x00\x00\x00\x00\x02\x67\x53\x00\x00\x00\x00\x02\x68\x53\x00\x00\x00\x00\x02\xbc\x49\x00\x00\x00\x00\x00\x00\x02\xbd\x53\x00\x00\x00\x00\x02\xbe\x53\x00'
msg += b'\x00\x00\x00\x02\xbf\x53\x00\x00\x00\x00\x02\xc0\x53\x00\x00\x00\x00\x02\xc1\x53\x00\x00\x00\x00\x02\xc2\x53\x00\x00\x00\x00\x02\xc3\x53\x00\x00\x00\x00\x02\xc4\x53'
msg += b'\x00\x00\x00\x00\x02\xc5\x53\x00\x00\x00\x00\x02\xc6\x53\x00\x00\x00\x00\x02\xc7\x53\x00\x00\x00\x00\x02\xc8\x53\x00\x00\x00\x00\x02\xc9\x53\x00\x00\x00\x00\x02\xca'
msg += b'\x53\x00\x00\x00\x00\x02\xcb\x53\x00\x00\x00\x00\x02\xcc\x53\x00\x00\x00\x00\x03\x20\x53\x00\x00\x00\x00\x03\x84\x53\x50\x11\x00\x00\x03\xe8\x46\x43\x61\x66\x66\x00'
msg += b'\x00\x04\x4c\x46\x3e\xeb\x85\x1f\x00\x00\x04\xb0\x46\x42\x48\x14\x7b\x00\x00\x05\x14\x53\x00\x00\x00\x00\x05\x78\x53\x00\x00\x00\x00\x05\xdc\x53\x00\x00\x00\x00\x06'
msg += b'\x40\x46\x42\xd3\x66\x66\x00\x00\x06\xa4\x46\x42\x06\x66\x66\x00\x00\x07\x08\x46\x3f\xf4\x7a\xe1\x00\x00\x07\x6c\x46\x00\x00\x00\x00\x00\x00\x07\xd0\x46\x42\x06\x00'
msg += b'\x00\x00\x00\x08\x34\x46\x3f\xae\x14\x7b\x00\x00\x08\x98\x46\x00\x00\x00\x00\x00\x00\x08\xfc\x46\x00\x00\x00\x00\x00\x00\x09\x60\x46\x00\x00\x00\x00\x00\x00\x09\xc4'
msg += b'\x46\x00\x00\x00\x00\x00\x00\x0a\x28\x46\x00\x00\x00\x00\x00\x00\x0a\x8c\x46\x00\x00\x00\x00\x00\x00\x0a\xf0\x46\x00\x00\x00\x00\x00\x00\x0b\x54\x46\x00\x00\x00\x00'
msg += b'\x00\x00\x0b\xb8\x46\x00\x00\x00\x00\x00\x00\x0c\x1c\x46\x00\x00\x00\x00\x00\x00\x0c\x80\x46\x00\x00\x00\x00\x00\x00\x0c\xe4\x46\x00\x00\x00\x00\x00\x00\x0d\x48\x46'
msg += b'\x00\x00\x00\x00\x00\x00\x0d\xac\x46\x00\x00\x00\x00\x00\x00\x0e\x10\x46\x00\x00\x00\x00\x00\x00\x0e\x74\x46\x00\x00\x00\x00\x00\x00\x0e\xd8\x46\x00\x00\x00\x00\x00'
msg += b'\x00\x0f\x3c\x53\x00\x00\x00\x00\x0f\xa0\x53\x00\x00\x00\x00\x10\x04\x53\x55\xaa\x00\x00\x10\x68\x53\x00\x00\x00\x00\x10\xcc\x53\x00\x00\x00\x00\x11\x30\x53\x00\x00'
msg += b'\x00\x00\x11\x94\x53\x00\x00\x00\x00\x11\xf8\x53\xff\xff\x00\x00\x12\x5c\x53\xff\xff\x00\x00\x12\xc0\x53\x00\x02\x00\x00\x13\x24\x53\xff\xff\x00\x00\x13\x88\x53\xff'
msg += b'\xff\x00\x00\x13\xec\x53\xff\xff\x00\x00\x14\x50\x53\xff\xff\x00\x00\x14\xb4\x53\xff\xff\x00\x00\x15\x18\x53\xff\xff\x00\x00\x15\x7c\x53\x00\x00\x00\x00\x27\x10\x53'
msg += b'\x00\x02\x00\x00\x27\x74\x53\x00\x3c\x00\x00\x27\xd8\x53\x00\x68\x00\x00\x28\x3c\x53\x05\x00\x00\x00\x28\xa0\x46\x43\x79\x00\x00\x00\x00\x29\x04\x46\x43\x48\x00\x00'
msg += b'\x00\x00\x29\x68\x46\x42\x48\x33\x33\x00\x00\x29\xcc\x46\x42\x3e\x3d\x71\x00\x00\x2a\x30\x53\x00\x01\x00\x00\x2a\x94\x46\x43\x37\x00\x00\x00\x00\x2a\xf8\x46\x42\xce'
msg += b'\x00\x00\x00\x00\x2b\x5c\x53\x00\x96\x00\x00\x2b\xc0\x53\x00\x10\x00\x00\x2c\x24\x46\x43\x90\x00\x00\x00\x00\x2c\x88\x46\x43\x95\x00\x00\x00\x00\x2c\xec\x53\x00\x06'
msg += b'\x00\x00\x2d\x50\x53\x00\x06\x00\x00\x2d\xb4\x46\x43\x7d\x00\x00\x00\x00\x2e\x18\x46\x42\x3d\xeb\x85\x00\x00\x2e\x7c\x46\x42\x3d\xeb\x85\x00\x00\x2e\xe0\x53\x00\x03'
msg += b'\x00\x00\x2f\x44\x53\x00\x03\x00\x00\x2f\xa8\x46\x42\x4d\xeb\x85\x00\x00\x30\x0c\x46\x42\x4d\xeb\x85\x00\x00\x30\x70\x53\x00\x03\x00\x00\x30\xd4\x53\x00\x03\x00\x00'
msg += b'\x31\x38\x46\x42\x08\x00\x00\x00\x00\x31\x9c\x53\x00\x05\x00\x00\x32\x00\x53\x04\x00\x00\x00\x32\x64\x53\x00\x01\x00\x00\x32\xc8\x53\x13\x9c\x00\x00\x33\x2c\x53\x0f'
msg += b'\xa0\x00\x00\x33\x90\x53\x00\x4f\x00\x00\x33\xf4\x53\x00\x66\x00\x00\x34\x58\x53\x03\xe8\x00\x00\x34\xbc\x53\x04\x00\x00\x00\x35\x20\x53\x00\x00\x00\x00\x35\x84\x53'
msg += b'\x00\x00\x00\x00\x35\xe8\x53\x00\x00\x00\x00\x36\x4c\x53\x00\x00\x00\x01\x38\x80\x53\x00\x02\x00\x01\x38\x81\x53\x00\x01\x00\x01\x38\x82\x53\x00\x01\x00\x01\x38\x83'
msg += b'\x53\x00\x00'
return msg
def test_parse_control(ContrDataSeq):
i = Infos()
i = InfosG3()
for key, result in i.parse (ContrDataSeq):
pass
assert json.dumps(i.db) == json.dumps(
{"collector": {"Collector_Fw_Version": "RSW_400_V1.00.06", "Chip_Type": "Raymon", "Chip_Model": "RSW-1-10001", "Trace_URL": "t.raymoniot.com", "Logger_URL": "logger.talent-monitoring.com"}, "controller": {"Signal_Strength": 100, "Power_On_Time": 29, "Data_Up_Interval": 300}})
{"collector": {"Collector_Fw_Version": "RSW_400_V1.00.06", "Chip_Type": "Raymon", "Chip_Model": "RSW-1-10001", "Trace_URL": "t.raymoniot.com", "Logger_URL": "logger.talent-monitoring.com"}, "controller": {"Collect_Interval": 1, "Signal_Strength": 100, "Power_On_Time": 29, "Communication_Type": 1, "Connect_Count": 1, "Data_Up_Interval": 300}})
def test_parse_control2(Contr2DataSeq):
i = InfosG3()
for key, result in i.parse (Contr2DataSeq):
pass
assert json.dumps(i.db) == json.dumps(
{"collector": {"Collector_Fw_Version": "RSW_400_V1.00.20", "Chip_Type": "Raymon", "Chip_Model": "RSW-1-10001", "Trace_URL": "t.raymoniot.com", "Logger_URL": "logger.talent-monitoring.com"}, "controller": {"Collect_Interval": 1, "Signal_Strength": 16, "Power_On_Time": 334, "Communication_Type": 1, "Connect_Count": 1, "Data_Up_Interval": 300}})
def test_parse_inverter(InvDataSeq):
i = Infos()
i = InfosG3()
for key, result in i.parse (InvDataSeq):
pass
assert json.dumps(i.db) == json.dumps(
{"inverter": {"Product_Name": "Microinv", "Manufacturer": "TSUN", "Version": "V5.0.11", "Serial_Number": "T17E7307021D006A", "Equipment_Model": "TSOL-MS600"}})
{"inverter": {"Product_Name": "Microinv", "Manufacturer": "TSUN", "Version": "V5.0.11", "Serial_Number": "T170000000000001", "Equipment_Model": "TSOL-MS600"}})
def test_parse_cont_and_invert(ContrDataSeq, InvDataSeq):
i = Infos()
i = InfosG3()
for key, result in i.parse (ContrDataSeq):
pass
@@ -45,14 +210,16 @@ def test_parse_cont_and_invert(ContrDataSeq, InvDataSeq):
assert json.dumps(i.db) == json.dumps(
{
"collector": {"Collector_Fw_Version": "RSW_400_V1.00.06", "Chip_Type": "Raymon", "Chip_Model": "RSW-1-10001", "Trace_URL": "t.raymoniot.com", "Logger_URL": "logger.talent-monitoring.com"}, "controller": {"Signal_Strength": 100, "Power_On_Time": 29, "Data_Up_Interval": 300},
"inverter": {"Product_Name": "Microinv", "Manufacturer": "TSUN", "Version": "V5.0.11", "Serial_Number": "T17E7307021D006A", "Equipment_Model": "TSOL-MS600"}})
"collector": {"Collector_Fw_Version": "RSW_400_V1.00.06", "Chip_Type": "Raymon", "Chip_Model": "RSW-1-10001", "Trace_URL": "t.raymoniot.com", "Logger_URL": "logger.talent-monitoring.com"}, "controller": {"Collect_Interval": 1, "Signal_Strength": 100, "Power_On_Time": 29, "Communication_Type": 1, "Connect_Count": 1, "Data_Up_Interval": 300},
"inverter": {"Product_Name": "Microinv", "Manufacturer": "TSUN", "Version": "V5.0.11", "Serial_Number": "T170000000000001", "Equipment_Model": "TSOL-MS600"}})
def test_build_ha_conf1(ContrDataSeq):
i = Infos()
i = InfosG3()
i.static_init() # initialize counter
tests = 0
for d_json, comp, id in i.ha_confs(prfx="tsun/garagendach/", snr='123'):
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id="garagendach/", snr='123'):
if id == 'out_power_123':
assert comp == 'sensor'
@@ -66,53 +233,294 @@ def test_build_ha_conf1(ContrDataSeq):
elif id == 'power_pv1_123':
assert comp == 'sensor'
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/input", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "power_pv1_123", "val_tpl": "{{ (value_json['pv1']['Power'] | float)}}", "unit_of_meas": "W", "ic": "mdi:gauge", "dev": {"name": "Module PV1", "sa": "Module PV1", "via_device": "inverter_123", "ids": ["input_pv1_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/input", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "power_pv1_123", "val_tpl": "{{ (value_json['pv1']['Power'] | float)}}", "unit_of_meas": "W", "dev": {"name": "Module PV1", "sa": "Module PV1", "via_device": "inverter_123", "ids": ["input_pv1_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
tests +=1
elif id == 'power_pv2_123':
assert comp == 'sensor'
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/input", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "power_pv2_123", "val_tpl": "{{ (value_json['pv2']['Power'] | float)}}", "unit_of_meas": "W", "ic": "mdi:gauge", "dev": {"name": "Module PV2", "sa": "Module PV2", "via_device": "inverter_123", "ids": ["input_pv2_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
tests +=1
assert False # if we haven't received and parsed a control data msg, we don't know the number of inputs. In this case we only register the first one!!
elif id == 'signal_123':
assert comp == 'sensor'
assert d_json == json.dumps({"name": "Signal Strength", "stat_t": "tsun/garagendach/controller", "dev_cla": None, "stat_cla": "measurement", "uniq_id": "signal_123", "val_tpl": "{{value_json[\'Signal_Strength\'] | int}}", "unit_of_meas": "%", "ic": "mdi:wifi", "dev": {"name": "Controller", "sa": "Controller", "ids": ["controller_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
assert d_json == json.dumps({"name": "Signal Strength", "stat_t": "tsun/garagendach/controller", "dev_cla": None, "stat_cla": "measurement", "uniq_id": "signal_123", "val_tpl": "{{value_json[\'Signal_Strength\'] | int}}", "unit_of_meas": "%", "ic": "mdi:wifi", "dev": {"name": "Controller", "sa": "Controller", "via_device": "proxy", "ids": ["controller_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
tests +=1
elif id == 'inv_count_456':
assert False
assert tests==4
for d_json, comp, node_id, id in i.ha_proxy_confs(ha_prfx="tsun/", node_id = 'proxy/', snr = '456'):
if id == 'out_power_123':
assert False
elif id == 'daily_gen_123':
assert False
elif id == 'power_pv1_123':
assert False
elif id == 'power_pv2_123':
assert False # if we haven't received and parsed a control data msg, we don't know the number of inputs. In this case we only register the first one!!
elif id == 'signal_123':
assert False
elif id == 'inv_count_456':
assert comp == 'sensor'
assert d_json == json.dumps({"name": "Active Inverter Connections", "stat_t": "tsun/proxy/proxy", "dev_cla": None, "stat_cla": None, "uniq_id": "inv_count_456", "val_tpl": "{{value_json['Inverter_Cnt'] | int}}", "ic": "mdi:counter", "dev": {"name": "Proxy", "sa": "Proxy", "mdl": "proxy", "mf": "Stefan Allius", "sw": "unknown", "ids": ["proxy"]}, "o": {"name": "proxy", "sw": "unknown"}})
tests +=1
assert tests==5
def test_build_ha_conf2(ContrDataSeq, InvDataSeq):
i = Infos()
def test_build_ha_conf2(ContrDataSeq, InvDataSeq, InvDataSeq2):
i = InfosG3()
for key, result in i.parse (ContrDataSeq):
pass
for key, result in i.parse (InvDataSeq):
pass
for key, result in i.parse (InvDataSeq2):
pass
tests = 0
for d_json, comp, id in i.ha_confs(prfx="tsun/garagendach/", snr='123'):
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id="garagendach/", snr='123', sug_area = 'roof'):
if id == 'out_power_123':
assert comp == 'sensor'
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/grid", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "out_power_123", "val_tpl": "{{value_json['Output_Power'] | float}}", "unit_of_meas": "W", "dev": {"name": "Micro Inverter", "sa": "Micro Inverter", "via_device": "controller_123", "mdl": "TSOL-MS600", "mf": "TSUN", "sw": "V5.0.11", "ids": ["inverter_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/grid", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "out_power_123", "val_tpl": "{{value_json['Output_Power'] | float}}", "unit_of_meas": "W", "dev": {"name": "Micro Inverter - roof", "sa": "Micro Inverter - roof", "via_device": "controller_123", "mdl": "TSOL-MS600", "mf": "TSUN", "sw": "V5.0.11", "ids": ["inverter_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
tests +=1
elif id == 'daily_gen_123':
if id == 'daily_gen_123':
assert comp == 'sensor'
assert d_json == json.dumps({"name": "Daily Generation", "stat_t": "tsun/garagendach/total", "dev_cla": "energy", "stat_cla": "total_increasing", "uniq_id": "daily_gen_123", "val_tpl": "{{value_json['Daily_Generation'] | float}}", "unit_of_meas": "kWh", "ic": "mdi:solar-power-variant", "dev": {"name": "Micro Inverter", "sa": "Micro Inverter", "via_device": "controller_123", "mdl": "TSOL-MS600", "mf": "TSUN", "sw": "V5.0.11", "ids": ["inverter_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
assert d_json == json.dumps({"name": "Daily Generation", "stat_t": "tsun/garagendach/total", "dev_cla": "energy", "stat_cla": "total_increasing", "uniq_id": "daily_gen_123", "val_tpl": "{{value_json['Daily_Generation'] | float}}", "unit_of_meas": "kWh", "ic": "mdi:solar-power-variant", "dev": {"name": "Micro Inverter - roof", "sa": "Micro Inverter - roof", "via_device": "controller_123", "mdl": "TSOL-MS600", "mf": "TSUN", "sw": "V5.0.11", "ids": ["inverter_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
tests +=1
elif id == 'power_pv1_123':
assert comp == 'sensor'
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/input", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "power_pv1_123", "val_tpl": "{{ (value_json['pv1']['Power'] | float)}}", "unit_of_meas": "W", "ic": "mdi:gauge", "dev": {"name": "Module PV1", "sa": "Module PV1", "via_device": "inverter_123", "ids": ["input_pv1_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/input", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "power_pv1_123", "val_tpl": "{{ (value_json['pv1']['Power'] | float)}}", "unit_of_meas": "W", "dev": {"name": "Module PV1 - roof", "sa": "Module PV1 - roof", "via_device": "inverter_123", "ids": ["input_pv1_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
tests +=1
elif id == 'power_pv2_123':
assert comp == 'sensor'
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/input", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "power_pv2_123", "val_tpl": "{{ (value_json['pv2']['Power'] | float)}}", "unit_of_meas": "W", "ic": "mdi:gauge", "dev": {"name": "Module PV2", "sa": "Module PV2", "via_device": "inverter_123", "ids": ["input_pv2_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/input", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "power_pv2_123", "val_tpl": "{{ (value_json['pv2']['Power'] | float)}}", "unit_of_meas": "W", "dev": {"name": "Module PV2 - roof", "sa": "Module PV2 - roof", "via_device": "inverter_123", "ids": ["input_pv2_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
tests +=1
elif id == 'signal_123':
assert comp == 'sensor'
assert d_json == json.dumps({"name": "Signal Strength", "stat_t": "tsun/garagendach/controller", "dev_cla": None, "stat_cla": "measurement", "uniq_id": "signal_123", "val_tpl": "{{value_json[\'Signal_Strength\'] | int}}", "unit_of_meas": "%", "ic": "mdi:wifi", "dev": {"name": "Controller", "sa": "Controller", "mdl": "RSW-1-10001", "mf": "Raymon", "sw": "RSW_400_V1.00.06", "ids": ["controller_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
assert d_json == json.dumps({"name": "Signal Strength", "stat_t": "tsun/garagendach/controller", "dev_cla": None, "stat_cla": "measurement", "uniq_id": "signal_123", "val_tpl": "{{value_json[\'Signal_Strength\'] | int}}", "unit_of_meas": "%", "ic": "mdi:wifi", "dev": {"name": "Controller - roof", "sa": "Controller - roof", "via_device": "proxy", "mdl": "RSW-1-10001", "mf": "Raymon", "sw": "RSW_400_V1.00.06", "ids": ["controller_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
tests +=1
assert tests==5
def test_must_incr_total(InvDataSeq2, InvDataSeq2_Zero):
i = InfosG3()
tests = 0
for key, update in i.parse (InvDataSeq2):
if key == 'total' or key == 'inverter' or key == 'env':
assert update == True
tests +=1
assert tests==5
assert json.dumps(i.db['total']) == json.dumps({'Daily_Generation': 1.7, 'Total_Generation': 17.36})
assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 64.5, "Daily_Generation": 1.08, "Total_Generation": 9.74}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 45.7, "Daily_Generation": 0.62, "Total_Generation": 7.62}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}})
assert json.dumps(i.db['env']) == json.dumps({"Inverter_Temp": 23})
tests = 0
for key, update in i.parse (InvDataSeq2):
if key == 'total':
assert update == False
tests +=1
elif key == 'env':
assert update == False
tests +=1
assert tests==3
assert json.dumps(i.db['total']) == json.dumps({'Daily_Generation': 1.7, 'Total_Generation': 17.36})
assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 64.5, "Daily_Generation": 1.08, "Total_Generation": 9.74}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 45.7, "Daily_Generation": 0.62, "Total_Generation": 7.62}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}})
assert json.dumps(i.db['env']) == json.dumps({"Inverter_Temp": 23})
assert json.dumps(i.db['inverter']) == json.dumps({"Rated_Power": 600, "No_Inputs": 2})
tests = 0
for key, update in i.parse (InvDataSeq2_Zero):
if key == 'total':
assert update == False
tests +=1
elif key == 'env':
assert update == True
tests +=1
assert tests==3
assert json.dumps(i.db['total']) == json.dumps({'Daily_Generation': 1.7, 'Total_Generation': 17.36})
assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 0.0, "Daily_Generation": 1.08, "Total_Generation": 9.74}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 0.0, "Daily_Generation": 0.62, "Total_Generation": 7.62}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}})
assert json.dumps(i.db['env']) == json.dumps({"Inverter_Temp": 0})
def test_must_incr_total2(InvDataSeq2, InvDataSeq2_Zero):
i = InfosG3()
tests = 0
for key, update in i.parse (InvDataSeq2_Zero):
if key == 'total':
assert update == False
tests +=1
elif key == 'env':
assert update == True
tests +=1
assert tests==3
assert json.dumps(i.db['total']) == json.dumps({})
assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 0.0}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 0.0}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}})
assert json.dumps(i.db['env']) == json.dumps({"Inverter_Temp": 0})
tests = 0
for key, update in i.parse (InvDataSeq2_Zero):
if key == 'total':
assert update == False
tests +=1
elif key == 'env':
assert update == False
tests +=1
assert tests==3
assert json.dumps(i.db['total']) == json.dumps({})
assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 0.0}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 0.0}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}})
assert json.dumps(i.db['env']) == json.dumps({"Inverter_Temp": 0})
tests = 0
for key, update in i.parse (InvDataSeq2):
if key == 'total':
assert update == True
tests +=1
elif key == 'env':
assert update == True
tests +=1
assert tests==3
assert json.dumps(i.db['total']) == json.dumps({'Daily_Generation': 1.7, 'Total_Generation': 17.36})
assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 64.5, "Daily_Generation": 1.08, "Total_Generation": 9.74}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 45.7, "Daily_Generation": 0.62, "Total_Generation": 7.62}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}})
assert json.dumps(i.db['env']) == json.dumps({"Inverter_Temp": 23})
def test_statistic_counter():
i = InfosG3()
val = i.dev_value("Test-String")
assert val == "Test-String"
val = i.dev_value(0xffffffff) # invalid addr
assert val == None
val = i.dev_value(Register.INVERTER_CNT) # valid addr but not initiliazed
assert val == None or val == 0
i.static_init() # initialize counter
assert json.dumps(i.stat) == json.dumps({"proxy": {"Inverter_Cnt": 0, "Unknown_SNR": 0, "Unknown_Msg": 0, "Invalid_Data_Type": 0, "Internal_Error": 0,"Unknown_Ctrl": 0, "OTA_Start_Msg": 0, "SW_Exception": 0, "Invalid_Msg_Format": 0, "AT_Command": 0}})
val = i.dev_value(Register.INVERTER_CNT) # valid and initiliazed addr
assert val == 0
i.inc_counter('Inverter_Cnt')
assert json.dumps(i.stat) == json.dumps({"proxy": {"Inverter_Cnt": 1, "Unknown_SNR": 0, "Unknown_Msg": 0, "Invalid_Data_Type": 0, "Internal_Error": 0,"Unknown_Ctrl": 0, "OTA_Start_Msg": 0, "SW_Exception": 0, "Invalid_Msg_Format": 0, "AT_Command": 0}})
val = i.dev_value(Register.INVERTER_CNT)
assert val == 1
i.dec_counter('Inverter_Cnt')
val = i.dev_value(Register.INVERTER_CNT)
assert val == 0
def test_dep_rules():
i = InfosG3()
i.static_init() # initialize counter
res = i.ignore_this_device({})
assert res == True
res = i.ignore_this_device({'reg':0xffffffff})
assert res == True
i.inc_counter('Inverter_Cnt') # is 1
val = i.dev_value(Register.INVERTER_CNT)
assert val == 1
res = i.ignore_this_device({'reg': Register.INVERTER_CNT})
assert res == True
res = i.ignore_this_device({'reg': Register.INVERTER_CNT, 'less_eq': 2})
assert res == False
res = i.ignore_this_device({'reg': Register.INVERTER_CNT, 'gte': 2})
assert res == True
i.inc_counter('Inverter_Cnt') # is 2
res = i.ignore_this_device({'reg': Register.INVERTER_CNT, 'less_eq': 2})
assert res == False
res = i.ignore_this_device({'reg': Register.INVERTER_CNT, 'gte': 2})
assert res == False
i.inc_counter('Inverter_Cnt') # is 3
res = i.ignore_this_device({'reg': Register.INVERTER_CNT, 'less_eq': 2})
assert res == True
res = i.ignore_this_device({'reg': Register.INVERTER_CNT, 'gte': 2})
assert res == False
def test_table_definition():
i = InfosG3()
i.static_init() # initialize counter
val = i.dev_value(Register.INTERNAL_ERROR) # check internal error counter
assert val == 0
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id="garagendach/", snr='123', sug_area = 'roof'):
pass
for d_json, comp, node_id, id in i.ha_proxy_confs(ha_prfx="tsun/", node_id = 'proxy/', snr = '456'):
pass
val = i.dev_value(Register.INTERNAL_ERROR) # check internal error counter
assert val == 0
# test missing 'fmt' value
i.info_defs[Register.TEST_REG1] = {'name':['proxy', 'Internal_Test1'], 'singleton': True, 'ha':{'dev':'proxy', 'dev_cla': None, 'stat_cla': None, 'id':'intern_test1_'}}
tests = 0
for d_json, comp, node_id, id in i.ha_proxy_confs(ha_prfx="tsun/", node_id = 'proxy/', snr = '456'):
if id == 'intern_test1_456':
tests +=1
assert tests == 1
val = i.dev_value(Register.INTERNAL_ERROR) # check internal error counter
assert val == 1
# test missing 'dev' value
i.info_defs[Register.TEST_REG1] = {'name':['proxy', 'Internal_Test2'], 'singleton': True, 'ha':{'dev_cla': None, 'stat_cla': None, 'id':'intern_test2_', 'fmt':'| int'}}
tests = 0
for d_json, comp, node_id, id in i.ha_proxy_confs(ha_prfx="tsun/", node_id = 'proxy/', snr = '456'):
if id == 'intern_test2_456':
tests +=1
assert tests == 1
val = i.dev_value(Register.INTERNAL_ERROR) # check internal error counter
assert val == 2
# test invalid 'via' value
i.info_devs['test_dev'] = {'via':'xyz', 'name':'Module PV1'}
i.info_defs[Register.TEST_REG1] = {'name':['proxy', 'Internal_Test2'], 'singleton': True, 'ha':{'dev':'test_dev', 'dev_cla': None, 'stat_cla': None, 'id':'intern_test2_', 'fmt':'| int'}}
tests = 0
for d_json, comp, node_id, id in i.ha_proxy_confs(ha_prfx="tsun/", node_id = 'proxy/', snr = '456'):
if id == 'intern_test2_456':
tests +=1
assert tests == 1
val = i.dev_value(Register.INTERNAL_ERROR) # check internal error counter
assert val == 3
def test_invalid_data_type(InvalidDataSeq):
i = InfosG3()
i.static_init() # initialize counter
val = i.dev_value(Register.INVALID_DATA_TYPE) # check invalid data type counter
assert val == 0
for key, result in i.parse (InvalidDataSeq):
pass
assert json.dumps(i.db) == json.dumps({"inverter": {"Product_Name": "Microinv"}})
val = i.dev_value(Register.INVALID_DATA_TYPE) # check invalid data type counter
assert val == 1

View File

@@ -1,21 +1,29 @@
# test_with_pytest.py
import pytest
from app.src.messages import Message, Control
import pytest, logging
from app.src.gen3.talent import Talent, Control
from app.src.config import Config
from app.src.infos import Infos
# initialize the proxy statistics
Infos.static_init()
class MemoryStream(Message):
def __init__(self, msg, chunks = (0,)):
super().__init__()
tracer = logging.getLogger('tracer')
class MemoryStream(Talent):
def __init__(self, msg, chunks = (0,), server_side: bool = True):
super().__init__(server_side)
self.__msg = msg
self.__msg_len = len(msg)
self.__chunks = chunks
self.__offs = 0
self.__chunk_idx = 0
self.msg_count = 0
self.server_side = False
self.addr = 'Test: SrvSide'
def append_msg(self, msg):
self.__msg += msg
self.__msg_len += len(msg)
def _read(self) -> int:
copied_bytes = 0
try:
@@ -34,15 +42,14 @@ class MemoryStream(Message):
pass
return copied_bytes
def _timestamp(self):
return 1700260990000
def _Message__flush_recv_msg(self) -> None:
super()._Message__flush_recv_msg()
def _Talent__flush_recv_msg(self) -> None:
super()._Talent__flush_recv_msg()
self.msg_count += 1
return
def __del__ (self):
super().__del__()
@pytest.fixture
def MsgContactInfo(): # Contact Info message
@@ -56,10 +63,112 @@ def MsgContactInfo_LongId(): # Contact Info message with longer ID
@pytest.fixture
def Msg2ContactInfo(): # two Contact Info messages
Config.config = {'tsun':{'enabled': True}}
return b'\x00\x00\x00\x2c\x10R170000000000001\x91\x00\x08solarhub\x0fsolarhub\x40123456\x00\x00\x00\x2c\x10R170000000000002\x91\x00\x08solarhub\x0fsolarhub\x40123456'
@pytest.fixture
def MsgContactResp(): # Contact Response message
return b'\x00\x00\x00\x14\x10R170000000000001\x91\x00\x01'
@pytest.fixture
def MsgContactResp2(): # Contact Response message
return b'\x00\x00\x00\x14\x10R170000000000002\x91\x00\x01'
@pytest.fixture
def MsgContactInvalid(): # Contact Response message
return b'\x00\x00\x00\x14\x10R170000000000001\x93\x00\x01'
@pytest.fixture
def MsgGetTime(): # Get Time Request message
return b'\x00\x00\x00\x13\x10R170000000000001\x91\x22'
@pytest.fixture
def MsgTimeResp(): # Get Time Resonse message
return b'\x00\x00\x00\x1b\x10R170000000000001\x91\x22\x00\x00\x01\x89\xc6\x63\x4d\x80'
@pytest.fixture
def MsgTimeInvalid(): # Get Time Request message
return b'\x00\x00\x00\x13\x10R170000000000001\x94\x22'
@pytest.fixture
def MsgControllerInd(): # Data indication from the controller
msg = b'\x00\x00\x01\x2f\x10R170000000000001\x91\x71\x0e\x10\x00\x00\x10R170000000000001'
msg += b'\x01\x00\x00\x01\x89\xc6\x63\x55\x50'
msg += b'\x00\x00\x00\x15\x00\x09\x2b\xa8\x54\x10\x52\x53\x57\x5f\x34\x30\x30\x5f\x56\x31\x2e\x30\x30\x2e\x30\x36\x00\x09\x27\xc0\x54\x06\x52\x61\x79\x6d\x6f'
msg += b'\x6e\x00\x09\x2f\x90\x54\x0b\x52\x53\x57\x2d\x31\x2d\x31\x30\x30\x30\x31\x00\x09\x5a\x88\x54\x0f\x74\x2e\x72\x61\x79\x6d\x6f\x6e\x69\x6f\x74\x2e\x63\x6f\x6d\x00\x09\x5a\xec\x54'
msg += b'\x1c\x6c\x6f\x67\x67\x65\x72\x2e\x74\x61\x6c\x65\x6e\x74\x2d\x6d\x6f\x6e\x69\x74\x6f\x72\x69\x6e\x67\x2e\x63\x6f\x6d\x00\x0d\x00\x20\x49\x00\x00\x00\x01\x00\x0c\x35\x00\x49\x00'
msg += b'\x00\x00\x64\x00\x0c\x96\xa8\x49\x00\x00\x00\x1d\x00\x0c\x7f\x38\x49\x00\x00\x00\x01\x00\x0c\xfc\x38\x49\x00\x00\x00\x01\x00\x0c\xf8\x50\x49\x00\x00\x01\x2c\x00\x0c\x63\xe0\x49'
msg += b'\x00\x00\x00\x00\x00\x0c\x67\xc8\x49\x00\x00\x00\x00\x00\x0c\x50\x58\x49\x00\x00\x00\x01\x00\x09\x5e\x70\x49\x00\x00\x13\x8d\x00\x09\x5e\xd4\x49\x00\x00\x13\x8d\x00\x09\x5b\x50'
msg += b'\x49\x00\x00\x00\x02\x00\x0d\x04\x08\x49\x00\x00\x00\x00\x00\x07\xa1\x84\x49\x00\x00\x00\x01\x00\x0c\x50\x59\x49\x00\x00\x00\x4c\x00\x0d\x1f\x60\x49\x00\x00\x00\x00'
return msg
@pytest.fixture
def MsgControllerAck(): # Get Time Request message
return b'\x00\x00\x00\x14\x10R170000000000001\x99\x71\x01'
@pytest.fixture
def MsgControllerInvalid(): # Get Time Request message
return b'\x00\x00\x00\x14\x10R170000000000001\x92\x71\x01'
@pytest.fixture
def MsgInverterInd(): # Data indication from the controller
msg = b'\x00\x00\x00\x8b\x10R170000000000001\x91\x04\x01\x90\x00\x01\x10R170000000000001'
msg += b'\x01\x00\x00\x01\x89\xc6\x63\x61\x08'
msg += b'\x00\x00\x00\x06\x00\x00\x00\x0a\x54\x08\x4d\x69\x63\x72\x6f\x69\x6e\x76\x00\x00\x00\x14\x54\x04\x54\x53\x55\x4e\x00\x00\x00\x1E\x54\x07\x56\x35\x2e\x30\x2e\x31\x31\x00\x00\x00\x28'
msg += b'\x54\x10T170000000000001\x00\x00\x00\x32\x54\x0a\x54\x53\x4f\x4c\x2d\x4d\x53\x36\x30\x30\x00\x00\x00\x3c\x54\x05\x41\x2c\x42\x2c\x43'
return msg
@pytest.fixture
def MsgInverterAck(): # Get Time Request message
return b'\x00\x00\x00\x14\x10R170000000000001\x99\x04\x01'
@pytest.fixture
def MsgInverterInvalid(): # Get Time Request message
return b'\x00\x00\x00\x14\x10R170000000000001\x92\x04\x01'
@pytest.fixture
def MsgUnknown(): # Get Time Request message
return b'\x00\x00\x00\x17\x10R170000000000001\x91\x17\x01\x02\x03\x04'
@pytest.fixture
def ConfigTsunAllowAll():
Config.config = {'tsun':{'enabled': True}, 'inverters':{'allow_all':True}}
@pytest.fixture
def ConfigNoTsunInv1():
Config.config = {'tsun':{'enabled': False},'inverters':{'R170000000000001':{'node_id':'inv1','suggested_area':'roof'}}}
@pytest.fixture
def ConfigTsunInv1():
Config.config = {'tsun':{'enabled': True},'inverters':{'R170000000000001':{'node_id':'inv1','suggested_area':'roof'}}}
@pytest.fixture
def MsgOtaReq(): # Over the air update rewuest from tsun cloud
msg = b'\x00\x00\x01\x16\x10R170000000000001\x70\x13\x01\x02\x76\x35\x70\x68\x74\x74\x70'
msg += b'\x3a\x2f\x2f\x77\x77\x77\x2e\x74\x61\x6c\x65\x6e\x74\x2d\x6d\x6f'
msg += b'\x6e\x69\x74\x6f\x72\x69\x6e\x67\x2e\x63\x6f\x6d\x3a\x39\x30\x30'
msg += b'\x32\x2f\x70\x72\x6f\x64\x2d\x61\x70\x69\x2f\x72\x6f\x6d\x2f\x75'
msg += b'\x70\x64\x61\x74\x65\x2f\x64\x6f\x77\x6e\x6c\x6f\x61\x64\x3f\x76'
msg += b'\x65\x72\x3d\x56\x31\x2e\x30\x30\x2e\x31\x37\x26\x6e\x61\x6d\x65'
msg += b'\x3d\x47\x33\x2d\x57\x69\x46\x69\x2b\x2d\x56\x31\x2e\x30\x30\x2e'
msg += b'\x31\x37\x2d\x4f\x54\x41\x26\x65\x78\x74\x3d\x30\x60\x68\x74\x74'
msg += b'\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x74\x61\x6c\x65\x6e\x74\x2d\x6d'
msg += b'\x6f\x6e\x69\x74\x6f\x72\x69\x6e\x67\x2e\x63\x6f\x6d\x3a\x39\x30'
msg += b'\x30\x32\x2f\x70\x72\x6f\x64\x2d\x61\x70\x69\x2f\x72\x6f\x6d\x2f'
msg += b'\x75\x70\x64\x61\x74\x65\x2f\x63\x61\x6c\x6c\x62\x61\x63\x6b\x3f'
msg += b'\x71\x69\x64\x3d\x31\x35\x30\x33\x36\x32\x26\x72\x69\x64\x3d\x32'
msg += b'\x32\x39\x26\x64\x69\x64\x3d\x31\x33\x34\x32\x32\x35\x20\x36\x35'
msg += b'\x66\x30\x64\x37\x34\x34\x62\x66\x33\x39\x61\x62\x38\x32\x34\x64'
msg += b'\x32\x38\x62\x38\x34\x64\x31\x39\x65\x64\x33\x31\x31\x63\x06\x34'
msg += b'\x36\x38\x36\x33\x33\x01\x31\x01\x30\x00'
return msg
@pytest.fixture
def MsgOtaAck(): # Over the air update rewuest from tsun cloud
return b'\x00\x00\x00\x14\x10R170000000000001\x91\x13\x01'
@pytest.fixture
def MsgOtaInvalid(): # Get Time Request message
return b'\x00\x00\x00\x14\x10R170000000000001\x99\x13\x01'
def test_read_message(MsgContactInfo):
@@ -68,11 +177,39 @@ def test_read_message(MsgContactInfo):
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == None
assert int(m.ctrl)==145
assert m.msg_id==0
assert m.header_len==23
assert m.data_len==25
assert m._forward_buffer==b''
m.close()
def test_read_message_twice(ConfigNoTsunInv1, MsgInverterInd):
ConfigNoTsunInv1
m = MemoryStream(MsgInverterInd, (0,))
m.append_msg(MsgInverterInd)
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==145
assert m.msg_id==4
assert m.header_len==23
assert m.data_len==120
assert m._forward_buffer==b''
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 2
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==145
assert m.msg_id==4
assert m.header_len==23
assert m.data_len==120
assert m._forward_buffer==b''
m.close()
def test_read_message_long_id(MsgContactInfo_LongId):
m = MemoryStream(MsgContactInfo_LongId, (23,24))
@@ -83,6 +220,7 @@ def test_read_message_long_id(MsgContactInfo_LongId):
assert m.header_valid # must be valid, since header is complete but not the msg
assert m.msg_count == 0
assert m.id_str == b"R1700000000000011"
assert m.unique_id == 0
assert int(m.ctrl)==145
assert m.msg_id==0
assert m.header_len==24
@@ -90,6 +228,7 @@ def test_read_message_long_id(MsgContactInfo_LongId):
m.read() # try to read rest of message, but there is no chunk available
assert m.header_valid # must be valid, since header is complete but not the msg
assert m.msg_count == 0
m.close()
def test_read_message_in_chunks(MsgContactInfo):
@@ -101,6 +240,7 @@ def test_read_message_in_chunks(MsgContactInfo):
assert m.header_valid # must be valid, since header is complete but not the msg
assert m.msg_count == 0
assert m.id_str == b"R170000000000001"
assert m.unique_id == 0
assert int(m.ctrl)==145
assert m.msg_id==0
assert m.header_len==23
@@ -108,6 +248,7 @@ def test_read_message_in_chunks(MsgContactInfo):
m.read() # read rest of message
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
m.close()
def test_read_message_in_chunks2(MsgContactInfo):
m = MemoryStream(MsgContactInfo, (4,10,0))
@@ -122,6 +263,7 @@ def test_read_message_in_chunks2(MsgContactInfo):
assert m.header_len==23
assert m.data_len==25
assert m.id_str == b"R170000000000001"
assert m.unique_id == None
assert int(m.ctrl)==145
assert m.msg_id==0
assert m.msg_count == 1
@@ -129,25 +271,428 @@ def test_read_message_in_chunks2(MsgContactInfo):
pass
assert m.msg_count == 1
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
m.close()
def test_read_two_messages(Msg2ContactInfo):
def test_read_two_messages(ConfigTsunAllowAll, Msg2ContactInfo,MsgContactResp,MsgContactResp2):
ConfigTsunAllowAll
m = MemoryStream(Msg2ContactInfo, (0,))
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==145
assert m.msg_id==0
assert m.header_len==23
assert m.data_len==25
assert m._forward_buffer==b''
assert m._send_buffer==MsgContactResp
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m._send_buffer = bytearray(0) # clear send buffer for next test
m.contact_name = b'solarhub'
m.contact_mail = b'solarhub@123456'
m._init_new_client_conn()
assert m._send_buffer==b'\x00\x00\x00,\x10R170000000000001\x91\x00\x08solarhub\x0fsolarhub@123456'
m._send_buffer = bytearray(0) # clear send buffer for next test
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 2
assert m.id_str == b"R170000000000002"
assert m.unique_id == 'R170000000000002'
assert int(m.ctrl)==145
assert m.msg_id==0
assert m.header_len==23
assert m.data_len==25
assert m._forward_buffer==b''
assert m._send_buffer==MsgContactResp2
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m._send_buffer = bytearray(0) # clear send buffer for next test
m.contact_name = b'solarhub'
m.contact_mail = b'solarhub@123456'
m._init_new_client_conn()
assert m._send_buffer==b'\x00\x00\x00,\x10R170000000000002\x91\x00\x08solarhub\x0fsolarhub@123456'
m.close()
def test_msg_contact_resp(ConfigTsunInv1, MsgContactResp):
ConfigTsunInv1
m = MemoryStream(MsgContactResp, (0,), False)
m.await_conn_resp_cnt = 1
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.await_conn_resp_cnt == 0
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==145
assert m.msg_id==0
assert m.header_len==23
assert m.data_len==1
assert m._forward_buffer==b''
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m.close()
def test_msg_contact_resp_2(ConfigTsunInv1, MsgContactResp):
ConfigTsunInv1
m = MemoryStream(MsgContactResp, (0,), False)
m.await_conn_resp_cnt = 0
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.await_conn_resp_cnt == 0
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==145
assert m.msg_id==0
assert m.header_len==23
assert m.data_len==1
assert m._forward_buffer==MsgContactResp
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m.close()
def test_msg_contact_resp_3(ConfigTsunInv1, MsgContactResp):
ConfigTsunInv1
m = MemoryStream(MsgContactResp, (0,), True)
m.await_conn_resp_cnt = 0
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.await_conn_resp_cnt == 0
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==145
assert m.msg_id==0
assert m.header_len==23
assert m.data_len==1
assert m._forward_buffer==MsgContactResp
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m.close()
def test_msg_contact_invalid(ConfigTsunInv1, MsgContactInvalid):
ConfigTsunInv1
m = MemoryStream(MsgContactInvalid, (0,))
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==147
assert m.msg_id==0
assert m.header_len==23
assert m.data_len==1
assert m._forward_buffer==MsgContactInvalid
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 1
m.close()
def test_msg_get_time(ConfigTsunInv1, MsgGetTime):
ConfigTsunInv1
m = MemoryStream(MsgGetTime, (0,))
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==145
assert m.msg_id==34
assert m.header_len==23
assert m.data_len==0
assert m._forward_buffer==MsgGetTime
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m.close()
def test_msg_get_time_autark(ConfigNoTsunInv1, MsgGetTime):
ConfigNoTsunInv1
m = MemoryStream(MsgGetTime, (0,))
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==145
assert m.msg_id==34
assert m.header_len==23
assert m.data_len==0
assert m._forward_buffer==b''
assert m._send_buffer==b'\x00\x00\x00\x1b\x10R170000000000001\x91"\x00\x00\x01\x8b\xdfs\xcc0'
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m.close()
def test_msg_time_resp(ConfigTsunInv1, MsgTimeResp):
ConfigTsunInv1
m = MemoryStream(MsgTimeResp, (0,), False)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==145
assert m.msg_id==34
assert m.header_len==23
assert m.data_len==8
assert m._forward_buffer==MsgTimeResp
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m.close()
def test_msg_time_resp_autark(ConfigNoTsunInv1, MsgTimeResp):
ConfigNoTsunInv1
m = MemoryStream(MsgTimeResp, (0,), False)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==145
assert m.msg_id==34
assert m.header_len==23
assert m.data_len==8
assert m._forward_buffer==b''
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m.close()
def test_msg_time_invalid(ConfigTsunInv1, MsgTimeInvalid):
ConfigTsunInv1
m = MemoryStream(MsgTimeInvalid, (0,), False)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==148
assert m.msg_id==34
assert m.header_len==23
assert m.data_len==0
assert m._forward_buffer==MsgTimeInvalid
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 1
m.close()
def test_msg_time_invalid_autark(ConfigNoTsunInv1, MsgTimeInvalid):
ConfigNoTsunInv1
m = MemoryStream(MsgTimeInvalid, (0,), False)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==148
assert m.msg_id==34
assert m.header_len==23
assert m.data_len==0
assert m._forward_buffer==b''
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 1
m.close()
def test_msg_cntrl_ind(ConfigTsunInv1, MsgControllerInd, MsgControllerAck):
ConfigTsunInv1
m = MemoryStream(MsgControllerInd, (0,))
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==145
assert m.msg_id==113
assert m.header_len==23
assert m.data_len==284
assert m._forward_buffer==MsgControllerInd
assert m._send_buffer==MsgControllerAck
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m.close()
def test_msg_cntrl_ack(ConfigTsunInv1, MsgControllerAck):
ConfigTsunInv1
m = MemoryStream(MsgControllerAck, (0,), False)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==153
assert m.msg_id==113
assert m.header_len==23
assert m.data_len==1
assert m._forward_buffer==b''
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m.close()
def test_msg_cntrl_invalid(ConfigTsunInv1, MsgControllerInvalid):
ConfigTsunInv1
m = MemoryStream(MsgControllerInvalid, (0,))
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==146
assert m.msg_id==113
assert m.header_len==23
assert m.data_len==1
assert m._forward_buffer==MsgControllerInvalid
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 1
m.close()
def test_msg_inv_ind(ConfigTsunInv1, MsgInverterInd, MsgInverterAck):
ConfigTsunInv1
tracer.setLevel(logging.DEBUG)
m = MemoryStream(MsgInverterInd, (0,))
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==145
assert m.msg_id==4
assert m.header_len==23
assert m.data_len==120
assert m._forward_buffer==MsgInverterInd
assert m._send_buffer==MsgInverterAck
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m.close()
def test_msg_inv_ack(ConfigTsunInv1, MsgInverterAck):
ConfigTsunInv1
tracer.setLevel(logging.ERROR)
m = MemoryStream(MsgInverterAck, (0,), False)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==153
assert m.msg_id==4
assert m.header_len==23
assert m.data_len==1
assert m._forward_buffer==b''
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m.close()
def test_msg_inv_invalid(ConfigTsunInv1, MsgInverterInvalid):
ConfigTsunInv1
m = MemoryStream(MsgInverterInvalid, (0,), False)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==146
assert m.msg_id==4
assert m.header_len==23
assert m.data_len==1
assert m._forward_buffer==MsgInverterInvalid
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 1
m.close()
def test_msg_ota_req(ConfigTsunInv1, MsgOtaReq):
ConfigTsunInv1
m = MemoryStream(MsgOtaReq, (0,), False)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.db.stat['proxy']['OTA_Start_Msg'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==112
assert m.msg_id==19
assert m.header_len==23
assert m.data_len==259
assert m._forward_buffer==MsgOtaReq
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
assert m.db.stat['proxy']['OTA_Start_Msg'] == 1
m.close()
def test_msg_ota_ack(ConfigTsunInv1, MsgOtaAck):
ConfigTsunInv1
tracer.setLevel(logging.ERROR)
m = MemoryStream(MsgOtaAck, (0,), False)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.db.stat['proxy']['OTA_Start_Msg'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==145
assert m.msg_id==19
assert m.header_len==23
assert m.data_len==1
assert m._forward_buffer==MsgOtaAck
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
assert m.db.stat['proxy']['OTA_Start_Msg'] == 0
m.close()
def test_msg_ota_invalid(ConfigTsunInv1, MsgOtaInvalid):
ConfigTsunInv1
m = MemoryStream(MsgOtaInvalid, (0,), False)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.db.stat['proxy']['OTA_Start_Msg'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==153
assert m.msg_id==19
assert m.header_len==23
assert m.data_len==1
assert m._forward_buffer==MsgOtaInvalid
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 1
assert m.db.stat['proxy']['OTA_Start_Msg'] == 0
m.close()
def test_msg_unknown(ConfigTsunInv1, MsgUnknown):
ConfigTsunInv1
m = MemoryStream(MsgUnknown, (0,), False)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==145
assert m.msg_id==23
assert m.header_len==23
assert m.data_len==4
assert m._forward_buffer==MsgUnknown
assert m._send_buffer==b''
assert 1 == m.db.stat['proxy']['Unknown_Msg']
m.close()
def test_ctrl_byte():
c = Control(0x91)
@@ -158,3 +703,38 @@ def test_ctrl_byte():
assert c.is_resp()
def test_msg_iterator():
m1 = Talent(server_side=True)
m2 = Talent(server_side=True)
m3 = Talent(server_side=True)
m3.close()
del m3
test1 = 0
test2 = 0
for key in Talent:
if key == m1:
test1+=1
elif key == m2:
test2+=1
else:
assert False
assert test1 == 1
assert test2 == 1
def test_proxy_counter():
m = Talent(server_side=True)
assert m.new_data == {}
m.db.stat['proxy']['Unknown_Msg'] = 0
Infos.new_stat_data['proxy'] = False
m.inc_counter('Unknown_Msg')
assert m.new_data == {}
assert Infos.new_stat_data == {'proxy': True}
assert 1 == m.db.stat['proxy']['Unknown_Msg']
Infos.new_stat_data['proxy'] = False
m.dec_counter('Unknown_Msg')
assert m.new_data == {}
assert Infos.new_stat_data == {'proxy': True}
assert 0 == m.db.stat['proxy']['Unknown_Msg']
m.close()

781
app/tests/test_solarman.py Normal file
View File

@@ -0,0 +1,781 @@
import pytest, json
from app.src.gen3plus.solarman_v5 import SolarmanV5
from app.src.config import Config
from app.src.infos import Infos, Register
# initialize the proxy statistics
Infos.static_init()
class MemoryStream(SolarmanV5):
def __init__(self, msg, chunks = (0,), server_side: bool = True):
super().__init__(server_side)
self.__msg = msg
self.__msg_len = len(msg)
self.__chunks = chunks
self.__offs = 0
self.__chunk_idx = 0
self.msg_count = 0
self.addr = 'Test: SrvSide'
self.db.stat['proxy']['Invalid_Msg_Format'] = 0
self.db.stat['proxy']['AT_Command'] = 0
def append_msg(self, msg):
self.__msg += msg
self.__msg_len += len(msg)
def _read(self) -> int:
copied_bytes = 0
try:
if (self.__offs < self.__msg_len):
len = self.__chunks[self.__chunk_idx]
self.__chunk_idx += 1
if len!=0:
self._recv_buffer += self.__msg[self.__offs:len]
copied_bytes = len - self.__offs
self.__offs = len
else:
self._recv_buffer += self.__msg[self.__offs:]
copied_bytes = self.__msg_len - self.__offs
self.__offs = self.__msg_len
except:
pass
return copied_bytes
def _timestamp(self):
return 1700260990000
def _SolarmanV5__flush_recv_msg(self) -> None:
super()._SolarmanV5__flush_recv_msg()
self.msg_count += 1
return
def get_sn() -> bytes:
return b'\x21\x43\x65\x7b'
def get_inv_no() -> bytes:
return b'T170000000000001'
def get_invalid_sn():
return b'R170000000000002'
def correct_checksum(buf):
checksum = sum(buf[1:]) & 0xff
return checksum.to_bytes(length=1)
def incorrect_checksum(buf):
checksum = (sum(buf[1:])+1) & 0xff
return checksum.to_bytes(length=1)
@pytest.fixture
def DeviceIndMsg(): # 0x4110
msg = b'\xa5\xd4\x00\x10\x41\x00\x01' +get_sn() +b'\x02\xba\xd2\x00\x00'
msg += b'\x19\x00\x00\x00\x00\x00\x00\x00\x05\x3c\x78\x01\x64\x01\x4c\x53'
msg += b'\x57\x35\x42\x4c\x45\x5f\x31\x37\x5f\x30\x32\x42\x30\x5f\x31\x2e'
msg += b'\x30\x35\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x40\x2a\x8f\x4f\x51\x54\x31\x39\x32\x2e'
msg += b'\x31\x36\x38\x2e\x38\x30\x2e\x34\x39\x00\x00\x00\x0f\x00\x01\xb0'
msg += b'\x02\x0f\x00\xff\x56\x31\x2e\x31\x2e\x30\x30\x2e\x30\x42\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xfe\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x41\x6c\x6c\x69\x75\x73\x2d\x48\x6f'
msg += b'\x6d\x65\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += correct_checksum(msg)
msg += b'\x15'
return msg
@pytest.fixture
def DeviceRspMsg(): # 0x1110
msg = b'\xa5\x0a\x00\x10\x11\x10\x84' +get_sn() +b'\x01\x01\x69\x6f\x09'
msg += b'\x66\x78\x00\x00\x00'
msg += correct_checksum(msg)
msg += b'\x15'
return msg
@pytest.fixture
def InvalidStartByte(): # 0x4110
msg = b'\xa4\xd4\x00\x10\x41\x00\x01' +get_sn() +b'\x02\xba\xd2\x00\x00'
msg += b'\x19\x00\x00\x00\x00\x00\x00\x00\x05\x3c\x78\x01\x64\x01\x4c\x53'
msg += b'\x57\x35\x42\x4c\x45\x5f\x31\x37\x5f\x30\x32\x42\x30\x5f\x31\x2e'
msg += b'\x30\x35\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x40\x2a\x8f\x4f\x51\x54\x31\x39\x32\x2e'
msg += b'\x31\x36\x38\x2e\x38\x30\x2e\x34\x39\x00\x00\x00\x0f\x00\x01\xb0'
msg += b'\x02\x0f\x00\xff\x56\x31\x2e\x31\x2e\x30\x30\x2e\x30\x42\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xfe\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x41\x6c\x6c\x69\x75\x73\x2d\x48\x6f'
msg += b'\x6d\x65\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += correct_checksum(msg)
msg += b'\x15'
return msg
@pytest.fixture
def InvalidStopByte(): # 0x4110
msg = b'\xa5\xd4\x00\x10\x41\x00\x01' +get_sn() +b'\x02\xba\xd2\x00\x00'
msg += b'\x19\x00\x00\x00\x00\x00\x00\x00\x05\x3c\x78\x01\x64\x01\x4c\x53'
msg += b'\x57\x35\x42\x4c\x45\x5f\x31\x37\x5f\x30\x32\x42\x30\x5f\x31\x2e'
msg += b'\x30\x35\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x40\x2a\x8f\x4f\x51\x54\x31\x39\x32\x2e'
msg += b'\x31\x36\x38\x2e\x38\x30\x2e\x34\x39\x00\x00\x00\x0f\x00\x01\xb0'
msg += b'\x02\x0f\x00\xff\x56\x31\x2e\x31\x2e\x30\x30\x2e\x30\x42\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xfe\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x41\x6c\x6c\x69\x75\x73\x2d\x48\x6f'
msg += b'\x6d\x65\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += correct_checksum(msg)
msg += b'\x14'
return msg
@pytest.fixture
def InvalidChecksum(): # 0x4110
msg = b'\xa5\xd4\x00\x10\x41\x00\x01' +get_sn() +b'\x02\xba\xd2\x00\x00'
msg += b'\x19\x00\x00\x00\x00\x00\x00\x00\x05\x3c\x78\x01\x64\x01\x4c\x53'
msg += b'\x57\x35\x42\x4c\x45\x5f\x31\x37\x5f\x30\x32\x42\x30\x5f\x31\x2e'
msg += b'\x30\x35\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x40\x2a\x8f\x4f\x51\x54\x31\x39\x32\x2e'
msg += b'\x31\x36\x38\x2e\x38\x30\x2e\x34\x39\x00\x00\x00\x0f\x00\x01\xb0'
msg += b'\x02\x0f\x00\xff\x56\x31\x2e\x31\x2e\x30\x30\x2e\x30\x42\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xfe\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x41\x6c\x6c\x69\x75\x73\x2d\x48\x6f'
msg += b'\x6d\x65\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += incorrect_checksum(msg)
msg += b'\x15'
return msg
@pytest.fixture
def InverterIndMsg(): # 0x4210
msg = b'\xa5\x99\x01\x10\x42\xe6\x9e' +get_sn() +b'\x01\xb0\x02\xbc\xc8'
msg += b'\x24\x32\x6c\x1f\x00\x00\xa0\x47\xe4\x33\x01\x00\x03\x08\x00\x00'
msg += b'\x59\x31\x37\x45\x37\x41\x30\x46\x30\x31\x30\x42\x30\x31\x33\x45'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x01\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x40\x10\x08\xc8\x00\x49\x13\x8d\x00\x36\x00\x00\x02\x58\x06\x7a'
msg += b'\x01\x61\x00\xa8\x02\x54\x01\x5a\x00\x8a\x01\xe4\x01\x5a\x00\xbd'
msg += b'\x02\x8f\x00\x11\x00\x01\x00\x00\x00\x0b\x00\x00\x27\x98\x00\x04'
msg += b'\x00\x00\x0c\x04\x00\x03\x00\x00\x0a\xe7\x00\x05\x00\x00\x0c\x75'
msg += b'\x00\x00\x00\x00\x06\x16\x02\x00\x00\x00\x55\xaa\x00\x01\x00\x00'
msg += b'\x00\x00\x00\x00\xff\xff\x07\xd0\x00\x03\x04\x00\x04\x00\x04\x00'
msg += b'\x04\x00\x00\x01\xff\xff\x00\x01\x00\x06\x00\x68\x00\x68\x05\x00'
msg += b'\x09\xcd\x07\xb6\x13\x9c\x13\x24\x00\x01\x07\xae\x04\x0f\x00\x41'
msg += b'\x00\x0f\x0a\x64\x0a\x64\x00\x06\x00\x06\x09\xf6\x12\x8c\x12\x8c'
msg += b'\x00\x10\x00\x10\x14\x52\x14\x52\x00\x10\x00\x10\x01\x51\x00\x05'
msg += b'\x04\x00\x00\x01\x13\x9c\x0f\xa0\x00\x4e\x00\x66\x03\xe8\x04\x00'
msg += b'\x09\xce\x07\xa8\x13\x9c\x13\x26\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x04\x00\x04\x00\x00\x00\x00\x00\xff\xff\x00\x00'
msg += b'\x00\x00\x00\x00'
msg += correct_checksum(msg)
msg += b'\x15'
return msg
@pytest.fixture
def InverterIndMsg1600(): # 0x4210 rated Power 1600W
msg = b'\xa5\x99\x01\x10\x42\xe6\x9e' +get_sn() +b'\x01\xb0\x02\xbc\xc8'
msg += b'\x24\x32\x6c\x1f\x00\x00\xa0\x47\xe4\x33\x01\x00\x03\x08\x00\x00'
msg += b'\x59\x31\x37\x45\x37\x41\x30\x46\x30\x31\x30\x42\x30\x31\x33\x45'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x01\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x40\x10\x08\xc8\x00\x49\x13\x8d\x00\x36\x00\x00\x06\x40\x06\x7a'
msg += b'\x01\x61\x00\xa8\x02\x54\x01\x5a\x00\x8a\x01\xe4\x01\x5a\x00\xbd'
msg += b'\x02\x8f\x00\x11\x00\x01\x00\x00\x00\x0b\x00\x00\x27\x98\x00\x04'
msg += b'\x00\x00\x0c\x04\x00\x03\x00\x00\x0a\xe7\x00\x05\x00\x00\x0c\x75'
msg += b'\x00\x00\x00\x00\x06\x16\x02\x00\x00\x00\x55\xaa\x00\x01\x00\x00'
msg += b'\x00\x00\x00\x00\xff\xff\x06\x40\x00\x03\x04\x00\x04\x00\x04\x00'
msg += b'\x04\x00\x00\x01\xff\xff\x00\x01\x00\x06\x00\x68\x00\x68\x05\x00'
msg += b'\x09\xcd\x07\xb6\x13\x9c\x13\x24\x00\x01\x07\xae\x04\x0f\x00\x41'
msg += b'\x00\x0f\x0a\x64\x0a\x64\x00\x06\x00\x06\x09\xf6\x12\x8c\x12\x8c'
msg += b'\x00\x10\x00\x10\x14\x52\x14\x52\x00\x10\x00\x10\x01\x51\x00\x05'
msg += b'\x04\x00\x00\x01\x13\x9c\x0f\xa0\x00\x4e\x00\x66\x03\xe8\x04\x00'
msg += b'\x09\xce\x07\xa8\x13\x9c\x13\x26\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x04\x00\x04\x00\x00\x00\x00\x00\xff\xff\x00\x00'
msg += b'\x00\x00\x00\x00'
msg += correct_checksum(msg)
msg += b'\x15'
return msg
@pytest.fixture
def InverterIndMsg1800(): # 0x4210 rated Power 1800W
msg = b'\xa5\x99\x01\x10\x42\xe6\x9e' +get_sn() +b'\x01\xb0\x02\xbc\xc8'
msg += b'\x24\x32\x6c\x1f\x00\x00\xa0\x47\xe4\x33\x01\x00\x03\x08\x00\x00'
msg += b'\x59\x31\x37\x45\x37\x41\x30\x46\x30\x31\x30\x42\x30\x31\x33\x45'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x01\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x40\x10\x08\xc8\x00\x49\x13\x8d\x00\x36\x00\x00\x07\x08\x06\x7a'
msg += b'\x01\x61\x00\xa8\x02\x54\x01\x5a\x00\x8a\x01\xe4\x01\x5a\x00\xbd'
msg += b'\x02\x8f\x00\x11\x00\x01\x00\x00\x00\x0b\x00\x00\x27\x98\x00\x04'
msg += b'\x00\x00\x0c\x04\x00\x03\x00\x00\x0a\xe7\x00\x05\x00\x00\x0c\x75'
msg += b'\x00\x00\x00\x00\x06\x16\x02\x00\x00\x00\x55\xaa\x00\x01\x00\x00'
msg += b'\x00\x00\x00\x00\xff\xff\x07\x08\x00\x03\x04\x00\x04\x00\x04\x00'
msg += b'\x04\x00\x00\x01\xff\xff\x00\x01\x00\x06\x00\x68\x00\x68\x05\x00'
msg += b'\x09\xcd\x07\xb6\x13\x9c\x13\x24\x00\x01\x07\xae\x04\x0f\x00\x41'
msg += b'\x00\x0f\x0a\x64\x0a\x64\x00\x06\x00\x06\x09\xf6\x12\x8c\x12\x8c'
msg += b'\x00\x10\x00\x10\x14\x52\x14\x52\x00\x10\x00\x10\x01\x51\x00\x05'
msg += b'\x04\x00\x00\x01\x13\x9c\x0f\xa0\x00\x4e\x00\x66\x03\xe8\x04\x00'
msg += b'\x09\xce\x07\xa8\x13\x9c\x13\x26\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x04\x00\x04\x00\x00\x00\x00\x00\xff\xff\x00\x00'
msg += b'\x00\x00\x00\x00'
msg += correct_checksum(msg)
msg += b'\x15'
return msg
@pytest.fixture
def InverterIndMsg2000(): # 0x4210 rated Power 2000W
msg = b'\xa5\x99\x01\x10\x42\xe6\x9e' +get_sn() +b'\x01\xb0\x02\xbc\xc8'
msg += b'\x24\x32\x6c\x1f\x00\x00\xa0\x47\xe4\x33\x01\x00\x03\x08\x00\x00'
msg += b'\x59\x31\x37\x45\x37\x41\x30\x46\x30\x31\x30\x42\x30\x31\x33\x45'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x01\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x40\x10\x08\xc8\x00\x49\x13\x8d\x00\x36\x00\x00\x07\xd0\x06\x7a'
msg += b'\x01\x61\x00\xa8\x02\x54\x01\x5a\x00\x8a\x01\xe4\x01\x5a\x00\xbd'
msg += b'\x02\x8f\x00\x11\x00\x01\x00\x00\x00\x0b\x00\x00\x27\x98\x00\x04'
msg += b'\x00\x00\x0c\x04\x00\x03\x00\x00\x0a\xe7\x00\x05\x00\x00\x0c\x75'
msg += b'\x00\x00\x00\x00\x06\x16\x02\x00\x00\x00\x55\xaa\x00\x01\x00\x00'
msg += b'\x00\x00\x00\x00\xff\xff\x07\xd0\x00\x03\x04\x00\x04\x00\x04\x00'
msg += b'\x04\x00\x00\x01\xff\xff\x00\x01\x00\x06\x00\x68\x00\x68\x05\x00'
msg += b'\x09\xcd\x07\xb6\x13\x9c\x13\x24\x00\x01\x07\xae\x04\x0f\x00\x41'
msg += b'\x00\x0f\x0a\x64\x0a\x64\x00\x06\x00\x06\x09\xf6\x12\x8c\x12\x8c'
msg += b'\x00\x10\x00\x10\x14\x52\x14\x52\x00\x10\x00\x10\x01\x51\x00\x05'
msg += b'\x04\x00\x00\x01\x13\x9c\x0f\xa0\x00\x4e\x00\x66\x03\xe8\x04\x00'
msg += b'\x09\xce\x07\xa8\x13\x9c\x13\x26\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x04\x00\x04\x00\x00\x00\x00\x00\xff\xff\x00\x00'
msg += b'\x00\x00\x00\x00'
msg += correct_checksum(msg)
msg += b'\x15'
return msg
@pytest.fixture
def InverterRspMsg(): # 0x1210
msg = b'\xa5\x0a\x00\x10\x12\x10\x84' +get_sn() +b'\x01\x01\x69\x6f\x09'
msg += b'\x66\x78\x00\x00\x00'
msg += correct_checksum(msg)
msg += b'\x15'
return msg
@pytest.fixture
def UnknownMsg(): # 0x5110
msg = b'\xa5\x0a\x00\x10\x51\x10\x84' +get_sn() +b'\x01\x01\x69\x6f\x09'
msg += b'\x66\x78\x00\x00\x00'
msg += correct_checksum(msg)
msg += b'\x15'
return msg
@pytest.fixture
def HeartbeatIndMsg(): # 0x4710
msg = b'\xa5\x01\x00\x10\x47\x10\x84' +get_sn()
msg += b'\x00'
msg += correct_checksum(msg)
msg += b'\x15'
return msg
@pytest.fixture
def HeartbeatRspMsg(): # 0x1710
msg = b'\xa5\x0a\x00\x10\x17\x10\x84' +get_sn() +b'\x00\x01\x22\x71\x09'
msg += b'\x66\x78\x00\x00\x00'
msg += correct_checksum(msg)
msg += b'\x15'
return msg
@pytest.fixture
def AtCommandIndMsg(): # 0x4510
msg = b'\xa5\x01\x00\x10\x45\x10\x84' +get_sn()
msg += b'\x00'
msg += correct_checksum(msg)
msg += b'\x15'
return msg
@pytest.fixture
def ConfigTsunAllowAll():
Config.config = {'solarman':{'enabled': True}, 'inverters':{'allow_all':True}}
@pytest.fixture
def ConfigNoTsunInv1():
Config.config = {'solarman':{'enabled': False},'inverters':{'Y170000000000001':{'monitor_sn': 2070233889,'node_id':'inv1','suggested_area':'roof'}}}
@pytest.fixture
def ConfigTsunInv1():
Config.config = {'solarman':{'enabled': True},'inverters':{'Y170000000000001':{'monitor_sn': 2070233889,'node_id':'inv1','suggested_area':'roof'}}}
def test_read_message(DeviceIndMsg):
m = MemoryStream(DeviceIndMsg, (0,))
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.header_len==11
assert m.snr == 2070233889
assert m.unique_id == None
assert m.control == 0x4110
assert m.serial == 0x0100
assert m.data_len == 0xd4
assert m._recv_buffer==b''
assert m._send_buffer==b''
assert m._forward_buffer==b''
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
m.close()
def test_invalid_start_byte(InvalidStartByte, DeviceIndMsg):
# received a message with wrong start byte plus an valid message
# the complete receive buffer must be cleared to
# find the next valid message
m = MemoryStream(InvalidStartByte, (0,))
m.append_msg(DeviceIndMsg)
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since start byte is wrong
assert m.msg_count == 0
assert m.header_len==11
assert m.snr == 2070233889
assert m.unique_id == 0
assert m.control == 0x4110
assert m.serial == 0x0100
assert m.data_len == 0xd4
assert m._recv_buffer==b''
assert m._send_buffer==b''
assert m._forward_buffer==b''
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 1
m.close()
def test_invalid_stop_byte(InvalidStopByte):
# received a message with wrong stop byte
# the complete receive buffer must be cleared to
# find the next valid message
m = MemoryStream(InvalidStopByte, (0,))
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since start byte is wrong
assert m.msg_count == 1 # msg flush was called
assert m.header_len==11
assert m.snr == 2070233889
assert m.unique_id == 0
assert m.control == 0x4110
assert m.serial == 0x0100
assert m.data_len == 0xd4
assert m._recv_buffer==b''
assert m._send_buffer==b''
assert m._forward_buffer==b''
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 1
m.close()
def test_invalid_stop_byte2(InvalidStopByte, DeviceIndMsg):
# received a message with wrong stop byte plus an valid message
# only the first message must be discarded
m = MemoryStream(InvalidStopByte, (0,))
m.append_msg(DeviceIndMsg)
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since start byte is wrong
assert m.msg_count == 1 # msg flush was called
assert m.header_len==11
assert m.snr == 2070233889
assert m.unique_id == 0
assert m.control == 0x4110
assert m.serial == 0x0100
assert m.data_len == 0xd4
assert m._recv_buffer==DeviceIndMsg
assert m._send_buffer==b''
assert m._forward_buffer==b''
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 1
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 2
assert m.header_len==11
assert m.snr == 2070233889
assert m.unique_id == None
assert m.control == 0x4110
assert m.serial == 0x0100
assert m.data_len == 0xd4
assert m._recv_buffer==b''
assert m._send_buffer==b''
assert m._forward_buffer==b''
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 1
m.close()
def test_invalid_stop_start_byte(InvalidStopByte, InvalidStartByte):
# received a message with wrong stop byte plus an invalid message
# with fron start byte
# the complete receive buffer must be cleared to
# find the next valid message
m = MemoryStream(InvalidStopByte, (0,))
m.append_msg(InvalidStartByte)
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since start byte is wrong
assert m.msg_count == 1 # msg flush was called
assert m.header_len==11
assert m.snr == 2070233889
assert m.unique_id == 0
assert m.control == 0x4110
assert m.serial == 0x0100
assert m.data_len == 0xd4
assert m._recv_buffer==b''
assert m._send_buffer==b''
assert m._forward_buffer==b''
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 1
m.close()
def test_invalid_checksum(InvalidChecksum, DeviceIndMsg):
# received a message with wrong checksum plus an valid message
# only the first message must be discarded
m = MemoryStream(InvalidChecksum, (0,))
m.append_msg(DeviceIndMsg)
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since start byte is wrong
assert m.msg_count == 1 # msg flush was called
assert m.header_len==11
assert m.snr == 2070233889
assert m.unique_id == 0
assert m.control == 0x4110
assert m.serial == 0x0100
assert m.data_len == 0xd4
assert m._recv_buffer==DeviceIndMsg
assert m._send_buffer==b''
assert m._forward_buffer==b''
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 1
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 2
assert m.header_len==11
assert m.snr == 2070233889
assert m.unique_id == None
assert m.control == 0x4110
assert m.serial == 0x0100
assert m.data_len == 0xd4
assert m._recv_buffer==b''
assert m._send_buffer==b''
assert m._forward_buffer==b''
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 1
m.close()
def test_read_message_twice(ConfigNoTsunInv1, DeviceIndMsg):
ConfigNoTsunInv1
m = MemoryStream(DeviceIndMsg, (0,))
m.append_msg(DeviceIndMsg)
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.header_len==11
assert m.snr == 2070233889
assert m.unique_id == '2070233889'
assert m.control == 0x4110
assert m.serial == 0x0100
assert m.data_len == 0xd4
assert m._forward_buffer==b''
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 2
assert m.header_len==11
assert m.snr == 2070233889
assert m.unique_id == '2070233889'
assert m.control == 0x4110
assert m.serial == 0x0100
assert m.data_len == 0xd4
assert m._forward_buffer==b''
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
m.close()
def test_read_message_in_chunks(DeviceIndMsg):
m = MemoryStream(DeviceIndMsg, (4,11,0))
m.read() # read 4 bytes, header incomplere
assert not m.header_valid # must be invalid, since header not complete
assert m.msg_count == 0
m.read() # read missing bytes for complete header
assert m.header_valid # must be valid, since header is complete but not the msg
assert m.msg_count == 0
assert m.header_len==11
assert m.snr == 2070233889
assert m.unique_id == 0 # should be None ?
assert m.control == 0x4110
assert m.serial == 0x0100
assert m.data_len == 0xd4
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
m.read() # read rest of message
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
m.close()
def test_read_message_in_chunks2(ConfigTsunInv1, DeviceIndMsg):
ConfigTsunInv1
m = MemoryStream(DeviceIndMsg, (4,10,0))
m.read() # read 4 bytes, header incomplere
assert not m.header_valid
assert m.msg_count == 0
m.read() # read 6 more bytes, header incomplere
assert not m.header_valid
assert m.msg_count == 0
m.read() # read rest of message
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.header_len==11
assert m.snr == 2070233889
assert m.unique_id == '2070233889'
assert m.control == 0x4110
assert m.serial == 0x0100
assert m.data_len == 0xd4
assert m.msg_count == 1
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
while m.read(): # read rest of message
pass
assert m.msg_count == 1
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
m.close()
def test_read_two_messages(ConfigTsunAllowAll, DeviceIndMsg, InverterIndMsg):
ConfigTsunAllowAll
m = MemoryStream(DeviceIndMsg, (0,))
m.append_msg(InverterIndMsg)
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.header_len==11
assert m.snr == 2070233889
assert m.unique_id == '2070233889'
assert m.control == 0x4110
assert m.serial == 0x0100
assert m.data_len == 0xd4
assert m.msg_count == 1
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
assert m._forward_buffer==DeviceIndMsg
assert m._send_buffer==b''
# assert m._send_buffer==MsgContactResp
m._send_buffer = bytearray(0) # clear send buffer for next test
m._init_new_client_conn()
assert m._send_buffer==b''
assert m._recv_buffer==InverterIndMsg
m._send_buffer = bytearray(0) # clear send buffer for next test
m._forward_buffer = bytearray(0) # clear forward buffer for next test
m.read() # read complete msg, and dispatch msg
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 2
assert m.header_len==11
assert m.snr == 2070233889
assert m.unique_id == '2070233889'
assert m.control == 0x4210
assert m.serial == 0x9ee6
assert m.data_len == 0x199
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
assert m._forward_buffer==InverterIndMsg
assert m._send_buffer==b''
m._send_buffer = bytearray(0) # clear send buffer for next test
m._init_new_client_conn()
assert m._send_buffer==b''
m.close()
def test_unkown_message(ConfigTsunInv1, UnknownMsg):
ConfigTsunInv1
m = MemoryStream(UnknownMsg, (0,))
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.header_len==11
assert m.snr == 2070233889
assert m.unique_id == '2070233889'
assert m.control == 0x5110
assert m.serial == 0x8410
assert m.data_len == 0x0a
assert m._recv_buffer==b''
assert m._send_buffer==b''
assert m._forward_buffer==UnknownMsg
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
m.close()
def test_device_rsp(ConfigTsunInv1, DeviceRspMsg):
ConfigTsunInv1
m = MemoryStream(DeviceRspMsg, (0,), False)
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.header_len==11
assert m.snr == 2070233889
assert m.unique_id == '2070233889'
assert m.control == 0x1110
assert m.serial == 0x8410
assert m.data_len == 0x0a
assert m._recv_buffer==b''
assert m._send_buffer==b''
assert m._forward_buffer==DeviceRspMsg
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
m.close()
def test_inverter_rsp(ConfigTsunInv1, InverterRspMsg):
ConfigTsunInv1
m = MemoryStream(InverterRspMsg, (0,), False)
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.header_len==11
assert m.snr == 2070233889
assert m.unique_id == '2070233889'
assert m.control == 0x1210
assert m.serial == 0x8410
assert m.data_len == 0x0a
assert m._recv_buffer==b''
assert m._send_buffer==b''
assert m._forward_buffer==InverterRspMsg
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
m.close()
def test_heartbeat_ind(ConfigTsunInv1, HeartbeatIndMsg):
ConfigTsunInv1
m = MemoryStream(HeartbeatIndMsg, (0,))
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.header_len==11
assert m.snr == 2070233889
# assert m.unique_id == '2070233889'
assert m.control == 0x4710
assert m.serial == 0x8410
assert m.data_len == 0x01
assert m._recv_buffer==b''
assert m._send_buffer==b''
assert m._forward_buffer==HeartbeatIndMsg
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
m.close()
def test_heartbeat_rsp(ConfigTsunInv1, HeartbeatRspMsg):
ConfigTsunInv1
m = MemoryStream(HeartbeatRspMsg, (0,), False)
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.header_len==11
assert m.snr == 2070233889
assert m.unique_id == '2070233889'
assert m.control == 0x1710
assert m.serial == 0x8410
assert m.data_len == 0x0a
assert m._recv_buffer==b''
assert m._send_buffer==b''
assert m._forward_buffer==HeartbeatRspMsg
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
m.close()
def test_at_command_ind(ConfigTsunInv1, AtCommandIndMsg):
ConfigTsunInv1
m = MemoryStream(AtCommandIndMsg, (0,))
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.header_len==11
assert m.snr == 2070233889
# assert m.unique_id == '2070233889'
assert m.control == 0x4510
assert m.serial == 0x8410
assert m.data_len == 0x01
assert m._recv_buffer==b''
assert m._send_buffer==b''
assert m._forward_buffer==AtCommandIndMsg
assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0
assert m.db.stat['proxy']['AT_Command'] == 1
m.close()
def test_build_modell_600(ConfigTsunAllowAll, InverterIndMsg):
ConfigTsunAllowAll
m = MemoryStream(InverterIndMsg, (0,))
assert 0 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0)
assert None == m.db.get_db_value(Register.RATED_POWER, None)
assert None == m.db.get_db_value(Register.INVERTER_TEMP, None)
m.read() # read complete msg, and dispatch msg
assert 2000 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0)
assert 600 == m.db.get_db_value(Register.RATED_POWER, 0)
assert 'TSOL-MS2000(600)' == m.db.get_db_value(Register.EQUIPMENT_MODEL, 0)
m._send_buffer = bytearray(0) # clear send buffer for next test
m._init_new_client_conn()
assert m._send_buffer==b''
m.close()
def test_build_modell_1600(ConfigTsunAllowAll, InverterIndMsg1600):
ConfigTsunAllowAll
m = MemoryStream(InverterIndMsg1600, (0,))
assert 0 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0)
assert None == m.db.get_db_value(Register.RATED_POWER, None)
assert None == m.db.get_db_value(Register.INVERTER_TEMP, None)
m.read() # read complete msg, and dispatch msg
assert 1600 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0)
assert 1600 == m.db.get_db_value(Register.RATED_POWER, 0)
assert 'TSOL-MS1600' == m.db.get_db_value(Register.EQUIPMENT_MODEL, 0)
m.close()
def test_build_modell_1800(ConfigTsunAllowAll, InverterIndMsg1800):
ConfigTsunAllowAll
m = MemoryStream(InverterIndMsg1800, (0,))
assert 0 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0)
assert None == m.db.get_db_value(Register.RATED_POWER, None)
assert None == m.db.get_db_value(Register.INVERTER_TEMP, None)
m.read() # read complete msg, and dispatch msg
assert 1800 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0)
assert 1800 == m.db.get_db_value(Register.RATED_POWER, 0)
assert 'TSOL-MS1800' == m.db.get_db_value(Register.EQUIPMENT_MODEL, 0)
m.close()
def test_build_modell_2000(ConfigTsunAllowAll, InverterIndMsg2000):
ConfigTsunAllowAll
m = MemoryStream(InverterIndMsg2000, (0,))
assert 0 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0)
assert None == m.db.get_db_value(Register.RATED_POWER, None)
assert None == m.db.get_db_value(Register.INVERTER_TEMP, None)
m.read() # read complete msg, and dispatch msg
assert 2000 == m.db.get_db_value(Register.MAX_DESIGNED_POWER, 0)
assert 2000 == m.db.get_db_value(Register.RATED_POWER, 0)
assert 'TSOL-MS2000' == m.db.get_db_value(Register.EQUIPMENT_MODEL, 0)
m.close()
def test_build_logger_modell(ConfigTsunAllowAll, DeviceIndMsg):
ConfigTsunAllowAll
m = MemoryStream(DeviceIndMsg, (0,))
assert 0 == m.db.get_db_value(Register.COLLECTOR_FW_VERSION, 0)
assert 'IGEN TECH' == m.db.get_db_value(Register.CHIP_TYPE, None)
assert None == m.db.get_db_value(Register.CHIP_MODEL, None)
m.read() # read complete msg, and dispatch msg
assert 'LSW5BLE_17_02B0_1.05' == m.db.get_db_value(Register.COLLECTOR_FW_VERSION, 0).rstrip('\00')
assert 'LSW5BLE' == m.db.get_db_value(Register.CHIP_MODEL, 0)
m.close()

View File

@@ -52,6 +52,7 @@ services:
mqtt:
container_name: mqtt-broker
image: eclipse-mosquitto:2
restart: unless-stopped
expose:
- 1883
volumes:
@@ -79,6 +80,7 @@ services:
- $(DNS2:-4.4.4.4}
ports:
- 5005:5005
- 10000:10000
volumes:
- ${PROJECT_DIR}./tsun-proxy/log:/home/tsun-proxy/log
- ${PROJECT_DIR}./tsun-proxy/config:/home/tsun-proxy/config

View File

@@ -0,0 +1,243 @@
# test_with_pytest.py and scapy
#
import pytest, socket, time
#from scapy.all import *
#from scapy.layers.inet import IP, TCP, TCP_client
def get_sn() -> bytes:
return b'R170000000000001'
def get_inv_no() -> bytes:
return b'T170000000000001'
def get_invalid_sn():
return b'R170000000000002'
@pytest.fixture
def MsgContactInfo(): # Contact Info message
return b'\x00\x00\x00\x2c\x10'+get_sn()+b'\x91\x00\x08solarhub\x0fsolarhub\x40123456'
@pytest.fixture
def MsgContactResp(): # Contact Response message
return b'\x00\x00\x00\x14\x10'+get_sn()+b'\x91\x00\x01'
@pytest.fixture
def MsgContactInfo2(): # Contact Info message
return b'\x00\x00\x00\x2c\x10'+get_invalid_sn()+b'\x91\x00\x08solarhub\x0fsolarhub\x40123456'
@pytest.fixture
def MsgContactResp2(): # Contact Response message
return b'\x00\x00\x00\x14\x10'+get_invalid_sn()+b'\x91\x00\x01'
@pytest.fixture
def MsgTimeStampReq(): # Get Time Request message
return b'\x00\x00\x00\x13\x10'+get_sn()+b'\x91\x22'
@pytest.fixture
def MsgTimeStampResp(): # Get Time Resonse message
return b'\x00\x00\x00\x1b\x10'+get_sn()+b'\x99\x22\x00\x00\x01\x89\xc6\x63\x4d\x80'
@pytest.fixture
def MsgContollerInd(): # Data indication from the controller
msg = b'\x00\x00\x01\x2f\x10'+ get_sn() + b'\x91\x71\x0e\x10\x00\x00\x10'+get_sn()
msg += b'\x01\x00\x00\x01\x89\xc6\x63\x55\x50'
msg += b'\x00\x00\x00\x15\x00\x09\x2b\xa8\x54\x10\x52\x53\x57\x5f\x34\x30\x30\x5f\x56\x31\x2e\x30\x30\x2e\x30\x36\x00\x09\x27\xc0\x54\x06\x52\x61\x79\x6d\x6f'
msg += b'\x6e\x00\x09\x2f\x90\x54\x0b\x52\x53\x57\x2d\x31\x2d\x31\x30\x30\x30\x31\x00\x09\x5a\x88\x54\x0f\x74\x2e\x72\x61\x79\x6d\x6f\x6e\x69\x6f\x74\x2e\x63\x6f\x6d\x00\x09\x5a\xec\x54'
msg += b'\x1c\x6c\x6f\x67\x67\x65\x72\x2e\x74\x61\x6c\x65\x6e\x74\x2d\x6d\x6f\x6e\x69\x74\x6f\x72\x69\x6e\x67\x2e\x63\x6f\x6d\x00\x0d\x00\x20\x49\x00\x00\x00\x01\x00\x0c\x35\x00\x49\x00'
msg += b'\x00\x00\x64\x00\x0c\x96\xa8\x49\x00\x00\x00\x1d\x00\x0c\x7f\x38\x49\x00\x00\x00\x01\x00\x0c\xfc\x38\x49\x00\x00\x00\x01\x00\x0c\xf8\x50\x49\x00\x00\x01\x2c\x00\x0c\x63\xe0\x49'
msg += b'\x00\x00\x00\x00\x00\x0c\x67\xc8\x49\x00\x00\x00\x00\x00\x0c\x50\x58\x49\x00\x00\x00\x01\x00\x09\x5e\x70\x49\x00\x00\x13\x8d\x00\x09\x5e\xd4\x49\x00\x00\x13\x8d\x00\x09\x5b\x50'
msg += b'\x49\x00\x00\x00\x02\x00\x0d\x04\x08\x49\x00\x00\x00\x00\x00\x07\xa1\x84\x49\x00\x00\x00\x01\x00\x0c\x50\x59\x49\x00\x00\x00\x4c\x00\x0d\x1f\x60\x49\x00\x00\x00\x00'
return msg
@pytest.fixture
def MsgInvData(): # Data indication from the controller
msg = b'\x00\x00\x00\x8b\x10'+ get_sn() + b'\x91\x04\x01\x90\x00\x01\x10'+get_inv_no()
msg += b'\x01\x00\x00\x01\x89\xc6\x63\x61\x08'
msg += b'\x00\x00\x00\x06\x00\x00\x00\x0a\x54\x08\x4d\x69\x63\x72\x6f\x69\x6e\x76\x00\x00\x00\x14\x54\x04\x54\x53\x55\x4e\x00\x00\x00\x1E\x54\x07\x56\x35\x2e\x30\x2e\x31\x31\x00\x00\x00\x28'
msg += b'\x54\x10\x54\x31\x37\x45\x37\x33\x30\x37\x30\x32\x31\x44\x30\x30\x36\x41\x00\x00\x00\x32\x54\x0a\x54\x53\x4f\x4c\x2d\x4d\x53\x36\x30\x30\x00\x00\x00\x3c\x54\x05\x41\x2c\x42\x2c\x43'
return msg
@pytest.fixture
def MsgInverterInd(): # Data indication from the inverter
msg = b'\x00\x00\x05\x02\x10'+ get_sn() + b'\x91\x04\x01\x90\x00\x01\x10'+get_inv_no()
msg += b'\x01\x00\x00\x01\x89\xc6\x63\x61\x08'
msg += b'\x00\x00\x00\xa3\x00\x00\x00\x64\x53\x00\x01\x00\x00\x00\xc8\x53\x00\x02\x00\x00\x01\x2c\x53\x00\x00\x00\x00\x01\x90\x49\x00\x00\x00\x00\x00\x00\x01\x91\x53\x00\x00'
msg += b'\x00\x00\x01\x92\x53\x00\x00\x00\x00\x01\x93\x53\x00\x00\x00\x00\x01\x94\x53\x00\x00\x00\x00\x01\x95\x53\x00\x00\x00\x00\x01\x96\x53\x00\x00\x00\x00\x01\x97\x53\x00'
msg += b'\x00\x00\x00\x01\x98\x53\x00\x00\x00\x00\x01\x99\x53\x00\x00\x00\x00\x01\x9a\x53\x00\x00\x00\x00\x01\x9b\x53\x00\x00\x00\x00\x01\x9c\x53\x00\x00\x00\x00\x01\x9d\x53'
msg += b'\x00\x00\x00\x00\x01\x9e\x53\x00\x00\x00\x00\x01\x9f\x53\x00\x00\x00\x00\x01\xa0\x53\x00\x00\x00\x00\x01\xf4\x49\x00\x00\x00\x00\x00\x00\x01\xf5\x53\x00\x00\x00\x00'
msg += b'\x01\xf6\x53\x00\x00\x00\x00\x01\xf7\x53\x00\x00\x00\x00\x01\xf8\x53\x00\x00\x00\x00\x01\xf9\x53\x00\x00\x00\x00\x01\xfa\x53\x00\x00\x00\x00\x01\xfb\x53\x00\x00\x00'
msg += b'\x00\x01\xfc\x53\x00\x00\x00\x00\x01\xfd\x53\x00\x00\x00\x00\x01\xfe\x53\x00\x00\x00\x00\x01\xff\x53\x00\x00\x00\x00\x02\x00\x53\x00\x00\x00\x00\x02\x01\x53\x00\x00'
msg += b'\x00\x00\x02\x02\x53\x00\x00\x00\x00\x02\x03\x53\x00\x00\x00\x00\x02\x04\x53\x00\x00\x00\x00\x02\x58\x49\x00\x00\x00\x00\x00\x00\x02\x59\x53\x00\x00\x00\x00\x02\x5a'
msg += b'\x53\x00\x00\x00\x00\x02\x5b\x53\x00\x00\x00\x00\x02\x5c\x53\x00\x00\x00\x00\x02\x5d\x53\x00\x00\x00\x00\x02\x5e\x53\x00\x00\x00\x00\x02\x5f\x53\x00\x00\x00\x00\x02'
msg += b'\x60\x53\x00\x00\x00\x00\x02\x61\x53\x00\x00\x00\x00\x02\x62\x53\x00\x00\x00\x00\x02\x63\x53\x00\x00\x00\x00\x02\x64\x53\x00\x00\x00\x00\x02\x65\x53\x00\x00\x00\x00'
msg += b'\x02\x66\x53\x00\x00\x00\x00\x02\x67\x53\x00\x00\x00\x00\x02\x68\x53\x00\x00\x00\x00\x02\xbc\x49\x00\x00\x00\x00\x00\x00\x02\xbd\x53\x00\x00\x00\x00\x02\xbe\x53\x00'
msg += b'\x00\x00\x00\x02\xbf\x53\x00\x00\x00\x00\x02\xc0\x53\x00\x00\x00\x00\x02\xc1\x53\x00\x00\x00\x00\x02\xc2\x53\x00\x00\x00\x00\x02\xc3\x53\x00\x00\x00\x00\x02\xc4\x53'
msg += b'\x00\x00\x00\x00\x02\xc5\x53\x00\x00\x00\x00\x02\xc6\x53\x00\x00\x00\x00\x02\xc7\x53\x00\x00\x00\x00\x02\xc8\x53\x00\x00\x00\x00\x02\xc9\x53\x00\x00\x00\x00\x02\xca'
msg += b'\x53\x00\x00\x00\x00\x02\xcb\x53\x00\x00\x00\x00\x02\xcc\x53\x00\x00\x00\x00\x03\x20\x53\x00\x00\x00\x00\x03\x84\x53\x50\x11\x00\x00\x03\xe8\x46\x43\x61\x66\x66\x00'
msg += b'\x00\x04\x4c\x46\x3e\xeb\x85\x1f\x00\x00\x04\xb0\x46\x42\x48\x14\x7b\x00\x00\x05\x14\x53\x00\x17\x00\x00\x05\x78\x53\x00\x00\x00\x00\x05\xdc\x53\x02\x58\x00\x00\x06'
msg += b'\x40\x46\x42\xd3\x66\x66\x00\x00\x06\xa4\x46\x42\x06\x66\x66\x00\x00\x07\x08\x46\x3f\xf4\x7a\xe1\x00\x00\x07\x6c\x46\x42\x81\x00\x00\x00\x00\x07\xd0\x46\x42\x06\x00'
msg += b'\x00\x00\x00\x08\x34\x46\x3f\xae\x14\x7b\x00\x00\x08\x98\x46\x42\x36\xcc\xcd\x00\x00\x08\xfc\x46\x00\x00\x00\x00\x00\x00\x09\x60\x46\x00\x00\x00\x00\x00\x00\x09\xc4'
msg += b'\x46\x00\x00\x00\x00\x00\x00\x0a\x28\x46\x00\x00\x00\x00\x00\x00\x0a\x8c\x46\x00\x00\x00\x00\x00\x00\x0a\xf0\x46\x00\x00\x00\x00\x00\x00\x0b\x54\x46\x3f\xd9\x99\x9a'
msg += b'\x00\x00\x0b\xb8\x46\x41\x8a\xe1\x48\x00\x00\x0c\x1c\x46\x3f\x8a\x3d\x71\x00\x00\x0c\x80\x46\x41\x1b\xd7\x0a\x00\x00\x0c\xe4\x46\x3f\x1e\xb8\x52\x00\x00\x0d\x48\x46'
msg += b'\x40\xf3\xd7\x0a\x00\x00\x0d\xac\x46\x00\x00\x00\x00\x00\x00\x0e\x10\x46\x00\x00\x00\x00\x00\x00\x0e\x74\x46\x00\x00\x00\x00\x00\x00\x0e\xd8\x46\x00\x00\x00\x00\x00'
msg += b'\x00\x0f\x3c\x53\x00\x00\x00\x00\x0f\xa0\x53\x00\x00\x00\x00\x10\x04\x53\x55\xaa\x00\x00\x10\x68\x53\x00\x00\x00\x00\x10\xcc\x53\x00\x00\x00\x00\x11\x30\x53\x00\x00'
msg += b'\x00\x00\x11\x94\x53\x00\x00\x00\x00\x11\xf8\x53\xff\xff\x00\x00\x12\x5c\x53\xff\xff\x00\x00\x12\xc0\x53\x00\x02\x00\x00\x13\x24\x53\xff\xff\x00\x00\x13\x88\x53\xff'
msg += b'\xff\x00\x00\x13\xec\x53\xff\xff\x00\x00\x14\x50\x53\xff\xff\x00\x00\x14\xb4\x53\xff\xff\x00\x00\x15\x18\x53\xff\xff\x00\x00\x15\x7c\x53\x00\x00\x00\x00\x27\x10\x53'
msg += b'\x00\x02\x00\x00\x27\x74\x53\x00\x3c\x00\x00\x27\xd8\x53\x00\x68\x00\x00\x28\x3c\x53\x05\x00\x00\x00\x28\xa0\x46\x43\x79\x00\x00\x00\x00\x29\x04\x46\x43\x48\x00\x00'
msg += b'\x00\x00\x29\x68\x46\x42\x48\x33\x33\x00\x00\x29\xcc\x46\x42\x3e\x3d\x71\x00\x00\x2a\x30\x53\x00\x01\x00\x00\x2a\x94\x46\x43\x37\x00\x00\x00\x00\x2a\xf8\x46\x42\xce'
msg += b'\x00\x00\x00\x00\x2b\x5c\x53\x00\x96\x00\x00\x2b\xc0\x53\x00\x10\x00\x00\x2c\x24\x46\x43\x90\x00\x00\x00\x00\x2c\x88\x46\x43\x95\x00\x00\x00\x00\x2c\xec\x53\x00\x06'
msg += b'\x00\x00\x2d\x50\x53\x00\x06\x00\x00\x2d\xb4\x46\x43\x7d\x00\x00\x00\x00\x2e\x18\x46\x42\x3d\xeb\x85\x00\x00\x2e\x7c\x46\x42\x3d\xeb\x85\x00\x00\x2e\xe0\x53\x00\x03'
msg += b'\x00\x00\x2f\x44\x53\x00\x03\x00\x00\x2f\xa8\x46\x42\x4d\xeb\x85\x00\x00\x30\x0c\x46\x42\x4d\xeb\x85\x00\x00\x30\x70\x53\x00\x03\x00\x00\x30\xd4\x53\x00\x03\x00\x00'
msg += b'\x31\x38\x46\x42\x08\x00\x00\x00\x00\x31\x9c\x53\x00\x05\x00\x00\x32\x00\x53\x04\x00\x00\x00\x32\x64\x53\x00\x01\x00\x00\x32\xc8\x53\x13\x9c\x00\x00\x33\x2c\x53\x0f'
msg += b'\xa0\x00\x00\x33\x90\x53\x00\x4f\x00\x00\x33\xf4\x53\x00\x66\x00\x00\x34\x58\x53\x03\xe8\x00\x00\x34\xbc\x53\x04\x00\x00\x00\x35\x20\x53\x00\x00\x00\x00\x35\x84\x53'
msg += b'\x00\x00\x00\x00\x35\xe8\x53\x00\x00\x00\x00\x36\x4c\x53\x00\x00\x00\x01\x38\x80\x53\x00\x02\x00\x01\x38\x81\x53\x00\x01\x00\x01\x38\x82\x53\x00\x01\x00\x01\x38\x83'
msg += b'\x53\x00\x00'
return msg
@pytest.fixture
def MsgOtaUpdateReq(): # Over the air update request from talent cloud
msg = b'\x00\x00\x01\x16\x10'+ get_sn() + b'\x70\x13\x01\x02\x76\x35'
msg += b'\x70\x68\x74\x74\x70'
msg += b'\x3a\x2f\x2f\x77\x77\x77\x2e\x74\x61\x6c\x65\x6e\x74\x2d\x6d\x6f'
msg += b'\x6e\x69\x74\x6f\x72\x69\x6e\x67\x2e\x63\x6f\x6d\x3a\x39\x30\x30'
msg += b'\x32\x2f\x70\x72\x6f\x64\x2d\x61\x70\x69\x2f\x72\x6f\x6d\x2f\x75'
msg += b'\x70\x64\x61\x74\x65\x2f\x64\x6f\x77\x6e\x6c\x6f\x61\x64\x3f\x76'
msg += b'\x65\x72\x3d\x56\x31\x2e\x30\x30\x2e\x31\x37\x26\x6e\x61\x6d\x65'
msg += b'\x3d\x47\x33\x2d\x57\x69\x46\x69\x2b\x2d\x56\x31\x2e\x30\x30\x2e'
msg += b'\x31\x37\x2d\x4f\x54\x41\x26\x65\x78\x74\x3d\x30\x60\x68\x74\x74'
msg += b'\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x74\x61\x6c\x65\x6e\x74\x2d\x6d'
msg += b'\x6f\x6e\x69\x74\x6f\x72\x69\x6e\x67\x2e\x63\x6f\x6d\x3a\x39\x30'
msg += b'\x30\x32\x2f\x70\x72\x6f\x64\x2d\x61\x70\x69\x2f\x72\x6f\x6d\x2f'
msg += b'\x75\x70\x64\x61\x74\x65\x2f\x63\x61\x6c\x6c\x62\x61\x63\x6b\x3f'
msg += b'\x71\x69\x64\x3d\x31\x35\x30\x33\x36\x32\x26\x72\x69\x64\x3d\x32'
msg += b'\x32\x39\x26\x64\x69\x64\x3d\x31\x33\x34\x32\x32\x35\x20\x36\x35'
msg += b'\x66\x30\x64\x37\x34\x34\x62\x66\x33\x39\x61\x62\x38\x32\x34\x64'
msg += b'\x32\x38\x62\x38\x34\x64\x31\x39\x65\x64\x33\x31\x31\x63\x06\x34'
msg += b'\x36\x38\x36\x33\x33\x01\x31\x01\x30\x00'
return msg
@pytest.fixture(scope="session")
def ClientConnection():
#host = '172.16.30.7'
host = 'logger.talent-monitoring.com'
#host = '127.0.0.1'
port = 5005
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.connect((host, port))
s.settimeout(1)
yield s
time.sleep(2.5)
s.close()
def tempClientConnection():
#host = '172.16.30.7'
host = 'logger.talent-monitoring.com'
#host = '127.0.0.1'
port = 5005
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.connect((host, port))
s.settimeout(1)
yield s
s.close()
def test_open_close():
try:
for s in tempClientConnection():
pass
except:
assert False
assert True
def test_send_contact_info1(ClientConnection, MsgContactInfo, MsgContactResp):
s = ClientConnection
try:
s.sendall(MsgContactInfo)
data = s.recv(1024)
except TimeoutError:
pass
assert data == MsgContactResp
def test_send_contact_info2(ClientConnection, MsgContactInfo2, MsgContactInfo, MsgContactResp):
s = ClientConnection
try:
s.sendall(MsgContactInfo2)
data = s.recv(1024)
except TimeoutError:
assert True
else:
assert False
try:
s.sendall(MsgContactInfo)
data = s.recv(1024)
except TimeoutError:
pass
assert data == MsgContactResp
def test_send_contact_info3(ClientConnection, MsgContactInfo, MsgContactResp, MsgTimeStampReq):
s = ClientConnection
try:
s.sendall(MsgContactInfo)
data = s.recv(1024)
except TimeoutError:
pass
assert data == MsgContactResp
try:
s.sendall(MsgTimeStampReq)
data = s.recv(1024)
except TimeoutError:
pass
def test_send_contact_resp(ClientConnection, MsgContactResp):
s = ClientConnection
try:
s.sendall(MsgContactResp)
data = s.recv(1024)
except TimeoutError:
assert True
else:
assert data == b''
def test_send_ctrl_data(ClientConnection, MsgTimeStampReq, MsgTimeStampResp, MsgContollerInd):
s = ClientConnection
try:
s.sendall(MsgTimeStampReq)
data = s.recv(1024)
except TimeoutError:
pass
# time.sleep(2.5)
# assert data == MsgTimeStampResp
try:
s.sendall(MsgContollerInd)
data = s.recv(1024)
except TimeoutError:
pass
def test_send_inv_data(ClientConnection, MsgTimeStampReq, MsgTimeStampResp, MsgInvData, MsgInverterInd):
s = ClientConnection
try:
s.sendall(MsgTimeStampReq)
data = s.recv(1024)
except TimeoutError:
pass
# time.sleep(32.5)
# assert data == MsgTimeStampResp
try:
s.sendall(MsgInvData)
data = s.recv(1024)
s.sendall(MsgInverterInd)
data = s.recv(1024)
except TimeoutError:
pass
def test_ota_req(ClientConnection, MsgOtaUpdateReq):
s = ClientConnection
try:
s.sendall(MsgOtaUpdateReq)
data = s.recv(1024)
except TimeoutError:
pass

View File

@@ -0,0 +1,150 @@
# test_with_pytest.py and scapy
#
import pytest, socket, time, os
from dotenv import load_dotenv
#from scapy.all import *
#from scapy.layers.inet import IP, TCP, TCP_client
load_dotenv()
SOLARMAN_SNR = os.getenv('SOLARMAN_SNR', '00000080')
def get_sn() -> bytes:
return bytes.fromhex(SOLARMAN_SNR)
def get_inv_no() -> bytes:
return b'T170000000000001'
def get_invalid_sn():
return b'R170000000000002'
@pytest.fixture
def MsgContactInfo(): # Contact Info message
msg = b'\xa5\xd4\x00\x10\x41\x00\x01' +get_sn() +b'\x02\xba\xd2\x00\x00'
msg += b'\x19\x00\x00\x00\x00\x00\x00\x00\x05\x3c\x78\x01\x64\x01\x4c\x53'
msg += b'\x57\x35\x42\x4c\x45\x5f\x31\x37\x5f\x30\x32\x42\x30\x5f\x31\x2e'
msg += b'\x30\x35\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x40\x2a\x8f\x4f\x51\x54\x31\x39\x32\x2e'
msg += b'\x31\x36\x38\x2e\x38\x30\x2e\x34\x39\x00\x00\x00\x0f\x00\x01\xb0'
msg += b'\x02\x0f\x00\xff\x56\x31\x2e\x31\x2e\x30\x30\x2e\x30\x42\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xfe\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x41\x6c\x6c\x69\x75\x73\x2d\x48\x6f'
msg += b'\x6d\x65\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3c'
msg += b'\x15'
return msg
@pytest.fixture
def MsgContactResp(): # Contact Response message
msg = b'\xa5\x0a\x00\x10\x11\x01\x01' +get_sn() +b'\x02\x01\x6a\xfd\x8f'
msg += b'\x65\x3c\x00\x00\x00\x75\x15'
return msg
@pytest.fixture
def MsgDataInd():
msg = b'\xa5\x99\x01\x10\x42\x59\x84' +get_sn() +b'\x01\xb0\x02\x2c\x87'
msg += b'\x22\x32\xb7\x29\x00\x00\xd6\xcf\xe1\x33\x01\x00\x0c\x05\x00\x00'
msg += b'\x59\x31\x37\x45\x37\x41\x30\x46\x30\x31\x30\x42\x30\x31\x33\x45'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x01\x12\x02\x12\x12\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x40\x10\x08\xd8\x00\x09\x13\x84\x00\x35\x00\x00\x02\x58\x00\xd8'
msg += b'\x01\x3f\x00\x17\x00\x4d\x01\x44\x00\x14\x00\x43\x01\x45\x00\x18'
msg += b'\x00\x52\x00\x12\x00\x01\x00\x00\x00\x7c\x00\x00\x24\xed\x00\x2c'
msg += b'\x00\x00\x0b\x10\x00\x26\x00\x00\x0a\x0f\x00\x30\x00\x00\x0b\x76'
msg += b'\x00\x00\x00\x00\x06\x16\x00\x00\x00\x00\x55\xaa\x00\x01\x00\x00'
msg += b'\x00\x00\x00\x00\xff\xff\x07\xd0\x00\x03\x04\x00\x04\x00\x04\x00'
msg += b'\x04\x00\x00\x01\xff\xff\x00\x01\x00\x06\x00\x68\x00\x68\x05\x00'
msg += b'\x09\xcd\x07\xb6\x13\x9c\x13\x24\x00\x01\x07\xae\x04\x0f\x00\x41'
msg += b'\x00\x0f\x0a\x64\x0a\x64\x00\x06\x00\x06\x09\xf6\x12\x8c\x12\x8c'
msg += b'\x00\x10\x00\x10\x14\x52\x14\x52\x00\x10\x00\x10\x01\x51\x00\x05'
msg += b'\x04\x00\x00\x01\x13\x9c\x0f\xa0\x00\x4e\x00\x66\x03\xe8\x04\x00'
msg += b'\x09\xce\x07\xa8\x13\x9c\x13\x26\x00\x00\x00\x00\x00\x00\x00\x00'
msg += b'\x00\x00\x00\x00\x04\x00\x04\x00\x00\x00\x00\x00\xff\xff\x00\x00'
msg += b'\x00\x00\x00\x00\x24\x15'
return msg
@pytest.fixture
def MsgDataResp(): # Contact Response message
msg = b'\xa5\x0a\x00\x10\x12\x80\x84' +get_sn() +b'\x01\x01\xd1\x96\x04'
msg += b'\x66\x3c\x00\x00\x00\xed\x15'
return msg
@pytest.fixture(scope="session")
def ClientConnection():
#host = '172.16.30.7'
host = 'logger.talent-monitoring.com'
#host = 'iot.talent-monitoring.com'
#host = '127.0.0.1'
port = 10000
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.connect((host, port))
s.settimeout(1)
yield s
s.close()
def checkResponse(data, Msg):
check = bytearray(data)
check[5]= Msg[5] # ignore seq
check[13:18]= Msg[13:18] # ignore timestamp + first byte of repeat time
check[21]= Msg[21] # ignore crc
assert check == Msg
def tempClientConnection():
#host = '172.16.30.7'
host = 'logger.talent-monitoring.com'
#host = 'iot.talent-monitoring.com'
#host = '127.0.0.1'
port = 10000
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.connect((host, port))
s.settimeout(1)
yield s
time.sleep(2.5)
s.close()
def test_open_close():
try:
for s in tempClientConnection():
pass
except:
assert False
assert True
def test_conn_msg(ClientConnection,MsgContactInfo, MsgContactResp):
s = ClientConnection
try:
s.sendall(MsgContactInfo)
# time.sleep(2.5)
data = s.recv(1024)
except TimeoutError:
pass
# time.sleep(2.5)
checkResponse(data, MsgContactResp)
def test_data_ind(ClientConnection,MsgDataInd, MsgDataResp):
s = ClientConnection
try:
s.sendall(MsgDataInd)
# time.sleep(2.5)
data = s.recv(1024)
except TimeoutError:
pass
# time.sleep(2.5)
checkResponse(data, MsgDataResp)