Compare commits

...

181 Commits

Author SHA1 Message Date
Stefan Allius
c3430f509e Version 0.5.3 2023-11-12 15:23:43 +01:00
Stefan Allius
32a669d0d1 Merge pull request #27 from s-allius/s-allius/issue26
S allius/issue26
2023-11-12 15:19:48 +01:00
Stefan Allius
4d9f00221c fix the palnt offline problem in tsun cloud
- use TSUN timestamp instead of local time,
  as TSUN also expects Central European Summer
  Time in winter
2023-11-12 15:15:30 +01:00
Stefan Allius
27c723b0c8 init contact_mail and contact_name 2023-11-12 01:06:24 +01:00
Stefan Allius
4bd59b91b3 send contact info every time a client connection is established 2023-11-11 23:49:06 +01:00
Stefan Allius
3a3c6142b8 ignore build.sh 2023-11-09 20:43:46 +01:00
Stefan Allius
5d36397f2f remover apk from the final image 2023-11-09 20:17:19 +01:00
Stefan Allius
bb39567d05 Version 0.5.2 2023-11-09 20:05:56 +01:00
Stefan Allius
b6431f8448 improve client conn disconection
- check for race cond. on closing and establishing
  client connections
- improve connection trace
2023-11-09 20:03:09 +01:00
Stefan Allius
714dd92f35 allow multiple calls to Message.close() 2023-11-08 18:57:56 +01:00
Stefan Allius
02861f70af - add int64 data type to info parser 2023-11-07 00:19:48 +01:00
Stefan Allius
942e17d7c3 Version 0.5.1 2023-11-05 00:57:10 +01:00
Stefan Allius
37f7052811 Merge pull request #23 from limes007/dns_desc
add description for DNS settings
2023-11-05 00:14:30 +01:00
Stefan Allius
05e446dc74 Merge pull request #24 from limes007/main
fix f-string
2023-11-05 00:09:51 +01:00
limes007
647ef157d4 fix f-string 2023-11-04 23:29:53 +01:00
limes007
9ae391b46d add description for DNS settings 2023-11-04 23:28:20 +01:00
Stefan Allius
97dfe5d19e Version 0.5.0 2023-11-04 23:06:55 +01:00
Stefan Allius
4cdaa84c65 move self.server_side to Message class 2023-11-03 18:28:04 +01:00
Stefan Allius
9936ab0411 test server and client side 2023-11-03 18:27:30 +01:00
Stefan Allius
b079318c4b fix syntax error 2023-11-03 18:26:43 +01:00
Stefan Allius
a369e0ae6d Merge pull request #22 from s-allius/s-allius/issue21
S allius/issue21
2023-11-03 18:14:04 +01:00
Stefan Allius
fbd4eb1336 disable flake8 for tests 2023-11-03 01:07:05 +01:00
Stefan Allius
6821734238 fix missing f-string declaration 2023-11-03 00:16:22 +01:00
Stefan Allius
7f91994934 fix multiline bugs 2023-11-02 23:50:49 +01:00
Stefan Allius
a002408a98 fix missing import 2023-11-02 23:50:02 +01:00
Stefan Allius
de50f896dd fix missing f-string declaration 2023-11-02 23:49:34 +01:00
Stefan Allius
b23cae5bea fix exception since super().__del__() don't excists 2023-11-02 23:47:20 +01:00
Stefan Allius
2c4af0b7d8 lint code with flake8 2023-11-02 22:31:30 +01:00
Stefan Allius
c772eeeb28 Proxy counters don't work after restart properly
Fixes #21
2023-11-02 22:30:50 +01:00
Stefan Allius
165f94828f adapt unit tests 2023-11-01 21:18:25 +01:00
Stefan Allius
d8bc2dcae1 register proxy dev as soon as the MQTT connection is established 2023-11-01 21:17:43 +01:00
Stefan Allius
af27e95ef7 register proxy dev as soon as the MQTT connection is established 2023-11-01 21:16:54 +01:00
Stefan Allius
bcc901ba4c add class value new_stat_data 2023-11-01 21:14:22 +01:00
Stefan Allius
7a2667767e add callback on mqtt/ha conn establishment 2023-11-01 21:11:54 +01:00
Stefan Allius
85be9072db adapt interface of ha_confs 2023-11-01 21:09:57 +01:00
Stefan Allius
387bab01be Proxy counters don't work after restart properly
Fixes #21
2023-11-01 21:08:51 +01:00
Stefan Allius
bcd37faa4f Update changelog 2023-10-30 23:48:15 +01:00
Stefan Allius
47878adb23 fix definition of the 'Unknown Control Type' counter 2023-10-30 23:43:18 +01:00
Stefan Allius
205a4e38ee increase test coverage 2023-10-28 00:14:34 +02:00
Stefan Allius
36754196c2 add Unknown_Ctrl error counter 2023-10-28 00:11:22 +02:00
Stefan Allius
cfe64b1eae add Unknown_Ctrl error counter 2023-10-28 00:09:19 +02:00
Stefan Allius
bb793a3f13 Version 0.4.3 2023-10-26 20:37:03 +02:00
Stefan Allius
c3da9d6101 - avoid resetting the daily generation counters 2023-10-26 20:32:08 +02:00
Stefan Allius
0c9f953476 don't initialize must_incr values with zero
- when the connection is just established by the inverter.
  sometimes the inverters send invalid data with the value zero.
  In this case, we no longer initialize the must_incr values,
  to avoid sending invalid data to the mqtt broker and the
  Home Assistant
2023-10-26 20:23:53 +02:00
Stefan Allius
658f42d4fe restart mqtt broker on errors 2023-10-23 21:38:58 +02:00
Stefan Allius
870a965c22 - fix typo 2023-10-23 21:27:58 +02:00
Stefan Allius
0c645812bd catch mqtt errros 2023-10-23 21:25:47 +02:00
Stefan Allius
7b71f25496 Merge branch 'main' of https://github.com/s-allius/tsun-gen3-proxy into main 2023-10-23 21:18:44 +02:00
Stefan Allius
50977d5afd catch Mqtt errors
- we catch mqtt errors, so we can forward messages to
  the tsun cloud even if the mqtt broker is not running
2023-10-23 21:17:17 +02:00
Stefan Allius
ff0979663e fetch broken pipe errors 2023-10-23 21:12:10 +02:00
Stefan Allius
a6ac9864af Merge pull request #20 from LenzGr/patch-1
CHANGELOG.md: fix typos
2023-10-23 19:12:25 +02:00
Lenz Grimmer
2e0331cb88 CHANGELOG.md: fix typos 2023-10-23 09:31:54 +02:00
Stefan Allius
ec54e399fb updae default config 2023-10-23 00:14:26 +02:00
Stefan Allius
600362d00b Version 0.4.2 2023-10-21 21:48:46 +02:00
Stefan Allius
341e5c3964 fix typo 2023-10-21 21:40:28 +02:00
Stefan Allius
27a99fccec setup test coverage measurement 2023-10-21 21:39:19 +02:00
Stefan Allius
9264faaf3d avoid resetting daily generation counters 2023-10-21 21:38:36 +02:00
Stefan Allius
342313b76d add more test cases 2023-10-21 18:20:32 +02:00
Stefan Allius
f96091affd Increase test coverage for the Infos class to 100% 2023-10-21 16:46:49 +02:00
Stefan Allius
1df8ae55f0 Version 0.4.1 2023-10-20 21:53:59 +02:00
Stefan Allius
311b36aaf1 avoid resetting total generation counters 2023-10-20 21:51:02 +02:00
Stefan Allius
1642fe5a8a Merge pull request #19 from s-allius/s-allius/issue18
S allius/issue18
2023-10-20 20:09:56 +02:00
Stefan Allius
2b7e671dfc initialize the proxy statistics 2023-10-20 20:08:39 +02:00
Stefan Allius
a1930d32ae initialize the proxy statistics 2023-10-20 19:54:48 +02:00
Stefan Allius
11d7d616fa add static constructor to init proxy statistics 2023-10-20 00:27:21 +02:00
Stefan Allius
5433e18389 Send proxy values when the inverter disconnets 2023-10-19 20:04:41 +02:00
Stefan Allius
9006472264 list supported inverters 2023-10-17 23:40:34 +02:00
Stefan Allius
605a269d84 Add screenshots 2023-10-17 21:18:54 +02:00
Stefan Allius
93392f49c0 Version 0.4.0 2023-10-16 21:56:04 +02:00
Stefan Allius
587ec3d517 Merge pull request #17 from s-allius/s-allius/issue10
S allius/issue10
2023-10-16 21:50:12 +02:00
Stefan Allius
b98313ae23 add test for open close without any msg transfer 2023-10-16 20:33:46 +02:00
Stefan Allius
dc27da2b56 initialize sug_area 2023-10-16 20:30:43 +02:00
Stefan Allius
0c4bd2a03a log forwarded messages on info level 2023-10-16 20:29:44 +02:00
Stefan Allius
ba961fdfd7 Send internal proxy states to Home-Assistant
Fixes #10
2023-10-16 20:29:23 +02:00
Stefan Allius
03aa0c5747 adapt tests 2023-10-15 23:13:43 +02:00
Stefan Allius
10ec949a5b register proxy entities under a unique device (singleton) 2023-10-15 23:05:56 +02:00
Stefan Allius
909d5ca44a add MQTT config for the proxy device 2023-10-15 22:59:18 +02:00
Stefan Allius
335e5d1184 add MQTT config for the proxy device 2023-10-15 22:55:29 +02:00
Stefan Allius
583ec0c2a7 Update changelog 2023-10-15 15:50:20 +02:00
Stefan Allius
3c8e7666d4 add inc and dec methods for proxy statistic counters 2023-10-15 15:46:05 +02:00
Stefan Allius
5f821802a5 Implement HA device for the proxy
- add singleton proxy device
- add static db for proxy counter
- add inc and dec methods
2023-10-15 15:45:25 +02:00
Stefan Allius
fc10912a12 Send internal proxy states to Home-Assistant
Fixes #10
2023-10-15 15:37:59 +02:00
Stefan Allius
4d5da5a91f fix unit tests, which were broken since version 0.3.0 2023-10-12 23:36:46 +02:00
Stefan Allius
a2dfb4c1a6 avoid crash on incomplete config.toml file 2023-10-12 23:04:54 +02:00
Stefan Allius
3adf968a59 add systemtests 2023-10-12 22:39:24 +02:00
Stefan Allius
89d8cecfb5 don't dispatch ignored messages
- move check of inverter serial number from asyn_stream to message class
- add trace for droped messages
2023-10-12 22:29:51 +02:00
Stefan Allius
00f735d0ce add a proxy device to home assistant 2023-10-12 13:22:28 +02:00
Stefan Allius
5c940bb7a2 Send internal proxy states to Home-Assistant
Fixes #10

Always register ha entities after connection setup
2023-10-12 13:19:56 +02:00
Stefan Allius
8cb8dea47b Merge pull request #15 from s-allius/s-allius/issue8
S allius/issue8
2023-10-11 21:09:03 +02:00
Stefan Allius
8edbd7928f add docstring 2023-10-11 21:01:57 +02:00
Stefan Allius
f0e9c67a06 fix issue #8 2023-10-11 20:22:33 +02:00
Stefan Allius
505beff6de Do not register non-existent inverter inputs in HA
Fixes #8
2023-10-11 20:01:10 +02:00
Stefan Allius
684e591835 Bump to python version 3.12 2023-10-10 21:54:12 +02:00
Stefan Allius
8b4a94bfcb Version 0.3.0 2023-10-10 20:45:12 +02:00
Stefan Allius
98dab7db99 Version 0.3.0 2023-10-10 20:17:04 +02:00
Stefan Allius
42ae95fd1c remove --no-cache for release candidates (rc) 2023-10-10 20:15:10 +02:00
Stefan Allius
9ffd105278 classify more value for diagnostics 2023-10-10 20:03:05 +02:00
Stefan Allius
97f426269f switch to python 3.12 2023-10-09 22:21:00 +02:00
Stefan Allius
c7bf3f2e44 formatting 2023-10-09 20:48:46 +02:00
Stefan Allius
2781bf3a14 Independence from TSUN 2023-10-09 20:47:05 +02:00
Stefan Allius
fcd3fddb19 optimize and reduce logging 2023-10-09 20:02:30 +02:00
Stefan Allius
88cdcabd6f use abbreviation 'ic' for icon 2023-10-09 19:58:37 +02:00
Stefan Allius
1f2f359188 optimize and reduce logging 2023-10-09 19:57:49 +02:00
Stefan Allius
2dd09288d5 bum aiomqtt version to 1.2.1 2023-10-08 16:32:24 +02:00
Stefan Allius
5c5c3bc926 Merge pull request #14 from s-allius/reduze-size
Reduze size
2023-10-07 23:10:40 +02:00
Stefan Allius
2cf7a2db36 Version 0.2.0 2023-10-07 23:08:39 +02:00
Stefan Allius
3225566b9b fix formating of a log message 2023-10-07 21:24:49 +02:00
Stefan Allius
fa567f68c0 - disable DEBUG log for releases
- support building of release candidates
2023-10-07 21:14:57 +02:00
Stefan Allius
e1536cb697 adapt log levels, optimize expensive hex dump logs 2023-10-07 21:03:49 +02:00
Stefan Allius
b06d832504 set log level to DEBUG for dev versions 2023-10-07 20:58:18 +02:00
Stefan Allius
ed14ed484b add build support for release candidates (rc) 2023-10-07 20:55:26 +02:00
Stefan Allius
ddba3f6285 optimize and update some comments 2023-10-07 16:39:39 +02:00
Stefan Allius
8264cc6d00 reduce continer size ans security attack surface 2023-10-07 16:20:40 +02:00
Stefan Allius
d5561d393a never log the config 2023-10-06 23:39:02 +02:00
Stefan Allius
a8f1a838c1 never print password in logs 2023-10-06 23:30:04 +02:00
Stefan Allius
b530353e54 Merge pull request #13 from s-allius/class-refact
Class refact
2023-10-06 23:15:13 +02:00
Stefan Allius
271b4f876e Version 0.1.0 2023-10-06 23:12:06 +02:00
Stefan Allius
6816a3e027 remove empty method Messages.__del__ 2023-10-06 23:11:12 +02:00
Stefan Allius
bee25a5f13 disable DEBUG logging for optimzed images 2023-10-06 22:59:57 +02:00
Stefan Allius
3db643cb87 send ha registrations only if values have changed 2023-10-06 20:02:38 +02:00
Stefan Allius
c791395e0e remove obsolete logging messages 2023-10-06 19:59:47 +02:00
Stefan Allius
0043e4c147 fix typo 2023-10-06 19:35:41 +02:00
Stefan Allius
f38047c931 update changelog 2023-10-05 22:51:52 +02:00
Stefan Allius
19cbd5a041 fix memory leak, improve traces 2023-10-05 22:48:20 +02:00
Stefan Allius
a48394d057 fix crash on container restart 2023-10-05 21:52:26 +02:00
Stefan Allius
1871f6c8d2 change owner id during startup 2023-10-05 21:25:02 +02:00
Stefan Allius
066459f14e rename class Proxy to Inverter 2023-10-05 19:34:10 +02:00
Stefan Allius
3f14f5cb9e make class Proxy to a derivation of class AsyncStream 2023-10-05 18:47:59 +02:00
Stefan Allius
4c51a159af remoce data logs from console 2023-10-03 20:32:46 +02:00
Stefan Allius
450012aac5 Version 0.0.6 2023-10-03 20:23:25 +02:00
Stefan Allius
00f800c17a put packet dumps only into tracer.log 2023-10-03 20:21:59 +02:00
Stefan Allius
421f7a533a dealloc async_stream instances in connection termination 2023-10-03 19:47:09 +02:00
Stefan Allius
6d9be75ce3 dealloc async_stream instances in connection termination
- improve close handler
- clearify logging on disconnection
2023-10-03 19:44:24 +02:00
Stefan Allius
0886b30032 fix control byte output in tx trace 2023-10-03 14:01:42 +02:00
Stefan Allius
d308c3a9fa Revert "fix memory leak on connection aborts"
This reverts commit f097b3350b.
2023-10-03 11:45:17 +02:00
Stefan Allius
38dacf2b97 Revert "use weakrefs to solve circular references"
This reverts commit dfe8bcb01e.
2023-10-03 11:43:08 +02:00
Stefan Allius
700b946acf dealloc async_stream instances in connection termination 2023-10-03 01:35:53 +02:00
Stefan Allius
dfe8bcb01e use weakrefs to solve circular references
- cleanup logging
2023-10-03 01:31:23 +02:00
Stefan Allius
a8449e8417 implement disc method 2023-10-03 01:30:06 +02:00
Stefan Allius
f097b3350b fix memory leak on connection aborts
- use weakrefs
- call Message.close() in the parent class
- call Message.__del__()
- cleanup logging
2023-10-03 00:48:22 +02:00
Stefan Allius
056e182f64 implement close() to release cercular references 2023-10-03 00:46:45 +02:00
Stefan Allius
00f1fe01bf disable MQTT debug logs 2023-10-03 00:45:56 +02:00
Stefan Allius
108da0a97e Merge pull request #12 from s-allius/s-allius/issue5
S allius/issue5
2023-10-02 19:49:46 +02:00
Stefan Allius
e5d19ce07d Force MQTT registration
- when the home assistant has set the status to online again
2023-10-02 19:42:42 +02:00
Stefan Allius
464e542a47 clearify comment 2023-10-02 19:38:34 +02:00
Stefan Allius
414eb19ffb clarify comment 2023-10-02 19:35:59 +02:00
Stefan Allius
283bc2257b send autoconfig on HA restart
Fixes #5
2023-10-02 19:31:12 +02:00
Stefan Allius
198146b5f4 Bump aiomqtt to version 1.2.1 2023-10-01 22:41:41 +02:00
Stefan Allius
242653da72 check MQTT componet and new icons 2023-10-01 21:54:59 +02:00
Stefan Allius
417b57c99a Version 0.0.5 2023-10-01 21:36:33 +02:00
Stefan Allius
ff9360d2a7 add versioning, build 'rel' and 'dev' variants 2023-10-01 21:30:40 +02:00
Stefan Allius
c570fbabfa add MQTT component configuration 2023-10-01 21:29:33 +02:00
Stefan Allius
7b69de8181 add MQTT component configuration; add MQTT origin 2023-10-01 21:27:36 +02:00
Stefan Allius
5377dd81c8 supports version string 2023-10-01 21:26:53 +02:00
Stefan Allius
64f573a369 supports version string 2023-10-01 21:26:22 +02:00
Stefan Allius
c31c0280e7 prints version on start 2023-10-01 21:22:43 +02:00
Stefan Allius
382d887f56 Update entity icons 2023-10-01 13:25:05 +02:00
Stefan Allius
92d44eaa6b update entity icons 2023-10-01 13:23:57 +02:00
Stefan Allius
c773d5a084 add build script and set docker labels with dyn. data 2023-09-30 22:34:52 +02:00
Stefan Allius
997195ea29 Version 0.0.4 2023-09-30 17:42:07 +02:00
Stefan Allius
b25a4619f3 Merge pull request #9 from s-allius/s-allius/issue6
S allius/issue6
2023-09-30 17:32:16 +02:00
Stefan Allius
030b9794bb add more code documentation 2023-09-30 17:27:51 +02:00
Stefan Allius
bf597c10a5 update changelog 2023-09-30 16:13:12 +02:00
Stefan Allius
0f4d41b466 add some code comments 2023-09-30 16:11:32 +02:00
Stefan Allius
a44c03fc98 add arg '-vv' for pytest to get better error msgs 2023-09-30 16:10:36 +02:00
Stefan Allius
6a6a89d6d3 fix unit test for multiple ha devices 2023-09-30 16:09:35 +02:00
Stefan Allius
7d56f47c10 Move data up interval to the controller block 2023-09-30 12:33:00 +02:00
Stefan Allius
aa1376208a ignore .DS_Store 2023-09-30 01:17:48 +02:00
Stefan Allius
4f1c3a53be support for sub devices
Fixes #6
2023-09-30 01:16:40 +02:00
Stefan Allius
d97d4ece43 cleanup 2023-09-29 16:38:52 +02:00
Stefan Allius
476cdf029e Version 0.0.3 2023-09-29 01:00:02 +02:00
Stefan Allius
dfcd5de166 uncomment at least one inverters definition 2023-09-29 00:46:00 +02:00
Stefan Allius
60fc38b1f0 Merge pull request #4 from s-allius/s-allius/issue2
S allius/issue2
2023-09-29 00:32:43 +02:00
Stefan Allius
5b155c7b4c Getting Started added 2023-09-29 00:25:23 +02:00
Stefan Allius
c0a2a705ec Running Proxy with host UID and GUID
Fixes #2
2023-09-28 21:46:02 +02:00
Stefan Allius
125f681bec move proxy image to ghcr.io 2023-09-27 22:08:58 +02:00
Stefan Allius
8b4ff6173c Version 0.02 2023-09-27 22:06:16 +02:00
Stefan Allius
9273c843d4 add opencontainer labels 2023-09-27 21:43:47 +02:00
Stefan Allius
d48ddcb151 ignore systemtest, since they include private data 2023-09-27 21:42:52 +02:00
Stefan Allius
5bc3ba8727 change default UIT to 1000 2023-09-27 19:11:35 +02:00
Stefan Allius
76cb9a19c7 Voltage and current of inputs added 2023-09-27 19:05:28 +02:00
24 changed files with 2339 additions and 602 deletions

2
.coveragerc Normal file
View File

@@ -0,0 +1,2 @@
[run]
branch = True

3
.gitignore vendored
View File

@@ -4,3 +4,6 @@ mosquitto/**
homeassistant/**
tsun_proxy/**
Doku/**
.DS_Store
.coverage
coverage.xml

12
.vscode/settings.json vendored
View File

@@ -1,7 +1,15 @@
{
"python.testing.pytestArgs": [
"app","system_tests"
"-vv",
"app",
"--cov=app/src",
"--cov-report=xml",
"--cov-report=html",
"system_tests"
],
"python.testing.unittestEnabled": false,
"python.testing.pytestEnabled": true
"python.testing.pytestEnabled": true,
"flake8.args": [
"--extend-exclude=app/tests/*.py system_tests/*.py"
]
}

View File

@@ -7,14 +7,125 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased]
## [0.5.3] - 2023-11-12
### Removed
- remove apk packet manager from the final image
- send contact info every time a client connection is established
- change timestamp from local time to utc
## [0.5.2] - 2023-11-09
-
- add int64 data type to info parser
- allow multiple calls to Message.close()
- check for race cond. on closing and establishing client connections
## [0.5.1] - 2023-11-05
- fixes f-string by limes007
- add description for dns settings by limes007
## [0.5.0] - 2023-11-04
- fix issue [#21](https://github.com/s-allius/tsun-gen3-proxy/issues/21)
- register proxy dev as soon as the MQTT connection is established
- increase test coverage of the Messages class
- add error counter for unknown control bytes
- lint code with flake8
## [0.4.3] - 2023-10-26
- fix typos by Lenz Grimmer
- catch mqtt errors, so we can forward messages to tsun even if the mqtt broker is not reachable
- avoid resetting the daily generation counters even if the inverter sends zero values after reconnection
## [0.4.2] - 2023-10-21
- count unknown data types in received messages
- count definition errors in our internal tables
- increase test coverage of the Infos class to 100%
- avoid resetting the daily generation counters even if the inverter sends zero values at sunset
## [0.4.1] - 2023-10-20
- fix issue [#18](https://github.com/s-allius/tsun-gen3-proxy/issues/18)
- initialize the proxy statistics
- avoid resetting total generation counters
## [0.4.0] - 2023-10-16
- fix issue [#8](https://github.com/s-allius/tsun-gen3-proxy/issues/8)
- implement [#10](https://github.com/s-allius/tsun-gen3-proxy/issues/10)
- fix: don't dispatch ignored messages so that they are not forwarded
- add systemtests
- fix unit tests, which were broken since version 0.3.0
- add proxy device to home assistant
- add statistic counter to proxy device
- support multiple inverter registration at home assistant
## [0.3.0] - 2023-10-10
❗Due to the definition of values for diagnostics, the MQTT devices of controller and inverter should be deleted in the Home Assistant before updating to version '0.3.0'. After the update, these are automatically created again. The measurement data is retained.
### Changes
- optimize and reduce logging
- switch to pathon 3.12
- classify some values for diagnostics
## [0.2.0] - 2023-10-07
This version halves the size of the Docker image and reduces the attack surface for security vulnerabilities, by omitting unneeded code. The feature set is exactly the same as the previous release version 0.1.0.
### Changes
- move from slim-bookworm to an alpine base image
- install python requirements with pip wheel
- disable DEBUG log for releases
- support building of release candidates
## [0.1.0] - 2023-10-06
- refactoring of the connection classes
- change user id on startup
- register MQTT topics to home assistant, even if we have multiple inverters
## [0.0.6] - 2023-10-03
- Bump aiomqtt to version 1.2.1
- Force MQTT registration when the home assistant has set the status to online again
- fix control byte output in tx trace
- dealloc async_stream instances in connection termination
## [0.0.5] - 2023-10-01
- Entity icons updated
- Prints version on start
- Prepare for MQTT component != sensor
- Add MQTT origin
## [0.0.4] - 2023-09-30
- With this patch we ignore the setting 'suggested_area' in config.toml, because it makes no sense with multiple devices. We are looking for a better solution without combining all values into one area again in a later version.
❗Due to the change from one device to multiple devices in the Home Assistant, the previous MQTT device should be deleted in the Home Assistant after the update to pre-release '0.0.4'. Afterwards, the proxy must be restarted again to ensure that the sub-devices are created completely.
### Added
-
- Register multiple devices at home-assistant instead of one for all measurements.
Now we register: a Controller, the inverter and up to 4 input devices to home-assistant.
## [0.0.3] - 2023-09-28
### Added
- Fixes Running Proxy with host UID and GUID #2
## [0.0.2] - 2023-09-27
### Added
- Dockerfile opencontainer labels
- Send voltage and current of inputs to mqtt
## [0.0.1] - 2023-09-25
@@ -31,4 +142,4 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Added
- First checkin, the project was born
- First checkin, the project was born

100
README.md
View File

@@ -6,9 +6,9 @@
<p align="center">integration</p>
<p align="center">
<a href="https://opensource.org/licenses/BSD-3-Clause"><img alt="License: BSD-3-Clause" src="https://img.shields.io/badge/License-BSD_3--Clause-green.svg"></a>
<a href="https://www.python.org/downloads/release/python-3110/"><img alt="Supported Python versions" src="https://img.shields.io/badge/python-3.11-blue.svg"></a>
<a href="https://sbtinstruments.github.io/aiomqtt/introduction.html"><img alt="Supported Python versions" src="https://img.shields.io/badge/aiomqtt-1.2.0-lightblue.svg"></a>
<a href="https://toml.io/en/v1.0.0"><img alt="Supported Python versions" src="https://img.shields.io/badge/toml-1.0.0-lightblue.svg"></a>
<a href="https://www.python.org/downloads/release/python-3120/"><img alt="Supported Python versions" src="https://img.shields.io/badge/python-3.12-blue.svg"></a>
<a href="https://sbtinstruments.github.io/aiomqtt/introduction.html"><img alt="Supported aiomqtt versions" src="https://img.shields.io/badge/aiomqtt-1.2.1-lightblue.svg"></a>
<a href="https://toml.io/en/v1.0.0"><img alt="Supported toml versions" src="https://img.shields.io/badge/toml-1.0.0-lightblue.svg"></a>
</p>
@@ -16,13 +16,16 @@
###
# Overview
The "TSUN Gen3 Micro-Inverter" proxy enables a reliable connection between TSUN third generation inverters and an MQTT broker to integrate the inverter into typical home automations.
This proxy enables a reliable connection between TSUN third generation inverters and an MQTT broker. With the proxy, you can easily retrieve real-time values such as power, current and daily energy and integrate the inverter into typical home automations. This works even without an internet connection. The optional connection to the TSUN Cloud can be disabled!
The inverter establishes a TCP connection to the TSUN Cloud to transmit current measured values every 300 seconds. To be able to forward the measurement data to an MQTT broker, the proxy must be looped into this TCP connection.
In detail, the inverter establishes a TCP connection to the TSUN cloud to transmit current measured values every 300 seconds. To be able to forward the measurement data to an MQTT broker, the proxy must be looped into this TCP connection.
Through this, the inverter then establishes a connection to the proxy and the proxy establishes another connection to the TSUN Cloud. The transmitted data is interpreted by the proxy and then passed on to both the TSUN Cloud and the MQTT broker. The connection to the TSUN Cloud is optional and can be switched off in the configuration (default is on). Then no more data is sent to the Internet, but no more remote updates of firmware and operating parameters (e.g. rated power, grid parameters) are possible.
By means of `docker` a simple installation and operation is possible. By using `docker-composer`, a complete stack of proxy, `MQTT-brocker` and `home-assistant` can be started easily.
By means of `docker` a simple installation and operation is possible. By using `docker-composer`, a complete stack of proxy, `MQTT-brocker` and `home-assistant` can be started easily.
###
This project is not related to the company TSUN. It is a private initiative that aims to connect TSUN inverters with an MQTT broker. There is no support and no warranty from TSUN.
###
```
❗An essential requirement is that the proxy can be looped into the connection
@@ -37,37 +40,43 @@ If you use a Pi-hole, you can also store the host entry in the Pi-hole.
## Features
- supports TSOL MS300, MS350, MS400, MS600, MS700 and MS800 inverters from TSUN
- `MQTT` support
- `Home-Assistant` auto-discovery support
- Self-sufficient island operation without internet
- non-root Docker Container
## Home Assistant Screenshots
Here are some screenshots of how the inverter is displayed in the Home Assistant:
https://github.com/s-allius/tsun-gen3-proxy/wiki/home-assistant#home-assistant-screenshots
## Requirements
- A running Docker engine to host the container
- Ability to loop the proxy into the connection between the inverter and the TSUN cloud
## License
This project is licensed under the [BSD 3-clause License](https://opensource.org/licenses/BSD-3-Clause).
###
# Getting Started
Note the aiomqtt library used is based on the paho-mqtt library, which has a dual license. One of the licenses is the so-called [Eclipse Distribution License v1.0](https://www.eclipse.org/org/documents/edl-v10.php). It is almost word-for-word identical to the BSD 3-clause License. The only differences are:
- One use of "COPYRIGHT OWNER" (EDL) instead of "COPYRIGHT HOLDER" (BSD)
- One use of "Eclipse Foundation, Inc." (EDL) instead of "copyright holder" (BSD)
## Versioning
This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). Breaking changes will only occur in major `X.0.0` releases.
## Contributing
We're very happy to receive contributions to this project! You can get started by reading [CONTRIBUTING.md](https://github.com/s-allius/tsun-gen3-proxy/blob/main/CONTRIBUTING.md).
## Changelog
The changelog lives in [CHANGELOG.md](https://github.com/s-allius/tsun-gen3-proxy/blob/main/CHANGELOG.md). It follows the principles of [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
To run the proxy, you first need to create the image. You can do this quite simply as follows:
```sh
docker build https://github.com/s-allius/tsun-gen3-proxy.git#main:app -t tsun-proxy
```
after that you can run the image:
```sh
docker run --dns '8.8.8.8' --env 'UID=1000' -p '5005:5005' -v ./config:/home/tsun-proxy/config -v ./log:/home/tsun-proxy/log tsun-proxy
```
You will surely see a message that the configuration file was not found. So that we can create this without admin rights, the `uid` must still be adapted. To do this, simply stop the proxy with ctrl-c and use the `id` command to determine your own UserId:
```sh
% id
uid=1050(sallius) gid=20(staff) ...
```
With this information we can customize the `docker run`` statement:
```sh
docker run --dns '8.8.8.8' --env 'UID=1050' -p '5005:5005' -v ./config:/home/tsun-proxy/config -v ./log:/home/tsun-proxy/log tsun-proxy
```
###
# Configuration
@@ -101,6 +110,8 @@ mqtt.passwd = ''
ha.auto_conf_prefix = 'homeassistant' # MQTT prefix for subscribing for homeassistant status updates
ha.discovery_prefix = 'homeassistant' # MQTT prefix for discovery topic
ha.entity_prefix = 'tsun' # MQTT topic prefix for publishing inverter values
ha.proxy_node_id = 'proxy' # MQTT node id, for the proxy_node_id
ha.proxy_unique_id = 'P170000000000001' # MQTT unique id, to identify a proxy instance
# microinverters
@@ -120,3 +131,42 @@ suggested_area = 'balcony' # Optional, suggested installation area for home-a
```
## DNS Settings
### Loop the proxy into the connection
To include the proxy in the connection between the inverter and the TSUN Cloud, you must adapt the DNS record of *logger.talent-monitoring.com* within the network that your inverter uses. You need a mapping from logger.talent-monitoring.com to the IP address of the host running the Docker engine.
This can be done, for example, by adding a local DNS record to the Pi-hole if you are using it.
### DNS Rebind Protection
If you are using a router as local DNS server, the router may have DNS rebind protection that needs to be adjusted. For security reasons, DNS rebind protection blocks DNS queries that refer to an IP address on the local network.
If you are using a FRITZ!Box, you can do this in the Network Settings tab under Home Network / Network. Add logger.talent-monitoring.com as a hostname exception in DNS rebind protection.
### DNS server of proxy
The proxy itself must use a different DNS server to connect to the TSUN Cloud. If you use the DNS server with the adapted record, you will end up in an endless loop as soon as the proxy tries to send data to the TSUN Cloud.
As described above, set a DNS sever in the Docker command or Docker compose file.
## License
This project is licensed under the [BSD 3-clause License](https://opensource.org/licenses/BSD-3-Clause).
Note the aiomqtt library used is based on the paho-mqtt library, which has a dual license. One of the licenses is the so-called [Eclipse Distribution License v1.0](https://www.eclipse.org/org/documents/edl-v10.php). It is almost word-for-word identical to the BSD 3-clause License. The only differences are:
- One use of "COPYRIGHT OWNER" (EDL) instead of "COPYRIGHT HOLDER" (BSD)
- One use of "Eclipse Foundation, Inc." (EDL) instead of "copyright holder" (BSD)
## Versioning
This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). Breaking changes will only occur in major `X.0.0` releases.
## Contributing
We're very happy to receive contributions to this project! You can get started by reading [CONTRIBUTING.md](https://github.com/s-allius/tsun-gen3-proxy/blob/main/CONTRIBUTING.md).
## Changelog
The changelog lives in [CHANGELOG.md](https://github.com/s-allius/tsun-gen3-proxy/blob/main/CHANGELOG.md). It follows the principles of [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).

View File

@@ -1,3 +1,5 @@
tests/
**/__pycache__
*.pyc
*.pyc
.DS_Store
build.sh

View File

@@ -1,52 +1,73 @@
ARG SERVICE_NAME="tsun-proxy"
ARG UID=1026
ARG UID=1000
ARG GID=1000
# set base image (host OS)
FROM python:3.11-slim-bookworm AS builder
#
# first stage for our base image
FROM python:3.12-alpine AS base
USER root
RUN pip install --upgrade pip
RUN apk update && \
apk upgrade
RUN apk add --no-cache su-exec
#
# second stage for building wheels packages
FROM base as builder
# copy the dependencies file to the working directory
COPY ./requirements.txt .
RUN apk add --no-cache build-base && \
python -m pip install --no-cache-dir -U pip wheel
# install dependencies
RUN pip install --user -r requirements.txt
# copy the dependencies file to the root dir and install requirements
COPY ./requirements.txt /root/
RUN python -OO -m pip wheel --no-cache-dir --wheel-dir=/root/wheels -r /root/requirements.txt
#
# second unnamed stage
FROM python:3.11-slim-bookworm
# third stage for our runtime image
FROM base as runtime
ARG SERVICE_NAME
ARG VERSION
ARG UID
ARG GID
ARG LOG_LVL
ENV VERSION=$VERSION
ENV SERVICE_NAME=$SERVICE_NAME
ENV UID=$UID
ENV GID=$GID
ENV LOG_LVL=$LOG_LVL
RUN addgroup --gid 1000 $SERVICE_NAME && \
adduser --ingroup $SERVICE_NAME --shell /bin/false --disabled-password --uid $UID $SERVICE_NAME && \
mkdir -p /home/$SERVICE_NAME/log && \
chown $SERVICE_NAME:$SERVICE_NAME /home/$SERVICE_NAME/log && \
mkdir -p /home/$SERVICE_NAME/config && \
chown $SERVICE_NAME:$SERVICE_NAME /home/$SERVICE_NAME/config
# set the working directory in the container
WORKDIR /home/$SERVICE_NAME
USER $SERVICE_NAME
# copy only the dependencies installation from the 1st stage image
COPY --from=builder --chown=$SERVICE_NAME:$SERVICE_NAME /root/.local /home/$SERVICE_NAME/.local
# copy the content of the local src and config directory to the working directory
COPY --chown=$SERVICE_NAME:$SERVICE_NAME config .
COPY --chown=$SERVICE_NAME:$SERVICE_NAME src .
# update PATH environment variable
ENV HOME=/home/$SERVICE_NAME
ENV PATH=/home/$SERVICE_NAME/.local:$PATH
EXPOSE 5005 5005
VOLUME ["/home/$SERVICE_NAME/log", "/home/$SERVICE_NAME/config"]
LABEL de.allius.image.authors="Stefan Allius <stefan.allius@t-online.de>"
# install the requirements from the wheels packages from the builder stage
COPY --from=builder /root/wheels /root/wheels
RUN python -m pip install --no-cache --no-index /root/wheels/* && \
rm -rf /root/wheels
RUN apk --purge del apk-tools
# copy the content of the local src and config directory to the working directory
COPY --chmod=0700 entrypoint.sh /root/entrypoint.sh
COPY config .
COPY src .
EXPOSE 5005
# command to run on container start
CMD [ "python3", "./server.py" ]
ENTRYPOINT ["/root/entrypoint.sh"]
CMD [ "python3", "./server.py" ]
LABEL org.opencontainers.image.authors="Stefan Allius"
LABEL org.opencontainers.image.source https://github.com/s-allius/tsun-gen3-proxy
LABEL org.opencontainers.image.description 'The "TSUN Gen3 Micro-Inverter" proxy enables a reliable connection between TSUN third generation inverters and an MQTT broker to integrate the inverter into typical home automations'
LABEL org.opencontainers.image.licenses="BSD-3-Clause"
LABEL org.opencontainers.image.vendor="Stefan Allius"

33
app/build.sh Executable file
View File

@@ -0,0 +1,33 @@
#!/bin/bash
set -e
BUILD_DATE=$(date -Iminutes)
VERSION=$(git describe --tags --abbrev=0)
VERSION="${VERSION:1}"
arr=(${VERSION//./ })
MAJOR=${arr[0]}
IMAGE=tsun-gen3-proxy
if [[ $1 == dev ]] || [[ $1 == rc ]] ;then
IMAGE=docker.io/sallius/${IMAGE}
VERSION=${VERSION}-$1
elif [[ $1 == rel ]];then
IMAGE=ghcr.io/s-allius/${IMAGE}
else
echo argument missing!
echo try: $0 '[dev|rc|rel]'
exit 1
fi
echo version: $VERSION build-date: $BUILD_DATE image: $IMAGE
if [[ $1 == dev ]];then
docker build --build-arg "VERSION=${VERSION}" --build-arg "LOG_LVL=DEBUG" --label "org.label-schema.build-date=${BUILD_DATE}" --label "org.opencontainers.image.version=${VERSION}" -t ${IMAGE}:latest app
elif [[ $1 == rc ]];then
docker build --build-arg "VERSION=${VERSION}" --label "org.label-schema.build-date=${BUILD_DATE}" --label "org.opencontainers.image.version=${VERSION}" -t ${IMAGE}:latest app
elif [[ $1 == rel ]];then
docker build --no-cache --build-arg "VERSION=${VERSION}" --label "org.label-schema.build-date=${BUILD_DATE}" --label "org.opencontainers.image.version=${VERSION}" -t ${IMAGE}:latest -t ${IMAGE}:${MAJOR} -t ${IMAGE}:${VERSION} app
docker push ghcr.io/s-allius/tsun-gen3-proxy:latest
docker push ghcr.io/s-allius/tsun-gen3-proxy:${MAJOR}
docker push ghcr.io/s-allius/tsun-gen3-proxy:${VERSION}
fi

View File

@@ -15,6 +15,8 @@ mqtt.passwd = ''
ha.auto_conf_prefix = 'homeassistant' # MQTT prefix for subscribing for homeassistant status updates
ha.discovery_prefix = 'homeassistant' # MQTT prefix for discovery topic
ha.entity_prefix = 'tsun' # MQTT topic prefix for publishing inverter values
ha.proxy_node_id = 'proxy' # MQTT node id, for the proxy_node_id
ha.proxy_unique_id = 'P170000000000001' # MQTT unique id, to identify a proxy instance
# microinverters
inverters.allow_all = true # allow inverters, even if we have no inverter mapping
@@ -22,7 +24,7 @@ inverters.allow_all = true # allow inverters, even if we have no inverter mapp
# inverter mapping, maps a `serial_no* to a `mqtt_id` and defines an optional `suggested_place` for `home-assistant`
#
# for each inverter add a block starting with [inverters."<16-digit serial numbeer>"]
#[inverters."R17xxxxxxxxxxxx1"]
[inverters."R170000000000001"]
#node_id = '' # Optional, MQTT replacement for inverters serial number
#suggested_area = '' # Optional, suggested installation area for home-assistant

24
app/entrypoint.sh Normal file
View File

@@ -0,0 +1,24 @@
#!/bin/sh
set -e
user="$(id -u)"
echo "######################################################"
echo "# prepare: '$SERVICE_NAME' Version:$VERSION"
echo "# for running with UserID:$UID, GroupID:$GID"
echo "#"
if [ "$user" = '0' ]; then
mkdir -p /home/$SERVICE_NAME/log /home/$SERVICE_NAME/config
if ! id $SERVICE_NAME &> /dev/null; then
addgroup --gid $GID $SERVICE_NAME 2> /dev/null
adduser -G $SERVICE_NAME -s /bin/false -D -H -g "" -u $UID $SERVICE_NAME
fi
chown -R $SERVICE_NAME:$SERVICE_NAME /home/$SERVICE_NAME || true
echo "######################################################"
echo "#"
exec su-exec $SERVICE_NAME "$@"
else
exec "$@"
fi

View File

@@ -1,2 +1,2 @@
aiomqtt==1.2.0
schema
aiomqtt==1.2.1
schema==0.7.5

View File

@@ -1,104 +1,66 @@
import logging, traceback, aiomqtt, json
from config import Config
import logging
import traceback
# from config import Config
# import gc
from messages import Message, hex_dump_memory
from mqtt import Mqtt
logger = logging.getLogger('conn')
logger_mqtt = logging.getLogger('mqtt')
class AsyncStream(Message):
def __init__(self, proxy, reader, writer, addr, stream=None, server_side=True):
super().__init__()
self.proxy = proxy
def __init__(self, reader, writer, addr, remote_stream, server_side: bool,
id_str=b'') -> None:
super().__init__(server_side, id_str)
self.reader = reader
self.writer = writer
self.remoteStream = stream
self.remoteStream = remote_stream
self.addr = addr
self.server_side = server_side
self.mqtt = Mqtt()
self.unique_id = 0
self.node_id = ''
self.r_addr = ''
self.l_addr = ''
'''
Our puplic methods
'''
async def set_serial_no(self, serial_no : str):
logger_mqtt.info(f'SerialNo: {serial_no}')
if self.unique_id != serial_no:
inverters = Config.get('inverters')
#logger_mqtt.debug(f'Inverters: {inverters}')
if serial_no in inverters:
logger_mqtt.debug(f'SerialNo {serial_no} allowed!')
inv = inverters[serial_no]
self.node_id = inv['node_id']
sug_area = inv['suggested_area']
else:
logger_mqtt.debug(f'SerialNo {serial_no} not known!')
self.node_id = ''
sug_area = ''
if not inverters['allow_all']:
self.unique_id = None
logger_mqtt.error('ignore message from unknow inverter!')
return
async def loop(self):
self.r_addr = self.writer.get_extra_info('peername')
self.l_addr = self.writer.get_extra_info('sockname')
self.unique_id = serial_no
ha = Config.get('ha')
self.entitiy_prfx = ha['entity_prefix'] + '/'
discovery_prfx = ha['discovery_prefix'] + '/'
if self.server_side:
try:
for data_json, id in self.db.ha_confs(self.entitiy_prfx + self.node_id, self.unique_id, sug_area):
logger_mqtt.debug(f'Register: {data_json}')
await self.mqtt.publish(f"{discovery_prfx}sensor/{self.node_id}{id}/config", data_json)
except Exception:
logging.error(
f"Proxy: Exception:\n"
f"{traceback.format_exc()}")
async def loop(self) -> None:
while True:
try:
await self.__async_read()
if self.id_str:
await self.set_serial_no(self.id_str.decode("utf-8"))
if self.unique_id:
await self.__async_write()
await self.__async_read()
if self.unique_id:
await self.__async_write()
await self.__async_forward()
await self.__async_publ_mqtt()
await self.async_publ_mqtt()
except (ConnectionResetError,
ConnectionAbortedError,
BrokenPipeError,
RuntimeError) as error:
logger.error(f'In loop for {self.addr}: {error}')
logger.warning(f'In loop for l{self.l_addr} | '
f'r{self.r_addr}: {error}')
self.close()
return
return self
except Exception:
logger.error(
f"Exception for {self.addr}:\n"
f"{traceback.format_exc()}")
self.close()
return
def close(self):
logger.info(f'in async_stream.close() {self.addr}')
self.writer.close()
self.proxy = None
self.remoteStream = None
return self
def disc(self) -> None:
logger.debug(f'in AsyncStream.disc() l{self.l_addr} | r{self.r_addr}')
self.writer.close()
def close(self):
logger.debug(f'in AsyncStream.close() l{self.l_addr} | r{self.r_addr}')
self.writer.close()
super().close() # call close handler in the parent class
# logger.info(f'AsyncStream refs: {gc.get_referrers(self)}')
'''
Our private methods
'''
@@ -109,37 +71,38 @@ class AsyncStream(Message):
self.read() # call read in parent class
else:
raise RuntimeError("Peer closed.")
async def __async_write(self) -> None:
if self._send_buffer:
hex_dump_memory(logging.INFO, f'Transmit to {self.addr}:', self._send_buffer, len(self._send_buffer))
hex_dump_memory(logging.INFO, f'Transmit to {self.addr}:',
self._send_buffer, len(self._send_buffer))
self.writer.write(self._send_buffer)
await self.writer.drain()
self._send_buffer = bytearray(0) #self._send_buffer[sent:]
self._send_buffer = bytearray(0) # self._send_buffer[sent:]
async def __async_forward(self) -> None:
if self._forward_buffer:
if not self.remoteStream:
tsun = Config.get('tsun')
self.remoteStream = await self.proxy.CreateClientStream (self, tsun['host'], tsun['port'])
await self.async_create_remote()
if self.remoteStream:
self.remoteStream._init_new_client_conn(self.contact_name,
self.contact_mail)
await self.remoteStream.__async_write()
if self.remoteStream:
hex_dump_memory(logging.DEBUG, f'Forward to {self.remoteStream.addr}:', self._forward_buffer, len(self._forward_buffer))
self.remoteStream.writer.write (self._forward_buffer)
await self.remoteStream.writer.drain()
hex_dump_memory(logging.INFO,
f'Forward to {self.remoteStream.addr}:',
self._forward_buffer,
len(self._forward_buffer))
self.remoteStream.writer.write(self._forward_buffer)
await self.remoteStream.writer.drain()
self._forward_buffer = bytearray(0)
async def __async_publ_mqtt(self) -> None:
if self.server_side:
db = self.db.db
for key in self.new_data:
if self.new_data[key] and key in db:
data_json = json.dumps(db[key])
logger_mqtt.info(f'{key}: {data_json}')
await self.mqtt.publish(f"{self.entitiy_prfx}{self.node_id}{key}", data_json)
self.new_data[key] = False
async def async_create_remote(self) -> None:
pass
def __del__ (self):
logger.debug ("AsyncStream __del__")
async def async_publ_mqtt(self) -> None:
pass
def __del__(self):
logging.debug(f"AsyncStream.__del__ l{self.l_addr} | r{self.r_addr}")

View File

@@ -1,76 +1,90 @@
'''Config module handles the proxy configuration in the config.toml file'''
import shutil, tomllib, logging
import shutil
import tomllib
import logging
from schema import Schema, And, Use, Optional
class Config():
'''Static class Config is reads and sanitize the config.
Read config.toml file and sanitize it with read().
'''Static class Config is reads and sanitize the config.
Read config.toml file and sanitize it with read().
Get named parts of the config with get()'''
config = {}
conf_schema = Schema({ 'tsun': {
'enabled': Use(bool),
'host': Use(str),
'port': And(Use(int), lambda n: 1024 <= n <= 65535)},
conf_schema = Schema({
'tsun': {
'enabled': Use(bool),
'host': Use(str),
'port': And(Use(int), lambda n: 1024 <= n <= 65535)
},
'mqtt': {
'host': Use(str),
'port': And(Use(int), lambda n: 1024 <= n <= 65535),
'user': And(Use(str), Use(lambda s: s if len(s) > 0 else None)),
'passwd': And(Use(str), Use(lambda s: s if len(s) > 0 else None))
},
'ha': {
'auto_conf_prefix': Use(str),
'discovery_prefix': Use(str),
'entity_prefix': Use(str),
'proxy_node_id': Use(str),
'proxy_unique_id': Use(str)
},
'inverters': {
'allow_all': Use(bool), And(Use(str), lambda s: len(s) == 16): {
Optional('node_id', default=""): And(Use(str),
Use(lambda s: s + '/'
if len(s) > 0 and
s[-1] != '/' else s)),
'mqtt': {
'host': Use(str),
'port': And(Use(int), lambda n: 1024 <= n <= 65535),
'user': And(Use(str), Use(lambda s: s if len(s) >0 else None)),
'passwd': And(Use(str), Use(lambda s: s if len(s) >0 else None))},
'ha': {
'auto_conf_prefix': Use(str),
'discovery_prefix': Use(str),
'entity_prefix': Use(str)},
'inverters': {
'allow_all' : Use(bool),
And(Use(str), lambda s: len(s) == 16 ): {
Optional('node_id', default=""): And(Use(str),Use(lambda s: s +'/' if len(s)> 0 and s[-1] != '/' else s)),
Optional('suggested_area', default=""): Use(str)
}}
}, ignore_extra_keys=True)
Optional('suggested_area', default=""): Use(str)
}}
}, ignore_extra_keys=True
)
@classmethod
def read(cls) -> None:
'''Read config file, merge it with the default config and sanitize the result'''
'''Read config file, merge it with the default config
and sanitize the result'''
config = {}
logger = logging.getLogger('data')
try:
# make the default config transparaent by copying it in the config.example file
# make the default config transparaent by copying it
# in the config.example file
shutil.copy2("default_config.toml", "config/config.example.toml")
# read example config file as default configuration
with open("default_config.toml", "rb") as f:
def_config = tomllib.load(f)
# overwrite the default values, with values from the config.toml file
# overwrite the default values, with values from
# the config.toml file
with open("config/config.toml", "rb") as f:
usr_config = tomllib.load(f)
config['tsun'] = def_config['tsun'] | usr_config['tsun']
config['mqtt'] = def_config['mqtt'] | usr_config['mqtt']
config['ha'] = def_config['ha'] | usr_config['ha']
config['inverters'] = def_config['inverters'] | usr_config['inverters']
config['tsun'] = def_config['tsun'] | usr_config['tsun']
config['mqtt'] = def_config['mqtt'] | usr_config['mqtt']
config['ha'] = def_config['ha'] | usr_config['ha']
config['inverters'] = def_config['inverters'] | \
usr_config['inverters']
cls.config = cls.conf_schema.validate(config)
logging.debug(f'Readed config: "{cls.config}" ')
# logging.debug(f'Readed config: "{cls.config}" ')
except Exception as error:
logger.error(f'Config.read: {error}')
cls.config = {}
@classmethod
def get(cls, member:str = None):
'''Get a named attribute from the proxy config. If member == None it returns the complete config dict'''
def get(cls, member: str = None):
'''Get a named attribute from the proxy config. If member ==
None it returns the complete config dict'''
if member:
return cls.config.get(member, {})
else:
return cls.config
return cls.config

View File

@@ -1,186 +1,382 @@
import struct, json, logging
import struct
import json
import logging
import os
class Infos:
stat = {}
app_name = os.getenv('SERVICE_NAME', 'proxy')
version = os.getenv('VERSION', 'unknown')
@classmethod
def static_init(cls):
logging.info('Initialize proxy statistics')
# init proxy counter in the class.stat dictionary
cls.stat['proxy'] = {}
for key in cls.__info_defs:
name = cls.__info_defs[key]['name']
if name[0] == 'proxy':
cls.stat['proxy'][name[1]] = 0
# add values from the environment to the device definition table
prxy = cls.__info_devs['proxy']
prxy['sw'] = cls.version
prxy['mdl'] = cls.app_name
class Infos:
def __init__(self):
self.db = {}
self.tracer = logging.getLogger('data')
__info_defs={
# collector values:
0x00092ba8: {'name':['collector', 'Collector_Fw_Version'], 'level': logging.INFO, 'unit': ''},
0x000927c0: {'name':['collector', 'Chip_Type'], 'level': logging.DEBUG, 'unit': ''},
0x00092f90: {'name':['collector', 'Chip_Model'], 'level': logging.DEBUG, 'unit': ''},
0x00095a88: {'name':['collector', 'Trace_URL'], 'level': logging.DEBUG, 'unit': ''},
0x00095aec: {'name':['collector', 'Logger_URL'], 'level': logging.DEBUG, 'unit': ''},
0x000cf850: {'name':['collector', 'Data_Up_Interval'], 'level': logging.DEBUG, 'unit': 's'},
0x000005dc: {'name':['collector', 'Rated_Power'], 'level': logging.DEBUG, 'unit': 'W'},
# inverter values:
0x0000000a: {'name':['inverter', 'Product_Name'], 'level': logging.DEBUG, 'unit': ''},
0x00000014: {'name':['inverter', 'Manufacturer'], 'level': logging.DEBUG, 'unit': ''},
0x0000001e: {'name':['inverter', 'Version'], 'level': logging.INFO, 'unit': ''},
0x00000028: {'name':['inverter', 'Serial_Number'], 'level': logging.DEBUG, 'unit': ''},
0x00000032: {'name':['inverter', 'Equipment_Model'], 'level': logging.DEBUG, 'unit': ''},
# env:
0x00000514: {'name':['env', 'Inverter_Temp'], 'level': logging.DEBUG, 'unit': '°C', 'ha':{'dev_cla': 'temperature', 'stat_cla': 'measurement', 'id':'temp_', 'fmt':'| float','name': 'Inverter Temperature'}},
0x000c3500: {'name':['env', 'Signal_Strength'], 'level': logging.DEBUG, 'unit': '%' , 'ha':{'dev_cla': None, 'stat_cla': 'measurement', 'id':'signal_', 'fmt':'| float','name': 'Signal Strength', 'icon':'mdi:wifi'}},
__info_devs = {
'proxy': {'singleton': True, 'name': 'Proxy', 'mf': 'Stefan Allius'}, # noqa: E501
'controller': {'via': 'proxy', 'name': 'Controller', 'mdl': 0x00092f90, 'mf': 0x000927c0, 'sw': 0x00092ba8}, # noqa: E501
'inverter': {'via': 'controller', 'name': 'Micro Inverter', 'mdl': 0x00000032, 'mf': 0x00000014, 'sw': 0x0000001e}, # noqa: E501
'input_pv1': {'via': 'inverter', 'name': 'Module PV1'},
'input_pv2': {'via': 'inverter', 'name': 'Module PV2', 'dep': {'reg': 0x00095b50, 'gte': 2}}, # noqa: E501
'input_pv3': {'via': 'inverter', 'name': 'Module PV3', 'dep': {'reg': 0x00095b50, 'gte': 3}}, # noqa: E501
'input_pv4': {'via': 'inverter', 'name': 'Module PV4', 'dep': {'reg': 0x00095b50, 'gte': 4}}, # noqa: E501
}
__info_defs = {
# collector values used for device registration:
0x00092ba8: {'name': ['collector', 'Collector_Fw_Version'], 'level': logging.INFO, 'unit': ''}, # noqa: E501
0x000927c0: {'name': ['collector', 'Chip_Type'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00092f90: {'name': ['collector', 'Chip_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00095a88: {'name': ['collector', 'Trace_URL'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00095aec: {'name': ['collector', 'Logger_URL'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00095b50: {'name': ['collector', 'No_Inputs'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
# inverter values used for device registration:
0x0000000a: {'name': ['inverter', 'Product_Name'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00000014: {'name': ['inverter', 'Manufacturer'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x0000001e: {'name': ['inverter', 'Version'], 'level': logging.INFO, 'unit': ''}, # noqa: E501
0x00000028: {'name': ['inverter', 'Serial_Number'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00000032: {'name': ['inverter', 'Equipment_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
# proxy:
0xffffff00: {'name': ['proxy', 'Inverter_Cnt'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_count_', 'fmt': '| int', 'name': 'Active Inverter Connections', 'icon': 'mdi:counter'}}, # noqa: E501
0xffffff01: {'name': ['proxy', 'Unknown_SNR'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'unknown_snr_', 'fmt': '| int', 'name': 'Unknown Serial No', 'icon': 'mdi:counter', 'ent_cat': 'diagnostic'}}, # noqa: E501
0xffffff02: {'name': ['proxy', 'Unknown_Msg'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'unknown_msg_', 'fmt': '| int', 'name': 'Unknown Msg Type', 'icon': 'mdi:counter', 'ent_cat': 'diagnostic'}}, # noqa: E501
0xffffff03: {'name': ['proxy', 'Invalid_Data_Type'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_data_type_', 'fmt': '| int', 'name': 'Invalid Data Type', 'icon': 'mdi:counter', 'ent_cat': 'diagnostic'}}, # noqa: E501
0xffffff04: {'name': ['proxy', 'Internal_Error'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'intern_err_', 'fmt': '| int', 'name': 'Internal Error', 'icon': 'mdi:counter', 'ent_cat': 'diagnostic', 'en': False}}, # noqa: E501
0xffffff05: {'name': ['proxy', 'Unknown_Ctrl'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'unknown_ctrl_', 'fmt': '| int', 'name': 'Unknown Control Type', 'icon': 'mdi:counter', 'ent_cat': 'diagnostic'}}, # noqa: E501
# 0xffffff03: {'name':['proxy', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha':{'dev':'proxy', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id':'proxy_volt_', 'fmt':'| float','name': 'Grid Voltage'}}, # noqa: E501
# events
0x00000191: {'name': ['events', '401_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00000192: {'name': ['events', '402_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00000193: {'name': ['events', '403_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00000194: {'name': ['events', '404_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00000195: {'name': ['events', '405_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00000196: {'name': ['events', '406_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00000197: {'name': ['events', '407_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00000198: {'name': ['events', '408_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x00000199: {'name': ['events', '409_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x0000019a: {'name': ['events', '410_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x0000019b: {'name': ['events', '411_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x0000019c: {'name': ['events', '412_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x0000019d: {'name': ['events', '413_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x0000019e: {'name': ['events', '414_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x0000019f: {'name': ['events', '415_GridFreqOverRating'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
0x000001a0: {'name': ['events', '416_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501
# events:
0x00000191: {'name':['events', '401_'], 'level': logging.DEBUG, 'unit': ''},
0x00000192: {'name':['events', '402_'], 'level': logging.DEBUG, 'unit': ''},
0x00000193: {'name':['events', '403_'], 'level': logging.DEBUG, 'unit': ''},
0x00000194: {'name':['events', '404_'], 'level': logging.DEBUG, 'unit': ''},
0x00000195: {'name':['events', '405_'], 'level': logging.DEBUG, 'unit': ''},
0x00000196: {'name':['events', '406_'], 'level': logging.DEBUG, 'unit': ''},
0x00000197: {'name':['events', '407_'], 'level': logging.DEBUG, 'unit': ''},
0x00000198: {'name':['events', '408_'], 'level': logging.DEBUG, 'unit': ''},
0x00000199: {'name':['events', '409_'], 'level': logging.DEBUG, 'unit': ''},
0x0000019a: {'name':['events', '410_'], 'level': logging.DEBUG, 'unit': ''},
0x0000019b: {'name':['events', '411_'], 'level': logging.DEBUG, 'unit': ''},
0x0000019c: {'name':['events', '412_'], 'level': logging.DEBUG, 'unit': ''},
0x0000019d: {'name':['events', '413_'], 'level': logging.DEBUG, 'unit': ''},
0x0000019e: {'name':['events', '414_'], 'level': logging.DEBUG, 'unit': ''},
0x0000019f: {'name':['events', '415_GridFreqOverRating'], 'level': logging.DEBUG, 'unit': ''},
0x000001a0: {'name':['events', '416_'], 'level': logging.DEBUG, 'unit': ''},
# grid measures:
0x000003e8: {'name':['grid', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha':{'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id':'out_volt_', 'fmt':'| float','name': 'Grid Voltage'}},
0x0000044c: {'name':['grid', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha':{'dev_cla': 'current', 'stat_cla': 'measurement', 'id':'out_cur_', 'fmt':'| float','name': 'Grid Current'}},
0x000004b0: {'name':['grid', 'Frequency'], 'level': logging.DEBUG, 'unit': 'Hz', 'ha':{'dev_cla': 'frequency', 'stat_cla': 'measurement', 'id':'out_freq_', 'fmt':'| float','name': 'Grid Frequency'}},
0x00000640: {'name':['grid', 'Output_Power'], 'level': logging.INFO, 'unit': 'W', 'ha':{'dev_cla': 'power', 'stat_cla': 'measurement', 'id':'out_power_', 'fmt':'| float','name': 'Actual Power'}},
# input measures:
0x000006a4: {'name':['input', 'pv1', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V'},
0x00000708: {'name':['input', 'pv1', 'Current'], 'level': logging.DEBUG, 'unit': 'A'},
0x0000076c: {'name':['input', 'pv1', 'Power'], 'level': logging.INFO, 'unit': 'W', 'ha':{'dev_cla': 'power', 'stat_cla': 'measurement', 'id':'power_pv1_','name': 'Power PV1', 'val_tpl' :"{{ (value_json['pv1']['Power'] | float)}}"}},
0x000007d0: {'name':['input', 'pv2', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V'},
0x00000834: {'name':['input', 'pv2', 'Current'], 'level': logging.DEBUG, 'unit': 'A'},
0x00000898: {'name':['input', 'pv2', 'Power'], 'level': logging.INFO, 'unit': 'W', 'ha':{'dev_cla': 'power', 'stat_cla': 'measurement', 'id':'power_pv2_','name': 'Power PV2', 'val_tpl' :"{{ (value_json['pv2']['Power'] | float)}}"}},
0x000008fc: {'name':['input', 'pv3', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V'},
0x00000960: {'name':['input', 'pv3', 'Curent'], 'level': logging.DEBUG, 'unit': 'A'},
0x000009c4: {'name':['input', 'pv3', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha':{'dev_cla': 'power', 'stat_cla': 'measurement', 'id':'power_pv3_','name': 'Power PV3', 'val_tpl' :"{{ (value_json['pv3']['Power'] | float)}}"}},
0x00000a28: {'name':['input', 'pv4', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V'},
0x00000a8c: {'name':['input', 'pv4', 'Current'], 'level': logging.DEBUG, 'unit': 'A'},
0x00000af0: {'name':['input', 'pv4', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha':{'dev_cla': 'power', 'stat_cla': 'measurement', 'id':'power_pv4_','name': 'Power PV4', 'val_tpl' :"{{ (value_json['pv4']['Power'] | float)}}"}},
0x00000c1c: {'name':['input', 'pv1', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id':'daily_gen_pv1_','name': 'Daily Generation PV1', 'val_tpl' :"{{ (value_json['pv1']['Daily_Generation'] | float)}}"}},
0x00000c80: {'name':['input', 'pv1', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev_cla': 'energy', 'stat_cla': 'total', 'id':'total_gen_pv1_','name': 'Total Generation PV1', 'val_tpl' :"{{ (value_json['pv1']['Total_Generation'] | float)}}"}},
0x00000ce4: {'name':['input', 'pv2', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id':'daily_gen_pv2_','name': 'Daily Generation PV2', 'val_tpl' :"{{ (value_json['pv2']['Daily_Generation'] | float)}}"}},
0x00000d48: {'name':['input', 'pv2', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev_cla': 'energy', 'stat_cla': 'total', 'id':'total_gen_pv2_','name': 'Total Generation PV2', 'val_tpl' :"{{ (value_json['pv2']['Total_Generation'] | float)}}"}},
0x00000dac: {'name':['input', 'pv3', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id':'daily_gen_pv3_','name': 'Daily Generation PV3', 'val_tpl' :"{{ (value_json['pv3']['Daily_Generation'] | float)}}"}},
0x00000e10: {'name':['input', 'pv3', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev_cla': 'energy', 'stat_cla': 'total', 'id':'total_gen_pv3_','name': 'Total Generation PV3', 'val_tpl' :"{{ (value_json['pv3']['Total_Generation'] | float)}}"}},
0x00000e74: {'name':['input', 'pv4', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id':'daily_gen_pv4_','name': 'Daily Generation PV4', 'val_tpl' :"{{ (value_json['pv4']['Daily_Generation'] | float)}}"}},
0x00000ed8: {'name':['input', 'pv4', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha':{'dev_cla': 'energy', 'stat_cla': 'total', 'id':'total_gen_pv4_','name': 'Total Generation PV4', 'val_tpl' :"{{ (value_json['pv4']['Total_Generation'] | float)}}"}},
# total:
0x00000b54: {'name':['total', 'Daily_Generation'], 'level': logging.INFO, 'unit': 'kWh', 'ha':{'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id':'daily_gen_', 'fmt':'| float','name': 'Daily Generation'}},
0x00000bb8: {'name':['total', 'Total_Generation'], 'level': logging.INFO, 'unit': 'kWh', 'ha':{'dev_cla': 'energy', 'stat_cla': 'total', 'id':'total_gen_', 'fmt':'| float','name': 'Total Generation', 'icon':'mdi:solar-power'}},
0x000c96a8: {'name':['total', 'Power_On_Time'], 'level': logging.DEBUG, 'unit': 's', 'ha':{'dev_cla': 'duration', 'stat_cla': 'measurement', 'id':'power_on_time_', 'name': 'Power on Time', 'val_tpl':"{{ (value_json['Power_On_Time'] | float)}}", 'nat_prc':'3'}},
0x000003e8: {'name': ['grid', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'inverter', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'out_volt_', 'fmt': '| float', 'name': 'Grid Voltage', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x0000044c: {'name': ['grid', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'inverter', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'out_cur_', 'fmt': '| float', 'name': 'Grid Current', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x000004b0: {'name': ['grid', 'Frequency'], 'level': logging.DEBUG, 'unit': 'Hz', 'ha': {'dev': 'inverter', 'dev_cla': 'frequency', 'stat_cla': 'measurement', 'id': 'out_freq_', 'fmt': '| float', 'name': 'Grid Frequency', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x00000640: {'name': ['grid', 'Output_Power'], 'level': logging.INFO, 'unit': 'W', 'ha': {'dev': 'inverter', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'out_power_', 'fmt': '| float', 'name': 'Power'}}, # noqa: E501
0x000005dc: {'name': ['env', 'Rated_Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'inverter', 'dev_cla': None, 'stat_cla': None, 'id': 'rated_power_', 'fmt': '| int', 'name': 'Rated Power', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x00000514: {'name': ['env', 'Inverter_Temp'], 'level': logging.DEBUG, 'unit': '°C', 'ha': {'dev': 'inverter', 'dev_cla': 'temperature', 'stat_cla': 'measurement', 'id': 'temp_', 'fmt': '| int', 'name': 'Temperature'}}, # noqa: E501
}
def ha_confs(self, prfx="tsun/garagendach/", snr='123', sug_area =''):
# input measures:
0x000006a4: {'name': ['input', 'pv1', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'input_pv1', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'volt_pv1_', 'val_tpl': "{{ (value_json['pv1']['Voltage'] | float)}}", 'icon': 'mdi:gauge', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x00000708: {'name': ['input', 'pv1', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'input_pv1', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'cur_pv1_', 'val_tpl': "{{ (value_json['pv1']['Current'] | float)}}", 'icon': 'mdi:gauge', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x0000076c: {'name': ['input', 'pv1', 'Power'], 'level': logging.INFO, 'unit': 'W', 'ha': {'dev': 'input_pv1', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'power_pv1_', 'val_tpl': "{{ (value_json['pv1']['Power'] | float)}}"}}, # noqa: E501
0x000007d0: {'name': ['input', 'pv2', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'input_pv2', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'volt_pv2_', 'val_tpl': "{{ (value_json['pv2']['Voltage'] | float)}}", 'icon': 'mdi:gauge', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x00000834: {'name': ['input', 'pv2', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'input_pv2', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'cur_pv2_', 'val_tpl': "{{ (value_json['pv2']['Current'] | float)}}", 'icon': 'mdi:gauge', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x00000898: {'name': ['input', 'pv2', 'Power'], 'level': logging.INFO, 'unit': 'W', 'ha': {'dev': 'input_pv2', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'power_pv2_', 'val_tpl': "{{ (value_json['pv2']['Power'] | float)}}"}}, # noqa: E501
0x000008fc: {'name': ['input', 'pv3', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'input_pv3', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'volt_pv3_', 'val_tpl': "{{ (value_json['pv3']['Voltage'] | float)}}", 'icon': 'mdi:gauge', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x00000960: {'name': ['input', 'pv3', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'input_pv3', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'cur_pv3_', 'val_tpl': "{{ (value_json['pv3']['Current'] | float)}}", 'icon': 'mdi:gauge', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x000009c4: {'name': ['input', 'pv3', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'input_pv3', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'power_pv3_', 'val_tpl': "{{ (value_json['pv3']['Power'] | float)}}"}}, # noqa: E501
0x00000a28: {'name': ['input', 'pv4', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha': {'dev': 'input_pv4', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id': 'volt_pv4_', 'val_tpl': "{{ (value_json['pv4']['Voltage'] | float)}}", 'icon': 'mdi:gauge', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x00000a8c: {'name': ['input', 'pv4', 'Current'], 'level': logging.DEBUG, 'unit': 'A', 'ha': {'dev': 'input_pv4', 'dev_cla': 'current', 'stat_cla': 'measurement', 'id': 'cur_pv4_', 'val_tpl': "{{ (value_json['pv4']['Current'] | float)}}", 'icon': 'mdi:gauge', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x00000af0: {'name': ['input', 'pv4', 'Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'input_pv4', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'power_pv4_', 'val_tpl': "{{ (value_json['pv4']['Power'] | float)}}"}}, # noqa: E501
0x00000c1c: {'name': ['input', 'pv1', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv1', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_pv1_', 'name': 'Daily Generation', 'val_tpl': "{{ (value_json['pv1']['Daily_Generation'] | float)}}", 'icon': 'mdi:solar-power-variant', 'must_incr': True}}, # noqa: E501
0x00000c80: {'name': ['input', 'pv1', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv1', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_pv1_', 'name': 'Total Generation', 'val_tpl': "{{ (value_json['pv1']['Total_Generation'] | float)}}", 'icon': 'mdi:solar-power', 'must_incr': True}}, # noqa: E501
0x00000ce4: {'name': ['input', 'pv2', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv2', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_pv2_', 'name': 'Daily Generation', 'val_tpl': "{{ (value_json['pv2']['Daily_Generation'] | float)}}", 'icon': 'mdi:solar-power-variant', 'must_incr': True}}, # noqa: E501
0x00000d48: {'name': ['input', 'pv2', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv2', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_pv2_', 'name': 'Total Generation', 'val_tpl': "{{ (value_json['pv2']['Total_Generation'] | float)}}", 'icon': 'mdi:solar-power', 'must_incr': True}}, # noqa: E501
0x00000dac: {'name': ['input', 'pv3', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv3', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_pv3_', 'name': 'Daily Generation', 'val_tpl': "{{ (value_json['pv3']['Daily_Generation'] | float)}}", 'icon': 'mdi:solar-power-variant', 'must_incr': True}}, # noqa: E501
0x00000e10: {'name': ['input', 'pv3', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv3', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_pv3_', 'name': 'Total Generation', 'val_tpl': "{{ (value_json['pv3']['Total_Generation'] | float)}}", 'icon': 'mdi:solar-power', 'must_incr': True}}, # noqa: E501
0x00000e74: {'name': ['input', 'pv4', 'Daily_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv4', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_pv4_', 'name': 'Daily Generation', 'val_tpl': "{{ (value_json['pv4']['Daily_Generation'] | float)}}", 'icon': 'mdi:solar-power-variant', 'must_incr': True}}, # noqa: E501
0x00000ed8: {'name': ['input', 'pv4', 'Total_Generation'], 'level': logging.DEBUG, 'unit': 'kWh', 'ha': {'dev': 'input_pv4', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_pv4_', 'name': 'Total Generation', 'val_tpl': "{{ (value_json['pv4']['Total_Generation'] | float)}}", 'icon': 'mdi:solar-power', 'must_incr': True}}, # noqa: E501
# total:
0x00000b54: {'name': ['total', 'Daily_Generation'], 'level': logging.INFO, 'unit': 'kWh', 'ha': {'dev': 'inverter', 'dev_cla': 'energy', 'stat_cla': 'total_increasing', 'id': 'daily_gen_', 'fmt': '| float', 'name': 'Daily Generation', 'icon': 'mdi:solar-power-variant', 'must_incr': True}}, # noqa: E501
0x00000bb8: {'name': ['total', 'Total_Generation'], 'level': logging.INFO, 'unit': 'kWh', 'ha': {'dev': 'inverter', 'dev_cla': 'energy', 'stat_cla': 'total', 'id': 'total_gen_', 'fmt': '| float', 'name': 'Total Generation', 'icon': 'mdi:solar-power', 'must_incr': True}}, # noqa: E501
# controller:
0x000c3500: {'name': ['controller', 'Signal_Strength'], 'level': logging.DEBUG, 'unit': '%', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': 'measurement', 'id': 'signal_', 'fmt': '| int', 'name': 'Signal Strength', 'icon': 'mdi:wifi'}}, # noqa: E501
0x000c96a8: {'name': ['controller', 'Power_On_Time'], 'level': logging.DEBUG, 'unit': 's', 'ha': {'dev': 'controller', 'dev_cla': 'duration', 'stat_cla': 'measurement', 'id': 'power_on_time_', 'name': 'Power on Time', 'val_tpl': "{{ (value_json['Power_On_Time'] | float)}}", 'nat_prc': '3', 'ent_cat': 'diagnostic'}}, # noqa: E501
0x000cf850: {'name': ['controller', 'Data_Up_Interval'], 'level': logging.DEBUG, 'unit': 's', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': 'measurement', 'id': 'data_up_intval_', 'fmt': '| int', 'name': 'Data Up Interval', 'icon': 'mdi:update', 'ent_cat': 'diagnostic'}}, # noqa: E501
}
def dev_value(self, idx: str | int) -> str | int | float | None:
'''returns the stored device value from our database
idx:int ==> lookup the value in the database and return it as str,
int or flout. If the value is not available return 'None'
idx:str ==> returns the string as a fixed value without a
database loopup
'''
if type(idx) is str:
return idx # return idx as a fixed value
elif idx in self.__info_defs:
row = self.__info_defs[idx]
if 'singleton' in row and row['singleton']:
dict = self.stat
else:
dict = self.db
keys = row['name']
for key in keys:
if key not in dict:
return None # value not found in the database
dict = dict[key]
return dict # value of the reqeusted entry
return None # unknwon idx, not in __info_defs
def ignore_this_device(self, dep: dict) -> bool:
'''Checks the equation in the dep dict
returns 'False' only if the equation is valid;
'True' in any other case'''
if 'reg' in dep:
value = self.dev_value(dep['reg'])
if not value:
return True
if 'gte' in dep:
return not value >= dep['gte']
elif 'less_eq' in dep:
return not value <= dep['less_eq']
return True
def ha_confs(self, ha_prfx, node_id, snr, singleton: bool, sug_area=''):
'''Generator function yields a json register struct for home-assistant
auto configuration and a unique entity string
arguments:
prfx:str ==> MQTT prefix for the home assistant 'stat_t string
snr:str ==> serial number of the inverter, used to build unique
entity strings
sug_area:str ==> suggested area string from the config file'''
tab = self.__info_defs
for key in tab:
row = tab[key]
if 'singleton' in row:
if singleton != row['singleton']:
continue
elif singleton:
continue
prfx = ha_prfx + node_id
# check if we have details for home assistant
if 'ha' in row:
ha = row['ha']
if 'comp' in ha:
component = ha['comp']
else:
component = 'sensor'
attr = {}
if 'name' in ha:
attr['name'] = ha['name'] # eg. 'name': "Actual Power"
else:
attr['name'] = row['name'][-1] # eg. 'name': "Actual Power"
attr['name'] = ha['name']
else:
attr['name'] = row['name'][-1]
attr['stat_t'] = prfx +row['name'][0] # eg. 'stat_t': "tsun/garagendach/grid"
attr['dev_cla'] = ha['dev_cla'] # eg. 'dev_cla': 'power'
attr['stat_cla'] = ha['stat_cla'] # eg. 'stat_cla': "measurement"
attr['uniq_id'] = ha['id']+snr # eg. 'uniq_id':'out_power_123'
attr['stat_t'] = prfx + row['name'][0]
attr['dev_cla'] = ha['dev_cla']
attr['stat_cla'] = ha['stat_cla']
attr['uniq_id'] = ha['id']+snr
if 'val_tpl' in ha:
attr['val_tpl'] = ha['val_tpl'] # eg. 'val_tpl': "{{ value_json['Output_Power']|float }}"
attr['val_tpl'] = ha['val_tpl']
elif 'fmt' in ha:
attr['val_tpl'] = '{{value_json' + f"['{row['name'][-1]}'] {ha['fmt']}" + '}}' # eg. 'val_tpl': "{{ value_json['Output_Power']|float }}"
attr['val_tpl'] = '{{value_json' + f"['{row['name'][-1]}'] {ha['fmt']}" + '}}' # eg. 'val_tpl': "{{ value_json['Output_Power']|float }} # noqa: E501
else:
self.inc_counter('Internal_Error')
logging.error(f"Infos.__info_defs: the row for {key} do"
" not have a 'val_tpl' nor a 'fmt' value")
if 'unit' in row:
attr['unit_of_meas'] = row['unit'] # eg. 'unit_of_meas': 'W'
# add unit_of_meas only, if status_class isn't none. If
# status_cla is None we want a number format and not line
# graph in home assistant. A unit will change the number
# format to a line graph
if 'unit' in row and attr['stat_cla'] is not None:
attr['unit_of_meas'] = row['unit'] # 'unit_of_meas'
if 'icon' in ha:
attr['icon'] = ha['icon'] # eg. 'icon':'mdi:solar-power'
attr['ic'] = ha['icon'] # icon for the entity
if 'nat_prc' in ha:
attr['suggested_display_precision'] = ha['nat_prc']
# eg. 'dev':{'name':'Microinverter','mdl':'MS-600','ids':["inverter_123"],'mf':'TSUN','sa': 'auf Garagendach'}
# attr['dev'] = {'name':'Microinverter','mdl':'MS-600','ids':[f'inverter_{snr}'],'mf':'TSUN','sa': 'auf Garagendach'}
dev = {}
dev['name'] = 'Microinverter' #fixme
dev['mdl'] = 'MS-600' #fixme
dev['ids'] = [f'inverter_{snr}']
dev['mf'] = 'TSUN' #fixme
dev['sa'] = sug_area
dev['sw'] = '0.01' #fixme
dev['hw'] = 'Hw0.01' #fixme
#dev['via_device'] = #fixme
attr['dev'] = dev
attr['sug_dsp_prc'] = ha['nat_prc'] # precison of floats
if 'ent_cat' in ha:
attr['ent_cat'] = ha['ent_cat'] # diagnostic, config
yield json.dumps (attr), attr['uniq_id']
# enabled_by_default is deactivated, since it avoid the via
# setup of the devices. It seems, that there is a bug in home
# assistant. tested with 'Home Assistant 2023.10.4'
# if 'en' in ha: # enabled_by_default
# attr['en'] = ha['en']
if 'dev' in ha:
device = self.__info_devs[ha['dev']]
if 'dep' in device and self.ignore_this_device(device['dep']): # noqa: E501
continue
dev = {}
# the same name for 'name' and 'suggested area', so we get
# dedicated devices in home assistant with short value
# name and headline
if (sug_area == '' or
('singleton' in device and device['singleton'])):
dev['name'] = device['name']
dev['sa'] = device['name']
else:
dev['name'] = device['name']+' - '+sug_area
dev['sa'] = device['name']+' - '+sug_area
if 'via' in device: # add the link to the parent device
via = device['via']
if via in self.__info_devs:
via_dev = self.__info_devs[via]
if 'singleton' in via_dev and via_dev['singleton']:
dev['via_device'] = via
else:
dev['via_device'] = f"{via}_{snr}"
else:
self.inc_counter('Internal_Error')
logging.error(f"Infos.__info_defs: the row for "
f"{key} has an invalid via value: "
f"{via}")
for key in ('mdl', 'mf', 'sw', 'hw'): # add optional
# values fpr 'modell', 'manufaturer', 'sw version' and
# 'hw version'
if key in device:
data = self.dev_value(device[key])
if data is not None:
dev[key] = data
if 'singleton' in device and device['singleton']:
dev['ids'] = [f"{ha['dev']}"]
else:
dev['ids'] = [f"{ha['dev']}_{snr}"]
attr['dev'] = dev
origin = {}
origin['name'] = self.app_name
origin['sw'] = self.version
attr['o'] = origin
else:
self.inc_counter('Internal_Error')
logging.error(f"Infos.__info_defs: the row for {key} "
"missing 'dev' value for ha register")
yield json.dumps(attr), component, node_id, attr['uniq_id']
def inc_counter(self, counter: str) -> None:
'''inc proxy statistic counter'''
dict = self.stat['proxy']
dict[counter] += 1
def dec_counter(self, counter: str) -> None:
'''dec proxy statistic counter'''
dict = self.stat['proxy']
dict[counter] -= 1
def __key_obj(self, id) -> list:
d = self.__info_defs.get(id, {'name': None, 'level': logging.DEBUG, 'unit': ''})
return d['name'], d['level'], d['unit']
def parse(self, buf):
d = self.__info_defs.get(id, {'name': None, 'level': logging.DEBUG,
'unit': ''})
if 'ha' in d and 'must_incr' in d['ha']:
must_incr = d['ha']['must_incr']
else:
must_incr = False
return d['name'], d['level'], d['unit'], must_incr
def parse(self, buf) -> None:
'''parse a data sequence received from the inverter and
stores the values in Infos.db
buf: buffer of the sequence to parse'''
result = struct.unpack_from('!l', buf, 0)
elms = result[0]
i = 0
ind = 4
while i < elms:
result = struct.unpack_from('!lB', buf, ind)
info_id = result[0]
info_id = result[0]
data_type = result[1]
ind += 5
keys, level, unit = self.__key_obj(info_id)
if data_type==0x54: # 'T' -> Pascal-String
keys, level, unit, must_incr = self.__key_obj(info_id)
if data_type == 0x54: # 'T' -> Pascal-String
str_len = buf[ind]
result = struct.unpack_from(f'!{str_len+1}p', buf, ind)[0].decode(encoding='ascii', errors='replace')
result = struct.unpack_from(f'!{str_len+1}p', buf,
ind)[0].decode(encoding='ascii',
errors='replace')
ind += str_len+1
elif data_type==0x49: # 'I' -> int32
result = struct.unpack_from(f'!l', buf, ind)[0]
elif data_type == 0x49: # 'I' -> int32
result = struct.unpack_from('!l', buf, ind)[0]
ind += 4
elif data_type==0x53: # 'S' -> short
result = struct.unpack_from(f'!h', buf, ind)[0]
elif data_type == 0x53: # 'S' -> short
result = struct.unpack_from('!h', buf, ind)[0]
ind += 2
elif data_type==0x46: # 'F' -> float32
result = round(struct.unpack_from(f'!f', buf, ind)[0],2)
elif data_type == 0x46: # 'F' -> float32
result = round(struct.unpack_from('!f', buf, ind)[0], 2)
ind += 4
elif data_type == 0x4c: # 'L' -> int64
result = struct.unpack_from('!q', buf, ind)[0]
ind += 8
else:
self.inc_counter('Invalid_Data_Type')
logging.error(f"Infos.parse: data_type: {data_type}"
" not supported")
return
if keys:
dict = self.db
name = ''
for key in keys[:-1]:
if key not in dict:
dict[key] = {}
dict = dict[key]
name += key + '.'
update = keys[-1] not in dict or dict[keys[-1]] != result
dict[keys[-1]] = result
name += keys[-1]
yield keys[0], update
else:
update = False
name = str(f'info-id.0x{info_id:x}')
self.tracer.log(level, f'{name} : {result}{unit}')
i +=1
if keys[-1] not in dict:
update = (not must_incr or result > 0)
else:
if must_incr:
update = dict[keys[-1]] < result
else:
update = dict[keys[-1]] != result
if update:
dict[keys[-1]] = result
name += keys[-1]
yield keys[0], update
else:
update = False
name = str(f'info-id.0x{info_id:x}')
self.tracer.log(level, f'{name} : {result}{unit}'
f' update: {update}')
i += 1

214
app/src/inverter.py Normal file
View File

@@ -0,0 +1,214 @@
import asyncio
import logging
import traceback
import json
from config import Config
from async_stream import AsyncStream
from mqtt import Mqtt
from aiomqtt import MqttCodeError
from infos import Infos
# import gc
# logger = logging.getLogger('conn')
logger_mqtt = logging.getLogger('mqtt')
class Inverter(AsyncStream):
'''class Inverter is a derivation of an Async_Stream
The class has some class method for managing common resources like a
connection to the MQTT broker or proxy error counter which are common
for all inverter connection
Instances of the class are connections to an inverter and can have an
optional link to an remote connection to the TSUN cloud. A remote
connection dies with the inverter connection.
class methods:
class_init(): initialize the common resources of the proxy (MQTT
broker, Proxy DB, etc). Must be called before the
first Ib´verter instance can be created
class_close(): release the common resources of the proxy. Should not
be called before any instances of the class are
destroyed
methods:
server_loop(addr): Async loop method for receiving messages from the
inverter (server-side)
client_loop(addr): Async loop method for receiving messages from the
TSUN cloud (client-side)
async_create_remote(): Establish a client connection to the TSUN cloud
async_publ_mqtt(): Publish data to MQTT broker
close(): Release method which must be called before a instance can be
destroyed
'''
@classmethod
def class_init(cls) -> None:
logging.debug('Inverter.class_init')
# initialize the proxy statistics
Infos.static_init()
cls.db_stat = Infos()
ha = Config.get('ha')
cls.entity_prfx = ha['entity_prefix'] + '/'
cls.discovery_prfx = ha['discovery_prefix'] + '/'
cls.proxy_node_id = ha['proxy_node_id'] + '/'
cls.proxy_unique_id = ha['proxy_unique_id']
# call Mqtt singleton to establisch the connection to the mqtt broker
cls.mqtt = Mqtt(cls.__cb_mqtt_is_up)
@classmethod
async def __cb_mqtt_is_up(cls) -> None:
logging.info('Initialize proxy device on home assistant')
# register proxy status counters at home assistant
await cls.__register_proxy_stat_home_assistant()
# send values of the proxy status counters
await asyncio.sleep(0.5) # wait a bit, before sending data
cls.new_stat_data['proxy'] = True # force sending data to sync ha
await cls.__async_publ_mqtt_proxy_stat('proxy')
@classmethod
async def __register_proxy_stat_home_assistant(cls) -> None:
'''register all our topics at home assistant'''
for data_json, component, node_id, id in cls.db_stat.ha_confs(
cls.entity_prfx, cls.proxy_node_id,
cls.proxy_unique_id, True):
logger_mqtt.debug(f"MQTT Register: cmp:'{component}' node_id:'{node_id}' {data_json}") # noqa: E501
await cls.mqtt.publish(f'{cls.discovery_prfx}{component}/{node_id}{id}/config', data_json) # noqa: E501
@classmethod
async def __async_publ_mqtt_proxy_stat(cls, key) -> None:
stat = Infos.stat
if key in stat and cls.new_stat_data[key]:
data_json = json.dumps(stat[key])
node_id = cls.proxy_node_id
logger_mqtt.debug(f'{key}: {data_json}')
await cls.mqtt.publish(f"{cls.entity_prfx}{node_id}{key}",
data_json)
cls.new_stat_data[key] = False
@classmethod
def class_close(cls, loop) -> None:
logging.debug('Inverter.class_close')
logging.info('Close MQTT Task')
loop.run_until_complete(cls.mqtt.close())
cls.mqtt = None
def __init__(self, reader, writer, addr):
super().__init__(reader, writer, addr, None, True)
self.ha_restarts = -1
async def server_loop(self, addr):
'''Loop for receiving messages from the inverter (server-side)'''
logging.info(f'Accept connection from {addr}')
self.inc_counter('Inverter_Cnt')
await self.loop()
self.dec_counter('Inverter_Cnt')
logging.info(f'Server loop stopped for r{self.r_addr}')
# if the server connection closes, we also have to disconnect
# the connection to te TSUN cloud
if self.remoteStream:
logging.debug("disconnect client connection")
self.remoteStream.disc()
try:
await self.__async_publ_mqtt_proxy_stat('proxy')
except Exception:
pass
async def client_loop(self, addr):
'''Loop for receiving messages from the TSUN cloud (client-side)'''
clientStream = await self.remoteStream.loop()
logging.info(f'Client loop stopped for l{clientStream.l_addr}')
# if the client connection closes, we don't touch the server
# connection. Instead we erase the client connection stream,
# thus on the next received packet from the inverter, we can
# establish a new connection to the TSUN cloud
# erase backlink to inverter
clientStream.remoteStream = None
if self.remoteStream == clientStream:
# logging.debug(f'Client l{clientStream.l_addr} refs:'
# f' {gc.get_referrers(clientStream)}')
# than erase client connection
self.remoteStream = None
async def async_create_remote(self) -> None:
'''Establish a client connection to the TSUN cloud'''
tsun = Config.get('tsun')
host = tsun['host']
port = tsun['port']
addr = (host, port)
try:
logging.info(f'Connected to {addr}')
connect = asyncio.open_connection(host, port)
reader, writer = await connect
self.remoteStream = AsyncStream(reader, writer, addr, self,
False, self.id_str)
asyncio.create_task(self.client_loop(addr))
except ConnectionRefusedError as error:
logging.info(f'{error}')
except Exception:
logging.error(
f"Inverter: Exception for {addr}:\n"
f"{traceback.format_exc()}")
async def async_publ_mqtt(self) -> None:
'''publish data to MQTT broker'''
# check if new inverter or collector infos are available or when the
# home assistant has changed the status back to online
try:
if (('inverter' in self.new_data and self.new_data['inverter'])
or ('collector' in self.new_data and
self.new_data['collector'])
or self.mqtt.ha_restarts != self.ha_restarts):
await self.__register_proxy_stat_home_assistant()
await self.__register_home_assistant()
self.ha_restarts = self.mqtt.ha_restarts
for key in self.new_data:
await self.__async_publ_mqtt_packet(key)
for key in self.new_stat_data:
await self.__async_publ_mqtt_proxy_stat(key)
except MqttCodeError as error:
logging.error(f'Mqtt except: {error}')
except Exception:
logging.error(
f"Inverter: Exception:\n"
f"{traceback.format_exc()}")
async def __async_publ_mqtt_packet(self, key):
db = self.db.db
if key in db and self.new_data[key]:
data_json = json.dumps(db[key])
node_id = self.node_id
logger_mqtt.debug(f'{key}: {data_json}')
await self.mqtt.publish(f'{self.entity_prfx}{node_id}{key}', data_json) # noqa: E501
self.new_data[key] = False
async def __register_home_assistant(self) -> None:
'''register all our topics at home assistant'''
for data_json, component, node_id, id in self.db.ha_confs(
self.entity_prfx, self.node_id, self.unique_id,
False, self.sug_area):
logger_mqtt.debug(f"MQTT Register: cmp:'{component}'"
f" node_id:'{node_id}' {data_json}")
await self.mqtt.publish(f"{self.discovery_prfx}{component}"
f"/{node_id}{id}/config", data_json)
def close(self) -> None:
logging.debug(f'Inverter.close() l{self.l_addr} | r{self.r_addr}')
super().close() # call close handler in the parent class
# logger.debug (f'Inverter refs: {gc.get_referrers(self)}')
def __del__(self):
logging.debug("Inverter.__del__")
super().__del__()

View File

@@ -11,29 +11,31 @@ keys=console_formatter,file_formatter
level=DEBUG
handlers=console_handler,file_handler_name1
[logger_mesg]
level=DEBUG
handlers=console_handler,file_handler_name1,file_handler_name2
propagate=0
qualname=msg
[logger_conn]
level=DEBUG
handlers=console_handler,file_handler_name1,file_handler_name2
handlers=console_handler,file_handler_name1
propagate=0
qualname=conn
[logger_mqtt]
level=INFO
handlers=console_handler,file_handler_name1
propagate=0
qualname=mqtt
[logger_data]
level=DEBUG
handlers=console_handler,file_handler_name1,file_handler_name2
handlers=file_handler_name1
propagate=0
qualname=data
[logger_mqtt]
[logger_mesg]
level=DEBUG
handlers=console_handler,file_handler_name1,file_handler_name2
handlers=file_handler_name2
propagate=0
qualname=mqtt
qualname=msg
[logger_tracer]
level=INFO
@@ -43,12 +45,12 @@ qualname=tracer
[handler_console_handler]
class=StreamHandler
level=INFO
level=DEBUG
formatter=console_formatter
[handler_file_handler_name1]
class=handlers.TimedRotatingFileHandler
level=NOTSET
level=INFO
formatter=file_formatter
args=('log/proxy.log', when:='midnight')
@@ -60,9 +62,9 @@ args=('log/trace.log', when:='midnight')
[formatter_console_formatter]
format=%(asctime)s %(levelname)5s | %(name)4s | %(message)s'
datefmt='%d-%m-%Y %H:%M:%S
datefmt='%Y-%m-%d %H:%M:%S
[formatter_file_formatter]
format=%(asctime)s %(levelname)5s | %(name)4s | %(message)s'
datefmt='%d-%m-%Y %H:%M:%S
datefmt='%Y-%m-%d %H:%M:%S

View File

@@ -1,29 +1,26 @@
import struct, logging, time, datetime
import weakref
import struct
import logging
import time
from datetime import datetime
import weakref
if __name__ == "app.src.messages":
from app.src.infos import Infos
from app.src.config import Config
else:
else: # pragma: no cover
from infos import Infos
from config import Config
logger = logging.getLogger('msg')
def hex_dump_memory(level, info, data, num):
s = ''
n = 0
lines = []
lines.append(info)
tracer = logging.getLogger('tracer')
#data = list((num * ctypes.c_byte).from_address(ptr))
if len(data) == 0:
return '<empty>'
if not tracer.isEnabledFor(level):
return
for i in range(0, num, 16):
line = ' '
@@ -31,124 +28,195 @@ def hex_dump_memory(level, info, data, num):
n += 16
for j in range(n-16, n):
if j >= len(data): break
if j >= len(data):
break
line += '%02x ' % abs(data[j])
line += ' ' * (3 * 16 + 9 - len(line)) + ' | '
for j in range(n-16, n):
if j >= len(data): break
if j >= len(data):
break
c = data[j] if not (data[j] < 0x20 or data[j] > 0x7e) else '.'
line += '%c' % c
lines.append(line)
tracer.log(level, '\n'.join(lines))
#return '\n'.join(lines)
class Control:
def __init__(self, ctrl:int):
def __init__(self, ctrl: int):
self.ctrl = ctrl
def __int__(self) -> int:
return self.ctrl
def is_ind(self) -> bool:
return not (self.ctrl & 0x08)
#def is_req(self) -> bool:
return (self.ctrl == 0x91)
# def is_req(self) -> bool:
# return not (self.ctrl & 0x08)
def is_resp(self) -> bool:
return self.ctrl & 0x08
return (self.ctrl == 0x99)
class IterRegistry(type):
def __iter__(cls):
for ref in cls._registry:
obj = ref()
if obj is not None: yield obj
if obj is not None:
yield obj
class Message(metaclass=IterRegistry):
_registry = []
new_stat_data = {}
def __init__(self):
def __init__(self, server_side: bool, id_str=b''):
self._registry.append(weakref.ref(self))
self.server_side = server_side
self.header_valid = False
self.header_len = 0
self.data_len = 0
self.unique_id = 0
self.node_id = ''
self.sug_area = ''
self.id_str = id_str
self.contact_name = b''
self.contact_mail = b''
self._recv_buffer = b''
self._send_buffer = bytearray(0)
self._forward_buffer = bytearray(0)
self.db = Infos()
self.db = Infos()
self.new_data = {}
self.switch={
self.switch = {
0x00: self.msg_contact_info,
0x22: self.msg_get_time,
0x71: self.msg_collector_data,
0x04: self.msg_inverter_data,
}
'''
Empty methods, that have to be implemented in any child class which don't use asyncio
Empty methods, that have to be implemented in any child class which
don't use asyncio
'''
def _read(self) -> None: # read data bytes from socket and copy them to our _recv_buffer
return
def _read(self) -> None: # read data bytes from socket and copy them
# to our _recv_buffer
return # pragma: no cover
'''
Our puplic methods
'''
def close(self) -> None:
# we have refernces to methods of this class in self.switch
# so we have to erase self.switch, otherwise this instance can't be
# deallocated by the garbage collector ==> we get a memory leak
self.switch.clear()
def inc_counter(self, counter: str) -> None:
self.db.inc_counter(counter)
self.new_stat_data['proxy'] = True
def dec_counter(self, counter: str) -> None:
self.db.dec_counter(counter)
self.new_stat_data['proxy'] = True
def set_serial_no(self, serial_no: str):
if self.unique_id == serial_no:
logger.debug(f'SerialNo: {serial_no}')
else:
inverters = Config.get('inverters')
# logger.debug(f'Inverters: {inverters}')
if serial_no in inverters:
inv = inverters[serial_no]
self.node_id = inv['node_id']
self.sug_area = inv['suggested_area']
logger.debug(f'SerialNo {serial_no} allowed! area:{self.sug_area}') # noqa: E501
else:
self.node_id = ''
self.sug_area = ''
if 'allow_all' not in inverters or not inverters['allow_all']:
self.inc_counter('Unknown_SNR')
self.unique_id = None
logger.warning(f'ignore message from unknow inverter! (SerialNo: {serial_no})') # noqa: E501
return
logger.debug(f'SerialNo {serial_no} not known but accepted!')
self.unique_id = serial_no
def read(self) -> None:
self._read()
if not self.header_valid:
self.__parse_header(self._recv_buffer, len(self._recv_buffer))
if self.header_valid and len(self._recv_buffer) >= (self.header_len+self.data_len):
if self.header_valid and len(self._recv_buffer) >= (self.header_len +
self.data_len):
hex_dump_memory(logging.INFO, f'Received from {self.addr}:',
self._recv_buffer, self.header_len+self.data_len)
self.set_serial_no(self.id_str.decode("utf-8"))
self.__dispatch_msg()
self.__flush_recv_msg()
return
def forward(self, buffer, buflen) -> None:
tsun = Config.get('tsun')
if tsun['enabled']:
self._forward_buffer = buffer[:buflen]
hex_dump_memory(logging.DEBUG, 'Store for forwarding:', buffer, buflen)
hex_dump_memory(logging.DEBUG, 'Store for forwarding:',
buffer, buflen)
self.__parse_header(self._forward_buffer, len(self._forward_buffer))
self.__parse_header(self._forward_buffer,
len(self._forward_buffer))
fnc = self.switch.get(self.msg_id, self.msg_unknown)
logger.info(self.__flow_str(self.server_side, 'forwrd') + f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}' )
logger.info(self.__flow_str(self.server_side, 'forwrd') +
f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}')
return
def _init_new_client_conn(self, contact_name, contact_mail) -> None:
logger.info(f'name: {contact_name} mail: {contact_mail}')
self.msg_id = 0
self.__build_header(0x91)
self._send_buffer += struct.pack(f'!{len(contact_name)+1}p'
f'{len(contact_mail)+1}p',
contact_name, contact_mail)
self.__finish_send_msg()
'''
Our private methods
'''
def __flow_str(self, server_side:bool, type:('rx','tx','forwrd', 'drop')):
switch={
'rx': ' <',
'tx': ' >',
'forwrd': '<< ',
'drop': ' xx',
'rxS': '> ',
'txS': '< ',
'forwrdS':' >>',
'dropS': 'xx ',
def __flow_str(self, server_side: bool, type:
('rx', 'tx', 'forwrd', 'drop')): # noqa: F821
switch = {
'rx': ' <',
'tx': ' >',
'forwrd': '<< ',
'drop': ' xx',
'rxS': '> ',
'txS': '< ',
'forwrdS': ' >>',
'dropS': 'xx ',
}
if server_side: type +='S'
if server_side:
type += 'S'
return switch.get(type, '???')
def __timestamp(self):
if False:
# utc as epoche
# utc as epoche
ts = time.time()
else:
# convert localtime in epoche
ts = (datetime.now() - datetime(1970,1,1)).total_seconds()
return round(ts*1000)
# convert localtime in epoche
ts = (datetime.now() - datetime(1970, 1, 1)).total_seconds()
return round(ts*1000)
# check if there is a complete header in the buffer, parse it
# check if there is a complete header in the buffer, parse it
# and set
# self.header_len
# self.data_len
@@ -157,51 +225,53 @@ class Message(metaclass=IterRegistry):
# self.msg_id
#
# if the header is incomplete, than self.header_len is still 0
#
def __parse_header(self, buf:bytes, buf_len:int) -> None:
if (buf_len <5): # enough bytes to read len and id_len?
#
def __parse_header(self, buf: bytes, buf_len: int) -> None:
if (buf_len < 5): # enough bytes to read len and id_len?
return
result = struct.unpack_from('!lB', buf, 0)
len = result[0] # len of complete message
len = result[0] # len of complete message
id_len = result[1] # len of variable id string
hdr_len = 5+id_len+2
if (buf_len < hdr_len): # enough bytes for complete header?
return
result = struct.unpack_from(f'!{id_len+1}pBB', buf, 4)
# store parsed header values in the class
# store parsed header values in the class
self.id_str = result[0]
self.ctrl = Control(result[1])
self.msg_id = result[2]
self.ctrl = Control(result[1])
self.msg_id = result[2]
self.data_len = len-id_len-3
self.header_len = hdr_len
self.header_valid = True
return
def __build_header(self, ctrl) -> None:
self.send_msg_ofs = len (self._send_buffer)
self._send_buffer += struct.pack(f'!l{len(self.id_str)+1}pBB', 0, self.id_str, ctrl, self.msg_id)
self.send_msg_ofs = len(self._send_buffer)
self._send_buffer += struct.pack(f'!l{len(self.id_str)+1}pBB',
0, self.id_str, ctrl, self.msg_id)
fnc = self.switch.get(self.msg_id, self.msg_unknown)
logger.info(self.__flow_str(self.server_side, 'tx') + f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}' )
logger.info(self.__flow_str(self.server_side, 'tx') +
f' Ctl: {int(ctrl):#02x} Msg: {fnc.__name__!r}')
def __finish_send_msg(self) -> None:
_len = len(self._send_buffer) - self.send_msg_ofs
struct.pack_into('!l',self._send_buffer, self.send_msg_ofs, _len-4)
struct.pack_into('!l', self._send_buffer, self.send_msg_ofs, _len-4)
def __dispatch_msg(self) -> None:
hex_dump_memory(logging.INFO, f'Received from {self.addr}:', self._recv_buffer, self.header_len+self.data_len)
fnc = self.switch.get(self.msg_id, self.msg_unknown)
logger.info(self.__flow_str(self.server_side, 'rx') + f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}' )
fnc()
if self.unique_id:
logger.info(self.__flow_str(self.server_side, 'rx') +
f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}')
fnc()
else:
logger.info(self.__flow_str(self.server_side, 'drop') +
f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}')
def __flush_recv_msg(self) -> None:
self._recv_buffer = self._recv_buffer[(self.header_len+self.data_len):]
self.header_valid = False
@@ -214,84 +284,113 @@ class Message(metaclass=IterRegistry):
self.__build_header(0x99)
self._send_buffer += b'\x01'
self.__finish_send_msg()
self.__process_contact_info()
# don't forward this contact info here, we will build one
# when the remote connection is established
return
elif self.ctrl.is_resp():
return # ignore received response from tsun
self.forward(self._recv_buffer, self.header_len+self.data_len)
return # ignore received response from tsun
else:
self.inc_counter('Unknown_Ctrl')
self.forward(self._recv_buffer, self.header_len+self.data_len)
def __process_contact_info(self):
result = struct.unpack_from('!B', self._recv_buffer, self.header_len)
name_len = result[0]
result = struct.unpack_from(f'!{name_len+1}pB', self._recv_buffer,
self.header_len)
self.contact_name = result[0]
mail_len = result[1]
logger.info(f'name: {self.contact_name}')
result = struct.unpack_from(f'!{mail_len+1}p', self._recv_buffer,
self.header_len+name_len+1)
self.contact_mail = result[0]
logger.info(f'mail: {self.contact_mail}')
def msg_get_time(self):
if self.ctrl.is_ind():
ts = self.__timestamp()
logger.debug(f'time: {ts:08x}')
self.__build_header(0x99)
self._send_buffer += struct.pack('!q', ts)
self.__finish_send_msg()
elif self.ctrl.is_resp():
result = struct.unpack_from(f'!q', self._recv_buffer, self.header_len)
logger.debug(f'tsun-time: {result[0]:08x}')
return # ignore received response from tsun
self.forward(self._recv_buffer, self.header_len+self.data_len)
tsun = Config.get('tsun')
if tsun['enabled']:
if self.ctrl.is_resp():
ts = self.__timestamp()
result = struct.unpack_from('!q', self._recv_buffer,
self.header_len)
logger.debug(f'tsun-time: {result[0]:08x}'
f' proxy-time: {ts:08x}')
else:
if self.ctrl.is_ind():
ts = self.__timestamp()
logger.debug(f'time: {ts:08x}')
self.__build_header(0x99)
self._send_buffer += struct.pack('!q', ts)
self.__finish_send_msg()
elif self.ctrl.is_resp():
result = struct.unpack_from('!q', self._recv_buffer,
self.header_len)
logger.debug(f'tsun-time: {result[0]:08x}')
return # ignore received response from tsun
else:
self.inc_counter('Unknown_Ctrl')
self.forward(self._recv_buffer, self.header_len+self.data_len)
def parse_msg_header(self):
result = struct.unpack_from('!lB', self._recv_buffer, self.header_len)
data_id = result[0] # len of complete message
id_len = result[1] # len of variable id string
id_len = result[1] # len of variable id string
logger.debug(f'Data_ID: {data_id} id_len: {id_len}')
msg_hdr_len= 5+id_len+9
result = struct.unpack_from(f'!{id_len+1}pBq', self._recv_buffer, self.header_len+4)
msg_hdr_len = 5+id_len+9
result = struct.unpack_from(f'!{id_len+1}pBq', self._recv_buffer,
self.header_len + 4)
logger.debug(f'ID: {result[0]} B: {result[1]}')
logger.debug(f'time: {result[2]:08x}')
#logger.info(f'time: {datetime.utcfromtimestamp(result[2]).strftime("%Y-%m-%d %H:%M:%S")}')
# logger.info(f'time: {datetime.utcfromtimestamp(result[2]).strftime(
# "%Y-%m-%d %H:%M:%S")}')
return msg_hdr_len
def msg_collector_data(self):
if self.ctrl.is_ind():
self.__build_header(0x99)
self._send_buffer += b'\x01'
self.__finish_send_msg()
elif self.ctrl.is_resp():
return # ignore received response
self.forward(self._recv_buffer, self.header_len+self.data_len)
self.__process_data()
self.__process_data()
elif self.ctrl.is_resp():
return # ignore received response
else:
self.inc_counter('Unknown_Ctrl')
self.forward(self._recv_buffer, self.header_len+self.data_len)
def msg_inverter_data(self):
if self.ctrl.is_ind():
self.__build_header(0x99)
self._send_buffer += b'\x01'
self.__finish_send_msg()
self.__process_data()
elif self.ctrl.is_resp():
return # ignore received response
return # ignore received response
else:
self.inc_counter('Unknown_Ctrl')
self.forward(self._recv_buffer, self.header_len+self.data_len)
self.__process_data()
def __process_data(self):
msg_hdr_len = self.parse_msg_header()
for key, update in self.db.parse(self._recv_buffer[self.header_len + msg_hdr_len:]):
if update: self.new_data[key] = True
for key, update in self.db.parse(self._recv_buffer[self.header_len
+ msg_hdr_len:]):
if update:
self.new_data[key] = True
def msg_unknown(self):
logger.warning(f"Unknow Msg: ID:{self.msg_id}")
self.inc_counter('Unknown_Msg')
self.forward(self._recv_buffer, self.header_len+self.data_len)
def __del__ (self):
logger.debug ("Messages __del__")

View File

@@ -1,4 +1,5 @@
import asyncio, logging
import asyncio
import logging
import aiomqtt
from config import Config
@@ -7,59 +8,86 @@ logger_mqtt = logging.getLogger('mqtt')
class Singleton(type):
_instances = {}
def __call__(cls, *args, **kwargs):
logger_mqtt.debug(f'singleton: __call__')
logger_mqtt.debug('singleton: __call__')
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
cls._instances[cls] = super(Singleton,
cls).__call__(*args, **kwargs)
return cls._instances[cls]
class Mqtt(metaclass=Singleton):
client = None
cb_MqttIsUp = None
def __init__(self):
logger_mqtt.debug(f'MQTT: __init__')
def __init__(self, cb_MqttIsUp):
logger_mqtt.debug('MQTT: __init__')
if cb_MqttIsUp:
self.cb_MqttIsUp = cb_MqttIsUp
loop = asyncio.get_event_loop()
self.task = loop.create_task(self.__loop())
self.ha_restarts = 0
@property
def ha_restarts(self):
return self._ha_restarts
@ha_restarts.setter
def ha_restarts(self, value):
self._ha_restarts = value
def __del__(self):
logger_mqtt.debug(f'MQTT: __del__')
logger_mqtt.debug('MQTT: __del__')
async def close(self) -> None:
logger_mqtt.debug(f'MQTT: close')
logger_mqtt.debug('MQTT: close')
self.task.cancel()
try:
await self.task
except Exception as e:
logging.debug(f"Mqtt.close: exception: {e} ...")
async def publish(self, topic: str, payload: str | bytes | bytearray | int | float | None = None) -> None:
async def publish(self, topic: str, payload: str | bytes | bytearray
| int | float | None = None) -> None:
if self.client:
await self.client.publish(topic, payload)
async def __loop(self) -> None:
mqtt = Config.get('mqtt')
ha = Config.get('ha')
logger_mqtt.info(f'start MQTT: host:{mqtt["host"]} port:{mqtt["port"]} user:{mqtt["user"]}')
self.client = aiomqtt.Client(hostname=mqtt['host'], port=mqtt['port'], username=mqtt['user'], password=mqtt['passwd'])
logger_mqtt.info(f'start MQTT: host:{mqtt["host"]} port:'
f'{mqtt["port"]} '
f'user:{mqtt["user"]}')
self.client = aiomqtt.Client(hostname=mqtt['host'], port=mqtt['port'],
username=mqtt['user'],
password=mqtt['passwd'])
interval = 5 # Seconds
while True:
try:
async with self.client:
logger_mqtt.info('MQTT broker connection established')
if self.cb_MqttIsUp:
await self.cb_MqttIsUp()
async with self.client.messages() as messages:
await self.client.subscribe(f"{ha['auto_conf_prefix']}/status")
await self.client.subscribe(f"{ha['auto_conf_prefix']}"
"/status")
async for message in messages:
logger_mqtt.info(f'Home-Assistant Status: {message.payload.decode("UTF-8")}')
status = message.payload.decode("UTF-8")
logger_mqtt.info('Home-Assistant Status:'
f' {status}')
if status == 'online':
self.ha_restarts += 1
await self.cb_MqttIsUp()
except aiomqtt.MqttError:
logger_mqtt.info(f"Connection lost; Reconnecting in {interval} seconds ...")
logger_mqtt.info(f"Connection lost; Reconnecting in {interval}"
" seconds ...")
await asyncio.sleep(interval)
except asyncio.CancelledError:
logger_mqtt.debug(f"MQTT task cancelled")
logger_mqtt.debug("MQTT task cancelled")
self.client = None
return

View File

@@ -1,43 +0,0 @@
import asyncio, logging, traceback
from async_stream import AsyncStream
class Proxy:
def __init__ (proxy, reader, writer, addr):
proxy.ServerStream = AsyncStream(proxy, reader, writer, addr)
proxy.ClientStream = None
async def server_loop(proxy, addr):
logging.info(f'Accept connection from {addr}')
await proxy.ServerStream.loop()
logging.info(f'Close server connection {addr}')
if proxy.ClientStream:
logging.debug ("close client connection")
proxy.ClientStream.close()
async def client_loop(proxy, addr):
await proxy.ClientStream.loop()
logging.info(f'Close client connection {addr}')
proxy.ServerStream.remoteStream = None
proxy.ClientStream = None
async def CreateClientStream (proxy, stream, host, port):
addr = (host, port)
try:
logging.info(f'Connected to {addr}')
connect = asyncio.open_connection(host, port)
reader, writer = await connect
proxy.ClientStream = AsyncStream(proxy, reader, writer, addr, stream, server_side=False)
asyncio.create_task(proxy.client_loop(addr))
except ConnectionRefusedError as error:
logging.info(f'{error}')
except Exception:
logging.error(
f"Proxy: Exception for {addr}:\n"
f"{traceback.format_exc()}")
return proxy.ClientStream
def __del__ (proxy):
logging.debug ("Proxy __del__")

View File

@@ -1,17 +1,19 @@
import logging, asyncio, signal, functools, os
#from logging.handlers import TimedRotatingFileHandler
from logging import config
import logging
import asyncio
import signal
import functools
import os
from logging import config # noqa F401
from async_stream import AsyncStream
from proxy import Proxy
from inverter import Inverter
from config import Config
from mqtt import Mqtt
async def handle_client(reader, writer):
'''Handles a new incoming connection and starts an async loop'''
addr = writer.get_extra_info('peername')
await Proxy(reader, writer, addr).server_loop(addr)
await Inverter(reader, writer, addr).server_loop(addr)
def handle_SIGTERM(loop):
@@ -31,10 +33,19 @@ def handle_SIGTERM(loop):
loop.stop()
logging.info('Shutdown complete')
def get_log_level() -> int:
'''checks if LOG_LVL is set in the environment and returns the
corresponding logging.LOG_LEVEL'''
log_level = os.getenv('LOG_LVL', 'INFO')
if log_level == 'DEBUG':
log_level = logging.DEBUG
elif log_level == 'WARN':
log_level = logging.WARNING
else:
log_level = logging.INFO
return log_level
if __name__ == "__main__":
@@ -42,39 +53,46 @@ if __name__ == "__main__":
# Setup our daily, rotating logger
#
serv_name = os.getenv('SERVICE_NAME', 'proxy')
version = os.getenv('VERSION', 'unknown')
logging.config.fileConfig('logging.ini')
logging.info(f'Server "{serv_name}" will be started')
logging.info(f'Server "{serv_name} - {version}" will be started')
# set lowest-severity for 'root', 'msg', 'conn' and 'data' logger
log_level = get_log_level()
logging.getLogger().setLevel(log_level)
logging.getLogger('msg').setLevel(log_level)
logging.getLogger('conn').setLevel(log_level)
logging.getLogger('data').setLevel(log_level)
# read config file
Config.read()
Config.read()
loop = asyncio.get_event_loop()
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
# call Mqtt singleton to establisch the connection to the mqtt broker
mqtt = Mqtt()
Inverter.class_init()
#
# Register some UNIX Signal handler for a gracefully server shutdown on Docker restart and stop
#
for signame in ('SIGINT','SIGTERM'):
loop.add_signal_handler(getattr(signal, signame), functools.partial(handle_SIGTERM, loop))
# Register some UNIX Signal handler for a gracefully server shutdown
# on Docker restart and stop
#
for signame in ('SIGINT', 'SIGTERM'):
loop.add_signal_handler(getattr(signal, signame),
functools.partial(handle_SIGTERM, loop))
#
# Create a task for our listening server. This must be a task! If we call start_server directly out
# of our main task, the eventloop will be blocked and we can't receive and handle the UNIX signals!
#
# Create a task for our listening server. This must be a task! If we call
# start_server directly out of our main task, the eventloop will be blocked
# and we can't receive and handle the UNIX signals!
#
loop.create_task(asyncio.start_server(handle_client, '0.0.0.0', 5005))
try:
loop.run_forever()
except KeyboardInterrupt:
pass
finally:
logging.info ('Close MQTT Task')
loop.run_until_complete(mqtt.close())
mqtt = None # release the last reference to the singleton
logging.info ('Close event loop')
Inverter.class_close(loop)
logging.info('Close event loop')
loop.close()
logging.info (f'Finally, exit Server "{serv_name}"')
logging.info(f'Finally, exit Server "{serv_name}"')

View File

@@ -12,6 +12,88 @@ def ContrDataSeq(): # Get Time Request message
msg += b'\x49\x00\x00\x00\x02\x00\x0d\x04\x08\x49\x00\x00\x00\x00\x00\x07\xa1\x84\x49\x00\x00\x00\x01\x00\x0c\x50\x59\x49\x00\x00\x00\x4c\x00\x0d\x1f\x60\x49\x00\x00\x00\x00'
return msg
@pytest.fixture
def InvDataSeq(): # Data indication from the controller
msg = b'\x00\x00\x00\x06\x00\x00\x00\x0a\x54\x08\x4d\x69\x63\x72\x6f\x69\x6e\x76\x00\x00\x00\x14\x54\x04\x54\x53\x55\x4e\x00\x00\x00\x1E\x54\x07\x56\x35\x2e\x30\x2e\x31\x31\x00\x00\x00\x28'
msg += b'\x54\x10\x54\x31\x37\x45\x37\x33\x30\x37\x30\x32\x31\x44\x30\x30\x36\x41\x00\x00\x00\x32\x54\x0a\x54\x53\x4f\x4c\x2d\x4d\x53\x36\x30\x30\x00\x00\x00\x3c\x54\x05\x41\x2c\x42\x2c\x43'
return msg
@pytest.fixture
def InvalidDataSeq(): # Data indication from the controller
msg = b'\x00\x00\x00\x06\x00\x00\x00\x0a\x54\x08\x4d\x69\x63\x72\x6f\x69\x6e\x76\x00\x00\x00\x14\x64\x04\x54\x53\x55\x4e\x00\x00\x00\x1E\x54\x07\x56\x35\x2e\x30\x2e\x31\x31\x00\x00\x00\x28'
msg += b'\x54\x10\x54\x31\x37\x45\x37\x33\x30\x37\x30\x32\x31\x44\x30\x30\x36\x41\x00\x00\x00\x32\x54\x0a\x54\x53\x4f\x4c\x2d\x4d\x53\x36\x30\x30\x00\x00\x00\x3c\x54\x05\x41\x2c\x42\x2c\x43'
return msg
@pytest.fixture
def InvDataSeq2(): # Data indication from the controller
msg = b'\x00\x00\x00\xa3\x00\x00\x00\x64\x53\x00\x01\x00\x00\x00\xc8\x53\x00\x02\x00\x00\x01\x2c\x53\x00\x00\x00\x00\x01\x90\x49\x00\x00\x00\x00\x00\x00\x01\x91\x53\x00\x00'
msg += b'\x00\x00\x01\x92\x53\x00\x00\x00\x00\x01\x93\x53\x00\x00\x00\x00\x01\x94\x53\x00\x00\x00\x00\x01\x95\x53\x00\x00\x00\x00\x01\x96\x53\x00\x00\x00\x00\x01\x97\x53\x00'
msg += b'\x00\x00\x00\x01\x98\x53\x00\x00\x00\x00\x01\x99\x53\x00\x00\x00\x00\x01\x9a\x53\x00\x00\x00\x00\x01\x9b\x53\x00\x00\x00\x00\x01\x9c\x53\x00\x00\x00\x00\x01\x9d\x53'
msg += b'\x00\x00\x00\x00\x01\x9e\x53\x00\x00\x00\x00\x01\x9f\x53\x00\x00\x00\x00\x01\xa0\x53\x00\x00\x00\x00\x01\xf4\x49\x00\x00\x00\x00\x00\x00\x01\xf5\x53\x00\x00\x00\x00'
msg += b'\x01\xf6\x53\x00\x00\x00\x00\x01\xf7\x53\x00\x00\x00\x00\x01\xf8\x53\x00\x00\x00\x00\x01\xf9\x53\x00\x00\x00\x00\x01\xfa\x53\x00\x00\x00\x00\x01\xfb\x53\x00\x00\x00'
msg += b'\x00\x01\xfc\x53\x00\x00\x00\x00\x01\xfd\x53\x00\x00\x00\x00\x01\xfe\x53\x00\x00\x00\x00\x01\xff\x53\x00\x00\x00\x00\x02\x00\x53\x00\x00\x00\x00\x02\x01\x53\x00\x00'
msg += b'\x00\x00\x02\x02\x53\x00\x00\x00\x00\x02\x03\x53\x00\x00\x00\x00\x02\x04\x53\x00\x00\x00\x00\x02\x58\x49\x00\x00\x00\x00\x00\x00\x02\x59\x53\x00\x00\x00\x00\x02\x5a'
msg += b'\x53\x00\x00\x00\x00\x02\x5b\x53\x00\x00\x00\x00\x02\x5c\x53\x00\x00\x00\x00\x02\x5d\x53\x00\x00\x00\x00\x02\x5e\x53\x00\x00\x00\x00\x02\x5f\x53\x00\x00\x00\x00\x02'
msg += b'\x60\x53\x00\x00\x00\x00\x02\x61\x53\x00\x00\x00\x00\x02\x62\x53\x00\x00\x00\x00\x02\x63\x53\x00\x00\x00\x00\x02\x64\x53\x00\x00\x00\x00\x02\x65\x53\x00\x00\x00\x00'
msg += b'\x02\x66\x53\x00\x00\x00\x00\x02\x67\x53\x00\x00\x00\x00\x02\x68\x53\x00\x00\x00\x00\x02\xbc\x49\x00\x00\x00\x00\x00\x00\x02\xbd\x53\x00\x00\x00\x00\x02\xbe\x53\x00'
msg += b'\x00\x00\x00\x02\xbf\x53\x00\x00\x00\x00\x02\xc0\x53\x00\x00\x00\x00\x02\xc1\x53\x00\x00\x00\x00\x02\xc2\x53\x00\x00\x00\x00\x02\xc3\x53\x00\x00\x00\x00\x02\xc4\x53'
msg += b'\x00\x00\x00\x00\x02\xc5\x53\x00\x00\x00\x00\x02\xc6\x53\x00\x00\x00\x00\x02\xc7\x53\x00\x00\x00\x00\x02\xc8\x53\x00\x00\x00\x00\x02\xc9\x53\x00\x00\x00\x00\x02\xca'
msg += b'\x53\x00\x00\x00\x00\x02\xcb\x53\x00\x00\x00\x00\x02\xcc\x53\x00\x00\x00\x00\x03\x20\x53\x00\x00\x00\x00\x03\x84\x53\x50\x11\x00\x00\x03\xe8\x46\x43\x61\x66\x66\x00'
msg += b'\x00\x04\x4c\x46\x3e\xeb\x85\x1f\x00\x00\x04\xb0\x46\x42\x48\x14\x7b\x00\x00\x05\x14\x53\x00\x17\x00\x00\x05\x78\x53\x00\x00\x00\x00\x05\xdc\x53\x02\x58\x00\x00\x06'
msg += b'\x40\x46\x42\xd3\x66\x66\x00\x00\x06\xa4\x46\x42\x06\x66\x66\x00\x00\x07\x08\x46\x3f\xf4\x7a\xe1\x00\x00\x07\x6c\x46\x42\x81\x00\x00\x00\x00\x07\xd0\x46\x42\x06\x00'
msg += b'\x00\x00\x00\x08\x34\x46\x3f\xae\x14\x7b\x00\x00\x08\x98\x46\x42\x36\xcc\xcd\x00\x00\x08\xfc\x46\x00\x00\x00\x00\x00\x00\x09\x60\x46\x00\x00\x00\x00\x00\x00\x09\xc4'
msg += b'\x46\x00\x00\x00\x00\x00\x00\x0a\x28\x46\x00\x00\x00\x00\x00\x00\x0a\x8c\x46\x00\x00\x00\x00\x00\x00\x0a\xf0\x46\x00\x00\x00\x00\x00\x00\x0b\x54\x46\x3f\xd9\x99\x9a'
msg += b'\x00\x00\x0b\xb8\x46\x41\x8a\xe1\x48\x00\x00\x0c\x1c\x46\x3f\x8a\x3d\x71\x00\x00\x0c\x80\x46\x41\x1b\xd7\x0a\x00\x00\x0c\xe4\x46\x3f\x1e\xb8\x52\x00\x00\x0d\x48\x46'
msg += b'\x40\xf3\xd7\x0a\x00\x00\x0d\xac\x46\x00\x00\x00\x00\x00\x00\x0e\x10\x46\x00\x00\x00\x00\x00\x00\x0e\x74\x46\x00\x00\x00\x00\x00\x00\x0e\xd8\x46\x00\x00\x00\x00\x00'
msg += b'\x00\x0f\x3c\x53\x00\x00\x00\x00\x0f\xa0\x53\x00\x00\x00\x00\x10\x04\x53\x55\xaa\x00\x00\x10\x68\x53\x00\x00\x00\x00\x10\xcc\x53\x00\x00\x00\x00\x11\x30\x53\x00\x00'
msg += b'\x00\x00\x11\x94\x53\x00\x00\x00\x00\x11\xf8\x53\xff\xff\x00\x00\x12\x5c\x53\xff\xff\x00\x00\x12\xc0\x53\x00\x02\x00\x00\x13\x24\x53\xff\xff\x00\x00\x13\x88\x53\xff'
msg += b'\xff\x00\x00\x13\xec\x53\xff\xff\x00\x00\x14\x50\x53\xff\xff\x00\x00\x14\xb4\x53\xff\xff\x00\x00\x15\x18\x53\xff\xff\x00\x00\x15\x7c\x53\x00\x00\x00\x00\x27\x10\x53'
msg += b'\x00\x02\x00\x00\x27\x74\x53\x00\x3c\x00\x00\x27\xd8\x53\x00\x68\x00\x00\x28\x3c\x53\x05\x00\x00\x00\x28\xa0\x46\x43\x79\x00\x00\x00\x00\x29\x04\x46\x43\x48\x00\x00'
msg += b'\x00\x00\x29\x68\x46\x42\x48\x33\x33\x00\x00\x29\xcc\x46\x42\x3e\x3d\x71\x00\x00\x2a\x30\x53\x00\x01\x00\x00\x2a\x94\x46\x43\x37\x00\x00\x00\x00\x2a\xf8\x46\x42\xce'
msg += b'\x00\x00\x00\x00\x2b\x5c\x53\x00\x96\x00\x00\x2b\xc0\x53\x00\x10\x00\x00\x2c\x24\x46\x43\x90\x00\x00\x00\x00\x2c\x88\x46\x43\x95\x00\x00\x00\x00\x2c\xec\x53\x00\x06'
msg += b'\x00\x00\x2d\x50\x53\x00\x06\x00\x00\x2d\xb4\x46\x43\x7d\x00\x00\x00\x00\x2e\x18\x46\x42\x3d\xeb\x85\x00\x00\x2e\x7c\x46\x42\x3d\xeb\x85\x00\x00\x2e\xe0\x53\x00\x03'
msg += b'\x00\x00\x2f\x44\x53\x00\x03\x00\x00\x2f\xa8\x46\x42\x4d\xeb\x85\x00\x00\x30\x0c\x46\x42\x4d\xeb\x85\x00\x00\x30\x70\x53\x00\x03\x00\x00\x30\xd4\x53\x00\x03\x00\x00'
msg += b'\x31\x38\x46\x42\x08\x00\x00\x00\x00\x31\x9c\x53\x00\x05\x00\x00\x32\x00\x53\x04\x00\x00\x00\x32\x64\x53\x00\x01\x00\x00\x32\xc8\x53\x13\x9c\x00\x00\x33\x2c\x53\x0f'
msg += b'\xa0\x00\x00\x33\x90\x53\x00\x4f\x00\x00\x33\xf4\x53\x00\x66\x00\x00\x34\x58\x53\x03\xe8\x00\x00\x34\xbc\x53\x04\x00\x00\x00\x35\x20\x53\x00\x00\x00\x00\x35\x84\x53'
msg += b'\x00\x00\x00\x00\x35\xe8\x53\x00\x00\x00\x00\x36\x4c\x53\x00\x00\x00\x01\x38\x80\x53\x00\x02\x00\x01\x38\x81\x53\x00\x01\x00\x01\x38\x82\x53\x00\x01\x00\x01\x38\x83'
msg += b'\x53\x00\x00'
return msg
@pytest.fixture
def InvDataSeq2_Zero(): # Data indication from the controller
msg = b'\x00\x00\x00\xa3\x00\x00\x00\x64\x53\x00\x01\x00\x00\x00\xc8\x53\x00\x02\x00\x00\x01\x2c\x53\x00\x00\x00\x00\x01\x90\x49\x00\x00\x00\x00\x00\x00\x01\x91\x53\x00\x00'
msg += b'\x00\x00\x01\x92\x53\x00\x00\x00\x00\x01\x93\x53\x00\x00\x00\x00\x01\x94\x53\x00\x00\x00\x00\x01\x95\x53\x00\x00\x00\x00\x01\x96\x53\x00\x00\x00\x00\x01\x97\x53\x00'
msg += b'\x00\x00\x00\x01\x98\x53\x00\x00\x00\x00\x01\x99\x53\x00\x00\x00\x00\x01\x9a\x53\x00\x00\x00\x00\x01\x9b\x53\x00\x00\x00\x00\x01\x9c\x53\x00\x00\x00\x00\x01\x9d\x53'
msg += b'\x00\x00\x00\x00\x01\x9e\x53\x00\x00\x00\x00\x01\x9f\x53\x00\x00\x00\x00\x01\xa0\x53\x00\x00\x00\x00\x01\xf4\x49\x00\x00\x00\x00\x00\x00\x01\xf5\x53\x00\x00\x00\x00'
msg += b'\x01\xf6\x53\x00\x00\x00\x00\x01\xf7\x53\x00\x00\x00\x00\x01\xf8\x53\x00\x00\x00\x00\x01\xf9\x53\x00\x00\x00\x00\x01\xfa\x53\x00\x00\x00\x00\x01\xfb\x53\x00\x00\x00'
msg += b'\x00\x01\xfc\x53\x00\x00\x00\x00\x01\xfd\x53\x00\x00\x00\x00\x01\xfe\x53\x00\x00\x00\x00\x01\xff\x53\x00\x00\x00\x00\x02\x00\x53\x00\x00\x00\x00\x02\x01\x53\x00\x00'
msg += b'\x00\x00\x02\x02\x53\x00\x00\x00\x00\x02\x03\x53\x00\x00\x00\x00\x02\x04\x53\x00\x00\x00\x00\x02\x58\x49\x00\x00\x00\x00\x00\x00\x02\x59\x53\x00\x00\x00\x00\x02\x5a'
msg += b'\x53\x00\x00\x00\x00\x02\x5b\x53\x00\x00\x00\x00\x02\x5c\x53\x00\x00\x00\x00\x02\x5d\x53\x00\x00\x00\x00\x02\x5e\x53\x00\x00\x00\x00\x02\x5f\x53\x00\x00\x00\x00\x02'
msg += b'\x60\x53\x00\x00\x00\x00\x02\x61\x53\x00\x00\x00\x00\x02\x62\x53\x00\x00\x00\x00\x02\x63\x53\x00\x00\x00\x00\x02\x64\x53\x00\x00\x00\x00\x02\x65\x53\x00\x00\x00\x00'
msg += b'\x02\x66\x53\x00\x00\x00\x00\x02\x67\x53\x00\x00\x00\x00\x02\x68\x53\x00\x00\x00\x00\x02\xbc\x49\x00\x00\x00\x00\x00\x00\x02\xbd\x53\x00\x00\x00\x00\x02\xbe\x53\x00'
msg += b'\x00\x00\x00\x02\xbf\x53\x00\x00\x00\x00\x02\xc0\x53\x00\x00\x00\x00\x02\xc1\x53\x00\x00\x00\x00\x02\xc2\x53\x00\x00\x00\x00\x02\xc3\x53\x00\x00\x00\x00\x02\xc4\x53'
msg += b'\x00\x00\x00\x00\x02\xc5\x53\x00\x00\x00\x00\x02\xc6\x53\x00\x00\x00\x00\x02\xc7\x53\x00\x00\x00\x00\x02\xc8\x53\x00\x00\x00\x00\x02\xc9\x53\x00\x00\x00\x00\x02\xca'
msg += b'\x53\x00\x00\x00\x00\x02\xcb\x53\x00\x00\x00\x00\x02\xcc\x53\x00\x00\x00\x00\x03\x20\x53\x00\x00\x00\x00\x03\x84\x53\x50\x11\x00\x00\x03\xe8\x46\x43\x61\x66\x66\x00'
msg += b'\x00\x04\x4c\x46\x3e\xeb\x85\x1f\x00\x00\x04\xb0\x46\x42\x48\x14\x7b\x00\x00\x05\x14\x53\x00\x00\x00\x00\x05\x78\x53\x00\x00\x00\x00\x05\xdc\x53\x00\x00\x00\x00\x06'
msg += b'\x40\x46\x42\xd3\x66\x66\x00\x00\x06\xa4\x46\x42\x06\x66\x66\x00\x00\x07\x08\x46\x3f\xf4\x7a\xe1\x00\x00\x07\x6c\x46\x00\x00\x00\x00\x00\x00\x07\xd0\x46\x42\x06\x00'
msg += b'\x00\x00\x00\x08\x34\x46\x3f\xae\x14\x7b\x00\x00\x08\x98\x46\x00\x00\x00\x00\x00\x00\x08\xfc\x46\x00\x00\x00\x00\x00\x00\x09\x60\x46\x00\x00\x00\x00\x00\x00\x09\xc4'
msg += b'\x46\x00\x00\x00\x00\x00\x00\x0a\x28\x46\x00\x00\x00\x00\x00\x00\x0a\x8c\x46\x00\x00\x00\x00\x00\x00\x0a\xf0\x46\x00\x00\x00\x00\x00\x00\x0b\x54\x46\x00\x00\x00\x00'
msg += b'\x00\x00\x0b\xb8\x46\x00\x00\x00\x00\x00\x00\x0c\x1c\x46\x00\x00\x00\x00\x00\x00\x0c\x80\x46\x00\x00\x00\x00\x00\x00\x0c\xe4\x46\x00\x00\x00\x00\x00\x00\x0d\x48\x46'
msg += b'\x00\x00\x00\x00\x00\x00\x0d\xac\x46\x00\x00\x00\x00\x00\x00\x0e\x10\x46\x00\x00\x00\x00\x00\x00\x0e\x74\x46\x00\x00\x00\x00\x00\x00\x0e\xd8\x46\x00\x00\x00\x00\x00'
msg += b'\x00\x0f\x3c\x53\x00\x00\x00\x00\x0f\xa0\x53\x00\x00\x00\x00\x10\x04\x53\x55\xaa\x00\x00\x10\x68\x53\x00\x00\x00\x00\x10\xcc\x53\x00\x00\x00\x00\x11\x30\x53\x00\x00'
msg += b'\x00\x00\x11\x94\x53\x00\x00\x00\x00\x11\xf8\x53\xff\xff\x00\x00\x12\x5c\x53\xff\xff\x00\x00\x12\xc0\x53\x00\x02\x00\x00\x13\x24\x53\xff\xff\x00\x00\x13\x88\x53\xff'
msg += b'\xff\x00\x00\x13\xec\x53\xff\xff\x00\x00\x14\x50\x53\xff\xff\x00\x00\x14\xb4\x53\xff\xff\x00\x00\x15\x18\x53\xff\xff\x00\x00\x15\x7c\x53\x00\x00\x00\x00\x27\x10\x53'
msg += b'\x00\x02\x00\x00\x27\x74\x53\x00\x3c\x00\x00\x27\xd8\x53\x00\x68\x00\x00\x28\x3c\x53\x05\x00\x00\x00\x28\xa0\x46\x43\x79\x00\x00\x00\x00\x29\x04\x46\x43\x48\x00\x00'
msg += b'\x00\x00\x29\x68\x46\x42\x48\x33\x33\x00\x00\x29\xcc\x46\x42\x3e\x3d\x71\x00\x00\x2a\x30\x53\x00\x01\x00\x00\x2a\x94\x46\x43\x37\x00\x00\x00\x00\x2a\xf8\x46\x42\xce'
msg += b'\x00\x00\x00\x00\x2b\x5c\x53\x00\x96\x00\x00\x2b\xc0\x53\x00\x10\x00\x00\x2c\x24\x46\x43\x90\x00\x00\x00\x00\x2c\x88\x46\x43\x95\x00\x00\x00\x00\x2c\xec\x53\x00\x06'
msg += b'\x00\x00\x2d\x50\x53\x00\x06\x00\x00\x2d\xb4\x46\x43\x7d\x00\x00\x00\x00\x2e\x18\x46\x42\x3d\xeb\x85\x00\x00\x2e\x7c\x46\x42\x3d\xeb\x85\x00\x00\x2e\xe0\x53\x00\x03'
msg += b'\x00\x00\x2f\x44\x53\x00\x03\x00\x00\x2f\xa8\x46\x42\x4d\xeb\x85\x00\x00\x30\x0c\x46\x42\x4d\xeb\x85\x00\x00\x30\x70\x53\x00\x03\x00\x00\x30\xd4\x53\x00\x03\x00\x00'
msg += b'\x31\x38\x46\x42\x08\x00\x00\x00\x00\x31\x9c\x53\x00\x05\x00\x00\x32\x00\x53\x04\x00\x00\x00\x32\x64\x53\x00\x01\x00\x00\x32\xc8\x53\x13\x9c\x00\x00\x33\x2c\x53\x0f'
msg += b'\xa0\x00\x00\x33\x90\x53\x00\x4f\x00\x00\x33\xf4\x53\x00\x66\x00\x00\x34\x58\x53\x03\xe8\x00\x00\x34\xbc\x53\x04\x00\x00\x00\x35\x20\x53\x00\x00\x00\x00\x35\x84\x53'
msg += b'\x00\x00\x00\x00\x35\xe8\x53\x00\x00\x00\x00\x36\x4c\x53\x00\x00\x00\x01\x38\x80\x53\x00\x02\x00\x01\x38\x81\x53\x00\x01\x00\x01\x38\x82\x53\x00\x01\x00\x01\x38\x83'
msg += b'\x53\x00\x00'
return msg
def test_parse_control(ContrDataSeq):
i = Infos()
@@ -19,37 +101,341 @@ def test_parse_control(ContrDataSeq):
pass
assert json.dumps(i.db) == json.dumps(
{"collector": {"Collector_Fw_Version": "RSW_400_V1.00.06", "Chip_Type": "Raymon", "Chip_Model": "RSW-1-10001", "Trace_URL": "t.raymoniot.com", "Logger_URL": "logger.talent-monitoring.com", "Data_Up_Interval": 300}, "env": {"Signal_Strength": 100}, "total": {"Power_On_Time": 29}})
def test_build_ha_conf():
i = Infos()
d_json, id = next (i.ha_confs(prfx="tsun/garagendach/", snr='123'))
assert id == 'out_power_123'
assert d_json == json.dumps({"name": "Actual Power", "stat_t": "tsun/garagendach/grid", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "out_power_123", "val_tpl": "{{value_json['Output_Power'] | float}}", "unit_of_meas": "W", "dev": {"name": "Microinverter", "mdl": "MS-600", "ids": ["inverter_123"], "mf": "TSUN", "sa": "", "sw": "0.01", "hw": "Hw0.01"}})
{"collector": {"Collector_Fw_Version": "RSW_400_V1.00.06", "Chip_Type": "Raymon", "Chip_Model": "RSW-1-10001", "Trace_URL": "t.raymoniot.com", "Logger_URL": "logger.talent-monitoring.com", "No_Inputs": 2}, "controller": {"Signal_Strength": 100, "Power_On_Time": 29, "Data_Up_Interval": 300}})
def test_build_ha_conf2():
def test_parse_inverter(InvDataSeq):
i = Infos()
for key, result in i.parse (InvDataSeq):
pass
assert json.dumps(i.db) == json.dumps(
{"inverter": {"Product_Name": "Microinv", "Manufacturer": "TSUN", "Version": "V5.0.11", "Serial_Number": "T17E7307021D006A", "Equipment_Model": "TSOL-MS600"}})
def test_parse_cont_and_invert(ContrDataSeq, InvDataSeq):
i = Infos()
for key, result in i.parse (ContrDataSeq):
pass
for key, result in i.parse (InvDataSeq):
pass
assert json.dumps(i.db) == json.dumps(
{
"collector": {"Collector_Fw_Version": "RSW_400_V1.00.06", "Chip_Type": "Raymon", "Chip_Model": "RSW-1-10001", "Trace_URL": "t.raymoniot.com", "Logger_URL": "logger.talent-monitoring.com", "No_Inputs": 2}, "controller": {"Signal_Strength": 100, "Power_On_Time": 29, "Data_Up_Interval": 300},
"inverter": {"Product_Name": "Microinv", "Manufacturer": "TSUN", "Version": "V5.0.11", "Serial_Number": "T17E7307021D006A", "Equipment_Model": "TSOL-MS600"}})
def test_build_ha_conf1(ContrDataSeq):
i = Infos()
i.static_init() # initialize counter
tests = 0
for d_json, id in i.ha_confs(prfx="tsun/garagendach/", snr='123'):
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id="garagendach/", snr='123', singleton=False):
if id == 'out_power_123':
assert d_json == json.dumps({"name": "Actual Power", "stat_t": "tsun/garagendach/grid", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "out_power_123", "val_tpl": "{{value_json['Output_Power'] | float}}", "unit_of_meas": "W", "dev": {"name": "Microinverter", "mdl": "MS-600", "ids": ["inverter_123"], "mf": "TSUN", "sa": "", "sw": "0.01", "hw": "Hw0.01"}})
assert comp == 'sensor'
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/grid", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "out_power_123", "val_tpl": "{{value_json['Output_Power'] | float}}", "unit_of_meas": "W", "dev": {"name": "Micro Inverter", "sa": "Micro Inverter", "via_device": "controller_123", "ids": ["inverter_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
tests +=1
elif id == 'daily_gen_123':
assert d_json == json.dumps({"name": "Daily Generation", "stat_t": "tsun/garagendach/total", "dev_cla": "energy", "stat_cla": "total_increasing", "uniq_id": "daily_gen_123", "val_tpl": "{{value_json['Daily_Generation'] | float}}", "unit_of_meas": "kWh", "dev": {"name": "Microinverter", "mdl": "MS-600", "ids": ["inverter_123"], "mf": "TSUN", "sa": "", "sw": "0.01", "hw": "Hw0.01"}})
assert comp == 'sensor'
assert d_json == json.dumps({"name": "Daily Generation", "stat_t": "tsun/garagendach/total", "dev_cla": "energy", "stat_cla": "total_increasing", "uniq_id": "daily_gen_123", "val_tpl": "{{value_json['Daily_Generation'] | float}}", "unit_of_meas": "kWh", "ic": "mdi:solar-power-variant", "dev": {"name": "Micro Inverter", "sa": "Micro Inverter", "via_device": "controller_123", "ids": ["inverter_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
tests +=1
elif id == 'power_pv1_123':
assert d_json == json.dumps({"name": "Power PV1", "stat_t": "tsun/garagendach/input", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "power_pv1_123", "val_tpl": "{{ (value_json['pv1']['Power'] | float)}}", "unit_of_meas": "W", "dev": {"name": "Microinverter", "mdl": "MS-600", "ids": ["inverter_123"], "mf": "TSUN", "sa": "", "sw": "0.01", "hw": "Hw0.01"}})
assert comp == 'sensor'
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/input", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "power_pv1_123", "val_tpl": "{{ (value_json['pv1']['Power'] | float)}}", "unit_of_meas": "W", "dev": {"name": "Module PV1", "sa": "Module PV1", "via_device": "inverter_123", "ids": ["input_pv1_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
tests +=1
elif id == 'total_gen_123':
assert d_json == json.dumps({"name": "Total Generation", "stat_t": "tsun/garagendach/total", "dev_cla": "energy", "stat_cla": "total", "uniq_id": "total_gen_123", "val_tpl": "{{value_json['Total_Generation'] | float}}", "unit_of_meas": "kWh", "icon": "mdi:solar-power", "dev": {"name": "Microinverter", "mdl": "MS-600", "ids": ["inverter_123"], "mf": "TSUN", "sa": "", "sw": "0.01", "hw": "Hw0.01"}})
elif id == 'power_pv2_123':
assert False # if we haven't received and parsed a control data msg, we don't know the number of inputs. In this case we only register the first one!!
elif id == 'signal_123':
assert comp == 'sensor'
assert d_json == json.dumps({"name": "Signal Strength", "stat_t": "tsun/garagendach/controller", "dev_cla": None, "stat_cla": "measurement", "uniq_id": "signal_123", "val_tpl": "{{value_json[\'Signal_Strength\'] | int}}", "unit_of_meas": "%", "ic": "mdi:wifi", "dev": {"name": "Controller", "sa": "Controller", "via_device": "proxy", "ids": ["controller_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
tests +=1
elif id == 'inv_count_456':
assert False
assert tests==4
def test_build_ha_conf3():
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id = 'proxy/', snr = '456', singleton=True):
if id == 'out_power_123':
assert False
elif id == 'daily_gen_123':
assert False
elif id == 'power_pv1_123':
assert False
elif id == 'power_pv2_123':
assert False # if we haven't received and parsed a control data msg, we don't know the number of inputs. In this case we only register the first one!!
elif id == 'signal_123':
assert False
elif id == 'inv_count_456':
assert comp == 'sensor'
assert d_json == json.dumps({"name": "Active Inverter Connections", "stat_t": "tsun/proxy/proxy", "dev_cla": None, "stat_cla": None, "uniq_id": "inv_count_456", "val_tpl": "{{value_json['Inverter_Cnt'] | int}}", "ic": "mdi:counter", "dev": {"name": "Proxy", "sa": "Proxy", "mdl": "proxy", "mf": "Stefan Allius", "sw": "unknown", "ids": ["proxy"]}, "o": {"name": "proxy", "sw": "unknown"}})
tests +=1
assert tests==5
def test_build_ha_conf2(ContrDataSeq, InvDataSeq):
i = Infos()
for d_json, id in i.ha_confs(prfx="tsun/garagendach/", snr='123'):
for key, result in i.parse (ContrDataSeq):
pass
for key, result in i.parse (InvDataSeq):
pass
tests = 0
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id="garagendach/", snr='123', singleton=False, sug_area = 'roof'):
if id == 'out_power_123':
assert comp == 'sensor'
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/grid", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "out_power_123", "val_tpl": "{{value_json['Output_Power'] | float}}", "unit_of_meas": "W", "dev": {"name": "Micro Inverter - roof", "sa": "Micro Inverter - roof", "via_device": "controller_123", "mdl": "TSOL-MS600", "mf": "TSUN", "sw": "V5.0.11", "ids": ["inverter_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
tests +=1
if id == 'daily_gen_123':
assert comp == 'sensor'
assert d_json == json.dumps({"name": "Daily Generation", "stat_t": "tsun/garagendach/total", "dev_cla": "energy", "stat_cla": "total_increasing", "uniq_id": "daily_gen_123", "val_tpl": "{{value_json['Daily_Generation'] | float}}", "unit_of_meas": "kWh", "ic": "mdi:solar-power-variant", "dev": {"name": "Micro Inverter - roof", "sa": "Micro Inverter - roof", "via_device": "controller_123", "mdl": "TSOL-MS600", "mf": "TSUN", "sw": "V5.0.11", "ids": ["inverter_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
tests +=1
elif id == 'power_pv1_123':
assert comp == 'sensor'
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/input", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "power_pv1_123", "val_tpl": "{{ (value_json['pv1']['Power'] | float)}}", "unit_of_meas": "W", "dev": {"name": "Module PV1 - roof", "sa": "Module PV1 - roof", "via_device": "inverter_123", "ids": ["input_pv1_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
tests +=1
elif id == 'power_pv2_123':
assert comp == 'sensor'
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/input", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "power_pv2_123", "val_tpl": "{{ (value_json['pv2']['Power'] | float)}}", "unit_of_meas": "W", "dev": {"name": "Module PV2 - roof", "sa": "Module PV2 - roof", "via_device": "inverter_123", "ids": ["input_pv2_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
tests +=1
elif id == 'signal_123':
assert comp == 'sensor'
assert d_json == json.dumps({"name": "Signal Strength", "stat_t": "tsun/garagendach/controller", "dev_cla": None, "stat_cla": "measurement", "uniq_id": "signal_123", "val_tpl": "{{value_json[\'Signal_Strength\'] | int}}", "unit_of_meas": "%", "ic": "mdi:wifi", "dev": {"name": "Controller - roof", "sa": "Controller - roof", "via_device": "proxy", "mdl": "RSW-1-10001", "mf": "Raymon", "sw": "RSW_400_V1.00.06", "ids": ["controller_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
tests +=1
assert tests==5
def test_must_incr_total(InvDataSeq2, InvDataSeq2_Zero):
i = Infos()
tests = 0
for key, update in i.parse (InvDataSeq2):
if key == 'total':
assert update == True
tests +=1
elif key == 'env':
assert update == True
tests +=1
assert tests==4
assert json.dumps(i.db['total']) == json.dumps({'Daily_Generation': 1.7, 'Total_Generation': 17.36})
assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 64.5, "Daily_Generation": 1.08, "Total_Generation": 9.74}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 45.7, "Daily_Generation": 0.62, "Total_Generation": 7.62}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}})
assert json.dumps(i.db['env']) == json.dumps({"Inverter_Temp": 23, "Rated_Power": 600})
tests = 0
for key, update in i.parse (InvDataSeq2):
if key == 'total':
assert update == False
tests +=1
elif key == 'env':
assert update == False
tests +=1
assert tests==4
assert json.dumps(i.db['total']) == json.dumps({'Daily_Generation': 1.7, 'Total_Generation': 17.36})
assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 64.5, "Daily_Generation": 1.08, "Total_Generation": 9.74}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 45.7, "Daily_Generation": 0.62, "Total_Generation": 7.62}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}})
assert json.dumps(i.db['env']) == json.dumps({"Inverter_Temp": 23, "Rated_Power": 600})
tests = 0
for key, update in i.parse (InvDataSeq2_Zero):
if key == 'total':
assert update == False
tests +=1
elif key == 'env':
assert update == True
tests +=1
assert tests==4
assert json.dumps(i.db['total']) == json.dumps({'Daily_Generation': 1.7, 'Total_Generation': 17.36})
assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 0.0, "Daily_Generation": 1.08, "Total_Generation": 9.74}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 0.0, "Daily_Generation": 0.62, "Total_Generation": 7.62}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}})
assert json.dumps(i.db['env']) == json.dumps({"Inverter_Temp": 0, "Rated_Power": 0})
def test_must_incr_total2(InvDataSeq2, InvDataSeq2_Zero):
i = Infos()
tests = 0
for key, update in i.parse (InvDataSeq2_Zero):
if key == 'total':
assert update == False
tests +=1
elif key == 'env':
assert update == True
tests +=1
assert tests==4
assert json.dumps(i.db['total']) == json.dumps({})
assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 0.0}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 0.0}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}})
assert json.dumps(i.db['env']) == json.dumps({"Inverter_Temp": 0, "Rated_Power": 0})
tests = 0
for key, update in i.parse (InvDataSeq2_Zero):
if key == 'total':
assert update == False
tests +=1
elif key == 'env':
assert update == False
tests +=1
assert tests==4
assert json.dumps(i.db['total']) == json.dumps({})
assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 0.0}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 0.0}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}})
assert json.dumps(i.db['env']) == json.dumps({"Inverter_Temp": 0, "Rated_Power": 0})
tests = 0
for key, update in i.parse (InvDataSeq2):
if key == 'total':
assert update == True
tests +=1
elif key == 'env':
assert update == True
tests +=1
assert tests==4
assert json.dumps(i.db['total']) == json.dumps({'Daily_Generation': 1.7, 'Total_Generation': 17.36})
assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 64.5, "Daily_Generation": 1.08, "Total_Generation": 9.74}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 45.7, "Daily_Generation": 0.62, "Total_Generation": 7.62}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}})
assert json.dumps(i.db['env']) == json.dumps({"Inverter_Temp": 23, "Rated_Power": 600})
def test_statistic_counter():
i = Infos()
val = i.dev_value("Test-String")
assert val == "Test-String"
val = i.dev_value(0xffffffff) # invalid addr
assert val == None
val = i.dev_value(0xffffff00) # valid addr but not initiliazed
assert val == None or val == 0
i.static_init() # initialize counter
assert json.dumps(i.stat) == json.dumps({"proxy": {"Inverter_Cnt": 0, "Unknown_SNR": 0, "Unknown_Msg": 0, "Invalid_Data_Type": 0, "Internal_Error": 0,"Unknown_Ctrl": 0}})
val = i.dev_value(0xffffff00) # valid and initiliazed addr
assert val == 0
i.inc_counter('Inverter_Cnt')
assert json.dumps(i.stat) == json.dumps({"proxy": {"Inverter_Cnt": 1, "Unknown_SNR": 0, "Unknown_Msg": 0, "Invalid_Data_Type": 0, "Internal_Error": 0,"Unknown_Ctrl": 0}})
val = i.dev_value(0xffffff00)
assert val == 1
i.dec_counter('Inverter_Cnt')
val = i.dev_value(0xffffff00)
assert val == 0
def test_dep_rules():
i = Infos()
i.static_init() # initialize counter
res = i.ignore_this_device({})
assert res == True
res = i.ignore_this_device({'reg':0xffffffff})
assert res == True
i.inc_counter('Inverter_Cnt') # is 1
val = i.dev_value(0xffffff00)
assert val == 1
res = i.ignore_this_device({'reg':0xffffff00})
assert res == True
res = i.ignore_this_device({'reg':0xffffff00, 'less_eq': 2})
assert res == False
res = i.ignore_this_device({'reg':0xffffff00, 'gte': 2})
assert res == True
i.inc_counter('Inverter_Cnt') # is 2
res = i.ignore_this_device({'reg':0xffffff00, 'less_eq': 2})
assert res == False
res = i.ignore_this_device({'reg':0xffffff00, 'gte': 2})
assert res == False
i.inc_counter('Inverter_Cnt') # is 3
res = i.ignore_this_device({'reg':0xffffff00, 'less_eq': 2})
assert res == True
res = i.ignore_this_device({'reg':0xffffff00, 'gte': 2})
assert res == False
def test_table_definition():
i = Infos()
i.static_init() # initialize counter
val = i.dev_value(0xffffff04) # check internal error counter
assert val == 0
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id="garagendach/", snr='123', singleton=False, sug_area = 'roof'):
pass
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id = 'proxy/', snr = '456', singleton=True, sug_area = 'roof'):
pass
val = i.dev_value(0xffffff04) # check internal error counter
assert val == 0
# test missing 'fmt' value
Infos._Infos__info_defs[0xfffffffe] = {'name':['proxy', 'Internal_Test1'], 'singleton': True, 'ha':{'dev':'proxy', 'dev_cla': None, 'stat_cla': None, 'id':'intern_test1_'}}
tests = 0
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id = 'proxy/', snr = '456', singleton=True, sug_area = 'roof'):
if id == 'intern_test1_456':
tests +=1
assert tests == 1
val = i.dev_value(0xffffff04) # check internal error counter
assert val == 1
# test missing 'dev' value
Infos._Infos__info_defs[0xfffffffe] = {'name':['proxy', 'Internal_Test2'], 'singleton': True, 'ha':{'dev_cla': None, 'stat_cla': None, 'id':'intern_test2_', 'fmt':'| int'}}
tests = 0
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id = 'proxy/', snr = '456', singleton=True, sug_area = 'roof'):
if id == 'intern_test2_456':
tests +=1
assert tests == 1
val = i.dev_value(0xffffff04) # check internal error counter
assert val == 2
# test invalid 'via' value
Infos._Infos__info_devs['test_dev'] = {'via':'xyz', 'name':'Module PV1'}
Infos._Infos__info_defs[0xfffffffe] = {'name':['proxy', 'Internal_Test2'], 'singleton': True, 'ha':{'dev':'test_dev', 'dev_cla': None, 'stat_cla': None, 'id':'intern_test2_', 'fmt':'| int'}}
tests = 0
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id = 'proxy/', snr = '456', singleton=True, sug_area = 'roof'):
if id == 'intern_test2_456':
tests +=1
assert tests == 1
val = i.dev_value(0xffffff04) # check internal error counter
assert val == 3
def test_invalid_data_type(InvalidDataSeq):
i = Infos()
i.static_init() # initialize counter
val = i.dev_value(0xffffff03) # check invalid data type counter
assert val == 0
for key, result in i.parse (InvalidDataSeq):
pass
assert json.dumps(i.db) == json.dumps({"inverter": {"Product_Name": "Microinv"}})
val = i.dev_value(0xffffff03) # check invalid data type counter
assert val == 1

View File

@@ -1,21 +1,29 @@
# test_with_pytest.py
import pytest
import pytest, logging
from app.src.messages import Message, Control
from app.src.config import Config
from app.src.infos import Infos
# initialize the proxy statistics
Infos.static_init()
tracer = logging.getLogger('tracer')
class MemoryStream(Message):
def __init__(self, msg, chunks = (0,)):
super().__init__()
def __init__(self, msg, chunks = (0,), server_side: bool = True):
super().__init__(server_side)
self.__msg = msg
self.__msg_len = len(msg)
self.__chunks = chunks
self.__offs = 0
self.__chunk_idx = 0
self.msg_count = 0
self.server_side = False
self.addr = 'Test: SrvSide'
def append_msg(self, msg):
self.__msg += msg
self.__msg_len += len(msg)
def _read(self) -> int:
copied_bytes = 0
try:
@@ -40,9 +48,6 @@ class MemoryStream(Message):
self.msg_count += 1
return
def __del__ (self):
super().__del__()
@pytest.fixture
def MsgContactInfo(): # Contact Info message
@@ -56,11 +61,92 @@ def MsgContactInfo_LongId(): # Contact Info message with longer ID
@pytest.fixture
def Msg2ContactInfo(): # two Contact Info messages
Config.config = {'tsun':{'enabled': True}}
return b'\x00\x00\x00\x2c\x10R170000000000001\x91\x00\x08solarhub\x0fsolarhub\x40123456\x00\x00\x00\x2c\x10R170000000000002\x91\x00\x08solarhub\x0fsolarhub\x40123456'
@pytest.fixture
def MsgContactResp(): # Contact Response message
return b'\x00\x00\x00\x14\x10R170000000000001\x99\x00\x01'
@pytest.fixture
def MsgContactResp2(): # Contact Response message
return b'\x00\x00\x00\x14\x10R170000000000002\x99\x00\x01'
@pytest.fixture
def MsgContactInvalid(): # Contact Response message
return b'\x00\x00\x00\x14\x10R170000000000001\x93\x00\x01'
@pytest.fixture
def MsgGetTime(): # Get Time Request message
return b'\x00\x00\x00\x13\x10R170000000000001\x91\x22'
@pytest.fixture
def MsgTimeResp(): # Get Time Resonse message
return b'\x00\x00\x00\x1b\x10R170000000000001\x99\x22\x00\x00\x01\x89\xc6\x63\x4d\x80'
@pytest.fixture
def MsgTimeInvalid(): # Get Time Request message
return b'\x00\x00\x00\x13\x10R170000000000001\x94\x22'
@pytest.fixture
def MsgControllerInd(): # Data indication from the controller
msg = b'\x00\x00\x01\x2f\x10R170000000000001\x91\x71\x0e\x10\x00\x00\x10R170000000000001'
msg += b'\x01\x00\x00\x01\x89\xc6\x63\x55\x50'
msg += b'\x00\x00\x00\x15\x00\x09\x2b\xa8\x54\x10\x52\x53\x57\x5f\x34\x30\x30\x5f\x56\x31\x2e\x30\x30\x2e\x30\x36\x00\x09\x27\xc0\x54\x06\x52\x61\x79\x6d\x6f'
msg += b'\x6e\x00\x09\x2f\x90\x54\x0b\x52\x53\x57\x2d\x31\x2d\x31\x30\x30\x30\x31\x00\x09\x5a\x88\x54\x0f\x74\x2e\x72\x61\x79\x6d\x6f\x6e\x69\x6f\x74\x2e\x63\x6f\x6d\x00\x09\x5a\xec\x54'
msg += b'\x1c\x6c\x6f\x67\x67\x65\x72\x2e\x74\x61\x6c\x65\x6e\x74\x2d\x6d\x6f\x6e\x69\x74\x6f\x72\x69\x6e\x67\x2e\x63\x6f\x6d\x00\x0d\x00\x20\x49\x00\x00\x00\x01\x00\x0c\x35\x00\x49\x00'
msg += b'\x00\x00\x64\x00\x0c\x96\xa8\x49\x00\x00\x00\x1d\x00\x0c\x7f\x38\x49\x00\x00\x00\x01\x00\x0c\xfc\x38\x49\x00\x00\x00\x01\x00\x0c\xf8\x50\x49\x00\x00\x01\x2c\x00\x0c\x63\xe0\x49'
msg += b'\x00\x00\x00\x00\x00\x0c\x67\xc8\x49\x00\x00\x00\x00\x00\x0c\x50\x58\x49\x00\x00\x00\x01\x00\x09\x5e\x70\x49\x00\x00\x13\x8d\x00\x09\x5e\xd4\x49\x00\x00\x13\x8d\x00\x09\x5b\x50'
msg += b'\x49\x00\x00\x00\x02\x00\x0d\x04\x08\x49\x00\x00\x00\x00\x00\x07\xa1\x84\x49\x00\x00\x00\x01\x00\x0c\x50\x59\x49\x00\x00\x00\x4c\x00\x0d\x1f\x60\x49\x00\x00\x00\x00'
return msg
@pytest.fixture
def MsgControllerAck(): # Get Time Request message
return b'\x00\x00\x00\x14\x10R170000000000001\x99\x71\x01'
@pytest.fixture
def MsgControllerInvalid(): # Get Time Request message
return b'\x00\x00\x00\x14\x10R170000000000001\x92\x71\x01'
@pytest.fixture
def MsgInverterInd(): # Data indication from the controller
msg = b'\x00\x00\x00\x8b\x10R170000000000001\x91\x04\x01\x90\x00\x01\x10R170000000000001'
msg += b'\x01\x00\x00\x01\x89\xc6\x63\x61\x08'
msg += b'\x00\x00\x00\x06\x00\x00\x00\x0a\x54\x08\x4d\x69\x63\x72\x6f\x69\x6e\x76\x00\x00\x00\x14\x54\x04\x54\x53\x55\x4e\x00\x00\x00\x1E\x54\x07\x56\x35\x2e\x30\x2e\x31\x31\x00\x00\x00\x28'
msg += b'\x54\x10\x54\x31\x37\x45\x37\x33\x30\x37\x30\x32\x31\x44\x30\x30\x36\x41\x00\x00\x00\x32\x54\x0a\x54\x53\x4f\x4c\x2d\x4d\x53\x36\x30\x30\x00\x00\x00\x3c\x54\x05\x41\x2c\x42\x2c\x43'
return msg
@pytest.fixture
def MsgInverterAck(): # Get Time Request message
return b'\x00\x00\x00\x14\x10R170000000000001\x99\x04\x01'
@pytest.fixture
def MsgInverterInvalid(): # Get Time Request message
return b'\x00\x00\x00\x14\x10R170000000000001\x92\x04\x01'
@pytest.fixture
def MsgUnknown(): # Get Time Request message
return b'\x00\x00\x00\x17\x10R170000000000001\x91\x17\x01\x02\x03\x04'
@pytest.fixture
def MsgGetTime(): # Get Time Request message
return b'\x00\x00\x00\x13\x10R170000000000001\x91\x22'
@pytest.fixture
def MsgTimeResp(): # Get Time Resonse message
return b'\x00\x00\x00\x1b\x10R170000000000001\x99\x22\x00\x00\x01\x89\xc6\x63\x4d\x80'
@pytest.fixture
def ConfigTsunAllowAll():
Config.config = {'tsun':{'enabled': True}, 'inverters':{'allow_all':True}}
@pytest.fixture
def ConfigNoTsunInv1():
Config.config = {'tsun':{'enabled': False},'inverters':{'R170000000000001':{'node_id':'inv1','suggested_area':'roof'}}}
@pytest.fixture
def ConfigTsunInv1():
Config.config = {'tsun':{'enabled': True},'inverters':{'R170000000000001':{'node_id':'inv1','suggested_area':'roof'}}}
def test_read_message(MsgContactInfo):
m = MemoryStream(MsgContactInfo, (0,))
@@ -68,11 +154,39 @@ def test_read_message(MsgContactInfo):
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == None
assert int(m.ctrl)==145
assert m.msg_id==0
assert m.header_len==23
assert m.data_len==25
assert m._forward_buffer==b''
m.close()
def test_read_message_twice(ConfigNoTsunInv1, MsgInverterInd):
ConfigNoTsunInv1
m = MemoryStream(MsgInverterInd, (0,))
m.append_msg(MsgInverterInd)
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==145
assert m.msg_id==4
assert m.header_len==23
assert m.data_len==120
assert m._forward_buffer==b''
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 2
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==145
assert m.msg_id==4
assert m.header_len==23
assert m.data_len==120
assert m._forward_buffer==b''
m.close()
def test_read_message_long_id(MsgContactInfo_LongId):
m = MemoryStream(MsgContactInfo_LongId, (23,24))
@@ -83,6 +197,7 @@ def test_read_message_long_id(MsgContactInfo_LongId):
assert m.header_valid # must be valid, since header is complete but not the msg
assert m.msg_count == 0
assert m.id_str == b"R1700000000000011"
assert m.unique_id == 0
assert int(m.ctrl)==145
assert m.msg_id==0
assert m.header_len==24
@@ -90,6 +205,7 @@ def test_read_message_long_id(MsgContactInfo_LongId):
m.read() # try to read rest of message, but there is no chunk available
assert m.header_valid # must be valid, since header is complete but not the msg
assert m.msg_count == 0
m.close()
def test_read_message_in_chunks(MsgContactInfo):
@@ -101,6 +217,7 @@ def test_read_message_in_chunks(MsgContactInfo):
assert m.header_valid # must be valid, since header is complete but not the msg
assert m.msg_count == 0
assert m.id_str == b"R170000000000001"
assert m.unique_id == 0
assert int(m.ctrl)==145
assert m.msg_id==0
assert m.header_len==23
@@ -108,6 +225,7 @@ def test_read_message_in_chunks(MsgContactInfo):
m.read() # read rest of message
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
m.close()
def test_read_message_in_chunks2(MsgContactInfo):
m = MemoryStream(MsgContactInfo, (4,10,0))
@@ -122,6 +240,7 @@ def test_read_message_in_chunks2(MsgContactInfo):
assert m.header_len==23
assert m.data_len==25
assert m.id_str == b"R170000000000001"
assert m.unique_id == None
assert int(m.ctrl)==145
assert m.msg_id==0
assert m.msg_count == 1
@@ -129,25 +248,264 @@ def test_read_message_in_chunks2(MsgContactInfo):
pass
assert m.msg_count == 1
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
m.close()
def test_read_two_messages(Msg2ContactInfo):
def test_read_two_messages(ConfigTsunAllowAll, Msg2ContactInfo,MsgContactResp,MsgContactResp2):
ConfigTsunAllowAll
m = MemoryStream(Msg2ContactInfo, (0,))
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==145
assert m.msg_id==0
assert m.header_len==23
assert m.data_len==25
assert m._forward_buffer==b''
assert m._send_buffer==MsgContactResp
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m._send_buffer = bytearray(0) # clear send buffer for next test
m._init_new_client_conn(b'solarhub', b'solarhub@123456')
assert m._send_buffer==b'\x00\x00\x00,\x10R170000000000001\x91\x00\x08solarhub\x0fsolarhub@123456'
m._send_buffer = bytearray(0) # clear send buffer for next test
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 2
assert m.id_str == b"R170000000000002"
assert m.unique_id == 'R170000000000002'
assert int(m.ctrl)==145
assert m.msg_id==0
assert m.header_len==23
assert m.data_len==25
assert m._forward_buffer==b''
assert m._send_buffer==MsgContactResp2
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m._send_buffer = bytearray(0) # clear send buffer for next test
m._init_new_client_conn(b'solarhub', b'solarhub@123456')
assert m._send_buffer==b'\x00\x00\x00,\x10R170000000000002\x91\x00\x08solarhub\x0fsolarhub@123456'
m.close()
def test_msg_contact_resp(ConfigTsunInv1, MsgContactResp):
ConfigTsunInv1
m = MemoryStream(MsgContactResp, (0,), False)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==153
assert m.msg_id==0
assert m.header_len==23
assert m.data_len==1
assert m._forward_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m.close()
def test_msg_contact_invalid(ConfigTsunInv1, MsgContactInvalid):
ConfigTsunInv1
m = MemoryStream(MsgContactInvalid, (0,))
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==147
assert m.msg_id==0
assert m.header_len==23
assert m.data_len==1
assert m._forward_buffer==MsgContactInvalid
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 1
m.close()
def test_msg_get_time(ConfigTsunInv1, MsgGetTime):
ConfigTsunInv1
m = MemoryStream(MsgGetTime, (0,))
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==145
assert m.msg_id==34
assert m.header_len==23
assert m.data_len==0
assert m._forward_buffer==MsgGetTime
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m.close()
def test_msg_time_resp(ConfigTsunInv1, MsgTimeResp):
ConfigTsunInv1
m = MemoryStream(MsgTimeResp, (0,), False)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==153
assert m.msg_id==34
assert m.header_len==23
assert m.data_len==8
assert m._forward_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m.close()
def test_msg_time_invalid(ConfigTsunInv1, MsgTimeInvalid):
ConfigTsunInv1
m = MemoryStream(MsgTimeInvalid, (0,), False)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==148
assert m.msg_id==34
assert m.header_len==23
assert m.data_len==0
assert m._forward_buffer==MsgTimeInvalid
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 1
m.close()
def test_msg_cntrl_ind(ConfigTsunInv1, MsgControllerInd, MsgControllerAck):
ConfigTsunInv1
m = MemoryStream(MsgControllerInd, (0,))
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==145
assert m.msg_id==113
assert m.header_len==23
assert m.data_len==284
assert m._forward_buffer==MsgControllerInd
assert m._send_buffer==MsgControllerAck
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m.close()
def test_msg_cntrl_ack(ConfigTsunInv1, MsgControllerAck):
ConfigTsunInv1
m = MemoryStream(MsgControllerAck, (0,), False)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==153
assert m.msg_id==113
assert m.header_len==23
assert m.data_len==1
assert m._forward_buffer==b''
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m.close()
def test_msg_cntrl_invalid(ConfigTsunInv1, MsgControllerInvalid):
ConfigTsunInv1
m = MemoryStream(MsgControllerInvalid, (0,))
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==146
assert m.msg_id==113
assert m.header_len==23
assert m.data_len==1
assert m._forward_buffer==MsgControllerInvalid
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 1
m.close()
def test_msg_inv_ind(ConfigTsunInv1, MsgInverterInd, MsgInverterAck):
ConfigTsunInv1
tracer.setLevel(logging.DEBUG)
m = MemoryStream(MsgInverterInd, (0,))
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==145
assert m.msg_id==4
assert m.header_len==23
assert m.data_len==120
assert m._forward_buffer==MsgInverterInd
assert m._send_buffer==MsgInverterAck
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m.close()
def test_msg_inv_ack(ConfigTsunInv1, MsgInverterAck):
ConfigTsunInv1
tracer.setLevel(logging.ERROR)
m = MemoryStream(MsgInverterAck, (0,), False)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==153
assert m.msg_id==4
assert m.header_len==23
assert m.data_len==1
assert m._forward_buffer==b''
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
m.close()
def test_msg_inv_invalid(ConfigTsunInv1, MsgInverterInvalid):
ConfigTsunInv1
m = MemoryStream(MsgInverterInvalid, (0,), False)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==146
assert m.msg_id==4
assert m.header_len==23
assert m.data_len==1
assert m._forward_buffer==MsgInverterInvalid
assert m._send_buffer==b''
assert m.db.stat['proxy']['Unknown_Ctrl'] == 1
m.close()
def test_msg_unknown(ConfigTsunInv1, MsgUnknown):
ConfigTsunInv1
m = MemoryStream(MsgUnknown, (0,), False)
m.db.stat['proxy']['Unknown_Ctrl'] = 0
m.read() # read complete msg, and dispatch msg
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
assert m.msg_count == 1
assert m.id_str == b"R170000000000001"
assert m.unique_id == 'R170000000000001'
assert int(m.ctrl)==145
assert m.msg_id==23
assert m.header_len==23
assert m.data_len==4
assert m._forward_buffer==MsgUnknown
assert m._send_buffer==b''
assert 1 == m.db.stat['proxy']['Unknown_Msg']
m.close()
def test_ctrl_byte():
c = Control(0x91)
@@ -158,3 +516,38 @@ def test_ctrl_byte():
assert c.is_resp()
def test_msg_iterator():
m1 = Message(server_side=True)
m2 = Message(server_side=True)
m3 = Message(server_side=True)
m3.close()
del m3
test1 = 0
test2 = 0
for key in Message:
if key == m1:
test1+=1
elif key == m2:
test2+=1
else:
assert False
assert test1 == 1
assert test2 == 1
def test_proxy_counter():
m = Message(server_side=True)
assert m.new_data == {}
m.db.stat['proxy']['Unknown_Msg'] = 0
m.new_stat_data['proxy'] = False
m.inc_counter('Unknown_Msg')
assert m.new_data == {}
assert m.new_stat_data == {'proxy': True}
assert 1 == m.db.stat['proxy']['Unknown_Msg']
m.new_stat_data['proxy'] = False
m.dec_counter('Unknown_Msg')
assert m.new_data == {}
assert m.new_stat_data == {'proxy': True}
assert 0 == m.db.stat['proxy']['Unknown_Msg']
m.close()

View File

@@ -52,6 +52,7 @@ services:
mqtt:
container_name: mqtt-broker
image: eclipse-mosquitto:2
restart: unless-stopped
expose:
- 1883
volumes:
@@ -66,20 +67,17 @@ services:
####### T S U N - P R O X Y ######
tsun-proxy:
container_name: tsun-proxy
image: docker.io/sallius/tsun-gen3-proxy:latest
build:
context: https://github.com/s-allius/tsun-gen3-proxy.git#main:app
args:
- UID=1026
image: ghcr.io/s-allius/tsun-gen3-proxy:latest
restart: unless-stopped
depends_on:
- mqtt
environment:
- TZ=Europe/Brussels
- SERVICE_NAME=tsun-proxy
- UID=${UID:-1000}
- GID=${GID:-1000}
dns:
- 8.8.8.8
- 4.4.4.4
- ${DNS1:-8.8.8.8}
- $(DNS2:-4.4.4.4}
ports:
- 5005:5005
volumes:

View File

@@ -0,0 +1,213 @@
# test_with_pytest.py and scapy
#
import pytest, socket, time
#from scapy.all import *
#from scapy.layers.inet import IP, TCP, TCP_client
def get_sn() -> bytes:
return b'R170000000000001'
def get_inv_no() -> bytes:
return b'T170000000000001'
def get_invalid_sn():
return b'R170000000000002'
@pytest.fixture
def MsgContactInfo(): # Contact Info message
return b'\x00\x00\x00\x2c\x10'+get_sn()+b'\x91\x00\x08solarhub\x0fsolarhub\x40123456'
@pytest.fixture
def MsgContactResp(): # Contact Response message
return b'\x00\x00\x00\x14\x10'+get_sn()+b'\x99\x00\x01'
@pytest.fixture
def MsgContactInfo2(): # Contact Info message
return b'\x00\x00\x00\x2c\x10'+get_invalid_sn()+b'\x91\x00\x08solarhub\x0fsolarhub\x40123456'
@pytest.fixture
def MsgContactResp2(): # Contact Response message
return b'\x00\x00\x00\x14\x10'+get_invalid_sn()+b'\x99\x00\x01'
@pytest.fixture
def MsgTimeStampReq(): # Get Time Request message
return b'\x00\x00\x00\x13\x10'+get_sn()+b'\x91\x22'
@pytest.fixture
def MsgTimeStampResp(): # Get Time Resonse message
return b'\x00\x00\x00\x1b\x10'+get_sn()+b'\x99\x22\x00\x00\x01\x89\xc6\x63\x4d\x80'
@pytest.fixture
def MsgContollerInd(): # Data indication from the controller
msg = b'\x00\x00\x01\x2f\x10'+ get_sn() + b'\x91\x71\x0e\x10\x00\x00\x10'+get_sn()
msg += b'\x01\x00\x00\x01\x89\xc6\x63\x55\x50'
msg += b'\x00\x00\x00\x15\x00\x09\x2b\xa8\x54\x10\x52\x53\x57\x5f\x34\x30\x30\x5f\x56\x31\x2e\x30\x30\x2e\x30\x36\x00\x09\x27\xc0\x54\x06\x52\x61\x79\x6d\x6f'
msg += b'\x6e\x00\x09\x2f\x90\x54\x0b\x52\x53\x57\x2d\x31\x2d\x31\x30\x30\x30\x31\x00\x09\x5a\x88\x54\x0f\x74\x2e\x72\x61\x79\x6d\x6f\x6e\x69\x6f\x74\x2e\x63\x6f\x6d\x00\x09\x5a\xec\x54'
msg += b'\x1c\x6c\x6f\x67\x67\x65\x72\x2e\x74\x61\x6c\x65\x6e\x74\x2d\x6d\x6f\x6e\x69\x74\x6f\x72\x69\x6e\x67\x2e\x63\x6f\x6d\x00\x0d\x00\x20\x49\x00\x00\x00\x01\x00\x0c\x35\x00\x49\x00'
msg += b'\x00\x00\x64\x00\x0c\x96\xa8\x49\x00\x00\x00\x1d\x00\x0c\x7f\x38\x49\x00\x00\x00\x01\x00\x0c\xfc\x38\x49\x00\x00\x00\x01\x00\x0c\xf8\x50\x49\x00\x00\x01\x2c\x00\x0c\x63\xe0\x49'
msg += b'\x00\x00\x00\x00\x00\x0c\x67\xc8\x49\x00\x00\x00\x00\x00\x0c\x50\x58\x49\x00\x00\x00\x01\x00\x09\x5e\x70\x49\x00\x00\x13\x8d\x00\x09\x5e\xd4\x49\x00\x00\x13\x8d\x00\x09\x5b\x50'
msg += b'\x49\x00\x00\x00\x02\x00\x0d\x04\x08\x49\x00\x00\x00\x00\x00\x07\xa1\x84\x49\x00\x00\x00\x01\x00\x0c\x50\x59\x49\x00\x00\x00\x4c\x00\x0d\x1f\x60\x49\x00\x00\x00\x00'
return msg
@pytest.fixture
def MsgInvData(): # Data indication from the controller
msg = b'\x00\x00\x00\x8b\x10'+ get_sn() + b'\x91\x04\x01\x90\x00\x01\x10'+get_inv_no()
msg += b'\x01\x00\x00\x01\x89\xc6\x63\x61\x08'
msg += b'\x00\x00\x00\x06\x00\x00\x00\x0a\x54\x08\x4d\x69\x63\x72\x6f\x69\x6e\x76\x00\x00\x00\x14\x54\x04\x54\x53\x55\x4e\x00\x00\x00\x1E\x54\x07\x56\x35\x2e\x30\x2e\x31\x31\x00\x00\x00\x28'
msg += b'\x54\x10\x54\x31\x37\x45\x37\x33\x30\x37\x30\x32\x31\x44\x30\x30\x36\x41\x00\x00\x00\x32\x54\x0a\x54\x53\x4f\x4c\x2d\x4d\x53\x36\x30\x30\x00\x00\x00\x3c\x54\x05\x41\x2c\x42\x2c\x43'
return msg
@pytest.fixture
def MsgInverterInd(): # Data indication from the inverter
msg = b'\x00\x00\x05\x02\x10'+ get_sn() + b'\x91\x04\x01\x90\x00\x01\x10'+get_inv_no()
msg += b'\x01\x00\x00\x01\x89\xc6\x63\x61\x08'
msg += b'\x00\x00\x00\xa3\x00\x00\x00\x64\x53\x00\x01\x00\x00\x00\xc8\x53\x00\x02\x00\x00\x01\x2c\x53\x00\x00\x00\x00\x01\x90\x49\x00\x00\x00\x00\x00\x00\x01\x91\x53\x00\x00'
msg += b'\x00\x00\x01\x92\x53\x00\x00\x00\x00\x01\x93\x53\x00\x00\x00\x00\x01\x94\x53\x00\x00\x00\x00\x01\x95\x53\x00\x00\x00\x00\x01\x96\x53\x00\x00\x00\x00\x01\x97\x53\x00'
msg += b'\x00\x00\x00\x01\x98\x53\x00\x00\x00\x00\x01\x99\x53\x00\x00\x00\x00\x01\x9a\x53\x00\x00\x00\x00\x01\x9b\x53\x00\x00\x00\x00\x01\x9c\x53\x00\x00\x00\x00\x01\x9d\x53'
msg += b'\x00\x00\x00\x00\x01\x9e\x53\x00\x00\x00\x00\x01\x9f\x53\x00\x00\x00\x00\x01\xa0\x53\x00\x00\x00\x00\x01\xf4\x49\x00\x00\x00\x00\x00\x00\x01\xf5\x53\x00\x00\x00\x00'
msg += b'\x01\xf6\x53\x00\x00\x00\x00\x01\xf7\x53\x00\x00\x00\x00\x01\xf8\x53\x00\x00\x00\x00\x01\xf9\x53\x00\x00\x00\x00\x01\xfa\x53\x00\x00\x00\x00\x01\xfb\x53\x00\x00\x00'
msg += b'\x00\x01\xfc\x53\x00\x00\x00\x00\x01\xfd\x53\x00\x00\x00\x00\x01\xfe\x53\x00\x00\x00\x00\x01\xff\x53\x00\x00\x00\x00\x02\x00\x53\x00\x00\x00\x00\x02\x01\x53\x00\x00'
msg += b'\x00\x00\x02\x02\x53\x00\x00\x00\x00\x02\x03\x53\x00\x00\x00\x00\x02\x04\x53\x00\x00\x00\x00\x02\x58\x49\x00\x00\x00\x00\x00\x00\x02\x59\x53\x00\x00\x00\x00\x02\x5a'
msg += b'\x53\x00\x00\x00\x00\x02\x5b\x53\x00\x00\x00\x00\x02\x5c\x53\x00\x00\x00\x00\x02\x5d\x53\x00\x00\x00\x00\x02\x5e\x53\x00\x00\x00\x00\x02\x5f\x53\x00\x00\x00\x00\x02'
msg += b'\x60\x53\x00\x00\x00\x00\x02\x61\x53\x00\x00\x00\x00\x02\x62\x53\x00\x00\x00\x00\x02\x63\x53\x00\x00\x00\x00\x02\x64\x53\x00\x00\x00\x00\x02\x65\x53\x00\x00\x00\x00'
msg += b'\x02\x66\x53\x00\x00\x00\x00\x02\x67\x53\x00\x00\x00\x00\x02\x68\x53\x00\x00\x00\x00\x02\xbc\x49\x00\x00\x00\x00\x00\x00\x02\xbd\x53\x00\x00\x00\x00\x02\xbe\x53\x00'
msg += b'\x00\x00\x00\x02\xbf\x53\x00\x00\x00\x00\x02\xc0\x53\x00\x00\x00\x00\x02\xc1\x53\x00\x00\x00\x00\x02\xc2\x53\x00\x00\x00\x00\x02\xc3\x53\x00\x00\x00\x00\x02\xc4\x53'
msg += b'\x00\x00\x00\x00\x02\xc5\x53\x00\x00\x00\x00\x02\xc6\x53\x00\x00\x00\x00\x02\xc7\x53\x00\x00\x00\x00\x02\xc8\x53\x00\x00\x00\x00\x02\xc9\x53\x00\x00\x00\x00\x02\xca'
msg += b'\x53\x00\x00\x00\x00\x02\xcb\x53\x00\x00\x00\x00\x02\xcc\x53\x00\x00\x00\x00\x03\x20\x53\x00\x00\x00\x00\x03\x84\x53\x50\x11\x00\x00\x03\xe8\x46\x43\x61\x66\x66\x00'
msg += b'\x00\x04\x4c\x46\x3e\xeb\x85\x1f\x00\x00\x04\xb0\x46\x42\x48\x14\x7b\x00\x00\x05\x14\x53\x00\x17\x00\x00\x05\x78\x53\x00\x00\x00\x00\x05\xdc\x53\x02\x58\x00\x00\x06'
msg += b'\x40\x46\x42\xd3\x66\x66\x00\x00\x06\xa4\x46\x42\x06\x66\x66\x00\x00\x07\x08\x46\x3f\xf4\x7a\xe1\x00\x00\x07\x6c\x46\x42\x81\x00\x00\x00\x00\x07\xd0\x46\x42\x06\x00'
msg += b'\x00\x00\x00\x08\x34\x46\x3f\xae\x14\x7b\x00\x00\x08\x98\x46\x42\x36\xcc\xcd\x00\x00\x08\xfc\x46\x00\x00\x00\x00\x00\x00\x09\x60\x46\x00\x00\x00\x00\x00\x00\x09\xc4'
msg += b'\x46\x00\x00\x00\x00\x00\x00\x0a\x28\x46\x00\x00\x00\x00\x00\x00\x0a\x8c\x46\x00\x00\x00\x00\x00\x00\x0a\xf0\x46\x00\x00\x00\x00\x00\x00\x0b\x54\x46\x3f\xd9\x99\x9a'
msg += b'\x00\x00\x0b\xb8\x46\x41\x8a\xe1\x48\x00\x00\x0c\x1c\x46\x3f\x8a\x3d\x71\x00\x00\x0c\x80\x46\x41\x1b\xd7\x0a\x00\x00\x0c\xe4\x46\x3f\x1e\xb8\x52\x00\x00\x0d\x48\x46'
msg += b'\x40\xf3\xd7\x0a\x00\x00\x0d\xac\x46\x00\x00\x00\x00\x00\x00\x0e\x10\x46\x00\x00\x00\x00\x00\x00\x0e\x74\x46\x00\x00\x00\x00\x00\x00\x0e\xd8\x46\x00\x00\x00\x00\x00'
msg += b'\x00\x0f\x3c\x53\x00\x00\x00\x00\x0f\xa0\x53\x00\x00\x00\x00\x10\x04\x53\x55\xaa\x00\x00\x10\x68\x53\x00\x00\x00\x00\x10\xcc\x53\x00\x00\x00\x00\x11\x30\x53\x00\x00'
msg += b'\x00\x00\x11\x94\x53\x00\x00\x00\x00\x11\xf8\x53\xff\xff\x00\x00\x12\x5c\x53\xff\xff\x00\x00\x12\xc0\x53\x00\x02\x00\x00\x13\x24\x53\xff\xff\x00\x00\x13\x88\x53\xff'
msg += b'\xff\x00\x00\x13\xec\x53\xff\xff\x00\x00\x14\x50\x53\xff\xff\x00\x00\x14\xb4\x53\xff\xff\x00\x00\x15\x18\x53\xff\xff\x00\x00\x15\x7c\x53\x00\x00\x00\x00\x27\x10\x53'
msg += b'\x00\x02\x00\x00\x27\x74\x53\x00\x3c\x00\x00\x27\xd8\x53\x00\x68\x00\x00\x28\x3c\x53\x05\x00\x00\x00\x28\xa0\x46\x43\x79\x00\x00\x00\x00\x29\x04\x46\x43\x48\x00\x00'
msg += b'\x00\x00\x29\x68\x46\x42\x48\x33\x33\x00\x00\x29\xcc\x46\x42\x3e\x3d\x71\x00\x00\x2a\x30\x53\x00\x01\x00\x00\x2a\x94\x46\x43\x37\x00\x00\x00\x00\x2a\xf8\x46\x42\xce'
msg += b'\x00\x00\x00\x00\x2b\x5c\x53\x00\x96\x00\x00\x2b\xc0\x53\x00\x10\x00\x00\x2c\x24\x46\x43\x90\x00\x00\x00\x00\x2c\x88\x46\x43\x95\x00\x00\x00\x00\x2c\xec\x53\x00\x06'
msg += b'\x00\x00\x2d\x50\x53\x00\x06\x00\x00\x2d\xb4\x46\x43\x7d\x00\x00\x00\x00\x2e\x18\x46\x42\x3d\xeb\x85\x00\x00\x2e\x7c\x46\x42\x3d\xeb\x85\x00\x00\x2e\xe0\x53\x00\x03'
msg += b'\x00\x00\x2f\x44\x53\x00\x03\x00\x00\x2f\xa8\x46\x42\x4d\xeb\x85\x00\x00\x30\x0c\x46\x42\x4d\xeb\x85\x00\x00\x30\x70\x53\x00\x03\x00\x00\x30\xd4\x53\x00\x03\x00\x00'
msg += b'\x31\x38\x46\x42\x08\x00\x00\x00\x00\x31\x9c\x53\x00\x05\x00\x00\x32\x00\x53\x04\x00\x00\x00\x32\x64\x53\x00\x01\x00\x00\x32\xc8\x53\x13\x9c\x00\x00\x33\x2c\x53\x0f'
msg += b'\xa0\x00\x00\x33\x90\x53\x00\x4f\x00\x00\x33\xf4\x53\x00\x66\x00\x00\x34\x58\x53\x03\xe8\x00\x00\x34\xbc\x53\x04\x00\x00\x00\x35\x20\x53\x00\x00\x00\x00\x35\x84\x53'
msg += b'\x00\x00\x00\x00\x35\xe8\x53\x00\x00\x00\x00\x36\x4c\x53\x00\x00\x00\x01\x38\x80\x53\x00\x02\x00\x01\x38\x81\x53\x00\x01\x00\x01\x38\x82\x53\x00\x01\x00\x01\x38\x83'
msg += b'\x53\x00\x00'
return msg
@pytest.fixture(scope="session")
def ClientConnection():
#host = '172.16.30.7'
host = 'logger.talent-monitoring.com'
#host = '127.0.0.1'
port = 5005
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.connect((host, port))
s.settimeout(1)
yield s
s.close()
def tempClientConnection():
#host = '172.16.30.7'
host = 'logger.talent-monitoring.com'
#host = '127.0.0.1'
port = 5005
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.connect((host, port))
s.settimeout(1)
yield s
s.close()
def test_open_close():
try:
for s in tempClientConnection():
pass
except:
assert False
assert True
def test_send_contact_info1(ClientConnection, MsgContactInfo, MsgContactResp):
s = ClientConnection
try:
s.sendall(MsgContactInfo)
data = s.recv(1024)
except TimeoutError:
pass
assert data == MsgContactResp
def test_send_contact_info2(ClientConnection, MsgContactInfo2, MsgContactInfo, MsgContactResp):
s = ClientConnection
try:
s.sendall(MsgContactInfo2)
data = s.recv(1024)
except TimeoutError:
assert True
else:
assert False
try:
s.sendall(MsgContactInfo)
data = s.recv(1024)
except TimeoutError:
pass
assert data == MsgContactResp
def test_send_contact_info3(ClientConnection, MsgContactInfo, MsgContactResp, MsgTimeStampReq):
s = ClientConnection
try:
s.sendall(MsgContactInfo)
data = s.recv(1024)
except TimeoutError:
pass
assert data == MsgContactResp
try:
s.sendall(MsgTimeStampReq)
data = s.recv(1024)
except TimeoutError:
pass
def test_send_contact_resp(ClientConnection, MsgContactResp):
s = ClientConnection
try:
s.sendall(MsgContactResp)
data = s.recv(1024)
except TimeoutError:
assert True
else:
assert data ==''
def test_send_ctrl_data(ClientConnection, MsgTimeStampReq, MsgTimeStampResp, MsgContollerInd):
s = ClientConnection
try:
s.sendall(MsgTimeStampReq)
data = s.recv(1024)
except TimeoutError:
pass
# time.sleep(2.5)
# assert data == MsgTimeStampResp
try:
s.sendall(MsgContollerInd)
data = s.recv(1024)
except TimeoutError:
pass
def test_send_inv_data(ClientConnection, MsgTimeStampReq, MsgTimeStampResp, MsgInvData, MsgInverterInd):
s = ClientConnection
try:
s.sendall(MsgTimeStampReq)
data = s.recv(1024)
except TimeoutError:
pass
# time.sleep(32.5)
# assert data == MsgTimeStampResp
try:
s.sendall(MsgInvData)
data = s.recv(1024)
s.sendall(MsgInverterInd)
data = s.recv(1024)
except TimeoutError:
pass