Compare commits
469 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
446a0d88f0 | ||
|
|
ae4565238b | ||
|
|
abdbdefc18 | ||
|
|
24b092b69e | ||
|
|
4ffe8a395e | ||
|
|
868d3ef312 | ||
|
|
ce050e8ab5 | ||
|
|
354c204ce6 | ||
|
|
d8200b5a35 | ||
|
|
cf1563dd55 | ||
|
|
962f6ee5fb | ||
|
|
9e60ad4bcd | ||
|
|
20503b46ab | ||
|
|
1781dba065 | ||
|
|
26ca006853 | ||
|
|
a93368336a | ||
|
|
772da4fa96 | ||
|
|
338b86964d | ||
|
|
3f4997a0fd | ||
|
|
35952654db | ||
|
|
2220517b55 | ||
|
|
55c403a754 | ||
|
|
3bf245300d | ||
|
|
b28792a654 | ||
|
|
d4ecbfe2b3 | ||
|
|
84dfd17d7b | ||
|
|
7067977d78 | ||
|
|
f227faa16e | ||
|
|
9572fbf609 | ||
|
|
3fddbe9456 | ||
|
|
34c09edb07 | ||
|
|
ef3ed5d287 | ||
|
|
7878c54ecc | ||
|
|
097e0d2104 | ||
|
|
181a22c369 | ||
|
|
89ab2eea3f | ||
|
|
264e09651f | ||
|
|
3bb360d359 | ||
|
|
8731d1bae3 | ||
|
|
4f0918ef83 | ||
|
|
5501288b51 | ||
|
|
851122515e | ||
|
|
badc065b7a | ||
|
|
aea6cc9763 | ||
|
|
92d1e648ae | ||
|
|
879b6608b3 | ||
|
|
b69e7e2242 | ||
|
|
0913fde126 | ||
|
|
bedbe08eeb | ||
|
|
3c81d446dd | ||
|
|
b335881500 | ||
|
|
ac7b02bde9 | ||
|
|
47a89c269f | ||
|
|
be3b4d6df0 | ||
|
|
a5b2b4b7c2 | ||
|
|
668c631018 | ||
|
|
07c989a305 | ||
|
|
28cf875533 | ||
|
|
9bae905c08 | ||
|
|
45b57109a8 | ||
|
|
2c69044bf8 | ||
|
|
3bada76516 | ||
|
|
84231c034c | ||
|
|
d4fd396dcf | ||
|
|
976eaed9ea | ||
|
|
211a958080 | ||
|
|
5ced5ff06a | ||
|
|
78a35b5513 | ||
|
|
9b22fe354c | ||
|
|
a6ad3d4f0d | ||
|
|
4993676614 | ||
|
|
10a18237c7 | ||
|
|
8d67f1745d | ||
|
|
9eb7c7fbe0 | ||
|
|
6c6109d421 | ||
|
|
7d0ea41728 | ||
|
|
ce5bd6eb0a | ||
|
|
6122f40718 | ||
|
|
c5f184a730 | ||
|
|
6da5d2cef6 | ||
|
|
db06d8c8e6 | ||
|
|
3863454a84 | ||
|
|
5775cb1ce3 | ||
|
|
5d61a261b1 | ||
|
|
bbda66e455 | ||
|
|
0c7bf7956d | ||
|
|
6b9c13ddfe | ||
|
|
a6ffcc0949 | ||
|
|
c956c13d13 | ||
|
|
85fe7261d5 | ||
|
|
d4b618742c | ||
|
|
719c6f703a | ||
|
|
62ea2a9e6f | ||
|
|
166a856705 | ||
|
|
bfea38d9da | ||
|
|
d5ec47fd1e | ||
|
|
828f26cf24 | ||
|
|
0b3d84ff36 | ||
|
|
5642c912a8 | ||
|
|
614acbf32d | ||
|
|
57525ca519 | ||
|
|
5ef68280b1 | ||
|
|
e12c78212f | ||
|
|
2ab35a8257 | ||
|
|
865216b8d9 | ||
|
|
5d5d7c218f | ||
|
|
be4c6ac77f | ||
|
|
a9dc7e6847 | ||
|
|
270732f1d0 | ||
|
|
7b4fabdc25 | ||
|
|
2351ec314a | ||
|
|
604d30c711 | ||
|
|
ab5256659b | ||
|
|
a76c0ac440 | ||
|
|
215dcd98e6 | ||
|
|
627ca97360 | ||
|
|
d2b88ab838 | ||
|
|
6d9addc7d5 | ||
|
|
1bb08fb211 | ||
|
|
193eea65af | ||
|
|
2b8dacb0de | ||
|
|
cb0c69944f | ||
|
|
7f41365815 | ||
|
|
5db3fbf495 | ||
|
|
d44726c0f3 | ||
|
|
1985557bce | ||
|
|
7dc2595d71 | ||
|
|
6d9a446bfe | ||
|
|
f9c1b83ccd | ||
|
|
58b42f7d7c | ||
|
|
27045cac6e | ||
|
|
54de2aecfe | ||
|
|
5a39370cc3 | ||
|
|
7a9b23d068 | ||
|
|
e34afcb523 | ||
|
|
22df381da5 | ||
|
|
117e6a7570 | ||
|
|
65de946992 | ||
|
|
33d385db10 | ||
|
|
1e610af1df | ||
|
|
db1169f61f | ||
|
|
383be10e87 | ||
|
|
b364fb3f8e | ||
|
|
a42ba8a8c6 | ||
|
|
f3e69ff217 | ||
|
|
a3c054d2b1 | ||
|
|
c34b33ed5f | ||
|
|
0a18918326 | ||
|
|
aa3bb4a1fa | ||
|
|
a62864218d | ||
|
|
0b2631c162 | ||
|
|
c59bd16664 | ||
|
|
039a021cda | ||
|
|
49e2dfbd86 | ||
|
|
e6ecf5911b | ||
|
|
6e1ed5d1e7 | ||
|
|
ad885e9644 | ||
|
|
8f81ceda98 | ||
|
|
8204cae2b1 | ||
|
|
8baa68e615 | ||
|
|
56f36e9f3f | ||
|
|
5b60d5dae1 | ||
|
|
c1c38ab5c7 | ||
|
|
ec4261ae84 | ||
|
|
be57d11214 | ||
|
|
685c2dc07b | ||
|
|
d27fe09006 | ||
|
|
e850a8c534 | ||
|
|
33f215def2 | ||
|
|
4be726166e | ||
|
|
20f4fd647c | ||
|
|
407c1ceb2b | ||
|
|
c6eecb4791 | ||
|
|
87d59d046f | ||
|
|
063850c7fb | ||
|
|
17c33601a0 | ||
|
|
3980ac013b | ||
|
|
66657888dd | ||
|
|
ab9e798152 | ||
|
|
fdf3475909 | ||
|
|
edc2c12b5b | ||
|
|
5c6f9e7414 | ||
|
|
0fc74b0d19 | ||
|
|
87cc3fb205 | ||
|
|
8fc5eb3670 | ||
|
|
55fc834a1e | ||
|
|
da2388941e | ||
|
|
9e38cb93ea | ||
|
|
de1c48fa62 | ||
|
|
e432441134 | ||
|
|
98ef252bb0 | ||
|
|
25e3db36c4 | ||
|
|
3ac48dad1f | ||
|
|
eff3e7558b | ||
|
|
6ef6f4cd34 | ||
|
|
177706c3e6 | ||
|
|
9ac1f6f46d | ||
|
|
3cc5f3ec53 | ||
|
|
23ff2bb05c | ||
|
|
c761446c11 | ||
|
|
f30aa07431 | ||
|
|
476c5f0006 | ||
|
|
282a459ef0 | ||
|
|
d25173e591 | ||
|
|
9c39ea27f7 | ||
|
|
766774224b | ||
|
|
f4da16987f | ||
|
|
841877305d | ||
|
|
fb5c6a74cf | ||
|
|
14425da5fa | ||
|
|
6877465915 | ||
|
|
2e214b1e71 | ||
|
|
036af8e127 | ||
|
|
92469456b7 | ||
|
|
1658036a26 | ||
|
|
1ae7784bee | ||
|
|
e43a02c508 | ||
|
|
4ea70dee64 | ||
|
|
6fcf4f47c2 | ||
|
|
73baffe9e0 | ||
|
|
3fda08bd25 | ||
|
|
0e7fbc7820 | ||
|
|
26f108cc51 | ||
|
|
dd438bf201 | ||
|
|
f48596a512 | ||
|
|
6a64484174 | ||
|
|
def5702415 | ||
|
|
b3f0fc97d7 | ||
|
|
65973b2835 | ||
|
|
b240b74994 | ||
|
|
93e82a2284 | ||
|
|
537d81fa19 | ||
|
|
5fe455e42f | ||
|
|
5a0456650f | ||
|
|
41d9a2a1ef | ||
|
|
a869ead89a | ||
|
|
91873d0c34 | ||
|
|
c4b3e1a817 | ||
|
|
0ac4b1f571 | ||
|
|
2ec0a59cd3 | ||
|
|
2d176894d3 | ||
|
|
0ae6dffc6b | ||
|
|
5fc1b16627 | ||
|
|
eab109ddab | ||
|
|
1b6bee12de | ||
|
|
2301511242 | ||
|
|
3fd528bdbe | ||
|
|
e15387b1ff | ||
|
|
02d9f01947 | ||
|
|
39beb0cb44 | ||
|
|
d5010fe053 | ||
|
|
54d2bf4439 | ||
|
|
f804b755a4 | ||
|
|
bf0f152d5a | ||
|
|
29ee540a19 | ||
|
|
5822f5de50 | ||
|
|
283ae31af2 | ||
|
|
808bf2fe87 | ||
|
|
fa2626ec7a | ||
|
|
eda8ef1db6 | ||
|
|
3dbcee63f6 | ||
|
|
f2c4230a49 | ||
|
|
763af8b4cf | ||
|
|
a2f67e7d3e | ||
|
|
f78d4ac310 | ||
|
|
fdedfcbf8e | ||
|
|
494c30e489 | ||
|
|
30dc802fb2 | ||
|
|
5fdad484f4 | ||
|
|
dba3b458ba | ||
|
|
1d9cbf314e | ||
|
|
58c3333fcc | ||
|
|
530687039d | ||
|
|
5d0c95d6e6 | ||
|
|
e603bb9baa | ||
|
|
e8902f7923 | ||
|
|
b1e577d357 | ||
|
|
4e8fd8e2a2 | ||
|
|
d34862260e | ||
|
|
c061d263eb | ||
|
|
ccc7e7959e | ||
|
|
7b4ed406a1 | ||
|
|
549fca8ae5 | ||
|
|
f73376b330 | ||
|
|
220f2cce18 | ||
|
|
e2a5c7e640 | ||
|
|
2e64ae5884 | ||
|
|
95ebb92f05 | ||
|
|
59dabbfa4a | ||
|
|
aa0d432149 | ||
|
|
6dbf259e44 | ||
|
|
184d0464c9 | ||
|
|
f29de66477 | ||
|
|
5130211985 | ||
|
|
4faf44db91 | ||
|
|
a571a3b456 | ||
|
|
9a698781db | ||
|
|
6f9d2d4fac | ||
|
|
111af8f469 | ||
|
|
b197212af8 | ||
|
|
27ac47fde9 | ||
|
|
ee1722e374 | ||
|
|
b46645daee | ||
|
|
220fe3d4c9 | ||
|
|
82514e9e41 | ||
|
|
6035e52234 | ||
|
|
8998c583ab | ||
|
|
77b0827b73 | ||
|
|
ccce1fd21a | ||
|
|
3a5e4648a1 | ||
|
|
b6c0dbdea5 | ||
|
|
d6d882ef78 | ||
|
|
3b2028c4c2 | ||
|
|
d85206c12b | ||
|
|
2763853b76 | ||
|
|
8314fd177a | ||
|
|
c4d9b10d0f | ||
|
|
4c923b0ded | ||
|
|
44c9b80c7e | ||
|
|
1f70bd49c5 | ||
|
|
6eec4b312e | ||
|
|
3d09d592a6 | ||
|
|
b1ea63b00d | ||
|
|
9682379bcd | ||
|
|
19c143d894 | ||
|
|
64362dad21 | ||
|
|
f4aa7004e5 | ||
|
|
2ade04e6cc | ||
|
|
c1e114447a | ||
|
|
0e63c45302 | ||
|
|
f6af744864 | ||
|
|
31e049630d | ||
|
|
ac0bf2f8f8 | ||
|
|
05b576b198 | ||
|
|
57bbd986b3 | ||
|
|
32ab49b566 | ||
|
|
1bee5046ed | ||
|
|
bdd9a0c27d | ||
|
|
03125782bc | ||
|
|
74ac6c6666 | ||
|
|
feb9e08855 | ||
|
|
789cf99e27 | ||
|
|
c5c49c5f24 | ||
|
|
1d3a44c9f0 | ||
|
|
22f68ab330 | ||
|
|
edab268faa | ||
|
|
d1e10b36ea | ||
|
|
b0f8817357 | ||
|
|
8431123356 | ||
|
|
70df843fe2 | ||
|
|
300196a9fc | ||
|
|
8b20af692f | ||
|
|
234eb26eae | ||
|
|
1760a764ea | ||
|
|
26b7ccd40f | ||
|
|
ddde988e2c | ||
|
|
9264c936c8 | ||
|
|
e93432f318 | ||
|
|
97da24c839 | ||
|
|
06b896d6e9 | ||
|
|
9d395af986 | ||
|
|
35bbfee80a | ||
|
|
0779bb96f0 | ||
|
|
93b89062f5 | ||
|
|
4d6813ae7c | ||
|
|
9159882f85 | ||
|
|
214f3dfae5 | ||
|
|
b9731d43a6 | ||
|
|
eadd85a125 | ||
|
|
98e0f6bc69 | ||
|
|
2153d7c15c | ||
|
|
156eb06b6a | ||
|
|
8fc8a29be2 | ||
|
|
d6cc211a51 | ||
|
|
4b8773ad84 | ||
|
|
e7294e4932 | ||
|
|
3611b3d859 | ||
|
|
7b55124a7a | ||
|
|
e81a6a2a14 | ||
|
|
23b6b56cb3 | ||
|
|
65448773aa | ||
|
|
6e2f88423d | ||
|
|
7fe9dcbe60 | ||
|
|
009746a1e4 | ||
|
|
4da8f8f3b2 | ||
|
|
13b1930599 | ||
|
|
a2364115b3 | ||
|
|
8f390b67cb | ||
|
|
fa86dde991 | ||
|
|
6cfc1792ba | ||
|
|
04ba868b37 | ||
|
|
f3842d95d8 | ||
|
|
fbbf698666 | ||
|
|
ef8a461569 | ||
|
|
73c35de3e5 | ||
|
|
80f4dd722a | ||
|
|
f38fea3807 | ||
|
|
db319f6aa3 | ||
|
|
695d8a8906 | ||
|
|
e4b7ef7a0c | ||
|
|
884d4c04e6 | ||
|
|
75bdaedc31 | ||
|
|
dccf0d22e1 | ||
|
|
c4db53bd1e | ||
|
|
f69b02aaeb | ||
|
|
cdc3226adf | ||
|
|
e29c250f39 | ||
|
|
643c0026d8 | ||
|
|
340f7a5127 | ||
|
|
7cbd5f25bb | ||
|
|
27ce61adf4 | ||
|
|
3d375d86be | ||
|
|
71ec0570ac | ||
|
|
e3fdeecf82 | ||
|
|
738dd708ac | ||
|
|
5853518afe | ||
|
|
385a984fd2 | ||
|
|
37cb7cc1a1 | ||
|
|
21e46ae456 | ||
|
|
c52fc990f4 | ||
|
|
5ddc402e3c | ||
|
|
ac81b20ce7 | ||
|
|
ef1fd4f913 | ||
|
|
97079974f1 | ||
|
|
213bb28466 | ||
|
|
542f422e1e | ||
|
|
7225c20b01 | ||
|
|
d7b3ab54e8 | ||
|
|
d15741949f | ||
|
|
c476fe6278 | ||
|
|
cef28b06cd | ||
|
|
ba4a1f058f | ||
|
|
154b80df11 | ||
|
|
a7815bcf65 | ||
|
|
43f513ecbf | ||
|
|
3e217b96d9 | ||
|
|
dc8fc5e4eb | ||
|
|
9acd781fa8 | ||
|
|
5d51a0d9f8 | ||
|
|
670424451d | ||
|
|
ea95e540ec | ||
|
|
9a68542c5a | ||
|
|
d9c56fb1ab | ||
|
|
4c4628301f | ||
|
|
3dc7730084 | ||
|
|
8401833c0e | ||
|
|
b142cfbc3c | ||
|
|
5996ca2500 | ||
|
|
bd7c4ae822 | ||
|
|
e2873ffce7 | ||
|
|
f10207b5ba | ||
|
|
aeb2a82df1 | ||
|
|
3b75c45344 | ||
|
|
9edfa40054 | ||
|
|
0a566a3df2 | ||
|
|
3e7eba9998 | ||
|
|
00ddcc138f | ||
|
|
0db2c3945d | ||
|
|
690c66a13a | ||
|
|
a47ebb1511 | ||
|
|
4b7431ede9 | ||
|
|
c3430f509e | ||
|
|
51b046c351 | ||
|
|
32a669d0d1 | ||
|
|
4d9f00221c | ||
|
|
27c723b0c8 | ||
|
|
4bd59b91b3 | ||
|
|
3a3c6142b8 | ||
|
|
5d36397f2f |
3
.cover_ghaction_rc
Normal file
3
.cover_ghaction_rc
Normal file
@@ -0,0 +1,3 @@
|
||||
[run]
|
||||
branch = True
|
||||
relative_files = True
|
||||
@@ -1,2 +1,2 @@
|
||||
[run]
|
||||
branch = True
|
||||
branch = True
|
||||
|
||||
9
.env_example
Normal file
9
.env_example
Normal file
@@ -0,0 +1,9 @@
|
||||
# example file for the .env file. The .env set private values
|
||||
# which are needed for builing containers
|
||||
|
||||
# registry for debug an dev container
|
||||
PRIVAT_CONTAINER_REGISTRY=docker.io/<user>/
|
||||
|
||||
# registry for official container (preview, rc, rel)
|
||||
PUBLIC_CONTAINER_REGISTRY=ghcr.io/<user>/
|
||||
PUBLIC_CR_KEY=
|
||||
3
.github/FUNDING.yml
vendored
Normal file
3
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
ko_fi: sallius
|
||||
71
.github/workflows/python-app.yml
vendored
Normal file
71
.github/workflows/python-app.yml
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
# This workflow will install Python dependencies, run tests and lint with a single version of Python
|
||||
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python
|
||||
|
||||
name: Python application
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "main", "dev-*", "*/issue*" ]
|
||||
paths-ignore:
|
||||
- '**.md' # Do no build on *.md changes
|
||||
- '**.yml' # Do no build on *.yml changes
|
||||
- '**.yaml' # Do no build on *.yaml changes
|
||||
- '**.yuml' # Do no build on *.yuml changes
|
||||
- '**.svg' # Do no build on *.svg changes
|
||||
- '**.json' # Do no build on *.json changes
|
||||
- '**.cfg' # Do no build on *.cfg changes
|
||||
- '**.gitignore' # Do no build on *.gitignore changes
|
||||
- '**.dockerfile' # Do no build on *.dockerfile changes
|
||||
- '**.sh' # Do no build on *.sh changes
|
||||
pull_request:
|
||||
branches: [ "main", "dev-*" ]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read # allows SonarCloud to decorate PRs with analysis results
|
||||
|
||||
env:
|
||||
TZ: "Europe/Berlin"
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis
|
||||
- name: Set up Python 3.12
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
if [ -f requirements-test.txt ]; then pip install -r requirements-test.txt; fi
|
||||
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
|
||||
- name: Lint with flake8
|
||||
run: |
|
||||
# stop the build if there are Python syntax errors or undefined names
|
||||
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
|
||||
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
|
||||
flake8 --exit-zero --ignore=C901,E121,E123,E126,E133,E226,E241,E242,E704,W503,W504,W505 --format=pylint --output-file=output_flake.txt --exclude=*.pyc app/src/
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
python -m pytest app --cov=app/src --cov-config=.cover_ghaction_rc --cov-report=xml
|
||||
coverage report
|
||||
- name: Analyze with SonarCloud
|
||||
if: ${{ env.SONAR_TOKEN != 0 }}
|
||||
uses: SonarSource/sonarqube-scan-action@v4
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
projectBaseDir: .
|
||||
args:
|
||||
-Dsonar.projectKey=s-allius_tsun-gen3-proxy
|
||||
-Dsonar.python.coverage.reportPaths=coverage.xml
|
||||
-Dsonar.python.flake8.reportPaths=output_flake.txt
|
||||
# -Dsonar.docker.hadolint.reportPaths=
|
||||
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -1,9 +1,15 @@
|
||||
__pycache__
|
||||
.pytest_cache
|
||||
.venv/**
|
||||
bin/**
|
||||
mosquitto/**
|
||||
homeassistant/**
|
||||
ha_addons/ha_addon/rootfs/home/proxy/*
|
||||
ha_addons/ha_addon/rootfs/requirements.txt
|
||||
tsun_proxy/**
|
||||
Doku/**
|
||||
.DS_Store
|
||||
.coverage
|
||||
.env
|
||||
.venv
|
||||
coverage.xml
|
||||
|
||||
2
.hadolint.yaml
Normal file
2
.hadolint.yaml
Normal file
@@ -0,0 +1,2 @@
|
||||
ignored:
|
||||
- SC1091
|
||||
4
.markdownlint.json
Normal file
4
.markdownlint.json
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"MD013": false,
|
||||
"MD033": false
|
||||
}
|
||||
1
.python-version
Normal file
1
.python-version
Normal file
@@ -0,0 +1 @@
|
||||
3.13.1
|
||||
4
.sonarlint/connectedMode.json
Normal file
4
.sonarlint/connectedMode.json
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"sonarCloudOrganization": "s-allius",
|
||||
"projectKey": "s-allius_tsun-gen3-proxy"
|
||||
}
|
||||
2
.vscode/launch.json
vendored
2
.vscode/launch.json
vendored
@@ -6,7 +6,7 @@
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Python: Aktuelle Datei",
|
||||
"type": "python",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "${file}",
|
||||
"console": "integratedTerminal",
|
||||
|
||||
27
.vscode/settings.json
vendored
27
.vscode/settings.json
vendored
@@ -1,15 +1,32 @@
|
||||
{
|
||||
"python.analysis.extraPaths": [
|
||||
"app/src",
|
||||
"app/tests",
|
||||
".venv/lib",
|
||||
],
|
||||
"python.testing.pytestArgs": [
|
||||
"-vv",
|
||||
"app",
|
||||
"-vvv",
|
||||
"--cov=app/src",
|
||||
"--cov-report=xml",
|
||||
"--cov-report=html",
|
||||
"app",
|
||||
"system_tests"
|
||||
],
|
||||
"python.testing.unittestEnabled": false,
|
||||
"python.testing.pytestEnabled": true,
|
||||
"flake8.args": [
|
||||
"--extend-exclude=app/tests/*.py system_tests/*.py"
|
||||
]
|
||||
"--extend-exclude=app/tests/*.py,system_tests/*.py"
|
||||
],
|
||||
"sonarlint.connectedMode.project": {
|
||||
"connectionId": "s-allius",
|
||||
"projectKey": "s-allius_tsun-gen3-proxy"
|
||||
},
|
||||
"files.exclude": {
|
||||
"**/*.pyi": true
|
||||
},
|
||||
"python.analysis.typeEvaluation.deprecateTypingAliases": true,
|
||||
"python.autoComplete.extraPaths": [
|
||||
".venv/lib"
|
||||
],
|
||||
"coverage-gutters.coverageBaseDir": "tsun",
|
||||
"makefile.configureOnOpen": false
|
||||
}
|
||||
218
CHANGELOG.md
218
CHANGELOG.md
@@ -5,14 +5,212 @@ All notable changes to this project will be documented in this file.
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [Unreleased]
|
||||
## [unreleased]
|
||||
|
||||
## [0.12.1] - 2025-01-13
|
||||
|
||||
- addon: bump base image version to v17.1.0
|
||||
- addon: add syntax check to config parameters
|
||||
- addon: bump base image version to v17.0.2
|
||||
|
||||
## [0.12.0] - 2024-12-22
|
||||
|
||||
- add hadolint configuration
|
||||
- detect usage of a local DNS resolver [#37](https://github.com/s-allius/tsun-gen3-proxy/issues/37)
|
||||
- path for logs is now configurable by cli args
|
||||
- configure the number of keeped logfiles by cli args
|
||||
- add DOCS.md and CHANGELOG.md for add-ons
|
||||
- pin library version und update them with renovate
|
||||
- build config.yaml for add-ons by a jinja2 template
|
||||
- use gnu make to build proxy and add-on
|
||||
- make the configuration more flexible, add command line args to control this
|
||||
- fix the python path so we don't need special import paths for unit tests anymore
|
||||
- add emulator mode [#205](https://github.com/s-allius/tsun-gen3-proxy/issues/205)
|
||||
- ignore inverter replays which a older than 1 day [#246](https://github.com/s-allius/tsun-gen3-proxy/issues/246)
|
||||
- support test coverage in vscode
|
||||
- upgrade SonarQube action to version 4
|
||||
- update github action to Ubuntu 24-04
|
||||
- add initial support for home assistant add-ons from @mime24
|
||||
- github action: use ubuntu 24.04 and sonar-scanner-action 4 [#222](https://github.com/s-allius/tsun-gen3-proxy/issues/222)
|
||||
- migrate paho.mqtt CallbackAPIVersion to VERSION2 [#224](https://github.com/s-allius/tsun-gen3-proxy/issues/224)
|
||||
- add PROD_COMPL_TYPE to trace
|
||||
- add SolarmanV5 messages builder
|
||||
- report inverter alarms and faults per MQTT [#7](https://github.com/s-allius/tsun-gen3-proxy/issues/7)
|
||||
|
||||
## [0.11.1] - 2024-11-20
|
||||
|
||||
- fix pytest setup that can be startet from the rootdir
|
||||
- support python venv environment
|
||||
- add pytest.ini
|
||||
- move common settings from .vscode/settings.json into pytest.ini
|
||||
- add missing requirements
|
||||
- fix import paths for pytests
|
||||
- Bumps [aiohttp](https://github.com/aio-libs/aiohttp) from 3.10.5 to 3.10.11.
|
||||
|
||||
## [0.11.0] - 2024-10-13
|
||||
|
||||
- fix healthcheck on infrastructure with IPv6 support [#196](https://github.com/s-allius/tsun-gen3-proxy/issues/196)
|
||||
- refactoring: cleaner architecture, increase test coverage
|
||||
- Parse more values in Server Mode [#186](https://github.com/s-allius/tsun-gen3-proxy/issues/186)
|
||||
- GEN3: add support for new messages of version 3 firmwares [#182](https://github.com/s-allius/tsun-gen3-proxy/issues/182)
|
||||
- add support for controller MAC and serial number
|
||||
- GEN3: don't crash on overwritten msg in the receive buffer
|
||||
- Reading the version string from the image updates it even if the image is re-pulled without re-deployment
|
||||
|
||||
## [0.10.1] - 2024-08-10
|
||||
|
||||
- fix displaying the version string at startup and in HA [#153](https://github.com/s-allius/tsun-gen3-proxy/issues/153)
|
||||
|
||||
## [0.10.0] - 2024-08-09
|
||||
|
||||
- bump aiohttp to version 3.10.2
|
||||
- add SonarQube and code coverage support
|
||||
- don't send MODBUS request when state is note up; adapt timeouts [#141](https://github.com/s-allius/tsun-gen3-proxy/issues/141)
|
||||
- build multi arch images with sboms [#144](https://github.com/s-allius/tsun-gen3-proxy/issues/144)
|
||||
- add timestamp to MQTT topics [#138](https://github.com/s-allius/tsun-gen3-proxy/issues/138)
|
||||
- improve the message handling, to avoid hangs
|
||||
- GEN3: allow long timeouts until we received first inverter data (not only device data)
|
||||
- bump aiomqtt to version 2.2.0
|
||||
- bump schema to version 0.7.7
|
||||
- Home Assistant: improve inverter status value texts
|
||||
- GEN3: add inverter status
|
||||
- fix flapping registers [#128](https://github.com/s-allius/tsun-gen3-proxy/issues/128)
|
||||
- register OUTPUT_COEFFICIENT at HA
|
||||
- GEN3: INVERTER_STATUS,
|
||||
- add config option to disable the MODBUS polling [#120](https://github.com/s-allius/tsun-gen3-proxy/issues/120)
|
||||
- make the maximum output coefficient configurable [#123](https://github.com/s-allius/tsun-gen3-proxy/issues/123)
|
||||
- cleanup shutdown
|
||||
- add preview build
|
||||
- MODBUS: the last digit of the inverter version is a hexadecimal number [#119](https://github.com/s-allius/tsun-gen3-proxy/issues/119)
|
||||
- GEN3PLUS: add client_mode connection on port 8899 [#117](https://github.com/s-allius/tsun-gen3-proxy/issues/117)
|
||||
|
||||
## [0.9.0] - 2024-07-01
|
||||
|
||||
- fix exception in MODBUS timeout callback
|
||||
|
||||
## [0.9.0-RC1] - 2024-06-29
|
||||
|
||||
- add asyncio log and debug mode
|
||||
- stop the HTTP server on shutdown gracefully
|
||||
- Synchronize regular MODBUS commands with the status of the inverter to prevent the inverter from crashing due to
|
||||
unexpected packets. [#111](https://github.com/s-allius/tsun-gen3-proxy/issues/111)
|
||||
- GEN3: avoid sending MODBUS commands to the inverter during the inverter's reporting phase
|
||||
- GEN3: determine the connection timeout based on the connection state
|
||||
- GEN3: support more data encodings for DSP version V5.0.17 [#108](https://github.com/s-allius/tsun-gen3-proxy/issues/108)
|
||||
- detect dead connections [#100](https://github.com/s-allius/tsun-gen3-proxy/issues/100)
|
||||
- improve connection logging wirt a unique connection id
|
||||
- Add healthcheck, readiness and liveness checks [#91](https://github.com/s-allius/tsun-gen3-proxy/issues/91)
|
||||
- MODBUS close handler releases internal resource [#93](https://github.com/s-allius/tsun-gen3-proxy/issues/93)
|
||||
- add exception handling for message forwarding [#94](https://github.com/s-allius/tsun-gen3-proxy/issues/94)
|
||||
- GEN3: make timestamp handling stateless, to avoid blocking when the TSUN cloud is down [#56](https://github.com/s-allius/tsun-gen3-proxy/issues/56)
|
||||
- GEN3PLUS: dump invalid packages with wrong start or stop byte
|
||||
- label debug imagages als `debug`
|
||||
- print imgae build time during proxy start
|
||||
- add type annotations
|
||||
- improve async unit test and fix pytest warnings
|
||||
- run github tests even for pulls on issue branches
|
||||
|
||||
## [0.8.1] - 2024-06-21
|
||||
|
||||
- Fix MODBUS responses are dropped and not forwarded to the TSUN cloud [#104](https://github.com/s-allius/tsun-gen3-proxy/issues/104)
|
||||
- GEN3: Fix connections losts due MODBUS requests [#102](https://github.com/s-allius/tsun-gen3-proxy/issues/102)
|
||||
|
||||
## [0.8.0] - 2024-06-07
|
||||
|
||||
- improve logging: add protocol or node_id to connection logs
|
||||
- improve logging: log ignored AT+ or MODBUS commands
|
||||
- improve tracelog: log level depends on message type and source
|
||||
- fix typo in docker-compose.yaml and remove the external network definition
|
||||
- trace heartbeat and regular modbus pakets witl log level DEBUG
|
||||
- GEN3PLUS: don't forward ack paket from tsun to the inverter
|
||||
- GEN3PLUS: add allow and block filter for AT+ commands
|
||||
- catch all OSError errors in the read loop
|
||||
- log Modbus traces with different log levels
|
||||
- add Modbus fifo and timeout handler
|
||||
- build version string in the same format as TSUN for GEN3 inverters
|
||||
- add graceful shutdown
|
||||
- parse Modbus values and store them in the database
|
||||
- add cron task to request the output power every minute
|
||||
- GEN3PLUS: add MQTT topics to send AT commands to the inverter
|
||||
- add MQTT topics to send Modbus commands to the inverter
|
||||
- convert data collect interval to minutes
|
||||
- add postfix for rc and dev versions to the version number
|
||||
- change logging level to DEBUG for some logs
|
||||
- remove experimental value Register.VALUE_1
|
||||
- format Register.POWER_ON_TIME as integer
|
||||
- ignore catch-up values from the inverters for now
|
||||
|
||||
## [0.7.0] - 2024-04-20
|
||||
|
||||
- GEN3PLUS: fix temperature values
|
||||
- GEN3PLUS: read corect firmware and logger version
|
||||
- GEN3PLUS: add inverter status
|
||||
- GEN3PLUS: fix encoding of `power on time` value
|
||||
- GEN3PLUS: fix glitches in inverter data after connection establishment
|
||||
see: [#53](https://github.com/s-allius/tsun-gen3-proxy/issues/53)
|
||||
- improve docker container labels
|
||||
- GEN3PLUS: add timestamp of inverter data into log
|
||||
- config linter for *.md files
|
||||
- switch to aiomqtt version 2.0.1
|
||||
- refactor unittest and increase testcoverage
|
||||
- GEN3PLUS: add experimental handler for `ÀT` commands
|
||||
- GEN3PLUS: implement self-sufficient island support
|
||||
see: [#42](https://github.com/s-allius/tsun-gen3-proxy/issues/42)
|
||||
- Improve error messages on config errors
|
||||
see: [#46](https://github.com/s-allius/tsun-gen3-proxy/issues/46)
|
||||
- Prepare support of inverters with 6 MTPPs
|
||||
- Clear `Daily Generation` values at midnigth
|
||||
see: [#32](https://github.com/s-allius/tsun-gen3-proxy/issues/32)
|
||||
- Read pv module details from config file and use it for the Home Assistant registration
|
||||
see: [#43](https://github.com/s-allius/tsun-gen3-proxy/issues/43)
|
||||
- migrate to aiomqtt version 2.0.0
|
||||
see: [#44](https://github.com/s-allius/tsun-gen3-proxy/issues/44)
|
||||
|
||||
## [0.6.0] - 2024-04-02
|
||||
|
||||
- Refactoring to support Solarman V5 protocol
|
||||
- Add unittest for Solarman V5 implementation
|
||||
- Handle checksum errors
|
||||
- Handle wrong start or Stop bytes
|
||||
- Watch for AT commands and signal their occurrence to HA
|
||||
- Build inverter type names for MS-1600 .. MS-2000
|
||||
- Build device name for Solarman logger module
|
||||
|
||||
## [0.5.5] - 2023-12-31
|
||||
|
||||
- Fixed [#33](https://github.com/s-allius/tsun-gen3-proxy/issues/33)
|
||||
- Fixed detection of the connected inputs/MPPTs
|
||||
- Preparation for overwriting received data
|
||||
- home assistant improvements:
|
||||
- Add unit 'W' to the `Rated Power` value for home assistant
|
||||
- `Collect_Interval`, `Connect_Count` and `Data_Up_Interval` as diagnostic value and not as graph
|
||||
- Add data acquisition interval
|
||||
- Add number of connections
|
||||
- Add communication type
|
||||
- Add 'Internal SW Exception' counter
|
||||
|
||||
## [0.5.4] - 2023-11-22
|
||||
|
||||
- hardening remove dangerous commands from busybox
|
||||
- add OTA start message counter
|
||||
- add message handler for over the air updates
|
||||
- add unit tests for ota messages
|
||||
- add unit test for int64 data type
|
||||
- cleanup msg_get_time_handler
|
||||
- remove python packages setuptools, wheel, pip from final image to reduce the attack surface
|
||||
|
||||
## [0.5.3] - 2023-11-12
|
||||
|
||||
- remove apk packet manager from the final image
|
||||
- send contact info every time a client connection is established
|
||||
- use TSUN timestamp instead of local time, as TSUN also expects Central European Summer Time in winter
|
||||
|
||||
## [0.5.2] - 2023-11-09
|
||||
|
||||
- add int64 data type to info parser
|
||||
- allow multiple calls to Message.close()
|
||||
- check for race cond. on closing and establishing client connections
|
||||
|
||||
|
||||
## [0.5.1] - 2023-11-05
|
||||
|
||||
- fixes f-string by limes007
|
||||
@@ -38,7 +236,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
- count definition errors in our internal tables
|
||||
- increase test coverage of the Infos class to 100%
|
||||
- avoid resetting the daily generation counters even if the inverter sends zero values at sunset
|
||||
|
||||
|
||||
## [0.4.1] - 2023-10-20
|
||||
|
||||
- fix issue [#18](https://github.com/s-allius/tsun-gen3-proxy/issues/18)
|
||||
@@ -64,13 +262,13 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
|
||||
- optimize and reduce logging
|
||||
- switch to pathon 3.12
|
||||
- classify some values for diagnostics
|
||||
- classify some values for diagnostics
|
||||
|
||||
## [0.2.0] - 2023-10-07
|
||||
|
||||
This version halves the size of the Docker image and reduces the attack surface for security vulnerabilities, by omitting unneeded code. The feature set is exactly the same as the previous release version 0.1.0.
|
||||
|
||||
### Changes
|
||||
### Changes in 0.2.0
|
||||
|
||||
- move from slim-bookworm to an alpine base image
|
||||
- install python requirements with pip wheel
|
||||
@@ -103,31 +301,31 @@ This version halves the size of the Docker image and reduces the attack surface
|
||||
|
||||
❗Due to the change from one device to multiple devices in the Home Assistant, the previous MQTT device should be deleted in the Home Assistant after the update to pre-release '0.0.4'. Afterwards, the proxy must be restarted again to ensure that the sub-devices are created completely.
|
||||
|
||||
### Added
|
||||
### Added in 0.0.4
|
||||
|
||||
- Register multiple devices at home-assistant instead of one for all measurements.
|
||||
Now we register: a Controller, the inverter and up to 4 input devices to home-assistant.
|
||||
|
||||
## [0.0.3] - 2023-09-28
|
||||
|
||||
### Added
|
||||
### Added in 0.0.3
|
||||
|
||||
- Fixes Running Proxy with host UID and GUID #2
|
||||
|
||||
## [0.0.2] - 2023-09-27
|
||||
|
||||
### Added
|
||||
### Added in 0.0.2
|
||||
|
||||
- Dockerfile opencontainer labels
|
||||
- Send voltage and current of inputs to mqtt
|
||||
|
||||
## [0.0.1] - 2023-09-25
|
||||
|
||||
### Added
|
||||
### Added in 0.0.1
|
||||
|
||||
- Logger for inverter packets
|
||||
- SIGTERM handler for fast docker restarts
|
||||
- Proxy as non-root docker application
|
||||
- Proxy as non-root docker application
|
||||
- Unit- and system tests
|
||||
- Home asssistant auto configuration
|
||||
- Self-sufficient island operation without internet
|
||||
|
||||
@@ -60,7 +60,7 @@ representative at an online or offline event.
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported to the community leaders responsible for enforcement at
|
||||
compliance@allius.de.
|
||||
<compliance@allius.de>.
|
||||
All complaints will be reviewed and investigated promptly and fairly.
|
||||
|
||||
All community leaders are obligated to respect the privacy and security of the
|
||||
@@ -116,7 +116,7 @@ the community.
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
||||
version 2.0, available at
|
||||
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
|
||||
<https://www.contributor-covenant.org/version/2/0/code_of_conduct.html>.
|
||||
|
||||
Community Impact Guidelines were inspired by [Mozilla's code of conduct
|
||||
enforcement ladder](https://github.com/mozilla/diversity).
|
||||
@@ -124,5 +124,5 @@ enforcement ladder](https://github.com/mozilla/diversity).
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
|
||||
For answers to common questions about this code of conduct, see the FAQ at
|
||||
https://www.contributor-covenant.org/faq. Translations are available at
|
||||
https://www.contributor-covenant.org/translations.
|
||||
<https://www.contributor-covenant.org/faq>. Translations are available at
|
||||
<https://www.contributor-covenant.org/translations>.
|
||||
|
||||
@@ -7,6 +7,7 @@ The project aims to bring TSUN third generation inverters (with WiFi support) in
|
||||
The code base of the proxy was created in a few weeks after work and offers many possibilities for collaboration.
|
||||
|
||||
Especially in the area of
|
||||
|
||||
- docker compose
|
||||
- packaging
|
||||
- test automation
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
Copyright (c) 2023 Stefan Allius.
|
||||
# Copyright © 2023 Stefan Allius
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
||||
|
||||
|
||||
14
Makefile
Normal file
14
Makefile
Normal file
@@ -0,0 +1,14 @@
|
||||
.PHONY: build clean addon-dev addon-debug addon-rc addon-rel debug dev preview rc rel
|
||||
|
||||
debug dev preview rc rel:
|
||||
$(MAKE) -C app $@
|
||||
|
||||
clean build:
|
||||
$(MAKE) -C ha_addons $@
|
||||
|
||||
addon-dev addon-debug addon-rc addon-rel:
|
||||
$(MAKE) -C ha_addons $(patsubst addon-%,%,$@)
|
||||
|
||||
check-docker-compose:
|
||||
docker-compose config -q
|
||||
|
||||
342
README.md
342
README.md
@@ -7,13 +7,17 @@
|
||||
<p align="center">
|
||||
<a href="https://opensource.org/licenses/BSD-3-Clause"><img alt="License: BSD-3-Clause" src="https://img.shields.io/badge/License-BSD_3--Clause-green.svg"></a>
|
||||
<a href="https://www.python.org/downloads/release/python-3120/"><img alt="Supported Python versions" src="https://img.shields.io/badge/python-3.12-blue.svg"></a>
|
||||
<a href="https://sbtinstruments.github.io/aiomqtt/introduction.html"><img alt="Supported aiomqtt versions" src="https://img.shields.io/badge/aiomqtt-1.2.1-lightblue.svg"></a>
|
||||
<a href="https://sbtinstruments.github.io/aiomqtt/introduction.html"><img alt="Supported aiomqtt versions" src="https://img.shields.io/badge/aiomqtt-2.3.0-lightblue.svg"></a>
|
||||
<a href="https://libraries.io/pypi/aiocron"><img alt="Supported aiocron versions" src="https://img.shields.io/badge/aiocron-1.8-lightblue.svg"></a>
|
||||
<a href="https://toml.io/en/v1.0.0"><img alt="Supported toml versions" src="https://img.shields.io/badge/toml-1.0.0-lightblue.svg"></a>
|
||||
|
||||
<br>
|
||||
<a href="https://sonarcloud.io/component_measures?id=s-allius_tsun-gen3-proxy&metric=alert_status"><img alt="The quality gate status" src="https://sonarcloud.io/api/project_badges/measure?project=s-allius_tsun-gen3-proxy&metric=alert_status"></a>
|
||||
<a href="https://sonarcloud.io/component_measures?id=s-allius_tsun-gen3-proxy&metric=bugs"><img alt="No of bugs" src="https://sonarcloud.io/api/project_badges/measure?project=s-allius_tsun-gen3-proxy&metric=bugs"></a>
|
||||
<a href="https://sonarcloud.io/component_measures?id=s-allius_tsun-gen3-proxy&metric=code_smells"><img alt="No of code-smells" src="https://sonarcloud.io/api/project_badges/measure?project=s-allius_tsun-gen3-proxy&metric=code_smells"></a>
|
||||
<br>
|
||||
<a href="https://sonarcloud.io/component_measures?id=s-allius_tsun-gen3-proxy&metric=coverage"><img alt="Test coverage in percent" src="https://sonarcloud.io/api/project_badges/measure?project=s-allius_tsun-gen3-proxy&metric=coverage"></a>
|
||||
</p>
|
||||
|
||||
|
||||
###
|
||||
# Overview
|
||||
|
||||
This proxy enables a reliable connection between TSUN third generation inverters and an MQTT broker. With the proxy, you can easily retrieve real-time values such as power, current and daily energy and integrate the inverter into typical home automations. This works even without an internet connection. The optional connection to the TSUN Cloud can be disabled!
|
||||
@@ -23,11 +27,15 @@ In detail, the inverter establishes a TCP connection to the TSUN cloud to transm
|
||||
Through this, the inverter then establishes a connection to the proxy and the proxy establishes another connection to the TSUN Cloud. The transmitted data is interpreted by the proxy and then passed on to both the TSUN Cloud and the MQTT broker. The connection to the TSUN Cloud is optional and can be switched off in the configuration (default is on). Then no more data is sent to the Internet, but no more remote updates of firmware and operating parameters (e.g. rated power, grid parameters) are possible.
|
||||
|
||||
By means of `docker` a simple installation and operation is possible. By using `docker-composer`, a complete stack of proxy, `MQTT-brocker` and `home-assistant` can be started easily.
|
||||
###
|
||||
ℹ️ This project is not related to the company TSUN. It is a private initiative that aims to connect TSUN inverters with an MQTT broker. There is no support and no warranty from TSUN.
|
||||
###
|
||||
|
||||
```
|
||||
Alternatively you can run the TSUN-Proxy as a Home Assistant Add-on. The installation of this add-on is pretty straightforward and not different in comparison to installing any other custom Home Assistant add-on.
|
||||
Follow the Instructions mentioned in the add-on subdirectory `ha_addons`.
|
||||
|
||||
<br>
|
||||
ℹ️ This project is not related to the company TSUN. It is a private initiative that aims to connect TSUN inverters with an MQTT broker. There is no support and no warranty from TSUN.
|
||||
<br><br>
|
||||
|
||||
```txt
|
||||
❗An essential requirement is that the proxy can be looped into the connection
|
||||
between the inverter and TSUN Cloud.
|
||||
|
||||
@@ -40,73 +48,162 @@ If you use a Pi-hole, you can also store the host entry in the Pi-hole.
|
||||
|
||||
## Features
|
||||
|
||||
- supports TSOL MS300, MS350, MS400, MS600, MS700 and MS800 inverters from TSUN
|
||||
- Supports TSUN GEN3 PLUS inverters: TSOL-MS2000, MS1800 and MS1600
|
||||
- Supports TSUN GEN3 inverters: TSOL-MS800, MS700, MS600, MS400, MS350 and MS300
|
||||
- `MQTT` support
|
||||
- `Home-Assistant` auto-discovery support
|
||||
- `MODBUS` support via MQTT topics
|
||||
- `AT-Command` support via MQTT topics (GEN3PLUS only)
|
||||
- Faster DataUp interval sends measurement data to the MQTT broker every minute
|
||||
- Self-sufficient island operation without internet
|
||||
- non-root Docker Container
|
||||
- Security-Features:
|
||||
- control access via `AT-commands`
|
||||
- Runs in a non-root Docker Container
|
||||
|
||||
## Home Assistant Screenshots
|
||||
|
||||
Here are some screenshots of how the inverter is displayed in the Home Assistant:
|
||||
|
||||
https://github.com/s-allius/tsun-gen3-proxy/wiki/home-assistant#home-assistant-screenshots
|
||||
<https://github.com/s-allius/tsun-gen3-proxy/wiki/home-assistant#home-assistant-screenshots>
|
||||
|
||||
## Requirements
|
||||
|
||||
### for Docker Installation
|
||||
|
||||
- A running Docker engine to host the container
|
||||
- Ability to loop the proxy into the connection between the inverter and the TSUN cloud
|
||||
|
||||
### for Home Assistant Add-on Installation
|
||||
|
||||
- Running Home Assistant on Home Assistant OS or Supervised. Container and Core installations doesn't support add-ons.
|
||||
- Ability to loop the proxy into the connection between the inverter and the TSUN cloud
|
||||
|
||||
###
|
||||
# Getting Started
|
||||
|
||||
## for Docker Installation
|
||||
|
||||
To run the proxy, you first need to create the image. You can do this quite simply as follows:
|
||||
|
||||
```sh
|
||||
docker build https://github.com/s-allius/tsun-gen3-proxy.git#main:app -t tsun-proxy
|
||||
```
|
||||
|
||||
after that you can run the image:
|
||||
|
||||
```sh
|
||||
docker run --dns '8.8.8.8' --env 'UID=1000' -p '5005:5005' -v ./config:/home/tsun-proxy/config -v ./log:/home/tsun-proxy/log tsun-proxy
|
||||
docker run --dns '8.8.8.8' --env 'UID=1000' -p '5005:5005' -p '10000:10000' -v ./config:/home/tsun-proxy/config -v ./log:/home/tsun-proxy/log tsun-proxy
|
||||
```
|
||||
You will surely see a message that the configuration file was not found. So that we can create this without admin rights, the `uid` must still be adapted. To do this, simply stop the proxy with ctrl-c and use the `id` command to determine your own UserId:
|
||||
|
||||
You will surely see a message that the configuration file was not found. So that we can create this without admin rights, the `uid` must still be adapted. To do this, simply stop the proxy with ctrl-c and use the `id` command to determine your own UserId:
|
||||
|
||||
```sh
|
||||
% id
|
||||
uid=1050(sallius) gid=20(staff) ...
|
||||
```
|
||||
|
||||
With this information we can customize the `docker run`` statement:
|
||||
|
||||
```sh
|
||||
docker run --dns '8.8.8.8' --env 'UID=1050' -p '5005:5005' -v ./config:/home/tsun-proxy/config -v ./log:/home/tsun-proxy/log tsun-proxy
|
||||
docker run --dns '8.8.8.8' --env 'UID=1050' -p '5005:5005' -p '10000:10000' -v ./config:/home/tsun-proxy/config -v ./log:/home/tsun-proxy/log tsun-proxy
|
||||
```
|
||||
|
||||
###
|
||||
## for Home Assistant Add-on Installation
|
||||
|
||||
1. Add the repository URL to the Home Assistant add-on store
|
||||
[![Add repository on my Home Assistant][repository-badge]][repository-url]
|
||||
2. Reload the add-on store page
|
||||
3. Click the "Install" button to install the add-on.
|
||||
|
||||
# Configuration
|
||||
The Docker container does not require any special configuration.
|
||||
|
||||
```txt
|
||||
❗The following description applies to the Docker installation. When installing the Home Assistant add-on, the
|
||||
configuration is carried out via the Home Assistant UI. Some of the options described below are not required for
|
||||
this. Additionally, creating a config.toml file is not necessary. However, for a general understanding of the
|
||||
configuration and functionality, it is helpful to read the following description.
|
||||
```
|
||||
|
||||
The configuration consists of several parts. First, the container and the proxy itself must be configured, and then the connection of the inverter to the proxy must be set up, which is done differently depending on the inverter generation
|
||||
|
||||
For GEN3PLUS inverters, this can be done easily via the web interface of the inverter. The GEN3 inverters do not have a web interface, so the proxy is integrated via a modified DNS resolution.
|
||||
|
||||
1. [Container Setup](#container-setup)
|
||||
2. [Proxy Configuration](#proxy-configuration)
|
||||
3. [Inverter Configuration](#inverter-configuration) (only GEN3PLUS)
|
||||
4. [DNS Settings](#dns-settings) (Mandatory for GEN3)
|
||||
|
||||
## Container Setup
|
||||
|
||||
No special configuration is required for the Docker container if it is built and started as described above. It is recommended to start the container with docker-compose. The configuration is then specified in a docker-compose.yaml file. An example of a stack consisting of the proxy, MQTT broker and home assistant can be found [here](https://github.com/s-allius/tsun-gen3-proxy/blob/main/docker-compose.yaml).
|
||||
|
||||
On the host, two directories (for log files and for config files) must be mapped. If necessary, the UID of the proxy process can be adjusted, which is also the owner of the log and configuration files.
|
||||
|
||||
The proxy can be configured via the file 'config.toml'. When the proxy is started, a file 'config.example.toml' is copied into the config directory. This file shows all possible parameters and their default values. Changes in the example file itself are not evaluated. To configure the proxy, the config.example.toml file should be renamed to config.toml. After that the corresponding values can be adjusted. To load the new configuration, the proxy must be restarted.
|
||||
|
||||
A description of the configuration parameters can be found [here](https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-toml#docker-compose-environment-variables).
|
||||
|
||||
## Proxy Configuration
|
||||
|
||||
The proxy can be configured via the file 'config.toml'. When the proxy is started, a file 'config.example.toml' is copied into the config directory. This file shows all possible parameters and their default values. Changes in the example file itself are not evaluated. To configure the proxy, the config.example.toml file should be renamed to config.toml. After that the corresponding values can be adjusted. To load the new configuration, the proxy must be restarted.
|
||||
|
||||
The configration uses the TOML format, which aims to be easy to read due to obvious semantics.
|
||||
You find more details here: https://toml.io/en/v1.0.0
|
||||
|
||||
You find more details here: <https://toml.io/en/v1.0.0>
|
||||
|
||||
<details>
|
||||
<summary>Here is an example of a <b>config.toml</b> file</summary>
|
||||
|
||||
```toml
|
||||
# configuration to reach tsun cloud
|
||||
tsun.enabled = true # false: disables connecting to the tsun cloud, and avoids updates
|
||||
tsun.host = 'logger.talent-monitoring.com'
|
||||
tsun.port = 5005
|
||||
##########################################################################################
|
||||
###
|
||||
### T S U N - G E N 3 - P R O X Y
|
||||
###
|
||||
### from Stefan Allius
|
||||
###
|
||||
##########################################################################################
|
||||
###
|
||||
### The readme will give you an overview of the project:
|
||||
### https://s-allius.github.io/tsun-gen3-proxy/
|
||||
###
|
||||
### The proxy supports different operation modes. Select the proper mode
|
||||
### which depends on your inverter type and you inverter firmware.
|
||||
### Please read:
|
||||
### https://github.com/s-allius/tsun-gen3-proxy/wiki/Operation-Modes-Overview
|
||||
###
|
||||
### Here you will find a description of all configuration options:
|
||||
### https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-toml
|
||||
###
|
||||
### The configration uses the TOML format, which aims to be easy to read due to
|
||||
### obvious semantics. You find more details here: https://toml.io/en/v1.0.0
|
||||
###
|
||||
##########################################################################################
|
||||
|
||||
|
||||
# mqtt broker configuration
|
||||
##########################################################################################
|
||||
##
|
||||
## MQTT broker configuration
|
||||
##
|
||||
## In this block, you must configure the connection to your MQTT broker and specify the
|
||||
## required credentials. As the proxy does not currently support an encrypted connection
|
||||
## to the MQTT broker, it is strongly recommended that you do not use a public broker.
|
||||
##
|
||||
## https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-toml#mqtt-broker-account
|
||||
##
|
||||
|
||||
mqtt.host = 'mqtt' # URL or IP address of the mqtt broker
|
||||
mqtt.port = 1883
|
||||
mqtt.user = ''
|
||||
mqtt.passwd = ''
|
||||
|
||||
|
||||
# home-assistant
|
||||
##########################################################################################
|
||||
##
|
||||
## HOME ASSISTANT
|
||||
##
|
||||
## The proxy supports the MQTT autoconfiguration of Home Assistant (HA). The default
|
||||
## values match the HA default configuration. If you need to change these or want to use
|
||||
## a different MQTT client, you can adjust the prefixes of the MQTT topics below.
|
||||
##
|
||||
## https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-toml#home-assistant
|
||||
##
|
||||
|
||||
ha.auto_conf_prefix = 'homeassistant' # MQTT prefix for subscribing for homeassistant status updates
|
||||
ha.discovery_prefix = 'homeassistant' # MQTT prefix for discovery topic
|
||||
ha.entity_prefix = 'tsun' # MQTT topic prefix for publishing inverter values
|
||||
@@ -114,40 +211,208 @@ ha.proxy_node_id = 'proxy' # MQTT node id, for the proxy_node_i
|
||||
ha.proxy_unique_id = 'P170000000000001' # MQTT unique id, to identify a proxy instance
|
||||
|
||||
|
||||
# microinverters
|
||||
inverters.allow_all = false # True: allow inverters, even if we have no inverter mapping
|
||||
##########################################################################################
|
||||
##
|
||||
## GEN3 Proxy Mode Configuration
|
||||
##
|
||||
## In this block, you can configure an optional connection to the TSUN cloud for GEN3
|
||||
## inverters. This connection is only required if you want send data to the TSUN cloud
|
||||
## to use the TSUN APPs or receive firmware updates.
|
||||
##
|
||||
## https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-toml#tsun-cloud-for-gen3-inverter-only
|
||||
##
|
||||
|
||||
tsun.enabled = true # false: disables connecting to the tsun cloud, and avoids updates
|
||||
tsun.host = 'logger.talent-monitoring.com'
|
||||
tsun.port = 5005
|
||||
|
||||
|
||||
##########################################################################################
|
||||
##
|
||||
## GEN3PLUS Proxy Mode Configuration
|
||||
##
|
||||
## In this block, you can configure an optional connection to the TSUN cloud for GEN3PLUS
|
||||
## inverters. This connection is only required if you want send data to the TSUN cloud
|
||||
## to use the TSUN APPs or receive firmware updates.
|
||||
##
|
||||
## https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-toml#solarman-cloud-for-gen3plus-inverter-only
|
||||
##
|
||||
solarman.enabled = true # false: disables connecting to the tsun cloud, and avoids updates
|
||||
solarman.host = 'iot.talent-monitoring.com'
|
||||
solarman.port = 10000
|
||||
|
||||
|
||||
##########################################################################################
|
||||
###
|
||||
### Inverter Definitions
|
||||
###
|
||||
### The proxy supports the simultaneous operation of several inverters, even of different
|
||||
### types. A configuration block must be defined for each inverter, in which all necessary
|
||||
### parameters must be specified. These depend on the operation mode used and also differ
|
||||
### slightly depending on the inverter type.
|
||||
###
|
||||
### In addition, the PV modules can be defined at the individual inputs for documentation
|
||||
### purposes, whereby these are displayed in Home Assistant.
|
||||
###
|
||||
### The proxy only accepts connections from known inverters. This can be switched off for
|
||||
### test purposes and unknown serial numbers are also accepted.
|
||||
###
|
||||
|
||||
inverters.allow_all = false # only allow known inverters
|
||||
|
||||
|
||||
##########################################################################################
|
||||
##
|
||||
## For each GEN3 inverter, the serial number of the inverter must be mapped to an MQTT
|
||||
## definition. To do this, the corresponding configuration block is started with
|
||||
## `[Inverter.“<16-digit serial number>”]` so that all subsequent parameters are assigned
|
||||
## to this inverter. Further inverter-specific parameters (e.g. polling mode) can be set
|
||||
## in the configuration block
|
||||
##
|
||||
## The serial numbers of all GEN3 inverters start with `R17`!
|
||||
##
|
||||
|
||||
# inverter mapping, maps a `serial_no* to a `node_id` and defines an optional `suggested_area` for `home-assistant`
|
||||
#
|
||||
# for each inverter add a block starting with [inverters."<16-digit serial numbeer>"]
|
||||
[inverters."R17xxxxxxxxxxxx1"]
|
||||
node_id = 'inv1' # Optional, MQTT replacement for inverters serial number
|
||||
suggested_area = 'roof' # Optional, suggested installation area for home-assistant
|
||||
node_id = 'inv_1' # MQTT replacement for inverters serial number
|
||||
suggested_area = 'roof' # suggested installation place for home-assistant
|
||||
modbus_polling = false # Disable optional MODBUS polling for GEN3 inverter
|
||||
pv1 = {type = 'RSM40-8-395M', manufacturer = 'Risen'} # Optional, PV module descr
|
||||
pv2 = {type = 'RSM40-8-395M', manufacturer = 'Risen'} # Optional, PV module descr
|
||||
|
||||
[inverters."R17xxxxxxxxxxxx2"]
|
||||
node_id = 'inv2' # Optional, MQTT replacement for inverters serial number
|
||||
suggested_area = 'balcony' # Optional, suggested installation area for home-assistant
|
||||
|
||||
##########################################################################################
|
||||
##
|
||||
## For each GEN3PLUS inverter, the serial number of the inverter must be mapped to an MQTT
|
||||
## definition. To do this, the corresponding configuration block is started with
|
||||
## `[Inverter.“<16-digit serial number>”]` so that all subsequent parameters are assigned
|
||||
## to this inverter. Further inverter-specific parameters (e.g. polling mode, client mode)
|
||||
## can be set in the configuration block
|
||||
##
|
||||
## The serial numbers of all GEN3PLUS inverters start with `Y17` or Y47! Each GEN3PLUS
|
||||
## inverter is supplied with a “Monitoring SN:”. This can be found on a sticker enclosed
|
||||
## with the inverter.
|
||||
##
|
||||
|
||||
[inverters."Y17xxxxxxxxxxxx1"] # This block is also for inverters with a Y47 serial no
|
||||
monitor_sn = 2000000000 # The GEN3PLUS "Monitoring SN:"
|
||||
node_id = 'inv_2' # MQTT replacement for inverters serial number
|
||||
suggested_area = 'garage' # suggested installation place for home-assistant
|
||||
modbus_polling = true # Enable optional MODBUS polling
|
||||
|
||||
# if your inverter supports SSL connections you must use the client_mode. Pls, uncomment
|
||||
# the next line and configure the fixed IP of your inverter
|
||||
#client_mode = {host = '192.168.0.1', port = 8899, forward = true}
|
||||
|
||||
pv1 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr
|
||||
pv2 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr
|
||||
pv3 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr
|
||||
pv4 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr
|
||||
|
||||
|
||||
##########################################################################################
|
||||
###
|
||||
### If the proxy mode is configured, commands from TSUN can be sent to the inverter via
|
||||
### this connection or parameters (e.g. network credentials) can be queried. Filters can
|
||||
### then be configured for the AT+ commands from the TSUN Cloud so that only certain
|
||||
### accesses are permitted.
|
||||
###
|
||||
### An overview of all known AT+ commands can be found here:
|
||||
### https://github.com/s-allius/tsun-gen3-proxy/wiki/AT--commands
|
||||
###
|
||||
|
||||
[gen3plus.at_acl]
|
||||
tsun.allow = ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'] # allow this for TSUN access
|
||||
tsun.block = []
|
||||
mqtt.allow = ['AT+'] # allow all via mqtt
|
||||
mqtt.block = []
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
## Inverter Configuration
|
||||
|
||||
GEN3PLUS inverters offer a web interface that can be used to configure the inverter. This is very practical for sending the data directly to the proxy. On the one hand, the inverter broadcasts its own SSID on 2.4GHz. This can be recognized because it is broadcast with `AP_<Montoring SN>`. You will find the `Monitor SN` and the password for the WLAN connection on a small sticker enclosed with the inverter.
|
||||
|
||||
If you have already connected the inverter to the cloud via the TSUN app, you can also address the inverter directly via WiFi. In the first case, the inverter uses the fixed IP address `10.10.100.254`, in the second case you have to look up the IP address in your router.
|
||||
|
||||
The standard web interface of the inverter can be accessed at `http://<ip-adress>/index_cn.html`. Here you can set up the WLAN connection or change the password. The default user and password is `admin`/`admin`.
|
||||
|
||||
For our purpose, the hidden URL `http://<ip-adress>/config_hide.html` should be called. There you can see and modify the parameters for accessing the cloud. Here we enter the IP address of our proxy and the IP port `10000` for the `Server A Setting` and for `Optional Server Setting`. The second entry is used as a backup in the event of connection problems.
|
||||
|
||||
```txt
|
||||
❗If the IP port is set to 10443 in the inverter configuration, you probably have a firmware with SSL support.
|
||||
In this case, you MUST NOT change the port or the host address, as this may cause the inverter to hang and
|
||||
require a complete reset. Use the configuration in client mode instead.
|
||||
```
|
||||
|
||||
If access to the web interface does not work, it can also be redirected via DNS redirection, as is necessary for the GEN3 inverters.
|
||||
|
||||
## Client Mode (GEN3PLUS only)
|
||||
|
||||
Newer GEN3PLUS inverters support SSL encrypted connections over port 10443 to the TSUN cloud. In this case you can't loop the proxy into this connection, since the certicate verification of the inverter don't allow this. You can configure the proxy in client-mode to establish an unencrypted connection to the inverter. For this porpuse the inverter listen on port `8899`.
|
||||
|
||||
There are some requirements to be met:
|
||||
|
||||
- the inverter should have a fixed IP
|
||||
- the proxy must be able to reach the inverter. You must configure a corresponding route in your router if the inverter and the proxy are in different IP networks
|
||||
- add a 'client_mode' line to your config.toml file, to specify the inverter's ip address
|
||||
|
||||
## DNS Settings
|
||||
|
||||
### Loop the proxy into the connection
|
||||
|
||||
To include the proxy in the connection between the inverter and the TSUN Cloud, you must adapt the DNS record of *logger.talent-monitoring.com* within the network that your inverter uses. You need a mapping from logger.talent-monitoring.com to the IP address of the host running the Docker engine.
|
||||
|
||||
The new GEN3 PLUS inverters use a different URL. Here, *iot.talent-monitoring.com* must be redirected.
|
||||
|
||||
This can be done, for example, by adding a local DNS record to the Pi-hole if you are using it.
|
||||
|
||||
### DNS Rebind Protection
|
||||
|
||||
If you are using a router as local DNS server, the router may have DNS rebind protection that needs to be adjusted. For security reasons, DNS rebind protection blocks DNS queries that refer to an IP address on the local network.
|
||||
|
||||
If you are using a FRITZ!Box, you can do this in the Network Settings tab under Home Network / Network. Add logger.talent-monitoring.com as a hostname exception in DNS rebind protection.
|
||||
|
||||
### DNS server of proxy
|
||||
|
||||
The proxy itself must use a different DNS server to connect to the TSUN Cloud. If you use the DNS server with the adapted record, you will end up in an endless loop as soon as the proxy tries to send data to the TSUN Cloud.
|
||||
|
||||
As described above, set a DNS sever in the Docker command or Docker compose file.
|
||||
|
||||
### Over The Air (OTA) firmware update
|
||||
|
||||
Even if the proxy is connected between the inverter and the TSUN Cloud, an OTA update is supported. To do this, the inverter must be able to reach the website <http://www.talent-monitoring.com:9002/> in order to download images from there.
|
||||
|
||||
It must be ensured that this address is not mapped to the proxy!
|
||||
|
||||
# General Information
|
||||
|
||||
## Compatibility
|
||||
|
||||
In the following table you will find an overview of which inverter model has been tested for compatibility with which firmware version.
|
||||
A combination with a red question mark should work, but I have not checked it in detail.
|
||||
|
||||
<table align="center">
|
||||
<tr><th align="center">Micro Inverter Model</th><th align="center">Fw. 1.00.06</th><th align="center">Fw. 1.00.17</th><th align="center">Fw. 1.00.20</th><th align="center">Fw. 4.0.10</th><th align="center">Fw. 4.0.20</th></tr>
|
||||
<tr><td>GEN3 micro inverters (single MPPT):<br>MS300, MS350, MS400<br>MS400-D</td><td align="center">❓</td><td align="center">❓</td><td align="center">❓</td><td align="center">➖</td><td align="center">➖</td></tr>
|
||||
<tr><td>GEN3 micro inverters (dual MPPT):<br>MS600, MS700, MS800<br>MS600-D, MS800-D</td><td align="center">✔️</td><td align="center">✔️</td><td align="center">✔️</td><td align="center">➖</td><td align="center">➖</td></tr>
|
||||
<tr><td>GEN3 PLUS micro inverters:<br>MS1600, MS1800, MS2000<br>MS2000-D</td><td align="center">➖</td><td align="center">➖</td><td align="center">➖</td><td align="center">✔️</td><td align="center">✔️</td></tr>
|
||||
<tr><td>TITAN micro inverters:<br>TSOL-MP3000, MP2250, MS3000</td><td align="center">❓</td><td align="center">❓</td><td align="center">❓</td><td align="center">❓</td><td align="center">❓</td></tr>
|
||||
</table>
|
||||
|
||||
```txt
|
||||
Legend
|
||||
➖: Firmware not available for this devices
|
||||
✔️: proxy support testet
|
||||
❓: proxy support possible but not testet
|
||||
🚧: Proxy support in preparation
|
||||
```
|
||||
|
||||
❗The new inverters of the GEN3 Plus generation (e.g. MS-2000) use a completely different protocol for data transmission to the TSUN server. These inverters are supported from proxy version 0.6. The serial numbers of these inverters start with `Y17E` or `Y47E` instead of `R17E`
|
||||
|
||||
If you have one of these combinations with a red question mark, it would be very nice if you could send me a proxy trace so that I can carry out the detailed checks and adjust the device and system tests. [Ask here how to send a trace](https://github.com/s-allius/tsun-gen3-proxy/discussions/categories/traces-for-compatibility-check)
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the [BSD 3-clause License](https://opensource.org/licenses/BSD-3-Clause).
|
||||
@@ -157,7 +422,6 @@ Note the aiomqtt library used is based on the paho-mqtt library, which has a dua
|
||||
- One use of "COPYRIGHT OWNER" (EDL) instead of "COPYRIGHT HOLDER" (BSD)
|
||||
- One use of "Eclipse Foundation, Inc." (EDL) instead of "copyright holder" (BSD)
|
||||
|
||||
|
||||
## Versioning
|
||||
|
||||
This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). Breaking changes will only occur in major `X.0.0` releases.
|
||||
@@ -170,3 +434,5 @@ We're very happy to receive contributions to this project! You can get started b
|
||||
|
||||
The changelog lives in [CHANGELOG.md](https://github.com/s-allius/tsun-gen3-proxy/blob/main/CHANGELOG.md). It follows the principles of [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||
|
||||
[repository-badge]: https://img.shields.io/badge/Add%20repository%20to%20my-Home%20Assistant-41BDF5?logo=home-assistant&style=for-the-badge
|
||||
[repository-url]: https://my.home-assistant.io/redirect/supervisor_add_addon_repository/?repository_url=https%3A%2F%2Fgithub.com%2Fs-allius%2Fha-addons
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
tests/
|
||||
**/__pycache__
|
||||
*.pyc
|
||||
.DS_Store
|
||||
.DS_Store
|
||||
build.sh
|
||||
1
app/.version
Normal file
1
app/.version
Normal file
@@ -0,0 +1 @@
|
||||
0.12.1
|
||||
@@ -4,68 +4,67 @@ ARG GID=1000
|
||||
|
||||
#
|
||||
# first stage for our base image
|
||||
FROM python:3.12-alpine AS base
|
||||
USER root
|
||||
FROM python:3.13-alpine AS base
|
||||
|
||||
RUN apk update && \
|
||||
apk upgrade
|
||||
RUN apk add --no-cache su-exec
|
||||
COPY --chmod=0700 ./hardening_base.sh /
|
||||
RUN apk upgrade --no-cache && \
|
||||
apk add --no-cache su-exec=0.2-r3 && \
|
||||
/hardening_base.sh && \
|
||||
rm /hardening_base.sh
|
||||
|
||||
#
|
||||
# second stage for building wheels packages
|
||||
FROM base as builder
|
||||
|
||||
RUN apk add --no-cache build-base && \
|
||||
python -m pip install --no-cache-dir -U pip wheel
|
||||
FROM base AS builder
|
||||
|
||||
# copy the dependencies file to the root dir and install requirements
|
||||
COPY ./requirements.txt /root/
|
||||
RUN python -OO -m pip wheel --no-cache-dir --wheel-dir=/root/wheels -r /root/requirements.txt
|
||||
RUN apk add --no-cache build-base=0.5-r3 && \
|
||||
python -m pip install --no-cache-dir pip==24.3.1 wheel==0.45.1 && \
|
||||
python -OO -m pip wheel --no-cache-dir --wheel-dir=/root/wheels -r /root/requirements.txt
|
||||
|
||||
|
||||
#
|
||||
# third stage for our runtime image
|
||||
FROM base as runtime
|
||||
FROM base AS runtime
|
||||
ARG SERVICE_NAME
|
||||
ARG VERSION
|
||||
ARG UID
|
||||
ARG GID
|
||||
ARG LOG_LVL
|
||||
ARG environment
|
||||
|
||||
ENV VERSION=$VERSION
|
||||
ENV SERVICE_NAME=$SERVICE_NAME
|
||||
ENV UID=$UID
|
||||
ENV GID=$GID
|
||||
ENV LOG_LVL=$LOG_LVL
|
||||
ENV HOME=/home/$SERVICE_NAME
|
||||
|
||||
|
||||
# set the working directory in the container
|
||||
WORKDIR /home/$SERVICE_NAME
|
||||
|
||||
# update PATH environment variable
|
||||
ENV HOME=/home/$SERVICE_NAME
|
||||
|
||||
VOLUME ["/home/$SERVICE_NAME/log", "/home/$SERVICE_NAME/config"]
|
||||
|
||||
# install the requirements from the wheels packages from the builder stage
|
||||
# install the requirements from the wheels packages from the builder stage
|
||||
# and unistall python packages and alpine package manger to reduce attack surface
|
||||
COPY --from=builder /root/wheels /root/wheels
|
||||
RUN python -m pip install --no-cache --no-index /root/wheels/* && \
|
||||
rm -rf /root/wheels
|
||||
COPY --chmod=0700 ./hardening_final.sh .
|
||||
RUN python -m pip install --no-cache-dir --no-cache --no-index /root/wheels/* && \
|
||||
rm -rf /root/wheels && \
|
||||
python -m pip uninstall --yes wheel pip && \
|
||||
apk --purge del apk-tools && \
|
||||
./hardening_final.sh && \
|
||||
rm ./hardening_final.sh
|
||||
|
||||
|
||||
# copy the content of the local src and config directory to the working directory
|
||||
COPY --chmod=0700 entrypoint.sh /root/entrypoint.sh
|
||||
COPY config .
|
||||
COPY src .
|
||||
|
||||
EXPOSE 5005
|
||||
RUN echo ${VERSION} > /proxy-version.txt \
|
||||
&& date > /build-date.txt
|
||||
EXPOSE 5005 8127 10000
|
||||
|
||||
# command to run on container start
|
||||
ENTRYPOINT ["/root/entrypoint.sh"]
|
||||
CMD [ "python3", "./server.py" ]
|
||||
|
||||
|
||||
LABEL org.opencontainers.image.authors="Stefan Allius"
|
||||
LABEL org.opencontainers.image.source https://github.com/s-allius/tsun-gen3-proxy
|
||||
LABEL org.opencontainers.image.description 'The "TSUN Gen3 Micro-Inverter" proxy enables a reliable connection between TSUN third generation inverters and an MQTT broker to integrate the inverter into typical home automations'
|
||||
LABEL org.opencontainers.image.licenses="BSD-3-Clause"
|
||||
LABEL org.opencontainers.image.vendor="Stefan Allius"
|
||||
|
||||
67
app/Makefile
Normal file
67
app/Makefile
Normal file
@@ -0,0 +1,67 @@
|
||||
#!make
|
||||
include ../.env
|
||||
|
||||
SHELL = /bin/sh
|
||||
IMAGE = tsun-gen3-proxy
|
||||
|
||||
|
||||
# Folders
|
||||
SRC=.
|
||||
SRC_PROXY=$(SRC)/src
|
||||
CNF_PROXY=$(SRC)/config
|
||||
|
||||
DST=rootfs
|
||||
DST_PROXY=$(DST)/home/proxy
|
||||
|
||||
# collect source files
|
||||
SRC_FILES := $(wildcard $(SRC_PROXY)/*.py)\
|
||||
$(wildcard $(SRC_PROXY)/*.ini)\
|
||||
$(wildcard $(SRC_PROXY)/cnf/*.py)\
|
||||
$(wildcard $(SRC_PROXY)/gen3/*.py)\
|
||||
$(wildcard $(SRC_PROXY)/gen3plus/*.py)
|
||||
CNF_FILES := $(wildcard $(CNF_PROXY)/*.toml)
|
||||
|
||||
# determine destination files
|
||||
TARGET_FILES = $(SRC_FILES:$(SRC_PROXY)/%=$(DST_PROXY)/%)
|
||||
CONFIG_FILES = $(CNF_FILES:$(CNF_PROXY)/%=$(DST_PROXY)/%)
|
||||
|
||||
export BUILD_DATE := ${shell date -Iminutes}
|
||||
VERSION := $(shell cat $(SRC)/.version)
|
||||
export MAJOR := $(shell echo $(VERSION) | cut -f1 -d.)
|
||||
|
||||
PUBLIC_URL := $(shell echo $(PUBLIC_CONTAINER_REGISTRY) | cut -f1 -d/)
|
||||
PUBLIC_USER :=$(shell echo $(PUBLIC_CONTAINER_REGISTRY) | cut -f2 -d/)
|
||||
|
||||
|
||||
dev debug:
|
||||
@echo version: $(VERSION) build-date: $(BUILD_DATE) image: $(PRIVAT_CONTAINER_REGISTRY)$(IMAGE)
|
||||
export VERSION=$(VERSION)-$@ && \
|
||||
export IMAGE=$(PRIVAT_CONTAINER_REGISTRY)$(IMAGE) && \
|
||||
docker buildx bake -f docker-bake.hcl $@
|
||||
|
||||
preview rc rel:
|
||||
@echo version: $(VERSION) build-date: $(BUILD_DATE) image: $(PUBLIC_CONTAINER_REGISTRY)$(IMAGE)
|
||||
@echo login at $(PUBLIC_URL) as $(PUBLIC_USER)
|
||||
@DO_LOGIN="$(shell echo $(PUBLIC_CR_KEY) | docker login $(PUBLIC_URL) -u $(PUBLIC_USER) --password-stdin)"
|
||||
export VERSION=$(VERSION)-$@ && \
|
||||
export IMAGE=$(PUBLIC_CONTAINER_REGISTRY)$(IMAGE) && \
|
||||
docker buildx bake -f docker-bake.hcl $@
|
||||
|
||||
|
||||
|
||||
.PHONY: debug dev preview rc rel
|
||||
|
||||
|
||||
$(CONFIG_FILES): $(DST_PROXY)/% : $(CNF_PROXY)/%
|
||||
@echo Copy $< to $@
|
||||
@mkdir -p $(@D)
|
||||
@cp $< $@
|
||||
|
||||
$(TARGET_FILES): $(DST_PROXY)/% : $(SRC_PROXY)/%
|
||||
@echo Copy $< to $@
|
||||
@mkdir -p $(@D)
|
||||
@cp $< $@
|
||||
|
||||
$(DST)/requirements.txt : $(SRC)/requirements.txt
|
||||
@echo Copy $< to $@
|
||||
@cp $< $@
|
||||
33
app/build.sh
33
app/build.sh
@@ -1,33 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
BUILD_DATE=$(date -Iminutes)
|
||||
VERSION=$(git describe --tags --abbrev=0)
|
||||
VERSION="${VERSION:1}"
|
||||
arr=(${VERSION//./ })
|
||||
MAJOR=${arr[0]}
|
||||
IMAGE=tsun-gen3-proxy
|
||||
|
||||
if [[ $1 == dev ]] || [[ $1 == rc ]] ;then
|
||||
IMAGE=docker.io/sallius/${IMAGE}
|
||||
VERSION=${VERSION}-$1
|
||||
elif [[ $1 == rel ]];then
|
||||
IMAGE=ghcr.io/s-allius/${IMAGE}
|
||||
else
|
||||
echo argument missing!
|
||||
echo try: $0 '[dev|rc|rel]'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo version: $VERSION build-date: $BUILD_DATE image: $IMAGE
|
||||
if [[ $1 == dev ]];then
|
||||
docker build --build-arg "VERSION=${VERSION}" --build-arg "LOG_LVL=DEBUG" --label "org.label-schema.build-date=${BUILD_DATE}" --label "org.opencontainers.image.version=${VERSION}" -t ${IMAGE}:latest app
|
||||
elif [[ $1 == rc ]];then
|
||||
docker build --build-arg "VERSION=${VERSION}" --label "org.label-schema.build-date=${BUILD_DATE}" --label "org.opencontainers.image.version=${VERSION}" -t ${IMAGE}:latest app
|
||||
elif [[ $1 == rel ]];then
|
||||
docker build --no-cache --build-arg "VERSION=${VERSION}" --label "org.label-schema.build-date=${BUILD_DATE}" --label "org.opencontainers.image.version=${VERSION}" -t ${IMAGE}:latest -t ${IMAGE}:${MAJOR} -t ${IMAGE}:${VERSION} app
|
||||
docker push ghcr.io/s-allius/tsun-gen3-proxy:latest
|
||||
docker push ghcr.io/s-allius/tsun-gen3-proxy:${MAJOR}
|
||||
docker push ghcr.io/s-allius/tsun-gen3-proxy:${VERSION}
|
||||
fi
|
||||
@@ -1,36 +1,177 @@
|
||||
# configuration to reach tsun cloud
|
||||
tsun.enabled = true # false: disables connecting to the tsun cloud, and avoids updates
|
||||
tsun.host = 'logger.talent-monitoring.com'
|
||||
tsun.port = 5005
|
||||
##########################################################################################
|
||||
###
|
||||
### T S U N - G E N 3 - P R O X Y
|
||||
###
|
||||
### from Stefan Allius
|
||||
###
|
||||
##########################################################################################
|
||||
###
|
||||
### The readme will give you an overview of the project:
|
||||
### https://s-allius.github.io/tsun-gen3-proxy/
|
||||
###
|
||||
### The proxy supports different operation modes. Select the proper mode
|
||||
### which depends on your inverter type and you inverter firmware.
|
||||
### Please read:
|
||||
### https://github.com/s-allius/tsun-gen3-proxy/wiki/Operation-Modes-Overview
|
||||
###
|
||||
### Here you will find a description of all configuration options:
|
||||
### https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-toml
|
||||
###
|
||||
### The configration uses the TOML format, which aims to be easy to read due to
|
||||
### obvious semantics. You find more details here: https://toml.io/en/v1.0.0
|
||||
###
|
||||
##########################################################################################
|
||||
|
||||
|
||||
# mqtt broker configuration
|
||||
##########################################################################################
|
||||
##
|
||||
## MQTT broker configuration
|
||||
##
|
||||
## In this block, you must configure the connection to your MQTT broker and specify the
|
||||
## required credentials. As the proxy does not currently support an encrypted connection
|
||||
## to the MQTT broker, it is strongly recommended that you do not use a public broker.
|
||||
##
|
||||
## https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-toml#mqtt-broker-account
|
||||
##
|
||||
|
||||
mqtt.host = 'mqtt' # URL or IP address of the mqtt broker
|
||||
mqtt.port = 1883
|
||||
mqtt.user = ''
|
||||
mqtt.passwd = ''
|
||||
|
||||
|
||||
# home-assistant
|
||||
##########################################################################################
|
||||
##
|
||||
## HOME ASSISTANT
|
||||
##
|
||||
## The proxy supports the MQTT autoconfiguration of Home Assistant (HA). The default
|
||||
## values match the HA default configuration. If you need to change these or want to use
|
||||
## a different MQTT client, you can adjust the prefixes of the MQTT topics below.
|
||||
##
|
||||
## https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-toml#home-assistant
|
||||
##
|
||||
|
||||
ha.auto_conf_prefix = 'homeassistant' # MQTT prefix for subscribing for homeassistant status updates
|
||||
ha.discovery_prefix = 'homeassistant' # MQTT prefix for discovery topic
|
||||
ha.entity_prefix = 'tsun' # MQTT topic prefix for publishing inverter values
|
||||
ha.proxy_node_id = 'proxy' # MQTT node id, for the proxy_node_id
|
||||
ha.proxy_unique_id = 'P170000000000001' # MQTT unique id, to identify a proxy instance
|
||||
|
||||
# microinverters
|
||||
inverters.allow_all = true # allow inverters, even if we have no inverter mapping
|
||||
|
||||
# inverter mapping, maps a `serial_no* to a `mqtt_id` and defines an optional `suggested_place` for `home-assistant`
|
||||
#
|
||||
# for each inverter add a block starting with [inverters."<16-digit serial numbeer>"]
|
||||
##########################################################################################
|
||||
##
|
||||
## GEN3 Proxy Mode Configuration
|
||||
##
|
||||
## In this block, you can configure an optional connection to the TSUN cloud for GEN3
|
||||
## inverters. This connection is only required if you want send data to the TSUN cloud
|
||||
## to use the TSUN APPs or receive firmware updates.
|
||||
##
|
||||
## https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-toml#tsun-cloud-for-gen3-inverter-only
|
||||
##
|
||||
|
||||
tsun.enabled = true # false: disables connecting to the tsun cloud, and avoids updates
|
||||
tsun.host = 'logger.talent-monitoring.com'
|
||||
tsun.port = 5005
|
||||
|
||||
|
||||
##########################################################################################
|
||||
##
|
||||
## GEN3PLUS Proxy Mode Configuration
|
||||
##
|
||||
## In this block, you can configure an optional connection to the TSUN cloud for GEN3PLUS
|
||||
## inverters. This connection is only required if you want send data to the TSUN cloud
|
||||
## to use the TSUN APPs or receive firmware updates.
|
||||
##
|
||||
## https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-toml#solarman-cloud-for-gen3plus-inverter-only
|
||||
##
|
||||
|
||||
solarman.enabled = true # false: disables connecting to the tsun cloud, and avoids updates
|
||||
solarman.host = 'iot.talent-monitoring.com'
|
||||
solarman.port = 10000
|
||||
|
||||
|
||||
##########################################################################################
|
||||
###
|
||||
### Inverter Definitions
|
||||
###
|
||||
### The proxy supports the simultaneous operation of several inverters, even of different
|
||||
### types. A configuration block must be defined for each inverter, in which all necessary
|
||||
### parameters must be specified. These depend on the operation mode used and also differ
|
||||
### slightly depending on the inverter type.
|
||||
###
|
||||
### In addition, the PV modules can be defined at the individual inputs for documentation
|
||||
### purposes, whereby these are displayed in Home Assistant.
|
||||
###
|
||||
### The proxy only accepts connections from known inverters. This can be switched off for
|
||||
### test purposes and unknown serial numbers are also accepted.
|
||||
###
|
||||
|
||||
inverters.allow_all = false # only allow known inverters
|
||||
|
||||
|
||||
##########################################################################################
|
||||
##
|
||||
## For each GEN3 inverter, the serial number of the inverter must be mapped to an MQTT
|
||||
## definition. To do this, the corresponding configuration block is started with
|
||||
## `[Inverter.“<16-digit serial number>”]` so that all subsequent parameters are assigned
|
||||
## to this inverter. Further inverter-specific parameters (e.g. polling mode) can be set
|
||||
## in the configuration block
|
||||
##
|
||||
## The serial numbers of all GEN3 inverters start with `R17`!
|
||||
##
|
||||
|
||||
[inverters."R170000000000001"]
|
||||
#node_id = '' # Optional, MQTT replacement for inverters serial number
|
||||
#suggested_area = '' # Optional, suggested installation area for home-assistant
|
||||
|
||||
#[inverters."R17xxxxxxxxxxxx2"]
|
||||
#node_id = '' # Optional, MQTT replacement for inverters serial number
|
||||
#suggested_area = '' # Optional, suggested installation area for home-assistant
|
||||
node_id = '' # MQTT replacement for inverters serial number
|
||||
suggested_area = '' # suggested installation area for home-assistant
|
||||
modbus_polling = false # Disable optional MODBUS polling
|
||||
pv1 = {type = 'RSM40-8-395M', manufacturer = 'Risen'} # Optional, PV module descr
|
||||
pv2 = {type = 'RSM40-8-395M', manufacturer = 'Risen'} # Optional, PV module descr
|
||||
|
||||
|
||||
##########################################################################################
|
||||
##
|
||||
## For each GEN3PLUS inverter, the serial number of the inverter must be mapped to an MQTT
|
||||
## definition. To do this, the corresponding configuration block is started with
|
||||
## `[Inverter.“<16-digit serial number>”]` so that all subsequent parameters are assigned
|
||||
## to this inverter. Further inverter-specific parameters (e.g. polling mode, client mode)
|
||||
## can be set in the configuration block
|
||||
##
|
||||
## The serial numbers of all GEN3PLUS inverters start with `Y17` or Y47! Each GEN3PLUS
|
||||
## inverter is supplied with a “Monitoring SN:”. This can be found on a sticker enclosed
|
||||
## with the inverter.
|
||||
##
|
||||
|
||||
[inverters."Y170000000000001"]
|
||||
monitor_sn = 2000000000 # The GEN3PLUS "Monitoring SN:"
|
||||
node_id = '' # MQTT replacement for inverters serial number
|
||||
suggested_area = '' # suggested installation place for home-assistant
|
||||
modbus_polling = true # Enable optional MODBUS polling
|
||||
|
||||
# if your inverter supports SSL connections you must use the client_mode. Pls, uncomment
|
||||
# the next line and configure the fixed IP of your inverter
|
||||
#client_mode = {host = '192.168.0.1', port = 8899, forward = true}
|
||||
|
||||
pv1 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr
|
||||
pv2 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr
|
||||
pv3 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr
|
||||
pv4 = {type = 'RSM40-8-410M', manufacturer = 'Risen'} # Optional, PV module descr
|
||||
|
||||
|
||||
##########################################################################################
|
||||
###
|
||||
### If the proxy mode is configured, commands from TSUN can be sent to the inverter via
|
||||
### this connection or parameters (e.g. network credentials) can be queried. Filters can
|
||||
### then be configured for the AT+ commands from the TSUN Cloud so that only certain
|
||||
### accesses are permitted.
|
||||
###
|
||||
### An overview of all known AT+ commands can be found here:
|
||||
### https://github.com/s-allius/tsun-gen3-proxy/wiki/AT--commands
|
||||
###
|
||||
|
||||
[gen3plus.at_acl]
|
||||
# filter for received commands from the internet
|
||||
tsun.allow = ['AT+Z', 'AT+UPURL', 'AT+SUPDATE']
|
||||
tsun.block = []
|
||||
# filter for received commands from the MQTT broker
|
||||
mqtt.allow = ['AT+']
|
||||
mqtt.block = []
|
||||
|
||||
93
app/docker-bake.hcl
Normal file
93
app/docker-bake.hcl
Normal file
@@ -0,0 +1,93 @@
|
||||
variable "IMAGE" {
|
||||
default = "tsun-gen3-proxy"
|
||||
}
|
||||
variable "VERSION" {
|
||||
default = "0.0.0"
|
||||
}
|
||||
variable "MAJOR" {
|
||||
default = "0"
|
||||
}
|
||||
variable "BUILD_DATE" {
|
||||
default = "dev"
|
||||
}
|
||||
variable "BRANCH" {
|
||||
default = ""
|
||||
}
|
||||
variable "DESCRIPTION" {
|
||||
default = "This proxy enables a reliable connection between TSUN third generation inverters (eg. TSOL MS600, MS800, MS2000) and an MQTT broker to integrate the inverter into typical home automations."
|
||||
}
|
||||
|
||||
target "_common" {
|
||||
context = "."
|
||||
dockerfile = "Dockerfile"
|
||||
args = {
|
||||
VERSION = "${VERSION}"
|
||||
environment = "production"
|
||||
}
|
||||
attest = [
|
||||
"type =provenance,mode=max",
|
||||
"type =sbom,generator=docker/scout-sbom-indexer:latest"
|
||||
]
|
||||
annotations = [
|
||||
"index:org.opencontainers.image.title=TSUN Gen3 Proxy",
|
||||
"index:org.opencontainers.image.authors=Stefan Allius",
|
||||
"index:org.opencontainers.image.created=${BUILD_DATE}",
|
||||
"index:org.opencontainers.image.version=${VERSION}",
|
||||
"index:org.opencontainers.image.revision=${BRANCH}",
|
||||
"index:org.opencontainers.image.description=${DESCRIPTION}",
|
||||
"index:org.opencontainers.image.licenses=BSD-3-Clause",
|
||||
"index:org.opencontainers.image.source=https://github.com/s-allius/tsun-gen3-proxy"
|
||||
]
|
||||
labels = {
|
||||
"org.opencontainers.image.title" = "TSUN Gen3 Proxy"
|
||||
"org.opencontainers.image.authors" = "Stefan Allius"
|
||||
"org.opencontainers.image.created" = "${BUILD_DATE}"
|
||||
"org.opencontainers.image.version" = "${VERSION}"
|
||||
"org.opencontainers.image.revision" = "${BRANCH}"
|
||||
"org.opencontainers.image.description" = "${DESCRIPTION}"
|
||||
"org.opencontainers.image.licenses" = "BSD-3-Clause"
|
||||
"org.opencontainers.image.source" = "https://github.com/s-allius/tsun-gen3-proxy"
|
||||
}
|
||||
output = [
|
||||
"type=image,push=true"
|
||||
]
|
||||
|
||||
no-cache = false
|
||||
platforms = ["linux/amd64", "linux/arm64", "linux/arm/v7"]
|
||||
}
|
||||
|
||||
target "_debug" {
|
||||
args = {
|
||||
LOG_LVL = "DEBUG"
|
||||
environment = "dev"
|
||||
}
|
||||
}
|
||||
target "_prod" {
|
||||
args = {
|
||||
}
|
||||
}
|
||||
target "debug" {
|
||||
inherits = ["_common", "_debug"]
|
||||
tags = ["${IMAGE}:debug"]
|
||||
}
|
||||
|
||||
target "dev" {
|
||||
inherits = ["_common"]
|
||||
tags = ["${IMAGE}:dev"]
|
||||
}
|
||||
|
||||
target "preview" {
|
||||
inherits = ["_common", "_prod"]
|
||||
tags = ["${IMAGE}:preview", "${IMAGE}:${VERSION}"]
|
||||
}
|
||||
|
||||
target "rc" {
|
||||
inherits = ["_common", "_prod"]
|
||||
tags = ["${IMAGE}:rc", "${IMAGE}:${VERSION}"]
|
||||
}
|
||||
|
||||
target "rel" {
|
||||
inherits = ["_common", "_prod"]
|
||||
tags = ["${IMAGE}:latest", "${IMAGE}:${MAJOR}", "${IMAGE}:${VERSION}"]
|
||||
no-cache = true
|
||||
}
|
||||
@@ -2,19 +2,24 @@
|
||||
set -e
|
||||
|
||||
user="$(id -u)"
|
||||
export VERSION=$(cat /proxy-version.txt)
|
||||
|
||||
echo "######################################################"
|
||||
echo "# prepare: '$SERVICE_NAME' Version:$VERSION"
|
||||
echo "# for running with UserID:$UID, GroupID:$GID"
|
||||
echo "# Image built: $(cat /build-date.txt) "
|
||||
echo "#"
|
||||
|
||||
if [ "$user" = '0' ]; then
|
||||
mkdir -p /home/$SERVICE_NAME/log /home/$SERVICE_NAME/config
|
||||
|
||||
if ! id $SERVICE_NAME &> /dev/null; then
|
||||
echo "# create user"
|
||||
addgroup --gid $GID $SERVICE_NAME 2> /dev/null
|
||||
adduser -G $SERVICE_NAME -s /bin/false -D -H -g "" -u $UID $SERVICE_NAME
|
||||
chown -R $SERVICE_NAME:$SERVICE_NAME /home/$SERVICE_NAME || true
|
||||
rm -fr /usr/sbin/addgroup /usr/sbin/adduser /bin/chown
|
||||
fi
|
||||
chown -R $SERVICE_NAME:$SERVICE_NAME /home/$SERVICE_NAME || true
|
||||
echo "######################################################"
|
||||
echo "#"
|
||||
|
||||
|
||||
19
app/hardening_base.sh
Normal file
19
app/hardening_base.sh
Normal file
@@ -0,0 +1,19 @@
|
||||
#!/bin/sh
|
||||
|
||||
rm -fr /var/spool/cron
|
||||
rm -fr /etc/crontabs
|
||||
rm -fr /etc/periodic
|
||||
|
||||
# Remove every user and group but root
|
||||
sed -i -r '/^(root)/!d' /etc/group
|
||||
sed -i -r '/^(root)/!d' /etc/passwd
|
||||
|
||||
# Remove init scripts since we do not use them.
|
||||
rm -fr /etc/inittab
|
||||
|
||||
# Remove kernel tunables since we do not need them.
|
||||
rm -fr /etc/sysctl*
|
||||
rm -fr /etc/modprobe.d
|
||||
|
||||
# Remove fstab since we do not need it.
|
||||
rm -f /etc/fstab
|
||||
21
app/hardening_final.sh
Normal file
21
app/hardening_final.sh
Normal file
@@ -0,0 +1,21 @@
|
||||
#!/bin/sh
|
||||
# For production images delete all uneeded admin commands and remove dangerous commands.
|
||||
# addgroup, adduser and chmod will be removed in entrypoint.sh during first start
|
||||
# su-exec will be needed for ever restart of the cotainer
|
||||
if [ "$environment" = "production" ] ; then \
|
||||
find /sbin /usr/sbin ! -type d \
|
||||
-a ! -name addgroup \
|
||||
-a ! -name adduser \
|
||||
-a ! -name nologin \
|
||||
-a ! -name su-exec \
|
||||
-delete; \
|
||||
find /bin /usr/bin -xdev \( \
|
||||
-name chgrp -o \
|
||||
-name chmod -o \
|
||||
-name hexdump -o \
|
||||
-name ln -o \
|
||||
-name od -o \
|
||||
-name strings -o \
|
||||
-name su -o \
|
||||
\) -delete \
|
||||
; fi
|
||||
260
app/proxy.svg
Normal file
260
app/proxy.svg
Normal file
@@ -0,0 +1,260 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
|
||||
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<!-- Generated by graphviz version 2.40.1 (20161225.0304)
|
||||
-->
|
||||
<!-- Title: G Pages: 1 -->
|
||||
<svg width="626pt" height="966pt"
|
||||
viewBox="0.00 0.00 625.50 966.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 962)">
|
||||
<title>G</title>
|
||||
<polygon fill="#ffffff" stroke="transparent" points="-4,4 -4,-962 621.5,-962 621.5,4 -4,4"/>
|
||||
<!-- A0 -->
|
||||
<g id="node1" class="node">
|
||||
<title>A0</title>
|
||||
<polygon fill="#fff8dc" stroke="#000000" points="191.6964,-934 83.3036,-934 83.3036,-898 197.6964,-898 197.6964,-928 191.6964,-934"/>
|
||||
<polyline fill="none" stroke="#000000" points="191.6964,-934 191.6964,-928 "/>
|
||||
<polyline fill="none" stroke="#000000" points="197.6964,-928 191.6964,-928 "/>
|
||||
<text text-anchor="middle" x="140.5" y="-919" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">You can stick notes</text>
|
||||
<text text-anchor="middle" x="140.5" y="-907" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">on diagrams too!</text>
|
||||
</g>
|
||||
<!-- A1 -->
|
||||
<g id="node2" class="node">
|
||||
<title>A1</title>
|
||||
<polygon fill="none" stroke="#000000" points="215.5,-926 215.5,-958 331.5,-958 331.5,-926 215.5,-926"/>
|
||||
<text text-anchor="start" x="225.149" y="-939" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><<AbstractIterMeta>></text>
|
||||
<polygon fill="none" stroke="#000000" points="215.5,-906 215.5,-926 331.5,-926 331.5,-906 215.5,-906"/>
|
||||
<polygon fill="none" stroke="#000000" points="215.5,-874 215.5,-906 331.5,-906 331.5,-874 215.5,-874"/>
|
||||
<text text-anchor="start" x="252.11" y="-887" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">__iter__()</text>
|
||||
</g>
|
||||
<!-- A4 -->
|
||||
<g id="node5" class="node">
|
||||
<title>A4</title>
|
||||
<polygon fill="none" stroke="#000000" points="178.5,-726 178.5,-758 369.5,-758 369.5,-726 178.5,-726"/>
|
||||
<text text-anchor="start" x="240.0965" y="-739" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><<InverterIfc>></text>
|
||||
<polygon fill="none" stroke="#000000" points="178.5,-706 178.5,-726 369.5,-726 369.5,-706 178.5,-706"/>
|
||||
<polygon fill="none" stroke="#000000" points="178.5,-650 178.5,-706 369.5,-706 369.5,-650 178.5,-650"/>
|
||||
<text text-anchor="start" x="240.522" y="-687" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">healthy()->bool</text>
|
||||
<text text-anchor="start" x="188.2835" y="-675" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><async>disc(shutdown_started=False)</text>
|
||||
<text text-anchor="start" x="219.544" y="-663" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><async>create_remote()</text>
|
||||
</g>
|
||||
<!-- A1->A4 -->
|
||||
<g id="edge1" class="edge">
|
||||
<title>A1->A4</title>
|
||||
<path fill="none" stroke="#000000" stroke-dasharray="5,2" d="M273.5,-863.7744C273.5,-831.6663 273.5,-790.6041 273.5,-758.1476"/>
|
||||
<polygon fill="none" stroke="#000000" points="270.0001,-863.8621 273.5,-873.8622 277.0001,-863.8622 270.0001,-863.8621"/>
|
||||
</g>
|
||||
<!-- A2 -->
|
||||
<g id="node3" class="node">
|
||||
<title>A2</title>
|
||||
<polygon fill="none" stroke="#000000" points="441.5,-454 441.5,-498 563.5,-498 563.5,-454 441.5,-454"/>
|
||||
<text text-anchor="start" x="492.777" y="-479" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">Mqtt</text>
|
||||
<text text-anchor="start" x="469.9815" y="-467" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><<Singleton>></text>
|
||||
<polygon fill="none" stroke="#000000" points="441.5,-398 441.5,-454 563.5,-454 563.5,-398 441.5,-398"/>
|
||||
<text text-anchor="start" x="459.9875" y="-435" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><static>ha_restarts</text>
|
||||
<text text-anchor="start" x="467.7665" y="-423" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><static>__client</text>
|
||||
<text text-anchor="start" x="451.3735" y="-411" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><static>__cb_MqttIsUp</text>
|
||||
<polygon fill="none" stroke="#000000" points="441.5,-354 441.5,-398 563.5,-398 563.5,-354 441.5,-354"/>
|
||||
<text text-anchor="start" x="464.436" y="-379" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><async>publish()</text>
|
||||
<text text-anchor="start" x="468.6045" y="-367" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><async>close()</text>
|
||||
</g>
|
||||
<!-- A3 -->
|
||||
<g id="node4" class="node">
|
||||
<title>A3</title>
|
||||
<polygon fill="none" stroke="#000000" points="387.5,-792 387.5,-824 617.5,-824 617.5,-792 387.5,-792"/>
|
||||
<text text-anchor="start" x="489.7215" y="-805" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">Proxy</text>
|
||||
<polygon fill="none" stroke="#000000" points="387.5,-676 387.5,-792 617.5,-792 617.5,-676 387.5,-676"/>
|
||||
<text text-anchor="start" x="474.1545" y="-773" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><cls>db_stat</text>
|
||||
<text text-anchor="start" x="467.491" y="-761" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><cls>entity_prfx</text>
|
||||
<text text-anchor="start" x="458.326" y="-749" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><cls>discovery_prfx</text>
|
||||
<text text-anchor="start" x="457.762" y="-737" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><cls>proxy_node_id</text>
|
||||
<text text-anchor="start" x="453.873" y="-725" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><cls>proxy_unique_id</text>
|
||||
<text text-anchor="start" x="469.716" y="-713" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><cls>mqtt:Mqtt</text>
|
||||
<text text-anchor="start" x="471.9355" y="-689" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">__ha_restarts</text>
|
||||
<polygon fill="none" stroke="#000000" points="387.5,-584 387.5,-676 617.5,-676 617.5,-584 387.5,-584"/>
|
||||
<text text-anchor="start" x="478.6145" y="-657" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">class_init()</text>
|
||||
<text text-anchor="start" x="473.334" y="-645" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">class_close()</text>
|
||||
<text text-anchor="start" x="444.984" y="-621" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><async>_cb_mqtt_is_up()</text>
|
||||
<text text-anchor="start" x="397.197" y="-609" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><async>_register_proxy_stat_home_assistant()</text>
|
||||
<text text-anchor="start" x="406.084" y="-597" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><async>_async_publ_mqtt_proxy_stat(key)</text>
|
||||
</g>
|
||||
<!-- A3->A2 -->
|
||||
<g id="edge9" class="edge">
|
||||
<title>A3->A2</title>
|
||||
<path fill="none" stroke="#000000" d="M502.5,-571.373C502.5,-549.9571 502.5,-528.339 502.5,-508.5579"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="502.5001,-571.682 506.5,-577.6821 502.5,-583.682 498.5,-577.682 502.5001,-571.682"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="502.5,-498.392 507.0001,-508.3919 502.5,-503.392 502.5001,-508.392 502.5001,-508.392 502.5001,-508.392 502.5,-503.392 498.0001,-508.392 502.5,-498.392 502.5,-498.392"/>
|
||||
</g>
|
||||
<!-- A5 -->
|
||||
<g id="node6" class="node">
|
||||
<title>A5</title>
|
||||
<polygon fill="none" stroke="#000000" points="205.5,-502 205.5,-534 396.5,-534 396.5,-502 205.5,-502"/>
|
||||
<text text-anchor="start" x="272.66" y="-515" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">InverterBase</text>
|
||||
<polygon fill="none" stroke="#000000" points="205.5,-386 205.5,-502 396.5,-502 396.5,-386 205.5,-386"/>
|
||||
<text text-anchor="start" x="281.8335" y="-483" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">_registry</text>
|
||||
<text text-anchor="start" x="270.4355" y="-471" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">__ha_restarts</text>
|
||||
<text text-anchor="start" x="290.997" y="-447" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">addr</text>
|
||||
<text text-anchor="start" x="274.0505" y="-435" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">config_id:str</text>
|
||||
<text text-anchor="start" x="247.3785" y="-423" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">prot_class:MessageProt</text>
|
||||
<text text-anchor="start" x="261.553" y="-411" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">remote:StreamPtr</text>
|
||||
<text text-anchor="start" x="266.832" y="-399" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">local:StreamPtr</text>
|
||||
<polygon fill="none" stroke="#000000" points="205.5,-318 205.5,-386 396.5,-386 396.5,-318 205.5,-318"/>
|
||||
<text text-anchor="start" x="267.522" y="-367" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">healthy()->bool</text>
|
||||
<text text-anchor="start" x="215.2835" y="-355" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><async>disc(shutdown_started=False)</text>
|
||||
<text text-anchor="start" x="246.544" y="-343" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><async>create_remote()</text>
|
||||
<text text-anchor="start" x="240.984" y="-331" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><async>async_publ_mqtt()</text>
|
||||
</g>
|
||||
<!-- A3->A5 -->
|
||||
<g id="edge7" class="edge">
|
||||
<title>A3->A5</title>
|
||||
<path fill="none" stroke="#000000" d="M409.1791,-575.5683C399.1409,-561.7533 389.0008,-547.7982 379.1588,-534.2532"/>
|
||||
<polygon fill="none" stroke="#000000" points="406.3649,-577.6495 415.0747,-583.682 412.0279,-573.5347 406.3649,-577.6495"/>
|
||||
</g>
|
||||
<!-- A4->A5 -->
|
||||
<g id="edge2" class="edge">
|
||||
<title>A4->A5</title>
|
||||
<path fill="none" stroke="#000000" stroke-dasharray="5,2" d="M279.7719,-639.4228C282.8086,-608.1559 286.5373,-569.7639 289.991,-534.2034"/>
|
||||
<polygon fill="none" stroke="#000000" points="276.2531,-639.4473 278.77,-649.7389 283.2203,-640.1241 276.2531,-639.4473"/>
|
||||
</g>
|
||||
<!-- A6 -->
|
||||
<g id="node7" class="node">
|
||||
<title>A6</title>
|
||||
<polygon fill="none" stroke="#000000" points="356.5,-236 356.5,-268 456.5,-268 456.5,-236 356.5,-236"/>
|
||||
<text text-anchor="start" x="383.9995" y="-249" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">StreamPtr</text>
|
||||
<polygon fill="none" stroke="#000000" points="356.5,-216 356.5,-236 456.5,-236 456.5,-216 356.5,-216"/>
|
||||
<polygon fill="none" stroke="#000000" points="356.5,-172 356.5,-216 456.5,-216 456.5,-172 356.5,-172"/>
|
||||
<text text-anchor="start" x="366.2175" y="-197" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">stream:ProtocolIfc</text>
|
||||
<text text-anchor="start" x="381.2185" y="-185" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">ifc:AsyncIfc</text>
|
||||
</g>
|
||||
<!-- A5->A6 -->
|
||||
<g id="edge8" class="edge">
|
||||
<title>A5->A6</title>
|
||||
<path fill="none" stroke="#000000" d="M356.1387,-317.872C363.3786,-303.802 370.5526,-289.86 377.1187,-277.0995"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="381.7846,-268.0318 381.2105,-278.9826 379.4969,-272.4777 377.2091,-276.9237 377.2091,-276.9237 377.2091,-276.9237 379.4969,-272.4777 373.2078,-274.8647 381.7846,-268.0318 381.7846,-268.0318"/>
|
||||
<text text-anchor="middle" x="381.0069" y="-285.0166" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">2</text>
|
||||
</g>
|
||||
<!-- A7 -->
|
||||
<g id="node8" class="node">
|
||||
<title>A7</title>
|
||||
<polygon fill="none" stroke="#000000" points="338.2314,-238 262.7686,-238 262.7686,-202 338.2314,-202 338.2314,-238"/>
|
||||
<text text-anchor="middle" x="300.5" y="-217" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">InverterG3</text>
|
||||
</g>
|
||||
<!-- A5->A7 -->
|
||||
<g id="edge5" class="edge">
|
||||
<title>A5->A7</title>
|
||||
<path fill="none" stroke="#000000" d="M300.5,-307.7729C300.5,-280.5002 300.5,-254.684 300.5,-238.2013"/>
|
||||
<polygon fill="none" stroke="#000000" points="297.0001,-307.872 300.5,-317.872 304.0001,-307.872 297.0001,-307.872"/>
|
||||
</g>
|
||||
<!-- A9 -->
|
||||
<g id="node10" class="node">
|
||||
<title>A9</title>
|
||||
<polygon fill="none" stroke="#000000" points="94.4001,-238 12.5999,-238 12.5999,-202 94.4001,-202 94.4001,-238"/>
|
||||
<text text-anchor="middle" x="53.5" y="-217" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">InverterG3P</text>
|
||||
</g>
|
||||
<!-- A5->A9 -->
|
||||
<g id="edge6" class="edge">
|
||||
<title>A5->A9</title>
|
||||
<path fill="none" stroke="#000000" d="M196.7667,-346.4637C165.8973,-321.9347 132.3582,-294.4156 102.5,-268 91.7971,-258.5312 80.3616,-247.3925 71.232,-238.23"/>
|
||||
<polygon fill="none" stroke="#000000" points="194.962,-349.4991 204.9739,-352.965 199.3086,-344.0121 194.962,-349.4991"/>
|
||||
</g>
|
||||
<!-- A11 -->
|
||||
<g id="node12" class="node">
|
||||
<title>A11</title>
|
||||
<polygon fill="none" stroke="#000000" points="450.1421,-36 360.8579,-36 360.8579,0 450.1421,0 450.1421,-36"/>
|
||||
<text text-anchor="middle" x="405.5" y="-15" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><<AsyncIfc>></text>
|
||||
</g>
|
||||
<!-- A6->A11 -->
|
||||
<g id="edge11" class="edge">
|
||||
<title>A6->A11</title>
|
||||
<path fill="none" stroke="#000000" d="M392.6633,-171.974C386.9982,-146.4565 382.5868,-114.547 386.5,-86 388.3468,-72.5276 392.161,-57.9618 395.8907,-45.7804"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="398.9587,-36.1851 400.1994,-47.0805 397.4359,-40.9476 395.9131,-45.71 395.9131,-45.71 395.9131,-45.71 397.4359,-40.9476 391.6269,-44.3395 398.9587,-36.1851 398.9587,-36.1851"/>
|
||||
<text text-anchor="middle" x="401.4892" y="-53.0243" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">1</text>
|
||||
</g>
|
||||
<!-- A12 -->
|
||||
<g id="node13" class="node">
|
||||
<title>A12</title>
|
||||
<polygon fill="none" stroke="#000000" points="493.5879,-122 395.4121,-122 395.4121,-86 493.5879,-86 493.5879,-122"/>
|
||||
<text text-anchor="middle" x="444.5" y="-101" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><<ProtocolIfc>></text>
|
||||
</g>
|
||||
<!-- A6->A12 -->
|
||||
<g id="edge10" class="edge">
|
||||
<title>A6->A12</title>
|
||||
<path fill="none" stroke="#000000" d="M422.2853,-171.8133C426.7329,-158.2365 431.4225,-143.9208 435.3408,-131.9595"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="438.5602,-122.132 439.7235,-133.036 437.0036,-126.8835 435.4471,-131.6351 435.4471,-131.6351 435.4471,-131.6351 437.0036,-126.8835 431.1707,-130.2341 438.5602,-122.132 438.5602,-122.132"/>
|
||||
<text text-anchor="middle" x="440.9498" y="-138.9887" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">1</text>
|
||||
</g>
|
||||
<!-- A8 -->
|
||||
<g id="node9" class="node">
|
||||
<title>A8</title>
|
||||
<polygon fill="#fff8dc" stroke="#000000" points="574.906,-248 474.094,-248 474.094,-192 580.906,-192 580.906,-242 574.906,-248"/>
|
||||
<polyline fill="none" stroke="#000000" points="574.906,-248 574.906,-242 "/>
|
||||
<polyline fill="none" stroke="#000000" points="580.906,-242 574.906,-242 "/>
|
||||
<text text-anchor="middle" x="527.5" y="-235" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">Creates an GEN3</text>
|
||||
<text text-anchor="middle" x="527.5" y="-223" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">inverter instance</text>
|
||||
<text text-anchor="middle" x="527.5" y="-211" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">with</text>
|
||||
<text text-anchor="middle" x="527.5" y="-199" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">prot_class:Talent</text>
|
||||
</g>
|
||||
<!-- A7->A8 -->
|
||||
<g id="edge3" class="edge">
|
||||
<title>A7->A8</title>
|
||||
<path fill="none" stroke="#000000" stroke-dasharray="5,2" d="M308.5491,-238.3283C317.4345,-256.0056 333.5793,-281.6949 356.5,-293 396.3598,-312.6598 415.5578,-310.2929 456.5,-293 478.1607,-283.8511 496.4784,-264.5049 509.0802,-248.0264"/>
|
||||
</g>
|
||||
<!-- A10 -->
|
||||
<g id="node11" class="node">
|
||||
<title>A10</title>
|
||||
<polygon fill="#fff8dc" stroke="#000000" points="239.022,-248 111.978,-248 111.978,-192 245.022,-192 245.022,-242 239.022,-248"/>
|
||||
<polyline fill="none" stroke="#000000" points="239.022,-248 239.022,-242 "/>
|
||||
<polyline fill="none" stroke="#000000" points="245.022,-242 239.022,-242 "/>
|
||||
<text text-anchor="middle" x="178.5" y="-235" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">Creates an GEN3PLUS</text>
|
||||
<text text-anchor="middle" x="178.5" y="-223" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">inverter instance</text>
|
||||
<text text-anchor="middle" x="178.5" y="-211" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">with</text>
|
||||
<text text-anchor="middle" x="178.5" y="-199" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">prot_class:SolarmanV5</text>
|
||||
</g>
|
||||
<!-- A9->A10 -->
|
||||
<g id="edge4" class="edge">
|
||||
<title>A9->A10</title>
|
||||
<path fill="none" stroke="#000000" stroke-dasharray="5,2" d="M94.5156,-220C100.3114,-220 106.1072,-220 111.903,-220"/>
|
||||
</g>
|
||||
<!-- A12->A11 -->
|
||||
<g id="edge12" class="edge">
|
||||
<title>A12->A11</title>
|
||||
<path fill="none" stroke="#000000" stroke-dasharray="5,2" d="M436.2291,-85.7616C430.9033,-74.0176 423.8824,-58.5355 417.896,-45.3349"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="413.759,-36.2121 421.9874,-43.4608 415.824,-40.7657 417.8891,-45.3194 417.8891,-45.3194 417.8891,-45.3194 415.824,-40.7657 413.7908,-47.1779 413.759,-36.2121 413.759,-36.2121"/>
|
||||
<text text-anchor="middle" x="421.0451" y="-69.7445" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">use</text>
|
||||
</g>
|
||||
<!-- A13 -->
|
||||
<g id="node14" class="node">
|
||||
<title>A13</title>
|
||||
<polygon fill="none" stroke="#000000" points=".5,-454 .5,-486 107.5,-486 107.5,-454 .5,-454"/>
|
||||
<text text-anchor="start" x="24.2695" y="-467" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">ModbusConn</text>
|
||||
<polygon fill="none" stroke="#000000" points=".5,-386 .5,-454 107.5,-454 107.5,-386 .5,-386"/>
|
||||
<text text-anchor="start" x="44.5515" y="-435" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">host</text>
|
||||
<text text-anchor="start" x="45.387" y="-423" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">port</text>
|
||||
<text text-anchor="start" x="43.997" y="-411" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">addr</text>
|
||||
<text text-anchor="start" x="10.383" y="-399" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">stream:InverterG3P</text>
|
||||
<polygon fill="none" stroke="#000000" points=".5,-366 .5,-386 107.5,-386 107.5,-366 .5,-366"/>
|
||||
</g>
|
||||
<!-- A13->A9 -->
|
||||
<g id="edge13" class="edge">
|
||||
<title>A13->A9</title>
|
||||
<path fill="none" stroke="#000000" d="M53.5,-365.8625C53.5,-327.1513 53.5,-278.6088 53.5,-248.4442"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="53.5,-238.2147 58.0001,-248.2147 53.5,-243.2147 53.5001,-248.2147 53.5001,-248.2147 53.5001,-248.2147 53.5,-243.2147 49.0001,-248.2148 53.5,-238.2147 53.5,-238.2147"/>
|
||||
<text text-anchor="middle" x="61.9524" y="-253.3409" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">1</text>
|
||||
<text text-anchor="middle" x="45.0476" y="-344.7363" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">has</text>
|
||||
</g>
|
||||
<!-- A14 -->
|
||||
<g id="node15" class="node">
|
||||
<title>A14</title>
|
||||
<polygon fill="none" stroke="#000000" points="93.7333,-722 13.2667,-722 13.2667,-686 93.7333,-686 93.7333,-722"/>
|
||||
<text text-anchor="middle" x="53.5" y="-701" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">ModbusTcp</text>
|
||||
</g>
|
||||
<!-- A14->A13 -->
|
||||
<g id="edge14" class="edge">
|
||||
<title>A14->A13</title>
|
||||
<path fill="none" stroke="#000000" d="M53.5,-685.7596C53.5,-647.9991 53.5,-559.5189 53.5,-496.3277"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="53.5,-486.0223 58.0001,-496.0223 53.5,-491.0223 53.5001,-496.0223 53.5001,-496.0223 53.5001,-496.0223 53.5,-491.0223 49.0001,-496.0224 53.5,-486.0223 53.5,-486.0223"/>
|
||||
<text text-anchor="middle" x="61.9524" y="-501.1485" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">*</text>
|
||||
<text text-anchor="middle" x="45.0476" y="-664.6335" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">creates</text>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 20 KiB |
36
app/proxy.yuml
Normal file
36
app/proxy.yuml
Normal file
@@ -0,0 +1,36 @@
|
||||
// {type:class}
|
||||
// {direction:topDown}
|
||||
// {generate:true}
|
||||
|
||||
[note: You can stick notes on diagrams too!{bg:cornsilk}]
|
||||
[<<AbstractIterMeta>>||__iter__()]
|
||||
|
||||
[Mqtt;<<Singleton>>|<static>ha_restarts;<static>__client;<static>__cb_MqttIsUp|<async>publish();<async>close()]
|
||||
[Proxy|<cls>db_stat;<cls>entity_prfx;<cls>discovery_prfx;<cls>proxy_node_id;<cls>proxy_unique_id;<cls>mqtt:Mqtt;;__ha_restarts|class_init();class_close();;<async>_cb_mqtt_is_up();<async>_register_proxy_stat_home_assistant();<async>_async_publ_mqtt_proxy_stat(key)]
|
||||
|
||||
[<<InverterIfc>>||healthy()->bool;<async>disc(shutdown_started=False);<async>create_remote();]
|
||||
[<<AbstractIterMeta>>]^-.-[<<InverterIfc>>]
|
||||
[InverterBase|_registry;__ha_restarts;;addr;config_id:str;prot_class:MessageProt;remote:StreamPtr;local:StreamPtr;|healthy()->bool;<async>disc(shutdown_started=False);<async>create_remote();<async>async_publ_mqtt()]
|
||||
[StreamPtr||stream:ProtocolIfc;ifc:AsyncIfc]
|
||||
[<<InverterIfc>>]^-.-[InverterBase]
|
||||
[InverterG3]-[note: Creates an GEN3 inverter instance with prot_class:Talent{bg:cornsilk}]
|
||||
[InverterG3P]-[note: Creates an GEN3PLUS inverter instance with prot_class:SolarmanV5{bg:cornsilk}]
|
||||
[InverterBase]^[InverterG3]
|
||||
[InverterBase]^[InverterG3P]
|
||||
[Proxy]^[InverterBase]
|
||||
[InverterBase]-2>[StreamPtr]
|
||||
[Proxy]++->[Mqtt;<<Singleton>>]
|
||||
|
||||
[<<AsyncIfc>>]
|
||||
|
||||
|
||||
[StreamPtr]-1>[<<ProtocolIfc>>]
|
||||
[StreamPtr]-1>[<<AsyncIfc>>]
|
||||
|
||||
|
||||
[<<ProtocolIfc>>]use-.->[<<AsyncIfc>>]
|
||||
|
||||
|
||||
[ModbusConn|host;port;addr;stream:InverterG3P;|]has-1>[InverterG3P]
|
||||
[ModbusTcp]creates-*>[ModbusConn]
|
||||
|
||||
371
app/proxy_2.svg
Normal file
371
app/proxy_2.svg
Normal file
@@ -0,0 +1,371 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
|
||||
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<!-- Generated by graphviz version 2.40.1 (20161225.0304)
|
||||
-->
|
||||
<!-- Title: G Pages: 1 -->
|
||||
<svg width="539pt" height="2000pt"
|
||||
viewBox="0.00 0.00 538.57 2000.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 1996)">
|
||||
<title>G</title>
|
||||
<polygon fill="#ffffff" stroke="transparent" points="-4,4 -4,-1996 534.566,-1996 534.566,4 -4,4"/>
|
||||
<!-- A0 -->
|
||||
<g id="node1" class="node">
|
||||
<title>A0</title>
|
||||
<polygon fill="#fff8dc" stroke="#000000" points="98.1981,-1972 -.0661,-1972 -.0661,-1928 104.1981,-1928 104.1981,-1966 98.1981,-1972"/>
|
||||
<polyline fill="none" stroke="#000000" points="98.1981,-1972 98.1981,-1966 "/>
|
||||
<polyline fill="none" stroke="#000000" points="104.1981,-1966 98.1981,-1966 "/>
|
||||
<text text-anchor="middle" x="52.066" y="-1959" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">Example of</text>
|
||||
<text text-anchor="middle" x="52.066" y="-1947" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">instantiation for a</text>
|
||||
<text text-anchor="middle" x="52.066" y="-1935" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">GEN3 inverter!</text>
|
||||
</g>
|
||||
<!-- A1 -->
|
||||
<g id="node2" class="node">
|
||||
<title>A1</title>
|
||||
<polygon fill="none" stroke="#000000" points="122.066,-1960 122.066,-1992 238.066,-1992 238.066,-1960 122.066,-1960"/>
|
||||
<text text-anchor="start" x="131.715" y="-1973" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><<AbstractIterMeta>></text>
|
||||
<polygon fill="none" stroke="#000000" points="122.066,-1940 122.066,-1960 238.066,-1960 238.066,-1940 122.066,-1940"/>
|
||||
<polygon fill="none" stroke="#000000" points="122.066,-1908 122.066,-1940 238.066,-1940 238.066,-1908 122.066,-1908"/>
|
||||
<text text-anchor="start" x="158.676" y="-1921" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">__iter__()</text>
|
||||
</g>
|
||||
<!-- A14 -->
|
||||
<g id="node15" class="node">
|
||||
<title>A14</title>
|
||||
<polygon fill="none" stroke="#000000" points="135.066,-1748 135.066,-1780 225.066,-1780 225.066,-1748 135.066,-1748"/>
|
||||
<text text-anchor="start" x="144.7725" y="-1761" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><<ProtocolIfc>></text>
|
||||
<polygon fill="none" stroke="#000000" points="135.066,-1716 135.066,-1748 225.066,-1748 225.066,-1716 135.066,-1716"/>
|
||||
<text text-anchor="start" x="160.8995" y="-1729" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">_registry</text>
|
||||
<polygon fill="none" stroke="#000000" points="135.066,-1684 135.066,-1716 225.066,-1716 225.066,-1684 135.066,-1684"/>
|
||||
<text text-anchor="start" x="165.0685" y="-1697" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">close()</text>
|
||||
</g>
|
||||
<!-- A1->A14 -->
|
||||
<g id="edge14" class="edge">
|
||||
<title>A1->A14</title>
|
||||
<path fill="none" stroke="#000000" stroke-dasharray="5,2" d="M180.066,-1897.756C180.066,-1862.0883 180.066,-1815.1755 180.066,-1780.3644"/>
|
||||
<polygon fill="none" stroke="#000000" points="176.5661,-1897.9674 180.066,-1907.9674 183.5661,-1897.9674 176.5661,-1897.9674"/>
|
||||
</g>
|
||||
<!-- A2 -->
|
||||
<g id="node3" class="node">
|
||||
<title>A2</title>
|
||||
<polygon fill="none" stroke="#000000" points="179.066,-662 179.066,-694 277.066,-694 277.066,-662 179.066,-662"/>
|
||||
<text text-anchor="start" x="204.4505" y="-675" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">InverterG3</text>
|
||||
<polygon fill="none" stroke="#000000" points="179.066,-606 179.066,-662 277.066,-662 277.066,-606 179.066,-606"/>
|
||||
<text text-anchor="start" x="218.063" y="-643" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">addr</text>
|
||||
<text text-anchor="start" x="188.619" y="-631" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">remote:StreamPtr</text>
|
||||
<text text-anchor="start" x="193.898" y="-619" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">local:StreamPtr</text>
|
||||
<polygon fill="none" stroke="#000000" points="179.066,-550 179.066,-606 277.066,-606 277.066,-550 179.066,-550"/>
|
||||
<text text-anchor="start" x="192.508" y="-587" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">create_remote()</text>
|
||||
<text text-anchor="start" x="213.0685" y="-563" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">close()</text>
|
||||
</g>
|
||||
<!-- A3 -->
|
||||
<g id="node4" class="node">
|
||||
<title>A3</title>
|
||||
<polygon fill="none" stroke="#000000" points="400.4026,-320 303.7294,-320 303.7294,-284 400.4026,-284 400.4026,-320"/>
|
||||
<text text-anchor="middle" x="352.066" y="-299" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">local:StreamPtr</text>
|
||||
</g>
|
||||
<!-- A2->A3 -->
|
||||
<g id="edge1" class="edge">
|
||||
<title>A2->A3</title>
|
||||
<path fill="none" stroke="#000000" d="M260.3657,-538.4062C268.7304,-516.7744 277.7293,-493.5168 286.066,-472 305.502,-421.8362 328.2143,-363.368 341.2906,-329.7205"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="260.2523,-538.6998 261.8194,-545.7386 255.9247,-549.8923 254.3577,-542.8536 260.2523,-538.6998"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="345.0251,-320.1117 345.5968,-331.0627 343.2138,-324.7721 341.4024,-329.4325 341.4024,-329.4325 341.4024,-329.4325 343.2138,-324.7721 337.2081,-327.8023 345.0251,-320.1117 345.0251,-320.1117"/>
|
||||
</g>
|
||||
<!-- A4 -->
|
||||
<g id="node5" class="node">
|
||||
<title>A4</title>
|
||||
<polygon fill="none" stroke="#000000" points="285.4601,-320 178.6719,-320 178.6719,-284 285.4601,-284 285.4601,-320"/>
|
||||
<text text-anchor="middle" x="232.066" y="-299" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">remote:StreamPtr</text>
|
||||
</g>
|
||||
<!-- A2->A4 -->
|
||||
<g id="edge2" class="edge">
|
||||
<title>A2->A4</title>
|
||||
<path fill="none" stroke="#000000" d="M229.12,-537.6831C229.9778,-469.0527 231.1375,-376.283 231.7124,-330.2853"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="229.1188,-537.7877 233.0434,-543.8372 228.9687,-549.7868 225.044,-543.7372 229.1188,-537.7877"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="231.839,-320.1609 236.2135,-330.2164 231.7764,-325.1605 231.7139,-330.1601 231.7139,-330.1601 231.7139,-330.1601 231.7764,-325.1605 227.2143,-330.1038 231.839,-320.1609 231.839,-320.1609"/>
|
||||
</g>
|
||||
<!-- A8 -->
|
||||
<g id="node9" class="node">
|
||||
<title>A8</title>
|
||||
<polygon fill="none" stroke="#000000" points="246.066,-100 246.066,-132 424.066,-132 424.066,-100 246.066,-100"/>
|
||||
<text text-anchor="start" x="290.6175" y="-113" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">AsyncStreamServer</text>
|
||||
<polygon fill="none" stroke="#000000" points="246.066,-68 246.066,-100 424.066,-100 424.066,-68 246.066,-68"/>
|
||||
<text text-anchor="start" x="302.837" y="-81" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">create_remote</text>
|
||||
<polygon fill="none" stroke="#000000" points="246.066,0 246.066,-68 424.066,-68 424.066,0 246.066,0"/>
|
||||
<text text-anchor="start" x="286.7235" y="-49" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><async>server_loop()</text>
|
||||
<text text-anchor="start" x="277.5545" y="-37" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><async>_async_forward()</text>
|
||||
<text text-anchor="start" x="255.875" y="-25" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><async>publish_outstanding_mqtt()</text>
|
||||
<text text-anchor="start" x="320.0685" y="-13" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">close()</text>
|
||||
</g>
|
||||
<!-- A3->A8 -->
|
||||
<g id="edge8" class="edge">
|
||||
<title>A3->A8</title>
|
||||
<path fill="none" stroke="#000000" d="M349.8809,-271.6651C347.5364,-239.1181 343.722,-186.1658 340.5509,-142.1431"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="349.898,-271.9044 354.3188,-277.6014 350.7603,-283.8733 346.3395,-278.1763 349.898,-271.9044"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="339.8226,-132.0321 345.0295,-141.6829 340.1818,-137.0192 340.5411,-142.0063 340.5411,-142.0063 340.5411,-142.0063 340.1818,-137.0192 336.0527,-142.3296 339.8226,-132.0321 339.8226,-132.0321"/>
|
||||
</g>
|
||||
<!-- A9 -->
|
||||
<g id="node10" class="node">
|
||||
<title>A9</title>
|
||||
<polygon fill="none" stroke="#000000" points="74.066,-82 74.066,-114 212.066,-114 212.066,-82 74.066,-82"/>
|
||||
<text text-anchor="start" x="100.563" y="-95" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">AsyncStreamClient</text>
|
||||
<polygon fill="none" stroke="#000000" points="74.066,-62 74.066,-82 212.066,-82 212.066,-62 74.066,-62"/>
|
||||
<polygon fill="none" stroke="#000000" points="74.066,-18 74.066,-62 212.066,-62 212.066,-18 74.066,-18"/>
|
||||
<text text-anchor="start" x="96.944" y="-43" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><async>client_loop()</text>
|
||||
<text text-anchor="start" x="83.89" y="-31" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><async>_async_forward())</text>
|
||||
</g>
|
||||
<!-- A4->A9 -->
|
||||
<g id="edge10" class="edge">
|
||||
<title>A4->A9</title>
|
||||
<path fill="none" stroke="#000000" d="M225.2301,-283.8733C212.4699,-250.0372 184.5329,-175.9573 164.7878,-123.5994"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="161.2018,-114.0904 168.941,-121.8593 162.9661,-118.7688 164.7305,-123.4472 164.7305,-123.4472 164.7305,-123.4472 162.9661,-118.7688 160.5199,-125.0351 161.2018,-114.0904 161.2018,-114.0904"/>
|
||||
<text text-anchor="middle" x="210.9254" y="-266.8956" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">0..1</text>
|
||||
</g>
|
||||
<!-- A5 -->
|
||||
<g id="node6" class="node">
|
||||
<title>A5</title>
|
||||
<polygon fill="none" stroke="#000000" points="129.066,-1114 129.066,-1146 246.066,-1146 246.066,-1114 129.066,-1114"/>
|
||||
<text text-anchor="start" x="156.995" y="-1127" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><<AsyncIfc>></text>
|
||||
<polygon fill="none" stroke="#000000" points="129.066,-1094 129.066,-1114 246.066,-1114 246.066,-1094 129.066,-1094"/>
|
||||
<polygon fill="none" stroke="#000000" points="129.066,-822 129.066,-1094 246.066,-1094 246.066,-822 129.066,-822"/>
|
||||
<text text-anchor="start" x="157.002" y="-1075" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">set_node_id()</text>
|
||||
<text text-anchor="start" x="155.332" y="-1063" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">get_conn_no()</text>
|
||||
<text text-anchor="start" x="169.2295" y="-1039" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">tx_add()</text>
|
||||
<text text-anchor="start" x="167.01" y="-1027" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">tx_flush()</text>
|
||||
<text text-anchor="start" x="170.6195" y="-1015" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">tx_get()</text>
|
||||
<text text-anchor="start" x="166.7295" y="-1003" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">tx_peek()</text>
|
||||
<text text-anchor="start" x="170.8995" y="-991" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">tx_log()</text>
|
||||
<text text-anchor="start" x="166.735" y="-979" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">tx_clear()</text>
|
||||
<text text-anchor="start" x="170.8995" y="-967" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">tx_len()</text>
|
||||
<text text-anchor="start" x="165.3405" y="-943" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">fwd_add()</text>
|
||||
<text text-anchor="start" x="167.0105" y="-931" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">fwd_log()</text>
|
||||
<text text-anchor="start" x="170.3445" y="-919" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">rx_get()</text>
|
||||
<text text-anchor="start" x="166.4545" y="-907" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">rx_peek()</text>
|
||||
<text text-anchor="start" x="170.6245" y="-895" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">rx_log()</text>
|
||||
<text text-anchor="start" x="166.46" y="-883" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">rx_clear()</text>
|
||||
<text text-anchor="start" x="170.6245" y="-871" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">rx_len()</text>
|
||||
<text text-anchor="start" x="162.565" y="-859" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">rx_set_cb()</text>
|
||||
<text text-anchor="start" x="138.9455" y="-835" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">prot_set_timeout_cb()</text>
|
||||
</g>
|
||||
<!-- A6 -->
|
||||
<g id="node7" class="node">
|
||||
<title>A6</title>
|
||||
<polygon fill="none" stroke="#000000" points="66.066,-652 66.066,-684 159.066,-684 159.066,-652 66.066,-652"/>
|
||||
<text text-anchor="start" x="84.23" y="-665" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">AsyncIfcImpl</text>
|
||||
<polygon fill="none" stroke="#000000" points="66.066,-560 66.066,-652 159.066,-652 159.066,-560 66.066,-560"/>
|
||||
<text text-anchor="start" x="75.614" y="-633" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">fwd_fifo:ByteFifo</text>
|
||||
<text text-anchor="start" x="79.503" y="-621" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">tx_fifo:ByteFifo</text>
|
||||
<text text-anchor="start" x="79.228" y="-609" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">rx_fifo:ByteFifo</text>
|
||||
<text text-anchor="start" x="78.662" y="-597" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">conn_no:Count</text>
|
||||
<text text-anchor="start" x="94.7795" y="-585" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">node_id</text>
|
||||
<text text-anchor="start" x="88.1155" y="-573" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">timeout_cb</text>
|
||||
</g>
|
||||
<!-- A5->A6 -->
|
||||
<g id="edge3" class="edge">
|
||||
<title>A5->A6</title>
|
||||
<path fill="none" stroke="#000000" stroke-dasharray="5,2" d="M151.3775,-811.7434C141.9017,-766.0069 132.2713,-719.5241 124.914,-684.013"/>
|
||||
<polygon fill="none" stroke="#000000" points="148.0039,-812.7126 153.4599,-821.7945 154.8583,-811.2924 148.0039,-812.7126"/>
|
||||
</g>
|
||||
<!-- A7 -->
|
||||
<g id="node8" class="node">
|
||||
<title>A7</title>
|
||||
<polygon fill="none" stroke="#000000" points="59.066,-390 59.066,-422 161.066,-422 161.066,-390 59.066,-390"/>
|
||||
<text text-anchor="start" x="80.34" y="-403" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">AsyncStream</text>
|
||||
<polygon fill="none" stroke="#000000" points="59.066,-310 59.066,-390 161.066,-390 161.066,-310 59.066,-310"/>
|
||||
<text text-anchor="start" x="95.619" y="-371" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">reader</text>
|
||||
<text text-anchor="start" x="97.849" y="-359" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">writer</text>
|
||||
<text text-anchor="start" x="100.063" y="-347" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">addr</text>
|
||||
<text text-anchor="start" x="95.619" y="-335" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">r_addr</text>
|
||||
<text text-anchor="start" x="96.174" y="-323" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">l_addr</text>
|
||||
<polygon fill="none" stroke="#000000" points="59.066,-182 59.066,-310 161.066,-310 161.066,-182 59.066,-182"/>
|
||||
<text text-anchor="start" x="81.72" y="-279" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><async>loop</text>
|
||||
<text text-anchor="start" x="97.848" y="-267" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">disc()</text>
|
||||
<text text-anchor="start" x="95.0685" y="-255" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">close()</text>
|
||||
<text text-anchor="start" x="90.62" y="-243" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">healthy()</text>
|
||||
<text text-anchor="start" x="75.3365" y="-219" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">__async_read()</text>
|
||||
<text text-anchor="start" x="74.787" y="-207" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">__async_write()</text>
|
||||
<text text-anchor="start" x="68.673" y="-195" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">__async_forward()</text>
|
||||
</g>
|
||||
<!-- A6->A7 -->
|
||||
<g id="edge4" class="edge">
|
||||
<title>A6->A7</title>
|
||||
<path fill="none" stroke="#000000" d="M111.6134,-549.5774C111.3784,-511.9877 111.0852,-465.0771 110.8174,-422.2295"/>
|
||||
<polygon fill="none" stroke="#000000" points="108.1155,-549.9435 111.678,-559.9214 115.1153,-549.8996 108.1155,-549.9435"/>
|
||||
</g>
|
||||
<!-- A7->A8 -->
|
||||
<g id="edge5" class="edge">
|
||||
<title>A7->A8</title>
|
||||
<path fill="none" stroke="#000000" d="M167.5272,-185.0204C168.3649,-184.0001 169.2111,-182.9929 170.066,-182 191.4283,-157.1889 219.1964,-135.0276 245.8416,-116.8901"/>
|
||||
<polygon fill="none" stroke="#000000" points="164.637,-183.0361 161.2751,-193.0834 170.1688,-187.3255 164.637,-183.0361"/>
|
||||
</g>
|
||||
<!-- A7->A9 -->
|
||||
<g id="edge6" class="edge">
|
||||
<title>A7->A9</title>
|
||||
<path fill="none" stroke="#000000" d="M128.2709,-171.8077C131.1447,-151.2556 133.9487,-131.2022 136.3294,-114.1772"/>
|
||||
<polygon fill="none" stroke="#000000" points="124.7747,-171.5375 126.856,-181.9259 131.7072,-172.5069 124.7747,-171.5375"/>
|
||||
</g>
|
||||
<!-- A10 -->
|
||||
<g id="node11" class="node">
|
||||
<title>A10</title>
|
||||
<polygon fill="none" stroke="#000000" points="295.066,-740 295.066,-772 409.066,-772 409.066,-740 295.066,-740"/>
|
||||
<text text-anchor="start" x="338.174" y="-753" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">Talent</text>
|
||||
<polygon fill="none" stroke="#000000" points="295.066,-600 295.066,-740 409.066,-740 409.066,-600 295.066,-600"/>
|
||||
<text text-anchor="start" x="332.889" y="-721" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">conn_no</text>
|
||||
<text text-anchor="start" x="342.063" y="-709" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">addr</text>
|
||||
<text text-anchor="start" x="304.829" y="-685" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">await_conn_resp_cnt</text>
|
||||
<text text-anchor="start" x="339.8435" y="-673" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">id_str</text>
|
||||
<text text-anchor="start" x="320.666" y="-661" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">contact_name</text>
|
||||
<text text-anchor="start" x="324.006" y="-649" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">contact_mail</text>
|
||||
<text text-anchor="start" x="327.6105" y="-637" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">db:InfosG3</text>
|
||||
<text text-anchor="start" x="325.95" y="-625" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">mb:Modbus</text>
|
||||
<text text-anchor="start" x="338.178" y="-613" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">switch</text>
|
||||
<polygon fill="none" stroke="#000000" points="295.066,-472 295.066,-600 409.066,-600 409.066,-472 295.066,-472"/>
|
||||
<text text-anchor="start" x="309.5585" y="-581" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">msg_contact_info()</text>
|
||||
<text text-anchor="start" x="311.4985" y="-569" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">msg_ota_update()</text>
|
||||
<text text-anchor="start" x="317.3425" y="-557" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">msg_get_time()</text>
|
||||
<text text-anchor="start" x="305.3945" y="-545" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">msg_collector_data()</text>
|
||||
<text text-anchor="start" x="307.3395" y="-533" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">msg_inverter_data()</text>
|
||||
<text text-anchor="start" x="316.5065" y="-521" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">msg_unknown()</text>
|
||||
<text text-anchor="start" x="332.62" y="-497" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">healthy()</text>
|
||||
<text text-anchor="start" x="337.0685" y="-485" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">close()</text>
|
||||
</g>
|
||||
<!-- A10->A3 -->
|
||||
<g id="edge7" class="edge">
|
||||
<title>A10->A3</title>
|
||||
<path fill="none" stroke="#000000" d="M352.066,-461.6172C352.066,-412.1611 352.066,-362.7538 352.066,-332.2961"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="352.066,-471.8382 347.5661,-461.8382 352.066,-466.8382 352.0661,-461.8382 352.0661,-461.8382 352.0661,-461.8382 352.066,-466.8382 356.5661,-461.8383 352.066,-471.8382 352.066,-471.8382"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="352.0661,-332.0807 348.066,-326.0808 352.066,-320.0807 356.066,-326.0807 352.0661,-332.0807"/>
|
||||
</g>
|
||||
<!-- A10->A4 -->
|
||||
<g id="edge9" class="edge">
|
||||
<title>A10->A4</title>
|
||||
<path fill="none" stroke="#000000" d="M292.1869,-462.3225C270.8082,-405.3126 249.4091,-348.2482 238.8463,-320.0807"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="295.7553,-471.8382 288.0306,-464.055 293.9997,-467.1566 292.244,-462.4749 292.244,-462.4749 292.244,-462.4749 293.9997,-467.1566 296.4575,-460.8948 295.7553,-471.8382 295.7553,-471.8382"/>
|
||||
<text text-anchor="middle" x="253.125" y="-331.0849" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">0..1</text>
|
||||
</g>
|
||||
<!-- A12 -->
|
||||
<g id="node13" class="node">
|
||||
<title>A12</title>
|
||||
<polygon fill="none" stroke="#000000" points="432.066,-318 432.066,-350 499.066,-350 499.066,-318 432.066,-318"/>
|
||||
<text text-anchor="start" x="448.059" y="-331" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">InfosG3</text>
|
||||
<polygon fill="none" stroke="#000000" points="432.066,-298 432.066,-318 499.066,-318 499.066,-298 432.066,-298"/>
|
||||
<polygon fill="none" stroke="#000000" points="432.066,-254 432.066,-298 499.066,-298 499.066,-254 432.066,-254"/>
|
||||
<text text-anchor="start" x="441.95" y="-279" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">ha_confs()</text>
|
||||
<text text-anchor="start" x="449.734" y="-267" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">parse()</text>
|
||||
</g>
|
||||
<!-- A10->A12 -->
|
||||
<g id="edge12" class="edge">
|
||||
<title>A10->A12</title>
|
||||
<path fill="none" stroke="#000000" d="M405.0919,-471.8382C419.1748,-431.9575 433.5466,-391.2585 444.6898,-359.7024"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="448.0405,-350.2137 448.9539,-361.1415 446.3756,-354.9284 444.7107,-359.6431 444.7107,-359.6431 444.7107,-359.6431 446.3756,-354.9284 440.4675,-358.1447 448.0405,-350.2137 448.0405,-350.2137"/>
|
||||
</g>
|
||||
<!-- A11 -->
|
||||
<g id="node12" class="node">
|
||||
<title>A11</title>
|
||||
<polygon fill="none" stroke="#000000" points="428.066,-710 428.066,-742 531.066,-742 531.066,-710 428.066,-710"/>
|
||||
<text text-anchor="start" x="468.728" y="-723" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">Infos</text>
|
||||
<polygon fill="none" stroke="#000000" points="428.066,-654 428.066,-710 531.066,-710 531.066,-654 428.066,-654"/>
|
||||
<text text-anchor="start" x="471.5075" y="-691" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">stat</text>
|
||||
<text text-anchor="start" x="447.052" y="-679" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">new_stat_data</text>
|
||||
<text text-anchor="start" x="460.6695" y="-667" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">info_dev</text>
|
||||
<polygon fill="none" stroke="#000000" points="428.066,-502 428.066,-654 531.066,-654 531.066,-502 428.066,-502"/>
|
||||
<text text-anchor="start" x="455.4015" y="-635" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">static_init()</text>
|
||||
<text text-anchor="start" x="453.4505" y="-623" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">dev_value()</text>
|
||||
<text text-anchor="start" x="450.3965" y="-611" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">inc_counter()</text>
|
||||
<text text-anchor="start" x="448.7265" y="-599" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">dec_counter()</text>
|
||||
<text text-anchor="start" x="446.776" y="-587" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">ha_proxy_conf</text>
|
||||
<text text-anchor="start" x="461.779" y="-575" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">ha_conf</text>
|
||||
<text text-anchor="start" x="454.56" y="-563" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">ha_remove</text>
|
||||
<text text-anchor="start" x="455.9405" y="-551" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">update_db</text>
|
||||
<text text-anchor="start" x="440.103" y="-539" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">set_db_def_value</text>
|
||||
<text text-anchor="start" x="449.5515" y="-527" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">get_db_value</text>
|
||||
<text text-anchor="start" x="437.8885" y="-515" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">ignore_this_device</text>
|
||||
</g>
|
||||
<!-- A11->A12 -->
|
||||
<g id="edge11" class="edge">
|
||||
<title>A11->A12</title>
|
||||
<path fill="none" stroke="#000000" d="M473.3644,-491.6786C471.1803,-441.7544 468.8213,-387.8351 467.1788,-350.293"/>
|
||||
<polygon fill="none" stroke="#000000" points="469.8793,-492.0959 473.8131,-501.9334 476.8726,-491.7899 469.8793,-492.0959"/>
|
||||
</g>
|
||||
<!-- A13 -->
|
||||
<g id="node14" class="node">
|
||||
<title>A13</title>
|
||||
<polygon fill="none" stroke="#000000" points="156.066,-1524 156.066,-1556 305.066,-1556 305.066,-1524 156.066,-1524"/>
|
||||
<text text-anchor="start" x="210.2835" y="-1537" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">Message</text>
|
||||
<polygon fill="none" stroke="#000000" points="156.066,-1300 156.066,-1524 305.066,-1524 305.066,-1300 156.066,-1300"/>
|
||||
<text text-anchor="start" x="193.8925" y="-1505" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">server_side:bool</text>
|
||||
<text text-anchor="start" x="204.45" y="-1493" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">mb:Modbus</text>
|
||||
<text text-anchor="start" x="205.2845" y="-1481" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">ifc:AsyncIfc</text>
|
||||
<text text-anchor="start" x="212.7795" y="-1469" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">node_id</text>
|
||||
<text text-anchor="start" x="191.109" y="-1457" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">header_valid:bool</text>
|
||||
<text text-anchor="start" x="205.556" y="-1445" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">header_len</text>
|
||||
<text text-anchor="start" x="211.39" y="-1433" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">data_len</text>
|
||||
<text text-anchor="start" x="208.8905" y="-1421" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">unique_id</text>
|
||||
<text text-anchor="start" x="202.781" y="-1409" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">sug_area:str</text>
|
||||
<text text-anchor="start" x="199.722" y="-1397" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">new_data:dict</text>
|
||||
<text text-anchor="start" x="206.666" y="-1385" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">state:State</text>
|
||||
<text text-anchor="start" x="180.2705" y="-1373" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">shutdown_started:bool</text>
|
||||
<text text-anchor="start" x="199.4505" y="-1361" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">modbus_elms</text>
|
||||
<text text-anchor="start" x="195.573" y="-1349" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">mb_timer:Timer</text>
|
||||
<text text-anchor="start" x="204.451" y="-1337" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">mb_timeout</text>
|
||||
<text text-anchor="start" x="193.6185" y="-1325" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">mb_first_timeout</text>
|
||||
<text text-anchor="start" x="184.72" y="-1313" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">modbus_polling:bool</text>
|
||||
<polygon fill="none" stroke="#000000" points="156.066,-1196 156.066,-1300 305.066,-1300 305.066,-1196 156.066,-1196"/>
|
||||
<text text-anchor="start" x="179.4505" y="-1281" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">_set_mqtt_timestamp()</text>
|
||||
<text text-anchor="start" x="208.066" y="-1269" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">_timeout()</text>
|
||||
<text text-anchor="start" x="180.8335" y="-1257" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">_send_modbus_cmd()</text>
|
||||
<text text-anchor="start" x="165.8255" y="-1245" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><async> end_modbus_cmd()</text>
|
||||
<text text-anchor="start" x="215.5685" y="-1233" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">close()</text>
|
||||
<text text-anchor="start" x="201.3965" y="-1221" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">inc_counter()</text>
|
||||
<text text-anchor="start" x="199.7265" y="-1209" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">dec_counter()</text>
|
||||
</g>
|
||||
<!-- A13->A5 -->
|
||||
<g id="edge13" class="edge">
|
||||
<title>A13->A5</title>
|
||||
<path fill="none" stroke="#000000" d="M210.2965,-1195.7758C208.8462,-1182.5547 207.3854,-1169.2373 205.9406,-1156.0662"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="204.8393,-1146.0268 210.403,-1155.4764 205.3846,-1150.997 205.9298,-1155.9672 205.9298,-1155.9672 205.9298,-1155.9672 205.3846,-1150.997 201.4567,-1156.4579 204.8393,-1146.0268 204.8393,-1146.0268"/>
|
||||
<text text-anchor="middle" x="199.9181" y="-1175.6794" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">use</text>
|
||||
</g>
|
||||
<!-- A13->A10 -->
|
||||
<g id="edge16" class="edge">
|
||||
<title>A13->A10</title>
|
||||
<path fill="none" stroke="#000000" d="M260.8183,-1185.9405C281.556,-1057.7747 308.5382,-891.0162 327.7708,-772.1524"/>
|
||||
<polygon fill="none" stroke="#000000" points="257.3528,-1185.4467 259.2105,-1195.8774 264.2629,-1186.5648 257.3528,-1185.4467"/>
|
||||
</g>
|
||||
<!-- A14->A13 -->
|
||||
<g id="edge15" class="edge">
|
||||
<title>A14->A13</title>
|
||||
<path fill="none" stroke="#000000" stroke-dasharray="5,2" d="M188.2401,-1673.8004C192.8037,-1641.3079 198.7631,-1598.8764 204.747,-1556.2713"/>
|
||||
<polygon fill="none" stroke="#000000" points="184.7342,-1673.5986 186.8092,-1683.9883 191.6661,-1674.5723 184.7342,-1673.5986"/>
|
||||
</g>
|
||||
<!-- A15 -->
|
||||
<g id="node16" class="node">
|
||||
<title>A15</title>
|
||||
<polygon fill="none" stroke="#000000" points="244.066,-1826 244.066,-1858 319.066,-1858 319.066,-1826 244.066,-1826"/>
|
||||
<text text-anchor="start" x="263.7835" y="-1839" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">Modbus</text>
|
||||
<polygon fill="none" stroke="#000000" points="244.066,-1674 244.066,-1826 319.066,-1826 319.066,-1674 244.066,-1674"/>
|
||||
<text text-anchor="start" x="273.2275" y="-1807" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">que</text>
|
||||
<text text-anchor="start" x="254.056" y="-1783" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">snd_handler</text>
|
||||
<text text-anchor="start" x="255.171" y="-1771" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">rsp_handler</text>
|
||||
<text text-anchor="start" x="265.1745" y="-1759" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">timeout</text>
|
||||
<text text-anchor="start" x="255.4555" y="-1747" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">max_retires</text>
|
||||
<text text-anchor="start" x="263.508" y="-1735" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">last_xxx</text>
|
||||
<text text-anchor="start" x="275.4575" y="-1723" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">err</text>
|
||||
<text text-anchor="start" x="262.1195" y="-1711" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">retry_cnt</text>
|
||||
<text text-anchor="start" x="260.445" y="-1699" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">req_pend</text>
|
||||
<text text-anchor="start" x="274.9025" y="-1687" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">tim</text>
|
||||
<polygon fill="none" stroke="#000000" points="244.066,-1606 244.066,-1674 319.066,-1674 319.066,-1606 244.066,-1606"/>
|
||||
<text text-anchor="start" x="255.456" y="-1655" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">build_msg()</text>
|
||||
<text text-anchor="start" x="258.79" y="-1643" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">recv_req()</text>
|
||||
<text text-anchor="start" x="256.29" y="-1631" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">recv_resp()</text>
|
||||
<text text-anchor="start" x="266.5685" y="-1619" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">close()</text>
|
||||
</g>
|
||||
<!-- A15->A13 -->
|
||||
<g id="edge17" class="edge">
|
||||
<title>A15->A13</title>
|
||||
<path fill="none" stroke="#000000" d="M261.5887,-1596.041C259.7128,-1582.9463 257.7908,-1569.5297 255.8664,-1556.0971"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="263.0135,-1605.9867 257.1408,-1596.726 262.3044,-1601.0373 261.5953,-1596.0878 261.5953,-1596.0878 261.5953,-1596.0878 262.3044,-1601.0373 266.0499,-1595.4496 263.0135,-1605.9867 263.0135,-1605.9867"/>
|
||||
<text text-anchor="middle" x="266.8039" y="-1569.8414" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">has</text>
|
||||
<text text-anchor="middle" x="252.0761" y="-1586.2424" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">0..1</text>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 33 KiB |
43
app/proxy_2.yuml
Normal file
43
app/proxy_2.yuml
Normal file
@@ -0,0 +1,43 @@
|
||||
// {type:class}
|
||||
// {direction:topDown}
|
||||
// {generate:true}
|
||||
|
||||
[note: Example of instantiation for a GEN3 inverter!{bg:cornsilk}]
|
||||
[<<AbstractIterMeta>>||__iter__()]
|
||||
|
||||
[InverterG3|addr;remote:StreamPtr;local:StreamPtr|create_remote();;close()]
|
||||
[InverterG3]++->[local:StreamPtr]
|
||||
[InverterG3]++->[remote:StreamPtr]
|
||||
|
||||
[<<AsyncIfc>>||set_node_id();get_conn_no();;tx_add();tx_flush();tx_get();tx_peek();tx_log();tx_clear();tx_len();;fwd_add();fwd_log();rx_get();rx_peek();rx_log();rx_clear();rx_len();rx_set_cb();;prot_set_timeout_cb()]
|
||||
[AsyncIfcImpl|fwd_fifo:ByteFifo;tx_fifo:ByteFifo;rx_fifo:ByteFifo;conn_no:Count;node_id;timeout_cb]
|
||||
[AsyncStream|reader;writer;addr;r_addr;l_addr|;<async>loop;disc();close();healthy();;__async_read();__async_write();__async_forward()]
|
||||
[AsyncStreamServer|create_remote|<async>server_loop();<async>_async_forward();<async>publish_outstanding_mqtt();close()]
|
||||
[AsyncStreamClient||<async>client_loop();<async>_async_forward())]
|
||||
[<<AsyncIfc>>]^-.-[AsyncIfcImpl]
|
||||
[AsyncIfcImpl]^[AsyncStream]
|
||||
[AsyncStream]^[AsyncStreamServer]
|
||||
[AsyncStream]^[AsyncStreamClient]
|
||||
|
||||
|
||||
[Talent|conn_no;addr;;await_conn_resp_cnt;id_str;contact_name;contact_mail;db:InfosG3;mb:Modbus;switch|msg_contact_info();msg_ota_update();msg_get_time();msg_collector_data();msg_inverter_data();msg_unknown();;healthy();close()]
|
||||
[Talent]<-++[local:StreamPtr]
|
||||
[local:StreamPtr]++->[AsyncStreamServer]
|
||||
[Talent]<-0..1[remote:StreamPtr]
|
||||
[remote:StreamPtr]0..1->[AsyncStreamClient]
|
||||
|
||||
[Infos|stat;new_stat_data;info_dev|static_init();dev_value();inc_counter();dec_counter();ha_proxy_conf;ha_conf;ha_remove;update_db;set_db_def_value;get_db_value;ignore_this_device]
|
||||
[Infos]^[InfosG3||ha_confs();parse()]
|
||||
|
||||
[Talent]->[InfosG3]
|
||||
|
||||
[Message|server_side:bool;mb:Modbus;ifc:AsyncIfc;node_id;header_valid:bool;header_len;data_len;unique_id;sug_area:str;new_data:dict;state:State;shutdown_started:bool;modbus_elms;mb_timer:Timer;mb_timeout;mb_first_timeout;modbus_polling:bool|_set_mqtt_timestamp();_timeout();_send_modbus_cmd();<async> end_modbus_cmd();close();inc_counter();dec_counter()]
|
||||
[Message]use->[<<AsyncIfc>>]
|
||||
|
||||
[<<ProtocolIfc>>|_registry|close()]
|
||||
[<<AbstractIterMeta>>]^-.-[<<ProtocolIfc>>]
|
||||
[<<ProtocolIfc>>]^-.-[Message]
|
||||
[Message]^[Talent]
|
||||
|
||||
[Modbus|que;;snd_handler;rsp_handler;timeout;max_retires;last_xxx;err;retry_cnt;req_pend;tim|build_msg();recv_req();recv_resp();close()]
|
||||
[Modbus]<0..1-has[Message]
|
||||
364
app/proxy_3.svg
Normal file
364
app/proxy_3.svg
Normal file
@@ -0,0 +1,364 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
|
||||
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<!-- Generated by graphviz version 2.40.1 (20161225.0304)
|
||||
-->
|
||||
<!-- Title: G Pages: 1 -->
|
||||
<svg width="539pt" height="1940pt"
|
||||
viewBox="0.00 0.00 538.62 1940.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 1936)">
|
||||
<title>G</title>
|
||||
<polygon fill="#ffffff" stroke="transparent" points="-4,4 -4,-1936 534.6165,-1936 534.6165,4 -4,4"/>
|
||||
<!-- A0 -->
|
||||
<g id="node1" class="node">
|
||||
<title>A0</title>
|
||||
<polygon fill="#fff8dc" stroke="#000000" points="114.3497,-1912 -.1167,-1912 -.1167,-1868 120.3497,-1868 120.3497,-1906 114.3497,-1912"/>
|
||||
<polyline fill="none" stroke="#000000" points="114.3497,-1912 114.3497,-1906 "/>
|
||||
<polyline fill="none" stroke="#000000" points="120.3497,-1906 114.3497,-1906 "/>
|
||||
<text text-anchor="middle" x="60.1165" y="-1899" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">Example of</text>
|
||||
<text text-anchor="middle" x="60.1165" y="-1887" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">instantiation for a</text>
|
||||
<text text-anchor="middle" x="60.1165" y="-1875" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">GEN3PLUS inverter!</text>
|
||||
</g>
|
||||
<!-- A1 -->
|
||||
<g id="node2" class="node">
|
||||
<title>A1</title>
|
||||
<polygon fill="none" stroke="#000000" points="138.1165,-1900 138.1165,-1932 254.1165,-1932 254.1165,-1900 138.1165,-1900"/>
|
||||
<text text-anchor="start" x="147.7655" y="-1913" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><<AbstractIterMeta>></text>
|
||||
<polygon fill="none" stroke="#000000" points="138.1165,-1880 138.1165,-1900 254.1165,-1900 254.1165,-1880 138.1165,-1880"/>
|
||||
<polygon fill="none" stroke="#000000" points="138.1165,-1848 138.1165,-1880 254.1165,-1880 254.1165,-1848 138.1165,-1848"/>
|
||||
<text text-anchor="start" x="174.7265" y="-1861" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">__iter__()</text>
|
||||
</g>
|
||||
<!-- A14 -->
|
||||
<g id="node15" class="node">
|
||||
<title>A14</title>
|
||||
<polygon fill="none" stroke="#000000" points="151.1165,-1688 151.1165,-1720 241.1165,-1720 241.1165,-1688 151.1165,-1688"/>
|
||||
<text text-anchor="start" x="160.823" y="-1701" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><<ProtocolIfc>></text>
|
||||
<polygon fill="none" stroke="#000000" points="151.1165,-1656 151.1165,-1688 241.1165,-1688 241.1165,-1656 151.1165,-1656"/>
|
||||
<text text-anchor="start" x="176.95" y="-1669" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">_registry</text>
|
||||
<polygon fill="none" stroke="#000000" points="151.1165,-1624 151.1165,-1656 241.1165,-1656 241.1165,-1624 151.1165,-1624"/>
|
||||
<text text-anchor="start" x="181.119" y="-1637" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">close()</text>
|
||||
</g>
|
||||
<!-- A1->A14 -->
|
||||
<g id="edge14" class="edge">
|
||||
<title>A1->A14</title>
|
||||
<path fill="none" stroke="#000000" stroke-dasharray="5,2" d="M196.1165,-1837.756C196.1165,-1802.0883 196.1165,-1755.1755 196.1165,-1720.3644"/>
|
||||
<polygon fill="none" stroke="#000000" points="192.6166,-1837.9674 196.1165,-1847.9674 199.6166,-1837.9674 192.6166,-1837.9674"/>
|
||||
</g>
|
||||
<!-- A2 -->
|
||||
<g id="node3" class="node">
|
||||
<title>A2</title>
|
||||
<polygon fill="none" stroke="#000000" points="202.1165,-632 202.1165,-664 300.1165,-664 300.1165,-632 202.1165,-632"/>
|
||||
<text text-anchor="start" x="224.1665" y="-645" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">InverterG3P</text>
|
||||
<polygon fill="none" stroke="#000000" points="202.1165,-576 202.1165,-632 300.1165,-632 300.1165,-576 202.1165,-576"/>
|
||||
<text text-anchor="start" x="241.1135" y="-613" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">addr</text>
|
||||
<text text-anchor="start" x="211.6695" y="-601" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">remote:StreamPtr</text>
|
||||
<text text-anchor="start" x="216.9485" y="-589" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">local:StreamPtr</text>
|
||||
<polygon fill="none" stroke="#000000" points="202.1165,-520 202.1165,-576 300.1165,-576 300.1165,-520 202.1165,-520"/>
|
||||
<text text-anchor="start" x="215.5585" y="-557" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">create_remote()</text>
|
||||
<text text-anchor="start" x="236.119" y="-533" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">close()</text>
|
||||
</g>
|
||||
<!-- A3 -->
|
||||
<g id="node4" class="node">
|
||||
<title>A3</title>
|
||||
<polygon fill="none" stroke="#000000" points="419.4531,-320 322.7799,-320 322.7799,-284 419.4531,-284 419.4531,-320"/>
|
||||
<text text-anchor="middle" x="371.1165" y="-299" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">local:StreamPtr</text>
|
||||
</g>
|
||||
<!-- A2->A3 -->
|
||||
<g id="edge1" class="edge">
|
||||
<title>A2->A3</title>
|
||||
<path fill="none" stroke="#000000" d="M285.5402,-508.8093C310.5478,-448.3743 342.848,-370.3156 359.7149,-329.5539"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="285.5219,-508.8538 286.9238,-515.9273 280.9336,-519.942 279.5317,-512.8685 285.5219,-508.8538"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="363.5595,-320.2627 363.894,-331.2235 361.6477,-324.8828 359.7359,-329.5029 359.7359,-329.5029 359.7359,-329.5029 361.6477,-324.8828 355.5779,-327.7823 363.5595,-320.2627 363.5595,-320.2627"/>
|
||||
</g>
|
||||
<!-- A4 -->
|
||||
<g id="node5" class="node">
|
||||
<title>A4</title>
|
||||
<polygon fill="none" stroke="#000000" points="304.5106,-320 197.7224,-320 197.7224,-284 304.5106,-284 304.5106,-320"/>
|
||||
<text text-anchor="middle" x="251.1165" y="-299" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">remote:StreamPtr</text>
|
||||
</g>
|
||||
<!-- A2->A4 -->
|
||||
<g id="edge2" class="edge">
|
||||
<title>A2->A4</title>
|
||||
<path fill="none" stroke="#000000" d="M251.1165,-507.5905C251.1165,-447.68 251.1165,-370.9429 251.1165,-330.266"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="251.1166,-507.942 255.1165,-513.942 251.1165,-519.942 247.1165,-513.942 251.1166,-507.942"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="251.1165,-320.2627 255.6166,-330.2626 251.1165,-325.2627 251.1166,-330.2627 251.1166,-330.2627 251.1166,-330.2627 251.1165,-325.2627 246.6166,-330.2627 251.1165,-320.2627 251.1165,-320.2627"/>
|
||||
</g>
|
||||
<!-- A8 -->
|
||||
<g id="node9" class="node">
|
||||
<title>A8</title>
|
||||
<polygon fill="none" stroke="#000000" points="265.1165,-100 265.1165,-132 443.1165,-132 443.1165,-100 265.1165,-100"/>
|
||||
<text text-anchor="start" x="309.668" y="-113" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">AsyncStreamServer</text>
|
||||
<polygon fill="none" stroke="#000000" points="265.1165,-68 265.1165,-100 443.1165,-100 443.1165,-68 265.1165,-68"/>
|
||||
<text text-anchor="start" x="321.8875" y="-81" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">create_remote</text>
|
||||
<polygon fill="none" stroke="#000000" points="265.1165,0 265.1165,-68 443.1165,-68 443.1165,0 265.1165,0"/>
|
||||
<text text-anchor="start" x="305.774" y="-49" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><async>server_loop()</text>
|
||||
<text text-anchor="start" x="296.605" y="-37" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><async>_async_forward()</text>
|
||||
<text text-anchor="start" x="274.9255" y="-25" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><async>publish_outstanding_mqtt()</text>
|
||||
<text text-anchor="start" x="339.119" y="-13" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">close()</text>
|
||||
</g>
|
||||
<!-- A3->A8 -->
|
||||
<g id="edge8" class="edge">
|
||||
<title>A3->A8</title>
|
||||
<path fill="none" stroke="#000000" d="M368.9314,-271.6651C366.5869,-239.1181 362.7725,-186.1658 359.6014,-142.1431"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="368.9485,-271.9044 373.3693,-277.6014 369.8108,-283.8733 365.39,-278.1763 368.9485,-271.9044"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="358.8731,-132.0321 364.08,-141.6829 359.2323,-137.0192 359.5916,-142.0063 359.5916,-142.0063 359.5916,-142.0063 359.2323,-137.0192 355.1032,-142.3296 358.8731,-132.0321 358.8731,-132.0321"/>
|
||||
</g>
|
||||
<!-- A9 -->
|
||||
<g id="node10" class="node">
|
||||
<title>A9</title>
|
||||
<polygon fill="none" stroke="#000000" points="93.1165,-82 93.1165,-114 231.1165,-114 231.1165,-82 93.1165,-82"/>
|
||||
<text text-anchor="start" x="119.6135" y="-95" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">AsyncStreamClient</text>
|
||||
<polygon fill="none" stroke="#000000" points="93.1165,-62 93.1165,-82 231.1165,-82 231.1165,-62 93.1165,-62"/>
|
||||
<polygon fill="none" stroke="#000000" points="93.1165,-18 93.1165,-62 231.1165,-62 231.1165,-18 93.1165,-18"/>
|
||||
<text text-anchor="start" x="115.9945" y="-43" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><async>client_loop()</text>
|
||||
<text text-anchor="start" x="102.9405" y="-31" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><async>_async_forward())</text>
|
||||
</g>
|
||||
<!-- A4->A9 -->
|
||||
<g id="edge10" class="edge">
|
||||
<title>A4->A9</title>
|
||||
<path fill="none" stroke="#000000" d="M244.2806,-283.8733C231.5204,-250.0372 203.5834,-175.9573 183.8383,-123.5994"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="180.2523,-114.0904 187.9915,-121.8593 182.0166,-118.7688 183.781,-123.4472 183.781,-123.4472 183.781,-123.4472 182.0166,-118.7688 179.5704,-125.0351 180.2523,-114.0904 180.2523,-114.0904"/>
|
||||
<text text-anchor="middle" x="229.9759" y="-266.8956" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">0..1</text>
|
||||
</g>
|
||||
<!-- A5 -->
|
||||
<g id="node6" class="node">
|
||||
<title>A5</title>
|
||||
<polygon fill="none" stroke="#000000" points="145.1165,-1054 145.1165,-1086 262.1165,-1086 262.1165,-1054 145.1165,-1054"/>
|
||||
<text text-anchor="start" x="173.0455" y="-1067" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><<AsyncIfc>></text>
|
||||
<polygon fill="none" stroke="#000000" points="145.1165,-1034 145.1165,-1054 262.1165,-1054 262.1165,-1034 145.1165,-1034"/>
|
||||
<polygon fill="none" stroke="#000000" points="145.1165,-762 145.1165,-1034 262.1165,-1034 262.1165,-762 145.1165,-762"/>
|
||||
<text text-anchor="start" x="173.0525" y="-1015" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">set_node_id()</text>
|
||||
<text text-anchor="start" x="171.3825" y="-1003" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">get_conn_no()</text>
|
||||
<text text-anchor="start" x="185.28" y="-979" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">tx_add()</text>
|
||||
<text text-anchor="start" x="183.0605" y="-967" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">tx_flush()</text>
|
||||
<text text-anchor="start" x="186.67" y="-955" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">tx_get()</text>
|
||||
<text text-anchor="start" x="182.78" y="-943" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">tx_peek()</text>
|
||||
<text text-anchor="start" x="186.95" y="-931" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">tx_log()</text>
|
||||
<text text-anchor="start" x="182.7855" y="-919" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">tx_clear()</text>
|
||||
<text text-anchor="start" x="186.95" y="-907" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">tx_len()</text>
|
||||
<text text-anchor="start" x="181.391" y="-883" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">fwd_add()</text>
|
||||
<text text-anchor="start" x="183.061" y="-871" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">fwd_log()</text>
|
||||
<text text-anchor="start" x="186.395" y="-859" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">rx_get()</text>
|
||||
<text text-anchor="start" x="182.505" y="-847" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">rx_peek()</text>
|
||||
<text text-anchor="start" x="186.675" y="-835" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">rx_log()</text>
|
||||
<text text-anchor="start" x="182.5105" y="-823" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">rx_clear()</text>
|
||||
<text text-anchor="start" x="186.675" y="-811" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">rx_len()</text>
|
||||
<text text-anchor="start" x="178.6155" y="-799" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">rx_set_cb()</text>
|
||||
<text text-anchor="start" x="154.996" y="-775" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">prot_set_timeout_cb()</text>
|
||||
</g>
|
||||
<!-- A6 -->
|
||||
<g id="node7" class="node">
|
||||
<title>A6</title>
|
||||
<polygon fill="none" stroke="#000000" points="87.1165,-622 87.1165,-654 180.1165,-654 180.1165,-622 87.1165,-622"/>
|
||||
<text text-anchor="start" x="105.2805" y="-635" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">AsyncIfcImpl</text>
|
||||
<polygon fill="none" stroke="#000000" points="87.1165,-530 87.1165,-622 180.1165,-622 180.1165,-530 87.1165,-530"/>
|
||||
<text text-anchor="start" x="96.6645" y="-603" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">fwd_fifo:ByteFifo</text>
|
||||
<text text-anchor="start" x="100.5535" y="-591" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">tx_fifo:ByteFifo</text>
|
||||
<text text-anchor="start" x="100.2785" y="-579" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">rx_fifo:ByteFifo</text>
|
||||
<text text-anchor="start" x="99.7125" y="-567" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">conn_no:Count</text>
|
||||
<text text-anchor="start" x="115.83" y="-555" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">node_id</text>
|
||||
<text text-anchor="start" x="109.166" y="-543" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">timeout_cb</text>
|
||||
</g>
|
||||
<!-- A5->A6 -->
|
||||
<g id="edge3" class="edge">
|
||||
<title>A5->A6</title>
|
||||
<path fill="none" stroke="#000000" stroke-dasharray="5,2" d="M166.8518,-752.0017C159.4629,-716.9571 152.1492,-682.2694 146.2303,-654.1971"/>
|
||||
<polygon fill="none" stroke="#000000" points="163.4489,-752.8275 168.9367,-761.8903 170.2983,-751.3833 163.4489,-752.8275"/>
|
||||
</g>
|
||||
<!-- A7 -->
|
||||
<g id="node8" class="node">
|
||||
<title>A7</title>
|
||||
<polygon fill="none" stroke="#000000" points="78.1165,-390 78.1165,-422 180.1165,-422 180.1165,-390 78.1165,-390"/>
|
||||
<text text-anchor="start" x="99.3905" y="-403" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">AsyncStream</text>
|
||||
<polygon fill="none" stroke="#000000" points="78.1165,-310 78.1165,-390 180.1165,-390 180.1165,-310 78.1165,-310"/>
|
||||
<text text-anchor="start" x="114.6695" y="-371" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">reader</text>
|
||||
<text text-anchor="start" x="116.8995" y="-359" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">writer</text>
|
||||
<text text-anchor="start" x="119.1135" y="-347" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">addr</text>
|
||||
<text text-anchor="start" x="114.6695" y="-335" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">r_addr</text>
|
||||
<text text-anchor="start" x="115.2245" y="-323" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">l_addr</text>
|
||||
<polygon fill="none" stroke="#000000" points="78.1165,-182 78.1165,-310 180.1165,-310 180.1165,-182 78.1165,-182"/>
|
||||
<text text-anchor="start" x="100.7705" y="-279" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><async>loop</text>
|
||||
<text text-anchor="start" x="116.8985" y="-267" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">disc()</text>
|
||||
<text text-anchor="start" x="114.119" y="-255" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">close()</text>
|
||||
<text text-anchor="start" x="109.6705" y="-243" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">healthy()</text>
|
||||
<text text-anchor="start" x="94.387" y="-219" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">__async_read()</text>
|
||||
<text text-anchor="start" x="93.8375" y="-207" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">__async_write()</text>
|
||||
<text text-anchor="start" x="87.7235" y="-195" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">__async_forward()</text>
|
||||
</g>
|
||||
<!-- A6->A7 -->
|
||||
<g id="edge4" class="edge">
|
||||
<title>A6->A7</title>
|
||||
<path fill="none" stroke="#000000" d="M132.1177,-519.5861C131.7106,-490.0737 131.229,-455.1552 130.7721,-422.0295"/>
|
||||
<polygon fill="none" stroke="#000000" points="128.6207,-519.837 132.2584,-529.7877 135.6201,-519.7404 128.6207,-519.837"/>
|
||||
</g>
|
||||
<!-- A7->A8 -->
|
||||
<g id="edge5" class="edge">
|
||||
<title>A7->A8</title>
|
||||
<path fill="none" stroke="#000000" d="M186.5777,-185.0204C187.4154,-184.0001 188.2616,-182.9929 189.1165,-182 210.4788,-157.1889 238.2469,-135.0276 264.8921,-116.8901"/>
|
||||
<polygon fill="none" stroke="#000000" points="183.6875,-183.0361 180.3256,-193.0834 189.2193,-187.3255 183.6875,-183.0361"/>
|
||||
</g>
|
||||
<!-- A7->A9 -->
|
||||
<g id="edge6" class="edge">
|
||||
<title>A7->A9</title>
|
||||
<path fill="none" stroke="#000000" d="M147.3214,-171.8077C150.1952,-151.2556 152.9992,-131.2022 155.3799,-114.1772"/>
|
||||
<polygon fill="none" stroke="#000000" points="143.8252,-171.5375 145.9065,-181.9259 150.7577,-172.5069 143.8252,-171.5375"/>
|
||||
</g>
|
||||
<!-- A10 -->
|
||||
<g id="node11" class="node">
|
||||
<title>A10</title>
|
||||
<polygon fill="none" stroke="#000000" points="319.1165,-668 319.1165,-700 410.1165,-700 410.1165,-668 319.1165,-668"/>
|
||||
<text text-anchor="start" x="337.1115" y="-681" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">SolarmanV5</text>
|
||||
<polygon fill="none" stroke="#000000" points="319.1165,-552 319.1165,-668 410.1165,-668 410.1165,-552 319.1165,-552"/>
|
||||
<text text-anchor="start" x="345.4395" y="-649" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">conn_no</text>
|
||||
<text text-anchor="start" x="354.6135" y="-637" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">addr</text>
|
||||
<text text-anchor="start" x="349.6145" y="-613" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">control</text>
|
||||
<text text-anchor="start" x="352.674" y="-601" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">serial</text>
|
||||
<text text-anchor="start" x="357.6725" y="-589" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">snr</text>
|
||||
<text text-anchor="start" x="336.8265" y="-577" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">db:InfosG3P</text>
|
||||
<text text-anchor="start" x="350.7285" y="-565" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">switch</text>
|
||||
<polygon fill="none" stroke="#000000" points="319.1165,-484 319.1165,-552 410.1165,-552 410.1165,-484 319.1165,-484"/>
|
||||
<text text-anchor="start" x="329.057" y="-533" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">msg_unknown()</text>
|
||||
<text text-anchor="start" x="345.1705" y="-509" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">healthy()</text>
|
||||
<text text-anchor="start" x="349.619" y="-497" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">close()</text>
|
||||
</g>
|
||||
<!-- A10->A3 -->
|
||||
<g id="edge7" class="edge">
|
||||
<title>A10->A3</title>
|
||||
<path fill="none" stroke="#000000" d="M366.9763,-473.5237C368.222,-421.9136 369.5798,-365.6622 370.389,-332.138"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="366.733,-483.6023 362.4757,-473.4966 366.8537,-478.6038 366.9744,-473.6052 366.9744,-473.6052 366.9744,-473.6052 366.8537,-478.6038 371.4731,-473.7139 366.733,-483.6023 366.733,-483.6023"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="370.3911,-332.0495 366.5371,-325.9547 370.6807,-320.053 374.5347,-326.1478 370.3911,-332.0495"/>
|
||||
</g>
|
||||
<!-- A10->A4 -->
|
||||
<g id="edge9" class="edge">
|
||||
<title>A10->A4</title>
|
||||
<path fill="none" stroke="#000000" d="M318.2339,-474.2481C295.3796,-415.5956 270.1211,-350.7729 258.151,-320.053"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="321.8788,-483.6023 314.0551,-475.9185 320.0634,-478.9435 318.2481,-474.2847 318.2481,-474.2847 318.2481,-474.2847 320.0634,-478.9435 322.441,-472.6508 321.8788,-483.6023 321.8788,-483.6023"/>
|
||||
<text text-anchor="middle" x="272.6076" y="-330.8736" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">0..1</text>
|
||||
</g>
|
||||
<!-- A12 -->
|
||||
<g id="node13" class="node">
|
||||
<title>A12</title>
|
||||
<polygon fill="none" stroke="#000000" points="442.1165,-318 442.1165,-350 509.1165,-350 509.1165,-318 442.1165,-318"/>
|
||||
<text text-anchor="start" x="454.775" y="-331" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">InfosG3P</text>
|
||||
<polygon fill="none" stroke="#000000" points="442.1165,-298 442.1165,-318 509.1165,-318 509.1165,-298 442.1165,-298"/>
|
||||
<polygon fill="none" stroke="#000000" points="442.1165,-254 442.1165,-298 509.1165,-298 509.1165,-254 442.1165,-254"/>
|
||||
<text text-anchor="start" x="452.0005" y="-279" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">ha_confs()</text>
|
||||
<text text-anchor="start" x="459.7845" y="-267" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">parse()</text>
|
||||
</g>
|
||||
<!-- A10->A12 -->
|
||||
<g id="edge12" class="edge">
|
||||
<title>A10->A12</title>
|
||||
<path fill="none" stroke="#000000" d="M405.6067,-483.6023C421.7045,-441.5449 439.4849,-395.0916 453.0329,-359.6958"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="456.737,-350.0185 457.3649,-360.9664 454.9496,-354.6881 453.1623,-359.3577 453.1623,-359.3577 453.1623,-359.3577 454.9496,-354.6881 448.9596,-357.7491 456.737,-350.0185 456.737,-350.0185"/>
|
||||
</g>
|
||||
<!-- A11 -->
|
||||
<g id="node12" class="node">
|
||||
<title>A11</title>
|
||||
<polygon fill="none" stroke="#000000" points="428.1165,-680 428.1165,-712 531.1165,-712 531.1165,-680 428.1165,-680"/>
|
||||
<text text-anchor="start" x="468.7785" y="-693" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">Infos</text>
|
||||
<polygon fill="none" stroke="#000000" points="428.1165,-624 428.1165,-680 531.1165,-680 531.1165,-624 428.1165,-624"/>
|
||||
<text text-anchor="start" x="471.558" y="-661" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">stat</text>
|
||||
<text text-anchor="start" x="447.1025" y="-649" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">new_stat_data</text>
|
||||
<text text-anchor="start" x="460.72" y="-637" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">info_dev</text>
|
||||
<polygon fill="none" stroke="#000000" points="428.1165,-472 428.1165,-624 531.1165,-624 531.1165,-472 428.1165,-472"/>
|
||||
<text text-anchor="start" x="455.452" y="-605" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">static_init()</text>
|
||||
<text text-anchor="start" x="453.501" y="-593" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">dev_value()</text>
|
||||
<text text-anchor="start" x="450.447" y="-581" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">inc_counter()</text>
|
||||
<text text-anchor="start" x="448.777" y="-569" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">dec_counter()</text>
|
||||
<text text-anchor="start" x="446.8265" y="-557" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">ha_proxy_conf</text>
|
||||
<text text-anchor="start" x="461.8295" y="-545" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">ha_conf</text>
|
||||
<text text-anchor="start" x="454.6105" y="-533" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">ha_remove</text>
|
||||
<text text-anchor="start" x="455.991" y="-521" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">update_db</text>
|
||||
<text text-anchor="start" x="440.1535" y="-509" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">set_db_def_value</text>
|
||||
<text text-anchor="start" x="449.602" y="-497" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">get_db_value</text>
|
||||
<text text-anchor="start" x="437.939" y="-485" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">ignore_this_device</text>
|
||||
</g>
|
||||
<!-- A11->A12 -->
|
||||
<g id="edge11" class="edge">
|
||||
<title>A11->A12</title>
|
||||
<path fill="none" stroke="#000000" d="M477.322,-461.8987C476.7744,-422.1971 476.206,-380.9898 475.7834,-350.352"/>
|
||||
<polygon fill="none" stroke="#000000" points="473.823,-462.0018 477.4607,-471.9525 480.8223,-461.9052 473.823,-462.0018"/>
|
||||
</g>
|
||||
<!-- A13 -->
|
||||
<g id="node14" class="node">
|
||||
<title>A13</title>
|
||||
<polygon fill="none" stroke="#000000" points="172.1165,-1464 172.1165,-1496 321.1165,-1496 321.1165,-1464 172.1165,-1464"/>
|
||||
<text text-anchor="start" x="226.334" y="-1477" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">Message</text>
|
||||
<polygon fill="none" stroke="#000000" points="172.1165,-1240 172.1165,-1464 321.1165,-1464 321.1165,-1240 172.1165,-1240"/>
|
||||
<text text-anchor="start" x="209.943" y="-1445" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">server_side:bool</text>
|
||||
<text text-anchor="start" x="220.5005" y="-1433" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">mb:Modbus</text>
|
||||
<text text-anchor="start" x="221.335" y="-1421" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">ifc:AsyncIfc</text>
|
||||
<text text-anchor="start" x="228.83" y="-1409" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">node_id</text>
|
||||
<text text-anchor="start" x="207.1595" y="-1397" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">header_valid:bool</text>
|
||||
<text text-anchor="start" x="221.6065" y="-1385" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">header_len</text>
|
||||
<text text-anchor="start" x="227.4405" y="-1373" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">data_len</text>
|
||||
<text text-anchor="start" x="224.941" y="-1361" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">unique_id</text>
|
||||
<text text-anchor="start" x="218.8315" y="-1349" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">sug_area:str</text>
|
||||
<text text-anchor="start" x="215.7725" y="-1337" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">new_data:dict</text>
|
||||
<text text-anchor="start" x="222.7165" y="-1325" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">state:State</text>
|
||||
<text text-anchor="start" x="196.321" y="-1313" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">shutdown_started:bool</text>
|
||||
<text text-anchor="start" x="215.501" y="-1301" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">modbus_elms</text>
|
||||
<text text-anchor="start" x="211.6235" y="-1289" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">mb_timer:Timer</text>
|
||||
<text text-anchor="start" x="220.5015" y="-1277" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">mb_timeout</text>
|
||||
<text text-anchor="start" x="209.669" y="-1265" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">mb_first_timeout</text>
|
||||
<text text-anchor="start" x="200.7705" y="-1253" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">modbus_polling:bool</text>
|
||||
<polygon fill="none" stroke="#000000" points="172.1165,-1136 172.1165,-1240 321.1165,-1240 321.1165,-1136 172.1165,-1136"/>
|
||||
<text text-anchor="start" x="195.501" y="-1221" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">_set_mqtt_timestamp()</text>
|
||||
<text text-anchor="start" x="224.1165" y="-1209" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">_timeout()</text>
|
||||
<text text-anchor="start" x="196.884" y="-1197" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">_send_modbus_cmd()</text>
|
||||
<text text-anchor="start" x="181.876" y="-1185" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000"><async> end_modbus_cmd()</text>
|
||||
<text text-anchor="start" x="231.619" y="-1173" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">close()</text>
|
||||
<text text-anchor="start" x="217.447" y="-1161" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">inc_counter()</text>
|
||||
<text text-anchor="start" x="215.777" y="-1149" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">dec_counter()</text>
|
||||
</g>
|
||||
<!-- A13->A5 -->
|
||||
<g id="edge13" class="edge">
|
||||
<title>A13->A5</title>
|
||||
<path fill="none" stroke="#000000" d="M226.347,-1135.7758C224.8967,-1122.5547 223.4359,-1109.2373 221.9911,-1096.0662"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="220.8898,-1086.0268 226.4535,-1095.4764 221.4351,-1090.997 221.9803,-1095.9672 221.9803,-1095.9672 221.9803,-1095.9672 221.4351,-1090.997 217.5072,-1096.4579 220.8898,-1086.0268 220.8898,-1086.0268"/>
|
||||
<text text-anchor="middle" x="215.9686" y="-1115.6794" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">use</text>
|
||||
</g>
|
||||
<!-- A13->A10 -->
|
||||
<g id="edge16" class="edge">
|
||||
<title>A13->A10</title>
|
||||
<path fill="none" stroke="#000000" d="M277.1595,-1125.5329C299.2708,-989.8666 328.1962,-812.3923 346.4719,-700.2604"/>
|
||||
<polygon fill="none" stroke="#000000" points="273.6668,-1125.205 275.5125,-1135.6378 280.5757,-1126.3311 273.6668,-1125.205"/>
|
||||
</g>
|
||||
<!-- A14->A13 -->
|
||||
<g id="edge15" class="edge">
|
||||
<title>A14->A13</title>
|
||||
<path fill="none" stroke="#000000" stroke-dasharray="5,2" d="M204.2906,-1613.8004C208.8542,-1581.3079 214.8136,-1538.8764 220.7975,-1496.2713"/>
|
||||
<polygon fill="none" stroke="#000000" points="200.7847,-1613.5986 202.8597,-1623.9883 207.7166,-1614.5723 200.7847,-1613.5986"/>
|
||||
</g>
|
||||
<!-- A15 -->
|
||||
<g id="node16" class="node">
|
||||
<title>A15</title>
|
||||
<polygon fill="none" stroke="#000000" points="260.1165,-1766 260.1165,-1798 335.1165,-1798 335.1165,-1766 260.1165,-1766"/>
|
||||
<text text-anchor="start" x="279.834" y="-1779" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">Modbus</text>
|
||||
<polygon fill="none" stroke="#000000" points="260.1165,-1614 260.1165,-1766 335.1165,-1766 335.1165,-1614 260.1165,-1614"/>
|
||||
<text text-anchor="start" x="289.278" y="-1747" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">que</text>
|
||||
<text text-anchor="start" x="270.1065" y="-1723" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">snd_handler</text>
|
||||
<text text-anchor="start" x="271.2215" y="-1711" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">rsp_handler</text>
|
||||
<text text-anchor="start" x="281.225" y="-1699" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">timeout</text>
|
||||
<text text-anchor="start" x="271.506" y="-1687" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">max_retires</text>
|
||||
<text text-anchor="start" x="279.5585" y="-1675" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">last_xxx</text>
|
||||
<text text-anchor="start" x="291.508" y="-1663" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">err</text>
|
||||
<text text-anchor="start" x="278.17" y="-1651" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">retry_cnt</text>
|
||||
<text text-anchor="start" x="276.4955" y="-1639" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">req_pend</text>
|
||||
<text text-anchor="start" x="290.953" y="-1627" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">tim</text>
|
||||
<polygon fill="none" stroke="#000000" points="260.1165,-1546 260.1165,-1614 335.1165,-1614 335.1165,-1546 260.1165,-1546"/>
|
||||
<text text-anchor="start" x="271.5065" y="-1595" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">build_msg()</text>
|
||||
<text text-anchor="start" x="274.8405" y="-1583" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">recv_req()</text>
|
||||
<text text-anchor="start" x="272.3405" y="-1571" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">recv_resp()</text>
|
||||
<text text-anchor="start" x="282.619" y="-1559" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">close()</text>
|
||||
</g>
|
||||
<!-- A15->A13 -->
|
||||
<g id="edge17" class="edge">
|
||||
<title>A15->A13</title>
|
||||
<path fill="none" stroke="#000000" d="M277.6392,-1536.041C275.7633,-1522.9463 273.8413,-1509.5297 271.9169,-1496.0971"/>
|
||||
<polygon fill="#000000" stroke="#000000" points="279.064,-1545.9867 273.1913,-1536.726 278.3549,-1541.0373 277.6458,-1536.0878 277.6458,-1536.0878 277.6458,-1536.0878 278.3549,-1541.0373 282.1004,-1535.4496 279.064,-1545.9867 279.064,-1545.9867"/>
|
||||
<text text-anchor="middle" x="282.8544" y="-1509.8414" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">has</text>
|
||||
<text text-anchor="middle" x="268.1266" y="-1526.2424" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">0..1</text>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 32 KiB |
42
app/proxy_3.yuml
Normal file
42
app/proxy_3.yuml
Normal file
@@ -0,0 +1,42 @@
|
||||
// {type:class}
|
||||
// {direction:topDown}
|
||||
// {generate:true}
|
||||
|
||||
[note: Example of instantiation for a GEN3PLUS inverter!{bg:cornsilk}]
|
||||
[<<AbstractIterMeta>>||__iter__()]
|
||||
|
||||
[InverterG3P|addr;remote:StreamPtr;local:StreamPtr|create_remote();;close()]
|
||||
[InverterG3P]++->[local:StreamPtr]
|
||||
[InverterG3P]++->[remote:StreamPtr]
|
||||
|
||||
[<<AsyncIfc>>||set_node_id();get_conn_no();;tx_add();tx_flush();tx_get();tx_peek();tx_log();tx_clear();tx_len();;fwd_add();fwd_log();rx_get();rx_peek();rx_log();rx_clear();rx_len();rx_set_cb();;prot_set_timeout_cb()]
|
||||
[AsyncIfcImpl|fwd_fifo:ByteFifo;tx_fifo:ByteFifo;rx_fifo:ByteFifo;conn_no:Count;node_id;timeout_cb]
|
||||
[AsyncStream|reader;writer;addr;r_addr;l_addr|;<async>loop;disc();close();healthy();;__async_read();__async_write();__async_forward()]
|
||||
[AsyncStreamServer|create_remote|<async>server_loop();<async>_async_forward();<async>publish_outstanding_mqtt();close()]
|
||||
[AsyncStreamClient||<async>client_loop();<async>_async_forward())]
|
||||
[<<AsyncIfc>>]^-.-[AsyncIfcImpl]
|
||||
[AsyncIfcImpl]^[AsyncStream]
|
||||
[AsyncStream]^[AsyncStreamServer]
|
||||
[AsyncStream]^[AsyncStreamClient]
|
||||
|
||||
[SolarmanV5|conn_no;addr;;control;serial;snr;db:InfosG3P;switch|msg_unknown();;healthy();close()]
|
||||
[SolarmanV5]<-++[local:StreamPtr]
|
||||
[local:StreamPtr]++->[AsyncStreamServer]
|
||||
[SolarmanV5]<-0..1[remote:StreamPtr]
|
||||
[remote:StreamPtr]0..1->[AsyncStreamClient]
|
||||
|
||||
[Infos|stat;new_stat_data;info_dev|static_init();dev_value();inc_counter();dec_counter();ha_proxy_conf;ha_conf;ha_remove;update_db;set_db_def_value;get_db_value;ignore_this_device]
|
||||
[Infos]^[InfosG3P||ha_confs();parse()]
|
||||
|
||||
[SolarmanV5]->[InfosG3P]
|
||||
|
||||
[Message|server_side:bool;mb:Modbus;ifc:AsyncIfc;node_id;header_valid:bool;header_len;data_len;unique_id;sug_area:str;new_data:dict;state:State;shutdown_started:bool;modbus_elms;mb_timer:Timer;mb_timeout;mb_first_timeout;modbus_polling:bool|_set_mqtt_timestamp();_timeout();_send_modbus_cmd();<async> end_modbus_cmd();close();inc_counter();dec_counter()]
|
||||
[Message]use->[<<AsyncIfc>>]
|
||||
|
||||
[<<ProtocolIfc>>|_registry|close()]
|
||||
[<<AbstractIterMeta>>]^-.-[<<ProtocolIfc>>]
|
||||
[<<ProtocolIfc>>]^-.-[Message]
|
||||
[Message]^[SolarmanV5]
|
||||
|
||||
[Modbus|que;;snd_handler;rsp_handler;timeout;max_retires;last_xxx;err;retry_cnt;req_pend;tim|build_msg();recv_req();recv_resp();close()]
|
||||
[Modbus]<0..1-has[Message]
|
||||
8
app/requirements-test.txt
Normal file
8
app/requirements-test.txt
Normal file
@@ -0,0 +1,8 @@
|
||||
flake8
|
||||
pytest
|
||||
pytest-asyncio
|
||||
pytest-cov
|
||||
python-dotenv
|
||||
mock
|
||||
coverage
|
||||
jinja2-cli
|
||||
@@ -1,2 +1,4 @@
|
||||
aiomqtt==1.2.1
|
||||
schema==0.7.5
|
||||
aiomqtt==2.3.0
|
||||
schema==0.7.7
|
||||
aiocron==1.8
|
||||
aiohttp==3.11.11
|
||||
104
app/src/async_ifc.py
Normal file
104
app/src/async_ifc.py
Normal file
@@ -0,0 +1,104 @@
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
|
||||
class AsyncIfc(ABC):
|
||||
@abstractmethod
|
||||
def get_conn_no(self):
|
||||
pass # pragma: no cover
|
||||
|
||||
@abstractmethod
|
||||
def set_node_id(self, value: str):
|
||||
pass # pragma: no cover
|
||||
|
||||
#
|
||||
# TX - QUEUE
|
||||
#
|
||||
@abstractmethod
|
||||
def tx_add(self, data: bytearray):
|
||||
''' add data to transmit queue'''
|
||||
pass # pragma: no cover
|
||||
|
||||
@abstractmethod
|
||||
def tx_flush(self):
|
||||
''' send transmit queue and clears it'''
|
||||
pass # pragma: no cover
|
||||
|
||||
@abstractmethod
|
||||
def tx_peek(self, size: int = None) -> bytearray:
|
||||
'''returns size numbers of byte without removing them'''
|
||||
pass # pragma: no cover
|
||||
|
||||
@abstractmethod
|
||||
def tx_log(self, level, info):
|
||||
''' log the transmit queue'''
|
||||
pass # pragma: no cover
|
||||
|
||||
@abstractmethod
|
||||
def tx_clear(self):
|
||||
''' clear transmit queue'''
|
||||
pass # pragma: no cover
|
||||
|
||||
@abstractmethod
|
||||
def tx_len(self):
|
||||
''' get numner of bytes in the transmit queue'''
|
||||
pass # pragma: no cover
|
||||
|
||||
#
|
||||
# FORWARD - QUEUE
|
||||
#
|
||||
@abstractmethod
|
||||
def fwd_add(self, data: bytearray):
|
||||
''' add data to forward queue'''
|
||||
pass # pragma: no cover
|
||||
|
||||
@abstractmethod
|
||||
def fwd_log(self, level, info):
|
||||
''' log the forward queue'''
|
||||
pass # pragma: no cover
|
||||
|
||||
#
|
||||
# RX - QUEUE
|
||||
#
|
||||
@abstractmethod
|
||||
def rx_get(self, size: int = None) -> bytearray:
|
||||
'''removes size numbers of bytes and return them'''
|
||||
pass # pragma: no cover
|
||||
|
||||
@abstractmethod
|
||||
def rx_peek(self, size: int = None) -> bytearray:
|
||||
'''returns size numbers of byte without removing them'''
|
||||
pass # pragma: no cover
|
||||
|
||||
@abstractmethod
|
||||
def rx_log(self, level, info):
|
||||
''' logs the receive queue'''
|
||||
pass # pragma: no cover
|
||||
|
||||
@abstractmethod
|
||||
def rx_clear(self):
|
||||
''' clear receive queue'''
|
||||
pass # pragma: no cover
|
||||
|
||||
@abstractmethod
|
||||
def rx_len(self):
|
||||
''' get numner of bytes in the receive queue'''
|
||||
pass # pragma: no cover
|
||||
|
||||
@abstractmethod
|
||||
def rx_set_cb(self, callback):
|
||||
pass # pragma: no cover
|
||||
|
||||
#
|
||||
# Protocol Callbacks
|
||||
#
|
||||
@abstractmethod
|
||||
def prot_set_timeout_cb(self, callback):
|
||||
pass # pragma: no cover
|
||||
|
||||
@abstractmethod
|
||||
def prot_set_init_new_client_conn_cb(self, callback):
|
||||
pass # pragma: no cover
|
||||
|
||||
@abstractmethod
|
||||
def prot_set_update_header_cb(self, callback):
|
||||
pass # pragma: no cover
|
||||
@@ -1,104 +1,397 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import traceback
|
||||
# from config import Config
|
||||
# import gc
|
||||
from messages import Message, hex_dump_memory
|
||||
import time
|
||||
from asyncio import StreamReader, StreamWriter
|
||||
from typing import Self
|
||||
from itertools import count
|
||||
|
||||
from proxy import Proxy
|
||||
from byte_fifo import ByteFifo
|
||||
from async_ifc import AsyncIfc
|
||||
from infos import Infos
|
||||
|
||||
|
||||
import gc
|
||||
logger = logging.getLogger('conn')
|
||||
|
||||
|
||||
class AsyncStream(Message):
|
||||
class AsyncIfcImpl(AsyncIfc):
|
||||
_ids = count(0)
|
||||
|
||||
def __init__(self, reader, writer, addr, remote_stream, server_side: bool
|
||||
) -> None:
|
||||
super().__init__(server_side)
|
||||
self.reader = reader
|
||||
self.writer = writer
|
||||
self.remoteStream = remote_stream
|
||||
self.addr = addr
|
||||
self.r_addr = ''
|
||||
self.l_addr = ''
|
||||
|
||||
'''
|
||||
Our puplic methods
|
||||
'''
|
||||
async def loop(self):
|
||||
self.r_addr = self.writer.get_extra_info('peername')
|
||||
self.l_addr = self.writer.get_extra_info('sockname')
|
||||
|
||||
while True:
|
||||
try:
|
||||
await self.__async_read()
|
||||
|
||||
if self.unique_id:
|
||||
await self.__async_write()
|
||||
await self.__async_forward()
|
||||
await self.async_publ_mqtt()
|
||||
|
||||
except (ConnectionResetError,
|
||||
ConnectionAbortedError,
|
||||
BrokenPipeError,
|
||||
RuntimeError) as error:
|
||||
logger.warning(f'In loop for l{self.l_addr} | '
|
||||
f'r{self.r_addr}: {error}')
|
||||
self.close()
|
||||
return self
|
||||
except Exception:
|
||||
logger.error(
|
||||
f"Exception for {self.addr}:\n"
|
||||
f"{traceback.format_exc()}")
|
||||
self.close()
|
||||
return self
|
||||
|
||||
def disc(self) -> None:
|
||||
logger.debug(f'in AsyncStream.disc() l{self.l_addr} | r{self.r_addr}')
|
||||
self.writer.close()
|
||||
def __init__(self) -> None:
|
||||
logger.debug('AsyncIfcImpl.__init__')
|
||||
self.fwd_fifo = ByteFifo()
|
||||
self.tx_fifo = ByteFifo()
|
||||
self.rx_fifo = ByteFifo()
|
||||
self.conn_no = next(self._ids)
|
||||
self.node_id = ''
|
||||
self.timeout_cb = None
|
||||
self.init_new_client_conn_cb = None
|
||||
self.update_header_cb = None
|
||||
|
||||
def close(self):
|
||||
logger.debug(f'in AsyncStream.close() l{self.l_addr} | r{self.r_addr}')
|
||||
self.writer.close()
|
||||
super().close() # call close handler in the parent class
|
||||
self.timeout_cb = None
|
||||
self.fwd_fifo.reg_trigger(None)
|
||||
self.tx_fifo.reg_trigger(None)
|
||||
self.rx_fifo.reg_trigger(None)
|
||||
|
||||
# logger.info(f'AsyncStream refs: {gc.get_referrers(self)}')
|
||||
def set_node_id(self, value: str):
|
||||
self.node_id = value
|
||||
|
||||
def get_conn_no(self):
|
||||
return self.conn_no
|
||||
|
||||
def tx_add(self, data: bytearray):
|
||||
''' add data to transmit queue'''
|
||||
self.tx_fifo += data
|
||||
|
||||
def tx_flush(self):
|
||||
''' send transmit queue and clears it'''
|
||||
self.tx_fifo()
|
||||
|
||||
def tx_peek(self, size: int = None) -> bytearray:
|
||||
'''returns size numbers of byte without removing them'''
|
||||
return self.tx_fifo.peek(size)
|
||||
|
||||
def tx_log(self, level, info):
|
||||
''' log the transmit queue'''
|
||||
self.tx_fifo.logging(level, info)
|
||||
|
||||
def tx_clear(self):
|
||||
''' clear transmit queue'''
|
||||
self.tx_fifo.clear()
|
||||
|
||||
def tx_len(self):
|
||||
''' get numner of bytes in the transmit queue'''
|
||||
return len(self.tx_fifo)
|
||||
|
||||
def fwd_add(self, data: bytearray):
|
||||
''' add data to forward queue'''
|
||||
self.fwd_fifo += data
|
||||
|
||||
def fwd_log(self, level, info):
|
||||
''' log the forward queue'''
|
||||
self.fwd_fifo.logging(level, info)
|
||||
|
||||
def rx_get(self, size: int = None) -> bytearray:
|
||||
'''removes size numbers of bytes and return them'''
|
||||
return self.rx_fifo.get(size)
|
||||
|
||||
def rx_peek(self, size: int = None) -> bytearray:
|
||||
'''returns size numbers of byte without removing them'''
|
||||
return self.rx_fifo.peek(size)
|
||||
|
||||
def rx_log(self, level, info):
|
||||
''' logs the receive queue'''
|
||||
self.rx_fifo.logging(level, info)
|
||||
|
||||
def rx_clear(self):
|
||||
''' clear receive queue'''
|
||||
self.rx_fifo.clear()
|
||||
|
||||
def rx_len(self):
|
||||
''' get numner of bytes in the receive queue'''
|
||||
return len(self.rx_fifo)
|
||||
|
||||
def rx_set_cb(self, callback):
|
||||
self.rx_fifo.reg_trigger(callback)
|
||||
|
||||
def prot_set_timeout_cb(self, callback):
|
||||
self.timeout_cb = callback
|
||||
|
||||
def prot_set_init_new_client_conn_cb(self, callback):
|
||||
self.init_new_client_conn_cb = callback
|
||||
|
||||
def prot_set_update_header_cb(self, callback):
|
||||
self.update_header_cb = callback
|
||||
|
||||
|
||||
class StreamPtr():
|
||||
'''Descr StreamPtr'''
|
||||
def __init__(self, _stream, _ifc=None):
|
||||
self.stream = _stream
|
||||
self.ifc = _ifc
|
||||
|
||||
@property
|
||||
def ifc(self):
|
||||
return self._ifc
|
||||
|
||||
@ifc.setter
|
||||
def ifc(self, value):
|
||||
self._ifc = value
|
||||
|
||||
@property
|
||||
def stream(self):
|
||||
return self._stream
|
||||
|
||||
@stream.setter
|
||||
def stream(self, value):
|
||||
self._stream = value
|
||||
|
||||
|
||||
class AsyncStream(AsyncIfcImpl):
|
||||
MAX_PROC_TIME = 2
|
||||
'''maximum processing time for a received msg in sec'''
|
||||
MAX_START_TIME = 400
|
||||
'''maximum time without a received msg in sec'''
|
||||
MAX_INV_IDLE_TIME = 120
|
||||
'''maximum time without a received msg from the inverter in sec'''
|
||||
MAX_DEF_IDLE_TIME = 360
|
||||
'''maximum default time without a received msg in sec'''
|
||||
|
||||
def __init__(self, reader: StreamReader, writer: StreamWriter,
|
||||
rstream: "StreamPtr") -> None:
|
||||
AsyncIfcImpl.__init__(self)
|
||||
|
||||
logger.debug('AsyncStream.__init__')
|
||||
|
||||
self.remote = rstream
|
||||
self.tx_fifo.reg_trigger(self.__write_cb)
|
||||
self._reader = reader
|
||||
self._writer = writer
|
||||
self.r_addr = writer.get_extra_info('peername')
|
||||
self.l_addr = writer.get_extra_info('sockname')
|
||||
self.proc_start = None # start processing start timestamp
|
||||
self.proc_max = 0
|
||||
self.async_publ_mqtt = None # will be set AsyncStreamServer only
|
||||
|
||||
def __write_cb(self):
|
||||
self._writer.write(self.tx_fifo.get())
|
||||
|
||||
def __timeout(self) -> int:
|
||||
if self.timeout_cb:
|
||||
return self.timeout_cb()
|
||||
return 360
|
||||
|
||||
async def loop(self) -> Self:
|
||||
"""Async loop handler for precessing all received messages"""
|
||||
self.proc_start = time.time()
|
||||
while True:
|
||||
try:
|
||||
self.__calc_proc_time()
|
||||
dead_conn_to = self.__timeout()
|
||||
await asyncio.wait_for(self.__async_read(),
|
||||
dead_conn_to)
|
||||
|
||||
await self.__async_write()
|
||||
await self.__async_forward()
|
||||
if self.async_publ_mqtt:
|
||||
await self.async_publ_mqtt()
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
logger.warning(f'[{self.node_id}:{self.conn_no}] Dead '
|
||||
f'connection timeout ({dead_conn_to}s) '
|
||||
f'for {self.l_addr}')
|
||||
await self.disc()
|
||||
return self
|
||||
|
||||
except OSError as error:
|
||||
logger.error(f'[{self.node_id}:{self.conn_no}] '
|
||||
f'{error} for l{self.l_addr} | '
|
||||
f'r{self.r_addr}')
|
||||
await self.disc()
|
||||
return self
|
||||
|
||||
except RuntimeError as error:
|
||||
logger.info(f'[{self.node_id}:{self.conn_no}] '
|
||||
f'{error} for {self.l_addr}')
|
||||
await self.disc()
|
||||
return self
|
||||
|
||||
except Exception:
|
||||
Infos.inc_counter('SW_Exception')
|
||||
logger.error(
|
||||
f"Exception for {self.r_addr}:\n"
|
||||
f"{traceback.format_exc()}")
|
||||
await asyncio.sleep(0) # be cooperative to other task
|
||||
|
||||
def __calc_proc_time(self):
|
||||
if self.proc_start:
|
||||
proc = time.time() - self.proc_start
|
||||
if proc > self.proc_max:
|
||||
self.proc_max = proc
|
||||
self.proc_start = None
|
||||
|
||||
async def disc(self) -> None:
|
||||
"""Async disc handler for graceful disconnect"""
|
||||
if self._writer.is_closing():
|
||||
return
|
||||
logger.debug(f'AsyncStream.disc() l{self.l_addr} | r{self.r_addr}')
|
||||
self._writer.close()
|
||||
await self._writer.wait_closed()
|
||||
|
||||
def close(self) -> None:
|
||||
logging.debug(f'AsyncStream.close() l{self.l_addr} | r{self.r_addr}')
|
||||
"""close handler for a no waiting disconnect
|
||||
|
||||
hint: must be called before releasing the connection instance
|
||||
"""
|
||||
super().close()
|
||||
self._reader.feed_eof() # abort awaited read
|
||||
if self._writer.is_closing():
|
||||
return
|
||||
self._writer.close()
|
||||
|
||||
def healthy(self) -> bool:
|
||||
elapsed = 0
|
||||
if self.proc_start is not None:
|
||||
elapsed = time.time() - self.proc_start
|
||||
if elapsed > self.MAX_PROC_TIME:
|
||||
logging.debug(f'[{self.node_id}:{self.conn_no}:'
|
||||
f'{type(self).__name__}]'
|
||||
f' act:{round(1000*elapsed)}ms'
|
||||
f' max:{round(1000*self.proc_max)}ms')
|
||||
logging.debug(f'Healthy()) refs: {gc.get_referrers(self)}')
|
||||
return elapsed < 5
|
||||
|
||||
'''
|
||||
Our private methods
|
||||
'''
|
||||
async def __async_read(self) -> None:
|
||||
data = await self.reader.read(4096)
|
||||
"""Async read handler to read received data from TCP stream"""
|
||||
data = await self._reader.read(4096)
|
||||
if data:
|
||||
self._recv_buffer += data
|
||||
self.read() # call read in parent class
|
||||
self.proc_start = time.time()
|
||||
self.rx_fifo += data
|
||||
wait = self.rx_fifo() # call read in parent class
|
||||
if wait and wait > 0:
|
||||
await asyncio.sleep(wait)
|
||||
else:
|
||||
raise RuntimeError("Peer closed.")
|
||||
|
||||
async def __async_write(self) -> None:
|
||||
if self._send_buffer:
|
||||
hex_dump_memory(logging.INFO, f'Transmit to {self.addr}:',
|
||||
self._send_buffer, len(self._send_buffer))
|
||||
self.writer.write(self._send_buffer)
|
||||
await self.writer.drain()
|
||||
self._send_buffer = bytearray(0) # self._send_buffer[sent:]
|
||||
async def __async_write(self, headline: str = 'Transmit to ') -> None:
|
||||
"""Async write handler to transmit the send_buffer"""
|
||||
if len(self.tx_fifo) > 0:
|
||||
self.tx_fifo.logging(logging.INFO, f'{headline}{self.r_addr}:')
|
||||
self._writer.write(self.tx_fifo.get())
|
||||
await self._writer.drain()
|
||||
|
||||
async def __async_forward(self) -> None:
|
||||
if self._forward_buffer:
|
||||
if not self.remoteStream:
|
||||
await self.async_create_remote()
|
||||
"""forward handler transmits data over the remote connection"""
|
||||
if len(self.fwd_fifo) == 0:
|
||||
return
|
||||
try:
|
||||
await self._async_forward()
|
||||
|
||||
if self.remoteStream:
|
||||
hex_dump_memory(logging.INFO,
|
||||
f'Forward to {self.remoteStream.addr}:',
|
||||
self._forward_buffer,
|
||||
len(self._forward_buffer))
|
||||
self.remoteStream.writer.write(self._forward_buffer)
|
||||
await self.remoteStream.writer.drain()
|
||||
self._forward_buffer = bytearray(0)
|
||||
except OSError as error:
|
||||
if self.remote.stream:
|
||||
rmt = self.remote
|
||||
logger.error(f'[{rmt.stream.node_id}:{rmt.stream.conn_no}] '
|
||||
f'Fwd: {error} for '
|
||||
f'l{rmt.ifc.l_addr} | r{rmt.ifc.r_addr}')
|
||||
await rmt.ifc.disc()
|
||||
if rmt.ifc.close_cb:
|
||||
rmt.ifc.close_cb()
|
||||
|
||||
async def async_create_remote(self) -> None:
|
||||
pass
|
||||
except RuntimeError as error:
|
||||
if self.remote.stream:
|
||||
rmt = self.remote
|
||||
logger.info(f'[{rmt.stream.node_id}:{rmt.stream.conn_no}] '
|
||||
f'Fwd: {error} for {rmt.ifc.l_addr}')
|
||||
await rmt.ifc.disc()
|
||||
if rmt.ifc.close_cb:
|
||||
rmt.ifc.close_cb()
|
||||
|
||||
async def async_publ_mqtt(self) -> None:
|
||||
pass
|
||||
except Exception:
|
||||
Infos.inc_counter('SW_Exception')
|
||||
logger.error(
|
||||
f"Fwd Exception for {self.r_addr}:\n"
|
||||
f"{traceback.format_exc()}")
|
||||
|
||||
def __del__(self):
|
||||
logging.debug(f"AsyncStream.__del__ l{self.l_addr} | r{self.r_addr}")
|
||||
async def publish_outstanding_mqtt(self):
|
||||
'''Publish all outstanding MQTT topics'''
|
||||
try:
|
||||
await self.async_publ_mqtt()
|
||||
await Proxy._async_publ_mqtt_proxy_stat('proxy')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
class AsyncStreamServer(AsyncStream):
|
||||
def __init__(self, reader: StreamReader, writer: StreamWriter,
|
||||
async_publ_mqtt, create_remote,
|
||||
rstream: "StreamPtr") -> None:
|
||||
AsyncStream.__init__(self, reader, writer, rstream)
|
||||
self.create_remote = create_remote
|
||||
self.async_publ_mqtt = async_publ_mqtt
|
||||
|
||||
def close(self) -> None:
|
||||
logging.debug('AsyncStreamServer.close()')
|
||||
self.create_remote = None
|
||||
self.async_publ_mqtt = None
|
||||
super().close()
|
||||
|
||||
async def server_loop(self) -> None:
|
||||
'''Loop for receiving messages from the inverter (server-side)'''
|
||||
logger.info(f'[{self.node_id}:{self.conn_no}] '
|
||||
f'Accept connection from {self.r_addr}')
|
||||
Infos.inc_counter('Inverter_Cnt')
|
||||
await self.publish_outstanding_mqtt()
|
||||
await self.loop()
|
||||
Infos.dec_counter('Inverter_Cnt')
|
||||
await self.publish_outstanding_mqtt()
|
||||
logger.info(f'[{self.node_id}:{self.conn_no}] Server loop stopped for'
|
||||
f' r{self.r_addr}')
|
||||
|
||||
# if the server connection closes, we also have to disconnect
|
||||
# the connection to te TSUN cloud
|
||||
if self.remote and self.remote.stream:
|
||||
logger.info(f'[{self.node_id}:{self.conn_no}] disc client '
|
||||
f'connection: [{self.remote.ifc.node_id}:'
|
||||
f'{self.remote.ifc.conn_no}]')
|
||||
await self.remote.ifc.disc()
|
||||
|
||||
async def _async_forward(self) -> None:
|
||||
"""forward handler transmits data over the remote connection"""
|
||||
if not self.remote.stream:
|
||||
await self.create_remote()
|
||||
if self.remote.stream and \
|
||||
self.remote.ifc.init_new_client_conn_cb():
|
||||
await self.remote.ifc._AsyncStream__async_write()
|
||||
if self.remote.stream:
|
||||
self.remote.ifc.update_header_cb(self.fwd_fifo.peek())
|
||||
self.fwd_fifo.logging(logging.INFO, 'Forward to '
|
||||
f'{self.remote.ifc.r_addr}:')
|
||||
self.remote.ifc._writer.write(self.fwd_fifo.get())
|
||||
await self.remote.ifc._writer.drain()
|
||||
|
||||
|
||||
class AsyncStreamClient(AsyncStream):
|
||||
def __init__(self, reader: StreamReader, writer: StreamWriter,
|
||||
rstream: "StreamPtr", close_cb) -> None:
|
||||
AsyncStream.__init__(self, reader, writer, rstream)
|
||||
self.close_cb = close_cb
|
||||
|
||||
async def disc(self) -> None:
|
||||
logging.debug('AsyncStreamClient.disc()')
|
||||
self.remote = None
|
||||
await super().disc()
|
||||
|
||||
def close(self) -> None:
|
||||
logging.debug('AsyncStreamClient.close()')
|
||||
self.close_cb = None
|
||||
super().close()
|
||||
|
||||
async def client_loop(self, _: str) -> None:
|
||||
'''Loop for receiving messages from the TSUN cloud (client-side)'''
|
||||
Infos.inc_counter('Cloud_Conn_Cnt')
|
||||
await self.publish_outstanding_mqtt()
|
||||
await self.loop()
|
||||
Infos.dec_counter('Cloud_Conn_Cnt')
|
||||
await self.publish_outstanding_mqtt()
|
||||
logger.info(f'[{self.node_id}:{self.conn_no}] '
|
||||
'Client loop stopped for'
|
||||
f' l{self.l_addr}')
|
||||
|
||||
if self.close_cb:
|
||||
self.close_cb()
|
||||
|
||||
async def _async_forward(self) -> None:
|
||||
"""forward handler transmits data over the remote connection"""
|
||||
if self.remote.stream:
|
||||
self.remote.ifc.update_header_cb(self.fwd_fifo.peek())
|
||||
self.fwd_fifo.logging(logging.INFO, 'Forward to '
|
||||
f'{self.remote.ifc.r_addr}:')
|
||||
self.remote.ifc._writer.write(self.fwd_fifo.get())
|
||||
await self.remote.ifc._writer.drain()
|
||||
|
||||
52
app/src/byte_fifo.py
Normal file
52
app/src/byte_fifo.py
Normal file
@@ -0,0 +1,52 @@
|
||||
from messages import hex_dump_str, hex_dump_memory
|
||||
|
||||
|
||||
class ByteFifo:
|
||||
""" a byte FIFO buffer with trigger callback """
|
||||
__slots__ = ('__buf', '__trigger_cb')
|
||||
|
||||
def __init__(self):
|
||||
self.__buf = bytearray()
|
||||
self.__trigger_cb = None
|
||||
|
||||
def reg_trigger(self, cb) -> None:
|
||||
self.__trigger_cb = cb
|
||||
|
||||
def __iadd__(self, data):
|
||||
self.__buf.extend(data)
|
||||
return self
|
||||
|
||||
def __call__(self):
|
||||
'''triggers the observer'''
|
||||
if callable(self.__trigger_cb):
|
||||
return self.__trigger_cb()
|
||||
return None
|
||||
|
||||
def get(self, size: int = None) -> bytearray:
|
||||
'''removes size numbers of byte and return them'''
|
||||
if not size:
|
||||
data = self.__buf
|
||||
self.clear()
|
||||
else:
|
||||
data = self.__buf[:size]
|
||||
# The fast delete syntax
|
||||
self.__buf[:size] = b''
|
||||
return data
|
||||
|
||||
def peek(self, size: int = None) -> bytearray:
|
||||
'''returns size numbers of byte without removing them'''
|
||||
if not size:
|
||||
return self.__buf
|
||||
return self.__buf[:size]
|
||||
|
||||
def clear(self):
|
||||
self.__buf = bytearray()
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self.__buf)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return hex_dump_str(self.__buf, self.__len__())
|
||||
|
||||
def logging(self, level, info):
|
||||
hex_dump_memory(level, info, self.__buf, self.__len__())
|
||||
211
app/src/cnf/config.py
Normal file
211
app/src/cnf/config.py
Normal file
@@ -0,0 +1,211 @@
|
||||
'''Config module handles the proxy configuration'''
|
||||
|
||||
import shutil
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from schema import Schema, And, Or, Use, Optional
|
||||
|
||||
|
||||
class ConfigIfc(ABC):
|
||||
'''Abstract basis class for config readers'''
|
||||
def __init__(self):
|
||||
Config.add(self)
|
||||
|
||||
@abstractmethod
|
||||
def get_config(self) -> dict: # pragma: no cover
|
||||
'''get the unverified config from the reader'''
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def descr(self) -> str: # pragma: no cover
|
||||
'''return a descriction of the source, e.g. the file name'''
|
||||
pass
|
||||
|
||||
def _extend_key(self, conf, key, val):
|
||||
'''split a dotted dict key into a hierarchical dict tree '''
|
||||
lst = key.split('.')
|
||||
d = conf
|
||||
for i, idx in enumerate(lst, 1): # pragma: no branch
|
||||
if i == len(lst):
|
||||
d[idx] = val
|
||||
break
|
||||
if idx not in d:
|
||||
d[idx] = {}
|
||||
d = d[idx]
|
||||
|
||||
|
||||
class Config():
|
||||
'''Static class Config build and sanitize the internal config dictenary.
|
||||
|
||||
Using config readers, a partial configuration is added to config.
|
||||
Config readers are a derivation of the abstract ConfigIfc reader.
|
||||
When a config reader is instantiated, theits `get_config` method is
|
||||
called automatically and afterwards the config will be merged.
|
||||
'''
|
||||
|
||||
conf_schema = Schema({
|
||||
'tsun': {
|
||||
'enabled': Use(bool),
|
||||
'host': Use(str),
|
||||
'port': And(Use(int), lambda n: 1024 <= n <= 65535)
|
||||
},
|
||||
'solarman': {
|
||||
'enabled': Use(bool),
|
||||
'host': Use(str),
|
||||
'port': And(Use(int), lambda n: 1024 <= n <= 65535)
|
||||
},
|
||||
'mqtt': {
|
||||
'host': Use(str),
|
||||
'port': And(Use(int), lambda n: 1024 <= n <= 65535),
|
||||
'user': Or(None, And(Use(str),
|
||||
Use(lambda s: s if len(s) > 0 else None))),
|
||||
'passwd': Or(None, And(Use(str),
|
||||
Use(lambda s: s if len(s) > 0 else None)))
|
||||
},
|
||||
'ha': {
|
||||
'auto_conf_prefix': Use(str),
|
||||
'discovery_prefix': Use(str),
|
||||
'entity_prefix': Use(str),
|
||||
'proxy_node_id': Use(str),
|
||||
'proxy_unique_id': Use(str)
|
||||
},
|
||||
'gen3plus': {
|
||||
'at_acl': {
|
||||
Or('mqtt', 'tsun'): {
|
||||
'allow': [str],
|
||||
Optional('block', default=[]): [str]
|
||||
}
|
||||
}
|
||||
},
|
||||
'inverters': {
|
||||
'allow_all': Use(bool), And(Use(str), lambda s: len(s) == 16): {
|
||||
Optional('monitor_sn', default=0): Use(int),
|
||||
Optional('node_id', default=""): And(Use(str),
|
||||
Use(lambda s: s + '/'
|
||||
if len(s) > 0
|
||||
and s[-1] != '/'
|
||||
else s)),
|
||||
Optional('client_mode'): {
|
||||
'host': Use(str),
|
||||
Optional('port', default=8899):
|
||||
And(Use(int), lambda n: 1024 <= n <= 65535),
|
||||
Optional('forward', default=False): Use(bool),
|
||||
},
|
||||
Optional('modbus_polling', default=True): Use(bool),
|
||||
Optional('suggested_area', default=""): Use(str),
|
||||
Optional('sensor_list', default=0x2b0): Use(int),
|
||||
Optional('pv1'): {
|
||||
Optional('type'): Use(str),
|
||||
Optional('manufacturer'): Use(str),
|
||||
},
|
||||
Optional('pv2'): {
|
||||
Optional('type'): Use(str),
|
||||
Optional('manufacturer'): Use(str),
|
||||
},
|
||||
Optional('pv3'): {
|
||||
Optional('type'): Use(str),
|
||||
Optional('manufacturer'): Use(str),
|
||||
},
|
||||
Optional('pv4'): {
|
||||
Optional('type'): Use(str),
|
||||
Optional('manufacturer'): Use(str),
|
||||
},
|
||||
Optional('pv5'): {
|
||||
Optional('type'): Use(str),
|
||||
Optional('manufacturer'): Use(str),
|
||||
},
|
||||
Optional('pv6'): {
|
||||
Optional('type'): Use(str),
|
||||
Optional('manufacturer'): Use(str),
|
||||
}
|
||||
}
|
||||
}
|
||||
}, ignore_extra_keys=True
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def init(cls, def_reader: ConfigIfc) -> None | str:
|
||||
'''Initialise the Proxy-Config
|
||||
|
||||
Copy the internal default config file into the config directory
|
||||
and initialise the Config with the default configuration '''
|
||||
cls.err = None
|
||||
cls.def_config = {}
|
||||
try:
|
||||
# make the default config transparaent by copying it
|
||||
# in the config.example file
|
||||
logging.debug('Copy Default Config to config.example.toml')
|
||||
|
||||
shutil.copy2("default_config.toml",
|
||||
"config/config.example.toml")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# read example config file as default configuration
|
||||
try:
|
||||
def_config = def_reader.get_config()
|
||||
cls.def_config = cls.conf_schema.validate(def_config)
|
||||
logging.info(f'Read from {def_reader.descr()} => ok')
|
||||
except Exception as error:
|
||||
cls.err = f'Config.read: {error}'
|
||||
logging.error(
|
||||
f"Can't read from {def_reader.descr()} => error\n {error}")
|
||||
|
||||
cls.act_config = cls.def_config.copy()
|
||||
|
||||
@classmethod
|
||||
def add(cls, reader: ConfigIfc):
|
||||
'''Merge the config from the Config Reader into the config
|
||||
|
||||
Checks if a default config exists. If no default configuration exists,
|
||||
the Config.init method has not yet been called.This is normal for the very
|
||||
first Config Reader which creates the default config and must be ignored
|
||||
here. The default config reader is handled in the Config.init method'''
|
||||
if hasattr(cls, 'def_config'):
|
||||
cls.__parse(reader)
|
||||
|
||||
@classmethod
|
||||
def get_error(cls) -> None | str:
|
||||
'''return the last error as a string or None if there is no error'''
|
||||
return cls.err
|
||||
|
||||
@classmethod
|
||||
def __parse(cls, reader) -> None | str:
|
||||
'''Read config from the reader, merge it with the default config
|
||||
and sanitize the result'''
|
||||
res = 'ok'
|
||||
try:
|
||||
rd_config = reader.get_config()
|
||||
config = cls.act_config.copy()
|
||||
for key in ['tsun', 'solarman', 'mqtt', 'ha', 'inverters',
|
||||
'gen3plus']:
|
||||
if key in rd_config:
|
||||
config[key] = config[key] | rd_config[key]
|
||||
|
||||
cls.act_config = cls.conf_schema.validate(config)
|
||||
except FileNotFoundError:
|
||||
res = 'n/a'
|
||||
except Exception as error:
|
||||
cls.err = f'error: {error}'
|
||||
logging.error(
|
||||
f"Can't read from {reader.descr()} => error\n {error}")
|
||||
return cls.err
|
||||
|
||||
logging.info(f'Read from {reader.descr()} => {res}')
|
||||
return cls.err
|
||||
|
||||
@classmethod
|
||||
def get(cls, member: str = None):
|
||||
'''Get a named attribute from the proxy config. If member ==
|
||||
None it returns the complete config dict'''
|
||||
|
||||
if member:
|
||||
return cls.act_config.get(member, {})
|
||||
else:
|
||||
return cls.act_config
|
||||
|
||||
@classmethod
|
||||
def is_default(cls, member: str) -> bool:
|
||||
'''Check if the member is the default value'''
|
||||
|
||||
return cls.act_config.get(member) == cls.def_config.get(member)
|
||||
25
app/src/cnf/config_read_env.py
Normal file
25
app/src/cnf/config_read_env.py
Normal file
@@ -0,0 +1,25 @@
|
||||
'''Config Reader module which handles config values from the environment'''
|
||||
|
||||
import os
|
||||
from cnf.config import ConfigIfc
|
||||
|
||||
|
||||
class ConfigReadEnv(ConfigIfc):
|
||||
'''Reader for environment values of the configuration'''
|
||||
|
||||
def get_config(self) -> dict:
|
||||
conf = {}
|
||||
data = [
|
||||
('mqtt.host', 'MQTT_HOST'),
|
||||
('mqtt.port', 'MQTT_PORT'),
|
||||
('mqtt.user', 'MQTT_USER'),
|
||||
('mqtt.passwd', 'MQTT_PASSWORD'),
|
||||
]
|
||||
for key, env_var in data:
|
||||
val = os.getenv(env_var)
|
||||
if val:
|
||||
self._extend_key(conf, key, val)
|
||||
return conf
|
||||
|
||||
def descr(self):
|
||||
return "environment"
|
||||
46
app/src/cnf/config_read_json.py
Normal file
46
app/src/cnf/config_read_json.py
Normal file
@@ -0,0 +1,46 @@
|
||||
'''Config Reader module which handles *.json config files'''
|
||||
|
||||
import json
|
||||
from cnf.config import ConfigIfc
|
||||
|
||||
|
||||
class ConfigReadJson(ConfigIfc):
|
||||
'''Reader for json config files'''
|
||||
def __init__(self, cnf_file='/data/options.json'):
|
||||
'''Read a json file and add the settings to the config'''
|
||||
if not isinstance(cnf_file, str):
|
||||
return
|
||||
self.cnf_file = cnf_file
|
||||
super().__init__()
|
||||
|
||||
def convert_inv(self, conf, inv):
|
||||
if 'serial' in inv:
|
||||
snr = inv['serial']
|
||||
del inv['serial']
|
||||
conf[snr] = {}
|
||||
|
||||
for key, val in inv.items():
|
||||
self._extend_key(conf[snr], key, val)
|
||||
|
||||
def convert_inv_arr(self, conf, key, val: list):
|
||||
if key not in conf:
|
||||
conf[key] = {}
|
||||
for elm in val:
|
||||
self.convert_inv(conf[key], elm)
|
||||
|
||||
def convert_to_obj(self, data):
|
||||
conf = {}
|
||||
for key, val in data.items():
|
||||
if key == 'inverters' and isinstance(val, list):
|
||||
self.convert_inv_arr(conf, key, val)
|
||||
else:
|
||||
self._extend_key(conf, key, val)
|
||||
return conf
|
||||
|
||||
def get_config(self) -> dict:
|
||||
with open(self.cnf_file) as f:
|
||||
data = json.load(f)
|
||||
return self.convert_to_obj(data)
|
||||
|
||||
def descr(self):
|
||||
return self.cnf_file
|
||||
21
app/src/cnf/config_read_toml.py
Normal file
21
app/src/cnf/config_read_toml.py
Normal file
@@ -0,0 +1,21 @@
|
||||
'''Config Reader module which handles *.toml config files'''
|
||||
|
||||
import tomllib
|
||||
from cnf.config import ConfigIfc
|
||||
|
||||
|
||||
class ConfigReadToml(ConfigIfc):
|
||||
'''Reader for toml config files'''
|
||||
def __init__(self, cnf_file):
|
||||
'''Read a toml file and add the settings to the config'''
|
||||
if not isinstance(cnf_file, str):
|
||||
return
|
||||
self.cnf_file = cnf_file
|
||||
super().__init__()
|
||||
|
||||
def get_config(self) -> dict:
|
||||
with open(self.cnf_file, "rb") as f:
|
||||
return tomllib.load(f)
|
||||
|
||||
def descr(self):
|
||||
return self.cnf_file
|
||||
@@ -1,90 +0,0 @@
|
||||
'''Config module handles the proxy configuration in the config.toml file'''
|
||||
|
||||
import shutil
|
||||
import tomllib
|
||||
import logging
|
||||
from schema import Schema, And, Use, Optional
|
||||
|
||||
|
||||
class Config():
|
||||
'''Static class Config is reads and sanitize the config.
|
||||
|
||||
Read config.toml file and sanitize it with read().
|
||||
Get named parts of the config with get()'''
|
||||
|
||||
config = {}
|
||||
conf_schema = Schema({
|
||||
'tsun': {
|
||||
'enabled': Use(bool),
|
||||
'host': Use(str),
|
||||
'port': And(Use(int), lambda n: 1024 <= n <= 65535)
|
||||
},
|
||||
'mqtt': {
|
||||
'host': Use(str),
|
||||
'port': And(Use(int), lambda n: 1024 <= n <= 65535),
|
||||
'user': And(Use(str), Use(lambda s: s if len(s) > 0 else None)),
|
||||
'passwd': And(Use(str), Use(lambda s: s if len(s) > 0 else None))
|
||||
},
|
||||
'ha': {
|
||||
'auto_conf_prefix': Use(str),
|
||||
'discovery_prefix': Use(str),
|
||||
'entity_prefix': Use(str),
|
||||
'proxy_node_id': Use(str),
|
||||
'proxy_unique_id': Use(str)
|
||||
},
|
||||
'inverters': {
|
||||
'allow_all': Use(bool), And(Use(str), lambda s: len(s) == 16): {
|
||||
Optional('node_id', default=""): And(Use(str),
|
||||
Use(lambda s: s + '/'
|
||||
if len(s) > 0 and
|
||||
s[-1] != '/' else s)),
|
||||
|
||||
Optional('suggested_area', default=""): Use(str)
|
||||
}}
|
||||
}, ignore_extra_keys=True
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def read(cls) -> None:
|
||||
'''Read config file, merge it with the default config
|
||||
and sanitize the result'''
|
||||
|
||||
config = {}
|
||||
logger = logging.getLogger('data')
|
||||
|
||||
try:
|
||||
# make the default config transparaent by copying it
|
||||
# in the config.example file
|
||||
shutil.copy2("default_config.toml", "config/config.example.toml")
|
||||
|
||||
# read example config file as default configuration
|
||||
with open("default_config.toml", "rb") as f:
|
||||
def_config = tomllib.load(f)
|
||||
|
||||
# overwrite the default values, with values from
|
||||
# the config.toml file
|
||||
with open("config/config.toml", "rb") as f:
|
||||
usr_config = tomllib.load(f)
|
||||
|
||||
config['tsun'] = def_config['tsun'] | usr_config['tsun']
|
||||
config['mqtt'] = def_config['mqtt'] | usr_config['mqtt']
|
||||
config['ha'] = def_config['ha'] | usr_config['ha']
|
||||
config['inverters'] = def_config['inverters'] | \
|
||||
usr_config['inverters']
|
||||
|
||||
cls.config = cls.conf_schema.validate(config)
|
||||
# logging.debug(f'Readed config: "{cls.config}" ')
|
||||
|
||||
except Exception as error:
|
||||
logger.error(f'Config.read: {error}')
|
||||
cls.config = {}
|
||||
|
||||
@classmethod
|
||||
def get(cls, member: str = None):
|
||||
'''Get a named attribute from the proxy config. If member ==
|
||||
None it returns the complete config dict'''
|
||||
|
||||
if member:
|
||||
return cls.config.get(member, {})
|
||||
else:
|
||||
return cls.config
|
||||
194
app/src/gen3/infos_g3.py
Normal file
194
app/src/gen3/infos_g3.py
Normal file
@@ -0,0 +1,194 @@
|
||||
|
||||
import struct
|
||||
import logging
|
||||
from typing import Generator
|
||||
|
||||
from infos import Infos, Register
|
||||
|
||||
|
||||
class RegisterMap:
|
||||
__slots__ = ()
|
||||
|
||||
map = {
|
||||
0x00092ba8: {'reg': Register.COLLECTOR_FW_VERSION},
|
||||
0x000927c0: {'reg': Register.CHIP_TYPE},
|
||||
0x00092f90: {'reg': Register.CHIP_MODEL},
|
||||
0x00094ae8: {'reg': Register.MAC_ADDR},
|
||||
0x00095a88: {'reg': Register.TRACE_URL},
|
||||
0x00095aec: {'reg': Register.LOGGER_URL},
|
||||
0x0000000a: {'reg': Register.PRODUCT_NAME},
|
||||
0x00000014: {'reg': Register.MANUFACTURER},
|
||||
0x0000001e: {'reg': Register.VERSION},
|
||||
0x00000028: {'reg': Register.SERIAL_NUMBER},
|
||||
0x00000032: {'reg': Register.EQUIPMENT_MODEL},
|
||||
0x00013880: {'reg': Register.NO_INPUTS},
|
||||
0xffffff00: {'reg': Register.INVERTER_CNT},
|
||||
0xffffff01: {'reg': Register.UNKNOWN_SNR},
|
||||
0xffffff02: {'reg': Register.UNKNOWN_MSG},
|
||||
0xffffff03: {'reg': Register.INVALID_DATA_TYPE},
|
||||
0xffffff04: {'reg': Register.INTERNAL_ERROR},
|
||||
0xffffff05: {'reg': Register.UNKNOWN_CTRL},
|
||||
0xffffff06: {'reg': Register.OTA_START_MSG},
|
||||
0xffffff07: {'reg': Register.SW_EXCEPTION},
|
||||
0xffffff08: {'reg': Register.POLLING_INTERVAL},
|
||||
0xfffffffe: {'reg': Register.TEST_REG1},
|
||||
0xffffffff: {'reg': Register.TEST_REG2},
|
||||
0x00000640: {'reg': Register.OUTPUT_POWER},
|
||||
0x000005dc: {'reg': Register.RATED_POWER},
|
||||
0x00000514: {'reg': Register.INVERTER_TEMP},
|
||||
0x000006a4: {'reg': Register.PV1_VOLTAGE},
|
||||
0x00000708: {'reg': Register.PV1_CURRENT},
|
||||
0x0000076c: {'reg': Register.PV1_POWER},
|
||||
0x000007d0: {'reg': Register.PV2_VOLTAGE},
|
||||
0x00000834: {'reg': Register.PV2_CURRENT},
|
||||
0x00000898: {'reg': Register.PV2_POWER},
|
||||
0x000008fc: {'reg': Register.PV3_VOLTAGE},
|
||||
0x00000960: {'reg': Register.PV3_CURRENT},
|
||||
0x000009c4: {'reg': Register.PV3_POWER},
|
||||
0x00000a28: {'reg': Register.PV4_VOLTAGE},
|
||||
0x00000a8c: {'reg': Register.PV4_CURRENT},
|
||||
0x00000af0: {'reg': Register.PV4_POWER},
|
||||
0x00000c1c: {'reg': Register.PV1_DAILY_GENERATION},
|
||||
0x00000c80: {'reg': Register.PV1_TOTAL_GENERATION},
|
||||
0x00000ce4: {'reg': Register.PV2_DAILY_GENERATION},
|
||||
0x00000d48: {'reg': Register.PV2_TOTAL_GENERATION},
|
||||
0x00000dac: {'reg': Register.PV3_DAILY_GENERATION},
|
||||
0x00000e10: {'reg': Register.PV3_TOTAL_GENERATION},
|
||||
0x00000e74: {'reg': Register.PV4_DAILY_GENERATION},
|
||||
0x00000ed8: {'reg': Register.PV4_TOTAL_GENERATION},
|
||||
0x00000b54: {'reg': Register.DAILY_GENERATION},
|
||||
0x00000bb8: {'reg': Register.TOTAL_GENERATION},
|
||||
0x000003e8: {'reg': Register.GRID_VOLTAGE},
|
||||
0x0000044c: {'reg': Register.GRID_CURRENT},
|
||||
0x000004b0: {'reg': Register.GRID_FREQUENCY},
|
||||
0x000cfc38: {'reg': Register.CONNECT_COUNT},
|
||||
0x000c3500: {'reg': Register.SIGNAL_STRENGTH},
|
||||
0x000c96a8: {'reg': Register.POWER_ON_TIME},
|
||||
0x000d0020: {'reg': Register.COLLECT_INTERVAL},
|
||||
0x000cf850: {'reg': Register.DATA_UP_INTERVAL},
|
||||
0x000c7f38: {'reg': Register.COMMUNICATION_TYPE},
|
||||
0x00000190: {'reg': Register.EVENT_ALARM},
|
||||
0x000001f4: {'reg': Register.EVENT_FAULT},
|
||||
0x00000258: {'reg': Register.EVENT_BF1},
|
||||
0x000002bc: {'reg': Register.EVENT_BF2},
|
||||
0x00000064: {'reg': Register.INVERTER_STATUS},
|
||||
|
||||
0x00000fa0: {'reg': Register.BOOT_STATUS},
|
||||
0x00001004: {'reg': Register.DSP_STATUS},
|
||||
0x000010cc: {'reg': Register.WORK_MODE},
|
||||
0x000011f8: {'reg': Register.OUTPUT_SHUTDOWN},
|
||||
0x0000125c: {'reg': Register.MAX_DESIGNED_POWER},
|
||||
0x000012c0: {'reg': Register.RATED_LEVEL},
|
||||
0x00001324: {'reg': Register.INPUT_COEFFICIENT, 'ratio': 100/1024},
|
||||
0x00001388: {'reg': Register.GRID_VOLT_CAL_COEF},
|
||||
0x00002710: {'reg': Register.PROD_COMPL_TYPE},
|
||||
0x00003200: {'reg': Register.OUTPUT_COEFFICIENT, 'ratio': 100/1024},
|
||||
}
|
||||
|
||||
|
||||
class InfosG3(Infos):
|
||||
__slots__ = ()
|
||||
|
||||
def ha_confs(self, ha_prfx: str, node_id: str, snr: str,
|
||||
sug_area: str = '') \
|
||||
-> Generator[tuple[dict, str], None, None]:
|
||||
'''Generator function yields a json register struct for home-assistant
|
||||
auto configuration and a unique entity string
|
||||
|
||||
arguments:
|
||||
prfx:str ==> MQTT prefix for the home assistant 'stat_t string
|
||||
snr:str ==> serial number of the inverter, used to build unique
|
||||
entity strings
|
||||
sug_area:str ==> suggested area string from the config file'''
|
||||
# iterate over RegisterMap.map and get the register values
|
||||
for row in RegisterMap.map.values():
|
||||
reg = row['reg']
|
||||
res = self.ha_conf(reg, ha_prfx, node_id, snr, False, sug_area) # noqa: E501
|
||||
if res:
|
||||
yield res
|
||||
|
||||
def parse(self, buf, ind=0, node_id: str = '') -> \
|
||||
Generator[tuple[str, bool], None, None]:
|
||||
'''parse a data sequence received from the inverter and
|
||||
stores the values in Infos.db
|
||||
|
||||
buf: buffer of the sequence to parse'''
|
||||
result = struct.unpack_from('!l', buf, ind)
|
||||
elms = result[0]
|
||||
i = 0
|
||||
ind += 4
|
||||
while i < elms:
|
||||
result = struct.unpack_from('!lB', buf, ind)
|
||||
addr = result[0]
|
||||
if addr not in RegisterMap.map:
|
||||
row = None
|
||||
info_id = -1
|
||||
else:
|
||||
row = RegisterMap.map[addr]
|
||||
info_id = row['reg']
|
||||
data_type = result[1]
|
||||
ind += 5
|
||||
|
||||
if data_type == 0x54: # 'T' -> Pascal-String
|
||||
str_len = buf[ind]
|
||||
result = struct.unpack_from(f'!{str_len+1}p', buf,
|
||||
ind)[0].decode(encoding='ascii',
|
||||
errors='replace')
|
||||
ind += str_len+1
|
||||
|
||||
elif data_type == 0x00: # 'Nul' -> end
|
||||
i = elms # abort the loop
|
||||
|
||||
elif data_type == 0x41: # 'A' -> Nop ??
|
||||
ind += 0
|
||||
i += 1
|
||||
continue
|
||||
|
||||
elif data_type == 0x42: # 'B' -> byte, int8
|
||||
result = struct.unpack_from('!B', buf, ind)[0]
|
||||
ind += 1
|
||||
|
||||
elif data_type == 0x49: # 'I' -> int32
|
||||
result = struct.unpack_from('!l', buf, ind)[0]
|
||||
ind += 4
|
||||
|
||||
elif data_type == 0x53: # 'S' -> short, int16
|
||||
result = struct.unpack_from('!h', buf, ind)[0]
|
||||
ind += 2
|
||||
|
||||
elif data_type == 0x46: # 'F' -> float32
|
||||
result = round(struct.unpack_from('!f', buf, ind)[0], 2)
|
||||
ind += 4
|
||||
|
||||
elif data_type == 0x4c: # 'L' -> long, int64
|
||||
result = struct.unpack_from('!q', buf, ind)[0]
|
||||
ind += 8
|
||||
|
||||
else:
|
||||
self.inc_counter('Invalid_Data_Type')
|
||||
logging.error(f"Infos.parse: data_type: {data_type}"
|
||||
f" @0x{addr:04x} No:{i}"
|
||||
" not supported")
|
||||
return
|
||||
|
||||
result = self.__modify_val(row, result)
|
||||
|
||||
yield from self.__store_result(addr, result, info_id, node_id)
|
||||
i += 1
|
||||
|
||||
def __modify_val(self, row, result):
|
||||
if row and 'ratio' in row:
|
||||
result = round(result * row['ratio'], 2)
|
||||
return result
|
||||
|
||||
def __store_result(self, addr, result, info_id, node_id):
|
||||
keys, level, unit, must_incr = self._key_obj(info_id)
|
||||
if keys:
|
||||
name, update = self.update_db(keys, must_incr, result)
|
||||
yield keys[0], update
|
||||
else:
|
||||
update = False
|
||||
name = str(f'info-id.0x{addr:x}')
|
||||
if update:
|
||||
self.tracer.log(level, f'[{node_id}] GEN3: {name} :'
|
||||
f' {result}{unit}')
|
||||
9
app/src/gen3/inverter_g3.py
Normal file
9
app/src/gen3/inverter_g3.py
Normal file
@@ -0,0 +1,9 @@
|
||||
from asyncio import StreamReader, StreamWriter
|
||||
|
||||
from inverter_base import InverterBase
|
||||
from gen3.talent import Talent
|
||||
|
||||
|
||||
class InverterG3(InverterBase):
|
||||
def __init__(self, reader: StreamReader, writer: StreamWriter):
|
||||
super().__init__(reader, writer, 'tsun', Talent)
|
||||
575
app/src/gen3/talent.py
Normal file
575
app/src/gen3/talent.py
Normal file
@@ -0,0 +1,575 @@
|
||||
import struct
|
||||
import logging
|
||||
from zoneinfo import ZoneInfo
|
||||
from datetime import datetime
|
||||
from tzlocal import get_localzone
|
||||
|
||||
from async_ifc import AsyncIfc
|
||||
from messages import Message, State
|
||||
from modbus import Modbus
|
||||
from cnf.config import Config
|
||||
from gen3.infos_g3 import InfosG3
|
||||
from infos import Register
|
||||
|
||||
logger = logging.getLogger('msg')
|
||||
|
||||
|
||||
class Control:
|
||||
def __init__(self, ctrl: int):
|
||||
self.ctrl = ctrl
|
||||
|
||||
def __int__(self) -> int:
|
||||
return self.ctrl
|
||||
|
||||
def is_ind(self) -> bool:
|
||||
return (self.ctrl == 0x91)
|
||||
|
||||
def is_req(self) -> bool:
|
||||
return (self.ctrl == 0x70)
|
||||
|
||||
def is_resp(self) -> bool:
|
||||
return (self.ctrl == 0x99)
|
||||
|
||||
|
||||
class Talent(Message):
|
||||
TXT_UNKNOWN_CTRL = 'Unknown Ctrl'
|
||||
|
||||
def __init__(self, addr, ifc: "AsyncIfc", server_side: bool,
|
||||
client_mode: bool = False, id_str=b''):
|
||||
super().__init__('G3', ifc, server_side, self.send_modbus_cb,
|
||||
mb_timeout=15)
|
||||
ifc.rx_set_cb(self.read)
|
||||
ifc.prot_set_timeout_cb(self._timeout)
|
||||
ifc.prot_set_init_new_client_conn_cb(self._init_new_client_conn)
|
||||
ifc.prot_set_update_header_cb(self._update_header)
|
||||
|
||||
self.addr = addr
|
||||
self.conn_no = ifc.get_conn_no()
|
||||
self.await_conn_resp_cnt = 0
|
||||
self.id_str = id_str
|
||||
self.contact_name = b''
|
||||
self.contact_mail = b''
|
||||
self.ts_offset = 0 # time offset between tsun cloud and local
|
||||
self.db = InfosG3()
|
||||
self.switch = {
|
||||
0x00: self.msg_contact_info,
|
||||
0x13: self.msg_ota_update,
|
||||
0x22: self.msg_get_time,
|
||||
0x99: self.msg_heartbeat,
|
||||
0x71: self.msg_collector_data,
|
||||
# 0x76:
|
||||
0x77: self.msg_modbus,
|
||||
# 0x78:
|
||||
0x87: self.msg_modbus2,
|
||||
0x04: self.msg_inverter_data,
|
||||
}
|
||||
self.log_lvl = {
|
||||
0x00: logging.INFO,
|
||||
0x13: logging.INFO,
|
||||
0x22: logging.INFO,
|
||||
0x99: logging.INFO,
|
||||
0x71: logging.INFO,
|
||||
# 0x76:
|
||||
0x77: self.get_modbus_log_lvl,
|
||||
# 0x78:
|
||||
0x87: self.get_modbus_log_lvl,
|
||||
0x04: logging.INFO,
|
||||
}
|
||||
|
||||
'''
|
||||
Our puplic methods
|
||||
'''
|
||||
def close(self) -> None:
|
||||
logging.debug('Talent.close()')
|
||||
# we have references to methods of this class in self.switch
|
||||
# so we have to erase self.switch, otherwise this instance can't be
|
||||
# deallocated by the garbage collector ==> we get a memory leak
|
||||
self.switch.clear()
|
||||
self.log_lvl.clear()
|
||||
super().close()
|
||||
|
||||
def __set_serial_no(self, serial_no: str):
|
||||
|
||||
if self.unique_id == serial_no:
|
||||
logger.debug(f'SerialNo: {serial_no}')
|
||||
else:
|
||||
inverters = Config.get('inverters')
|
||||
# logger.debug(f'Inverters: {inverters}')
|
||||
|
||||
if serial_no in inverters:
|
||||
inv = inverters[serial_no]
|
||||
self.node_id = inv['node_id']
|
||||
self.sug_area = inv['suggested_area']
|
||||
self.modbus_polling = inv['modbus_polling']
|
||||
logger.debug(f'SerialNo {serial_no} allowed! area:{self.sug_area}') # noqa: E501
|
||||
self.db.set_pv_module_details(inv)
|
||||
if self.mb:
|
||||
self.mb.set_node_id(self.node_id)
|
||||
else:
|
||||
self.node_id = ''
|
||||
self.sug_area = ''
|
||||
if 'allow_all' not in inverters or not inverters['allow_all']:
|
||||
self.inc_counter('Unknown_SNR')
|
||||
self.unique_id = None
|
||||
logger.warning(f'ignore message from unknow inverter! (SerialNo: {serial_no})') # noqa: E501
|
||||
return
|
||||
logger.debug(f'SerialNo {serial_no} not known but accepted!')
|
||||
|
||||
self.unique_id = serial_no
|
||||
self.db.set_db_def_value(Register.COLLECTOR_SNR, serial_no)
|
||||
|
||||
def read(self) -> float:
|
||||
'''process all received messages in the _recv_buffer'''
|
||||
self._read()
|
||||
while True:
|
||||
if not self.header_valid:
|
||||
self.__parse_header(self.ifc.rx_peek(), self.ifc.rx_len())
|
||||
|
||||
if self.header_valid and \
|
||||
self.ifc.rx_len() >= (self.header_len + self.data_len):
|
||||
if self.state == State.init:
|
||||
self.state = State.received # received 1st package
|
||||
|
||||
log_lvl = self.log_lvl.get(self.msg_id, logging.WARNING)
|
||||
if callable(log_lvl):
|
||||
log_lvl = log_lvl()
|
||||
|
||||
self.ifc.rx_log(log_lvl, f'Received from {self.addr}:'
|
||||
f' BufLen: {self.ifc.rx_len()}'
|
||||
f' HdrLen: {self.header_len}'
|
||||
f' DtaLen: {self.data_len}')
|
||||
|
||||
self.__set_serial_no(self.id_str.decode("utf-8"))
|
||||
self.__dispatch_msg()
|
||||
self.__flush_recv_msg()
|
||||
else:
|
||||
return 0 # don not wait before sending a response
|
||||
|
||||
def forward(self) -> None:
|
||||
'''add the actual receive msg to the forwarding queue'''
|
||||
tsun = Config.get('tsun')
|
||||
if tsun['enabled']:
|
||||
buflen = self.header_len+self.data_len
|
||||
buffer = self.ifc.rx_peek(buflen)
|
||||
self.ifc.fwd_add(buffer)
|
||||
self.ifc.fwd_log(logging.DEBUG, 'Store for forwarding:')
|
||||
|
||||
fnc = self.switch.get(self.msg_id, self.msg_unknown)
|
||||
logger.info(self.__flow_str(self.server_side, 'forwrd') +
|
||||
f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}')
|
||||
|
||||
def send_modbus_cb(self, modbus_pdu: bytearray, log_lvl: int, state: str):
|
||||
if self.state != State.up:
|
||||
logger.warning(f'[{self.node_id}] ignore MODBUS cmd,'
|
||||
' cause the state is not UP anymore')
|
||||
return
|
||||
|
||||
self.__build_header(0x70, 0x77)
|
||||
self.ifc.tx_add(b'\x00\x01\xa3\x28') # magic ?
|
||||
self.ifc.tx_add(struct.pack('!B', len(modbus_pdu)))
|
||||
self.ifc.tx_add(modbus_pdu)
|
||||
self.__finish_send_msg()
|
||||
|
||||
self.ifc.tx_log(log_lvl, f'Send Modbus {state}:{self.addr}:')
|
||||
self.ifc.tx_flush()
|
||||
|
||||
def mb_timout_cb(self, exp_cnt):
|
||||
self.mb_timer.start(self.mb_timeout)
|
||||
|
||||
if 2 == (exp_cnt % 30):
|
||||
# logging.info("Regular Modbus Status request")
|
||||
self._send_modbus_cmd(Modbus.READ_REGS, 0x2000, 96, logging.DEBUG)
|
||||
else:
|
||||
self._send_modbus_cmd(Modbus.READ_REGS, 0x3000, 48, logging.DEBUG)
|
||||
|
||||
def _init_new_client_conn(self) -> bool:
|
||||
contact_name = self.contact_name
|
||||
contact_mail = self.contact_mail
|
||||
logger.info(f'name: {contact_name} mail: {contact_mail}')
|
||||
self.msg_id = 0
|
||||
self.await_conn_resp_cnt += 1
|
||||
self.__build_header(0x91)
|
||||
self.ifc.tx_add(struct.pack(f'!{len(contact_name)+1}p'
|
||||
f'{len(contact_mail)+1}p',
|
||||
contact_name, contact_mail))
|
||||
|
||||
self.__finish_send_msg()
|
||||
return True
|
||||
|
||||
'''
|
||||
Our private methods
|
||||
'''
|
||||
def __flow_str(self, server_side: bool, type: str): # noqa: F821
|
||||
switch = {
|
||||
'rx': ' <',
|
||||
'tx': ' >',
|
||||
'forwrd': '<< ',
|
||||
'drop': ' xx',
|
||||
'rxS': '> ',
|
||||
'txS': '< ',
|
||||
'forwrdS': ' >>',
|
||||
'dropS': 'xx ',
|
||||
}
|
||||
if server_side:
|
||||
type += 'S'
|
||||
return switch.get(type, '???')
|
||||
|
||||
def _timestamp(self): # pragma: no cover
|
||||
'''returns timestamp fo the inverter as localtime
|
||||
since 1.1.1970 in msec'''
|
||||
# convert localtime in epoche
|
||||
ts = (datetime.now() - datetime(1970, 1, 1)).total_seconds()
|
||||
return round(ts*1000)
|
||||
|
||||
def _utcfromts(self, ts: float):
|
||||
'''converts inverter timestamp into unix time (epoche)'''
|
||||
dt = datetime.fromtimestamp(ts/1000, tz=ZoneInfo("UTC")). \
|
||||
replace(tzinfo=get_localzone())
|
||||
return dt.timestamp()
|
||||
|
||||
def _utc(self): # pragma: no cover
|
||||
'''returns unix time (epoche)'''
|
||||
return datetime.now().timestamp()
|
||||
|
||||
def _update_header(self, _forward_buffer):
|
||||
'''update header for message before forwarding,
|
||||
add time offset to timestamp'''
|
||||
_len = len(_forward_buffer)
|
||||
ofs = 0
|
||||
while ofs < _len:
|
||||
result = struct.unpack_from('!lB', _forward_buffer, 0)
|
||||
msg_len = 4 + result[0]
|
||||
id_len = result[1] # len of variable id string
|
||||
if _len < 2*id_len + 21:
|
||||
return
|
||||
|
||||
result = struct.unpack_from('!B', _forward_buffer, id_len+6)
|
||||
msg_code = result[0]
|
||||
if msg_code == 0x71 or msg_code == 0x04:
|
||||
result = struct.unpack_from('!q', _forward_buffer, 13+2*id_len)
|
||||
ts = result[0] + self.ts_offset
|
||||
logger.debug(f'offset: {self.ts_offset:08x}'
|
||||
f' proxy-time: {ts:08x}')
|
||||
struct.pack_into('!q', _forward_buffer, 13+2*id_len, ts)
|
||||
ofs += msg_len
|
||||
|
||||
# check if there is a complete header in the buffer, parse it
|
||||
# and set
|
||||
# self.header_len
|
||||
# self.data_len
|
||||
# self.id_str
|
||||
# self.ctrl
|
||||
# self.msg_id
|
||||
#
|
||||
# if the header is incomplete, than self.header_len is still 0
|
||||
#
|
||||
def __parse_header(self, buf: bytes, buf_len: int) -> None:
|
||||
|
||||
if (buf_len < 5): # enough bytes to read len and id_len?
|
||||
return
|
||||
result = struct.unpack_from('!lB', buf, 0)
|
||||
msg_len = result[0] # len of complete message
|
||||
id_len = result[1] # len of variable id string
|
||||
if id_len > 17:
|
||||
logger.warning(f'len of ID string must == 16 but is {id_len}')
|
||||
self.inc_counter('Invalid_Msg_Format')
|
||||
|
||||
# erase broken recv buffer
|
||||
self.ifc.rx_clear()
|
||||
return
|
||||
|
||||
hdr_len = 5+id_len+2
|
||||
|
||||
if (buf_len < hdr_len): # enough bytes for complete header?
|
||||
return
|
||||
|
||||
result = struct.unpack_from(f'!{id_len+1}pBB', buf, 4)
|
||||
|
||||
# store parsed header values in the class
|
||||
self.id_str = result[0]
|
||||
self.ctrl = Control(result[1])
|
||||
self.msg_id = result[2]
|
||||
self.data_len = msg_len-id_len-3
|
||||
self.header_len = hdr_len
|
||||
self.header_valid = True
|
||||
|
||||
def __build_header(self, ctrl, msg_id=None) -> None:
|
||||
if not msg_id:
|
||||
msg_id = self.msg_id
|
||||
self.send_msg_ofs = self.ifc.tx_len()
|
||||
self.ifc.tx_add(struct.pack(f'!l{len(self.id_str)+1}pBB',
|
||||
0, self.id_str, ctrl, msg_id))
|
||||
fnc = self.switch.get(msg_id, self.msg_unknown)
|
||||
logger.info(self.__flow_str(self.server_side, 'tx') +
|
||||
f' Ctl: {int(ctrl):#02x} Msg: {fnc.__name__!r}')
|
||||
|
||||
def __finish_send_msg(self) -> None:
|
||||
_len = self.ifc.tx_len() - self.send_msg_ofs
|
||||
struct.pack_into('!l', self.ifc.tx_peek(), self.send_msg_ofs,
|
||||
_len-4)
|
||||
|
||||
def __dispatch_msg(self) -> None:
|
||||
fnc = self.switch.get(self.msg_id, self.msg_unknown)
|
||||
if self.unique_id:
|
||||
logger.info(self.__flow_str(self.server_side, 'rx') +
|
||||
f' Ctl: {int(self.ctrl):#02x} ({self.state}) '
|
||||
f'Msg: {fnc.__name__!r}')
|
||||
fnc()
|
||||
else:
|
||||
logger.info(self.__flow_str(self.server_side, 'drop') +
|
||||
f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}')
|
||||
|
||||
def __flush_recv_msg(self) -> None:
|
||||
self.ifc.rx_get(self.header_len+self.data_len)
|
||||
self.header_valid = False
|
||||
|
||||
'''
|
||||
Message handler methods
|
||||
'''
|
||||
def msg_contact_info(self):
|
||||
if self.ctrl.is_ind():
|
||||
if self.server_side and self.__process_contact_info():
|
||||
self.__build_header(0x91)
|
||||
self.ifc.tx_add(b'\x01')
|
||||
self.__finish_send_msg()
|
||||
# don't forward this contact info here, we will build one
|
||||
# when the remote connection is established
|
||||
elif self.await_conn_resp_cnt > 0:
|
||||
self.await_conn_resp_cnt -= 1
|
||||
else:
|
||||
self.forward()
|
||||
else:
|
||||
logger.warning(self.TXT_UNKNOWN_CTRL)
|
||||
self.inc_counter('Unknown_Ctrl')
|
||||
self.forward()
|
||||
|
||||
def __process_contact_info(self) -> bool:
|
||||
buf = self.ifc.rx_peek()
|
||||
result = struct.unpack_from('!B', buf, self.header_len)
|
||||
name_len = result[0]
|
||||
if self.data_len == 1: # this is a response withone status byte
|
||||
return False
|
||||
if self.data_len >= name_len+2:
|
||||
result = struct.unpack_from(f'!{name_len+1}pB', buf,
|
||||
self.header_len)
|
||||
self.contact_name = result[0]
|
||||
mail_len = result[1]
|
||||
logger.info(f'name: {self.contact_name}')
|
||||
|
||||
result = struct.unpack_from(f'!{mail_len+1}p', buf,
|
||||
self.header_len+name_len+1)
|
||||
self.contact_mail = result[0]
|
||||
logger.info(f'mail: {self.contact_mail}')
|
||||
return True
|
||||
|
||||
def msg_get_time(self):
|
||||
if self.ctrl.is_ind():
|
||||
if self.data_len == 0:
|
||||
if self.state == State.up:
|
||||
self.state = State.pend # block MODBUS cmds
|
||||
|
||||
ts = self._timestamp()
|
||||
logger.debug(f'time: {ts:08x}')
|
||||
self.__build_header(0x91)
|
||||
self.ifc.tx_add(struct.pack('!q', ts))
|
||||
self.__finish_send_msg()
|
||||
|
||||
elif self.data_len >= 8:
|
||||
ts = self._timestamp()
|
||||
result = struct.unpack_from('!q', self.ifc.rx_peek(),
|
||||
self.header_len)
|
||||
self.ts_offset = result[0]-ts
|
||||
if self.ifc.remote.stream:
|
||||
self.ifc.remote.stream.ts_offset = self.ts_offset
|
||||
logger.debug(f'tsun-time: {int(result[0]):08x}'
|
||||
f' proxy-time: {ts:08x}'
|
||||
f' offset: {self.ts_offset}')
|
||||
return # ignore received response
|
||||
else:
|
||||
logger.warning(self.TXT_UNKNOWN_CTRL)
|
||||
self.inc_counter('Unknown_Ctrl')
|
||||
|
||||
self.forward()
|
||||
|
||||
def msg_heartbeat(self):
|
||||
if self.ctrl.is_ind():
|
||||
if self.data_len == 9:
|
||||
self.state = State.up # allow MODBUS cmds
|
||||
if (self.modbus_polling):
|
||||
self.mb_timer.start(self.mb_first_timeout)
|
||||
self.db.set_db_def_value(Register.POLLING_INTERVAL,
|
||||
self.mb_timeout)
|
||||
self.__build_header(0x99)
|
||||
self.ifc.tx_add(b'\x02')
|
||||
self.__finish_send_msg()
|
||||
|
||||
result = struct.unpack_from('!Bq', self.ifc.rx_peek(),
|
||||
self.header_len)
|
||||
resp_code = result[0]
|
||||
ts = result[1]+self.ts_offset
|
||||
logger.debug(f'inv-time: {int(result[1]):08x}'
|
||||
f' tsun-time: {ts:08x}'
|
||||
f' offset: {self.ts_offset}')
|
||||
struct.pack_into('!Bq', self.ifc.rx_peek(),
|
||||
self.header_len, resp_code, ts)
|
||||
elif self.ctrl.is_resp():
|
||||
result = struct.unpack_from('!B', self.ifc.rx_peek(),
|
||||
self.header_len)
|
||||
resp_code = result[0]
|
||||
logging.debug(f'Heartbeat-RespCode: {resp_code}')
|
||||
return
|
||||
else:
|
||||
logger.warning(self.TXT_UNKNOWN_CTRL)
|
||||
self.inc_counter('Unknown_Ctrl')
|
||||
|
||||
self.forward()
|
||||
|
||||
def parse_msg_header(self):
|
||||
result = struct.unpack_from('!lB', self.ifc.rx_peek(),
|
||||
self.header_len)
|
||||
|
||||
data_id = result[0] # len of complete message
|
||||
id_len = result[1] # len of variable id string
|
||||
logger.debug(f'Data_ID: 0x{data_id:08x} id_len: {id_len}')
|
||||
|
||||
msg_hdr_len = 5+id_len+9
|
||||
|
||||
result = struct.unpack_from(f'!{id_len+1}pBq', self.ifc.rx_peek(),
|
||||
self.header_len + 4)
|
||||
|
||||
timestamp = result[2]
|
||||
logger.debug(f'ID: {result[0]} B: {result[1]}')
|
||||
logger.debug(f'time: {timestamp:08x}')
|
||||
# logger.info(f'time: {datetime.utcfromtimestamp(result[2]).strftime(
|
||||
# "%Y-%m-%d %H:%M:%S")}')
|
||||
return msg_hdr_len, timestamp
|
||||
|
||||
def msg_collector_data(self):
|
||||
if self.ctrl.is_ind():
|
||||
self.__build_header(0x99)
|
||||
self.ifc.tx_add(b'\x01')
|
||||
self.__finish_send_msg()
|
||||
self.__process_data(False)
|
||||
|
||||
elif self.ctrl.is_resp():
|
||||
return # ignore received response
|
||||
else:
|
||||
logger.warning(self.TXT_UNKNOWN_CTRL)
|
||||
self.inc_counter('Unknown_Ctrl')
|
||||
|
||||
self.forward()
|
||||
|
||||
def msg_inverter_data(self):
|
||||
if self.ctrl.is_ind():
|
||||
self.__build_header(0x99)
|
||||
self.ifc.tx_add(b'\x01')
|
||||
self.__finish_send_msg()
|
||||
self.__process_data(True)
|
||||
self.state = State.up # allow MODBUS cmds
|
||||
if (self.modbus_polling):
|
||||
self.mb_timer.start(self.mb_first_timeout)
|
||||
self.db.set_db_def_value(Register.POLLING_INTERVAL,
|
||||
self.mb_timeout)
|
||||
|
||||
elif self.ctrl.is_resp():
|
||||
return # ignore received response
|
||||
else:
|
||||
logger.warning(self.TXT_UNKNOWN_CTRL)
|
||||
self.inc_counter('Unknown_Ctrl')
|
||||
|
||||
self.forward()
|
||||
|
||||
def __process_data(self, ignore_replay: bool):
|
||||
msg_hdr_len, ts = self.parse_msg_header()
|
||||
if ignore_replay:
|
||||
age = self._utc() - self._utcfromts(ts)
|
||||
age = age/(3600*24)
|
||||
logger.debug(f"Age: {age} days")
|
||||
if age > 1:
|
||||
return
|
||||
|
||||
for key, update in self.db.parse(self.ifc.rx_peek(), self.header_len
|
||||
+ msg_hdr_len, self.node_id):
|
||||
if update:
|
||||
self._set_mqtt_timestamp(key, self._utcfromts(ts))
|
||||
self.new_data[key] = True
|
||||
|
||||
def msg_ota_update(self):
|
||||
if self.ctrl.is_req():
|
||||
self.inc_counter('OTA_Start_Msg')
|
||||
elif self.ctrl.is_ind():
|
||||
pass # Ok, nothing to do
|
||||
else:
|
||||
logger.warning(self.TXT_UNKNOWN_CTRL)
|
||||
self.inc_counter('Unknown_Ctrl')
|
||||
self.forward()
|
||||
|
||||
def parse_modbus_header(self):
|
||||
|
||||
msg_hdr_len = 5
|
||||
|
||||
result = struct.unpack_from('!lBB', self.ifc.rx_peek(),
|
||||
self.header_len)
|
||||
modbus_len = result[1]
|
||||
return msg_hdr_len, modbus_len
|
||||
|
||||
def parse_modbus_header2(self):
|
||||
|
||||
msg_hdr_len = 6
|
||||
|
||||
result = struct.unpack_from('!lBBB', self.ifc.rx_peek(),
|
||||
self.header_len)
|
||||
modbus_len = result[2]
|
||||
return msg_hdr_len, modbus_len
|
||||
|
||||
def get_modbus_log_lvl(self) -> int:
|
||||
if self.ctrl.is_req():
|
||||
return logging.INFO
|
||||
elif self.ctrl.is_ind() and self.server_side:
|
||||
return self.mb.last_log_lvl
|
||||
return logging.WARNING
|
||||
|
||||
def msg_modbus(self):
|
||||
hdr_len, _ = self.parse_modbus_header()
|
||||
self.__msg_modbus(hdr_len)
|
||||
|
||||
def msg_modbus2(self):
|
||||
hdr_len, _ = self.parse_modbus_header2()
|
||||
self.__msg_modbus(hdr_len)
|
||||
|
||||
def __msg_modbus(self, hdr_len):
|
||||
data = self.ifc.rx_peek()[self.header_len:
|
||||
self.header_len+self.data_len]
|
||||
|
||||
if self.ctrl.is_req():
|
||||
rstream = self.ifc.remote.stream
|
||||
if rstream.mb.recv_req(data[hdr_len:], rstream.msg_forward):
|
||||
self.inc_counter('Modbus_Command')
|
||||
else:
|
||||
self.inc_counter('Invalid_Msg_Format')
|
||||
elif self.ctrl.is_ind():
|
||||
self.modbus_elms = 0
|
||||
# logger.debug(f'Modbus Ind MsgLen: {modbus_len}')
|
||||
if not self.server_side:
|
||||
logger.warning('Unknown Message')
|
||||
self.inc_counter('Unknown_Msg')
|
||||
return
|
||||
|
||||
for key, update, _ in self.mb.recv_resp(self.db, data[
|
||||
hdr_len:]):
|
||||
if update:
|
||||
self._set_mqtt_timestamp(key, self._utc())
|
||||
self.new_data[key] = True
|
||||
self.modbus_elms += 1 # count for unit tests
|
||||
else:
|
||||
logger.warning(self.TXT_UNKNOWN_CTRL)
|
||||
self.inc_counter('Unknown_Ctrl')
|
||||
self.forward()
|
||||
|
||||
def msg_forward(self):
|
||||
self.forward()
|
||||
|
||||
def msg_unknown(self):
|
||||
logger.warning(f"Unknow Msg: ID:{self.msg_id}")
|
||||
self.inc_counter('Unknown_Msg')
|
||||
self.forward()
|
||||
204
app/src/gen3plus/infos_g3p.py
Normal file
204
app/src/gen3plus/infos_g3p.py
Normal file
@@ -0,0 +1,204 @@
|
||||
|
||||
from typing import Generator
|
||||
|
||||
from infos import Infos, Register, ProxyMode, Fmt
|
||||
|
||||
|
||||
class RegisterMap:
|
||||
# make the class read/only by using __slots__
|
||||
__slots__ = ()
|
||||
|
||||
FMT_2_16BIT_VAL = '!HH'
|
||||
FMT_3_16BIT_VAL = '!HHH'
|
||||
FMT_4_16BIT_VAL = '!HHHH'
|
||||
|
||||
map = {
|
||||
# 0x41020007: {'reg': Register.DEVICE_SNR, 'fmt': '<L'}, # noqa: E501
|
||||
0x41020018: {'reg': Register.DATA_UP_INTERVAL, 'fmt': '<B', 'ratio': 60, 'dep': ProxyMode.SERVER}, # noqa: E501
|
||||
0x41020019: {'reg': Register.COLLECT_INTERVAL, 'fmt': '<B', 'quotient': 60, 'dep': ProxyMode.SERVER}, # noqa: E501
|
||||
0x4102001a: {'reg': Register.HEARTBEAT_INTERVAL, 'fmt': '<B', 'ratio': 1}, # noqa: E501
|
||||
0x4102001b: {'reg': None, 'fmt': '<B', 'const': 1}, # noqa: E501 Max No Of Connected Devices
|
||||
0x4102001c: {'reg': Register.SIGNAL_STRENGTH, 'fmt': '<B', 'ratio': 1, 'dep': ProxyMode.SERVER}, # noqa: E501
|
||||
0x4102001d: {'reg': None, 'fmt': '<B', 'const': 1}, # noqa: E501
|
||||
0x4102001e: {'reg': Register.CHIP_MODEL, 'fmt': '!40s'}, # noqa: E501
|
||||
0x41020046: {'reg': Register.MAC_ADDR, 'fmt': '!6B', 'func': Fmt.mac}, # noqa: E501
|
||||
0x4102004c: {'reg': Register.IP_ADDRESS, 'fmt': '!16s'}, # noqa: E501
|
||||
0x4102005c: {'reg': None, 'fmt': '<B', 'const': 15}, # noqa: E501
|
||||
0x4102005e: {'reg': None, 'fmt': '<B', 'const': 1}, # noqa: E501 No Of Sensors (ListLen)
|
||||
0x4102005f: {'reg': Register.SENSOR_LIST, 'fmt': '<H', 'func': Fmt.hex4}, # noqa: E501
|
||||
0x41020061: {'reg': None, 'fmt': '<HB', 'const': (15, 255)}, # noqa: E501
|
||||
0x41020064: {'reg': Register.COLLECTOR_FW_VERSION, 'fmt': '!40s'}, # noqa: E501
|
||||
0x4102008c: {'reg': None, 'fmt': '<BB', 'const': (254, 254)}, # noqa: E501
|
||||
0x4102008e: {'reg': None, 'fmt': '<B'}, # noqa: E501 Encryption Certificate File Status
|
||||
0x4102008f: {'reg': None, 'fmt': '!40s'}, # noqa: E501
|
||||
0x410200b7: {'reg': Register.SSID, 'fmt': '!40s'}, # noqa: E501
|
||||
|
||||
0x4201000c: {'reg': Register.SENSOR_LIST, 'fmt': '<H', 'func': Fmt.hex4}, # noqa: E501
|
||||
0x4201001c: {'reg': Register.POWER_ON_TIME, 'fmt': '<H', 'ratio': 1, 'dep': ProxyMode.SERVER}, # noqa: E501, or packet number
|
||||
0x42010020: {'reg': Register.SERIAL_NUMBER, 'fmt': '!16s'}, # noqa: E501
|
||||
|
||||
# Start MODBUS Block: 0x3000 (R/O Measurements)
|
||||
0x420100c0: {'reg': Register.INVERTER_STATUS, 'fmt': '!H'}, # noqa: E501
|
||||
0x420100c2: {'reg': Register.DETECT_STATUS_1, 'fmt': '!H'}, # noqa: E501
|
||||
0x420100c4: {'reg': Register.DETECT_STATUS_2, 'fmt': '!H'}, # noqa: E501
|
||||
0x420100c6: {'reg': Register.EVENT_ALARM, 'fmt': '!H'}, # noqa: E501
|
||||
0x420100c8: {'reg': Register.EVENT_FAULT, 'fmt': '!H'}, # noqa: E501
|
||||
0x420100ca: {'reg': Register.EVENT_BF1, 'fmt': '!H'}, # noqa: E501
|
||||
0x420100cc: {'reg': Register.EVENT_BF2, 'fmt': '!H'}, # noqa: E501
|
||||
# 0x420100ce
|
||||
0x420100d0: {'reg': Register.VERSION, 'fmt': '!H', 'func': Fmt.version}, # noqa: E501
|
||||
0x420100d2: {'reg': Register.GRID_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
|
||||
0x420100d4: {'reg': Register.GRID_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
|
||||
0x420100d6: {'reg': Register.GRID_FREQUENCY, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
|
||||
0x420100d8: {'reg': Register.INVERTER_TEMP, 'fmt': '!H', 'offset': -40}, # noqa: E501
|
||||
# 0x420100da
|
||||
0x420100dc: {'reg': Register.RATED_POWER, 'fmt': '!H', 'ratio': 1}, # noqa: E501
|
||||
0x420100de: {'reg': Register.OUTPUT_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
|
||||
0x420100e0: {'reg': Register.PV1_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
|
||||
0x420100e2: {'reg': Register.PV1_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
|
||||
0x420100e4: {'reg': Register.PV1_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
|
||||
0x420100e6: {'reg': Register.PV2_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
|
||||
0x420100e8: {'reg': Register.PV2_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
|
||||
0x420100ea: {'reg': Register.PV2_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
|
||||
0x420100ec: {'reg': Register.PV3_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
|
||||
0x420100ee: {'reg': Register.PV3_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
|
||||
0x420100f0: {'reg': Register.PV3_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
|
||||
0x420100f2: {'reg': Register.PV4_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
|
||||
0x420100f4: {'reg': Register.PV4_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
|
||||
0x420100f6: {'reg': Register.PV4_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
|
||||
0x420100f8: {'reg': Register.DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
|
||||
0x420100fa: {'reg': Register.TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501
|
||||
0x420100fe: {'reg': Register.PV1_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
|
||||
0x42010100: {'reg': Register.PV1_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501
|
||||
0x42010104: {'reg': Register.PV2_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
|
||||
0x42010106: {'reg': Register.PV2_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501
|
||||
0x4201010a: {'reg': Register.PV3_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
|
||||
0x4201010c: {'reg': Register.PV3_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501
|
||||
0x42010110: {'reg': Register.PV4_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
|
||||
0x42010112: {'reg': Register.PV4_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501
|
||||
0x42010116: {'reg': Register.INV_UNKNOWN_1, 'fmt': '!H'}, # noqa: E501
|
||||
|
||||
# Start MODBUS Block: 0x2000 (R/W Config Paramaneters)
|
||||
0x42010118: {'reg': Register.BOOT_STATUS, 'fmt': '!H'},
|
||||
0x4201011a: {'reg': Register.DSP_STATUS, 'fmt': '!H'},
|
||||
0x4201011c: {'reg': None, 'fmt': '!H', 'const': 1}, # noqa: E501
|
||||
0x4201011e: {'reg': Register.WORK_MODE, 'fmt': '!H'},
|
||||
0x42010124: {'reg': Register.OUTPUT_SHUTDOWN, 'fmt': '!H'},
|
||||
0x42010126: {'reg': Register.MAX_DESIGNED_POWER, 'fmt': '!H'},
|
||||
0x42010128: {'reg': Register.RATED_LEVEL, 'fmt': '!H'},
|
||||
0x4201012a: {'reg': Register.INPUT_COEFFICIENT, 'fmt': '!H', 'ratio': 100/1024}, # noqa: E501
|
||||
0x4201012c: {'reg': Register.GRID_VOLT_CAL_COEF, 'fmt': '!H'},
|
||||
0x4201012e: {'reg': None, 'fmt': '!H', 'const': 1024}, # noqa: E501
|
||||
0x42010130: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (1024, 1, 0xffff, 1)}, # noqa: E501
|
||||
0x42010138: {'reg': Register.PROD_COMPL_TYPE, 'fmt': '!H'},
|
||||
0x4201013a: {'reg': None, 'fmt': FMT_3_16BIT_VAL, 'const': (0x68, 0x68, 0x500)}, # noqa: E501
|
||||
0x42010140: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0x9cd, 0x7b6, 0x139c, 0x1324)}, # noqa: E501
|
||||
0x42010148: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (1, 0x7ae, 0x40f, 0x41)}, # noqa: E501
|
||||
0x42010150: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0xf, 0xa64, 0xa64, 0x6)}, # noqa: E501
|
||||
0x42010158: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0x6, 0x9f6, 0x128c, 0x128c)}, # noqa: E501
|
||||
0x42010160: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0x10, 0x10, 0x1452, 0x1452)}, # noqa: E501
|
||||
0x42010168: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0x10, 0x10, 0x151, 0x5)}, # noqa: E501
|
||||
0x42010170: {'reg': Register.OUTPUT_COEFFICIENT, 'fmt': '!H', 'ratio': 100/1024}, # noqa: E501
|
||||
0x42010172: {'reg': None, 'fmt': FMT_3_16BIT_VAL, 'const': (0x1, 0x139c, 0xfa0)}, # noqa: E501
|
||||
0x42010178: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0x4e, 0x66, 0x3e8, 0x400)}, # noqa: E501
|
||||
0x42010180: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0x9ce, 0x7a8, 0x139c, 0x1326)}, # noqa: E501
|
||||
0x42010188: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0x0, 0x0, 0x0, 0)}, # noqa: E501
|
||||
0x42010190: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0x0, 0x0, 1024, 1024)}, # noqa: E501
|
||||
0x42010198: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0, 0, 0xffff, 0)}, # noqa: E501
|
||||
0x420101a0: {'reg': None, 'fmt': FMT_2_16BIT_VAL, 'const': (0x0, 0x0)}, # noqa: E501
|
||||
|
||||
0xffffff02: {'reg': Register.POLLING_INTERVAL},
|
||||
# 0x4281001c: {'reg': Register.POWER_ON_TIME, 'fmt': '<H', 'ratio': 1}, # noqa: E501
|
||||
}
|
||||
|
||||
|
||||
class InfosG3P(Infos):
|
||||
__slots__ = ('client_mode', )
|
||||
|
||||
def __init__(self, client_mode: bool):
|
||||
super().__init__()
|
||||
self.client_mode = client_mode
|
||||
self.set_db_def_value(Register.MANUFACTURER, 'TSUN')
|
||||
self.set_db_def_value(Register.EQUIPMENT_MODEL, 'TSOL-MSxx00')
|
||||
self.set_db_def_value(Register.CHIP_TYPE, 'IGEN TECH')
|
||||
self.set_db_def_value(Register.NO_INPUTS, 4)
|
||||
|
||||
def __hide_topic(self, row: dict) -> bool:
|
||||
if 'dep' in row:
|
||||
mode = row['dep']
|
||||
if self.client_mode:
|
||||
return mode != ProxyMode.CLIENT
|
||||
else:
|
||||
return mode != ProxyMode.SERVER
|
||||
return False
|
||||
|
||||
def ha_confs(self, ha_prfx: str, node_id: str, snr: str,
|
||||
sug_area: str = '') \
|
||||
-> Generator[tuple[dict, str], None, None]:
|
||||
'''Generator function yields a json register struct for home-assistant
|
||||
auto configuration and a unique entity string
|
||||
|
||||
arguments:
|
||||
prfx:str ==> MQTT prefix for the home assistant 'stat_t string
|
||||
snr:str ==> serial number of the inverter, used to build unique
|
||||
entity strings
|
||||
sug_area:str ==> suggested area string from the config file'''
|
||||
# iterate over RegisterMap.map and get the register values
|
||||
for row in RegisterMap.map.values():
|
||||
info_id = row['reg']
|
||||
if self.__hide_topic(row):
|
||||
res = self.ha_remove(info_id, node_id, snr) # noqa: E501
|
||||
else:
|
||||
res = self.ha_conf(info_id, ha_prfx, node_id, snr, False, sug_area) # noqa: E501
|
||||
if res:
|
||||
yield res
|
||||
|
||||
def parse(self, buf, msg_type: int, rcv_ftype: int, node_id: str = '') \
|
||||
-> Generator[tuple[str, bool], None, None]:
|
||||
'''parse a data sequence received from the inverter and
|
||||
stores the values in Infos.db
|
||||
|
||||
buf: buffer of the sequence to parse'''
|
||||
for idx, row in RegisterMap.map.items():
|
||||
addr = idx & 0xffff
|
||||
ftype = (idx >> 16) & 0xff
|
||||
mtype = (idx >> 24) & 0xff
|
||||
if ftype != rcv_ftype or mtype != msg_type:
|
||||
continue
|
||||
if not isinstance(row, dict):
|
||||
continue
|
||||
info_id = row['reg']
|
||||
result = Fmt.get_value(buf, addr, row)
|
||||
|
||||
keys, level, unit, must_incr = self._key_obj(info_id)
|
||||
|
||||
if keys:
|
||||
name, update = self.update_db(keys, must_incr, result)
|
||||
yield keys[0], update
|
||||
else:
|
||||
name = str(f'info-id.0x{addr:x}')
|
||||
update = False
|
||||
|
||||
if update:
|
||||
self.tracer.log(level, f'[{node_id}] GEN3PLUS: {name}'
|
||||
f' : {result}{unit}')
|
||||
|
||||
def build(self, len, msg_type: int, rcv_ftype: int):
|
||||
buf = bytearray(len)
|
||||
for idx, row in RegisterMap.map.items():
|
||||
addr = idx & 0xffff
|
||||
ftype = (idx >> 16) & 0xff
|
||||
mtype = (idx >> 24) & 0xff
|
||||
if ftype != rcv_ftype or mtype != msg_type:
|
||||
continue
|
||||
if not isinstance(row, dict):
|
||||
continue
|
||||
if 'const' in row:
|
||||
val = row['const']
|
||||
else:
|
||||
info_id = row['reg']
|
||||
val = self.get_db_value(info_id)
|
||||
if not val:
|
||||
continue
|
||||
Fmt.set_value(buf, addr, row, val)
|
||||
return buf
|
||||
15
app/src/gen3plus/inverter_g3p.py
Normal file
15
app/src/gen3plus/inverter_g3p.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from asyncio import StreamReader, StreamWriter
|
||||
|
||||
from inverter_base import InverterBase
|
||||
from gen3plus.solarman_v5 import SolarmanV5
|
||||
from gen3plus.solarman_emu import SolarmanEmu
|
||||
|
||||
|
||||
class InverterG3P(InverterBase):
|
||||
def __init__(self, reader: StreamReader, writer: StreamWriter,
|
||||
client_mode: bool = False):
|
||||
remote_prot = None
|
||||
if client_mode:
|
||||
remote_prot = SolarmanEmu
|
||||
super().__init__(reader, writer, 'solarman',
|
||||
SolarmanV5, client_mode, remote_prot)
|
||||
138
app/src/gen3plus/solarman_emu.py
Normal file
138
app/src/gen3plus/solarman_emu.py
Normal file
@@ -0,0 +1,138 @@
|
||||
import logging
|
||||
import struct
|
||||
|
||||
from async_ifc import AsyncIfc
|
||||
from gen3plus.solarman_v5 import SolarmanBase
|
||||
from my_timer import Timer
|
||||
from infos import Register
|
||||
|
||||
logger = logging.getLogger('msg')
|
||||
|
||||
|
||||
class SolarmanEmu(SolarmanBase):
|
||||
def __init__(self, addr, ifc: "AsyncIfc",
|
||||
server_side: bool, client_mode: bool):
|
||||
super().__init__(addr, ifc, server_side=False,
|
||||
_send_modbus_cb=None,
|
||||
mb_timeout=8)
|
||||
logging.debug('SolarmanEmu.init()')
|
||||
self.db = ifc.remote.stream.db
|
||||
self.snr = ifc.remote.stream.snr
|
||||
self.hb_timeout = 60
|
||||
'''actual heatbeat timeout from the last response message'''
|
||||
self.data_up_inv = self.db.get_db_value(Register.DATA_UP_INTERVAL)
|
||||
'''time interval for getting new MQTT data messages'''
|
||||
self.hb_timer = Timer(self.send_heartbeat_cb, self.node_id)
|
||||
self.data_timer = Timer(self.send_data_cb, self.node_id)
|
||||
self.last_sync = self._emu_timestamp()
|
||||
'''timestamp when we send the last sync message (4110)'''
|
||||
self.pkt_cnt = 0
|
||||
'''last sent packet number'''
|
||||
|
||||
self.switch = {
|
||||
|
||||
0x4210: 'msg_data_ind', # real time data
|
||||
0x1210: self.msg_response, # at least every 5 minutes
|
||||
|
||||
0x4710: 'msg_hbeat_ind', # heatbeat
|
||||
0x1710: self.msg_response, # every 2 minutes
|
||||
|
||||
0x4110: 'msg_dev_ind', # device data, sync start
|
||||
0x1110: self.msg_response, # every 3 hours
|
||||
|
||||
}
|
||||
|
||||
self.log_lvl = {
|
||||
|
||||
0x4110: logging.INFO, # device data, sync start
|
||||
0x1110: logging.INFO, # every 3 hours
|
||||
|
||||
0x4210: logging.INFO, # real time data
|
||||
0x1210: logging.INFO, # at least every 5 minutes
|
||||
|
||||
0x4710: logging.DEBUG, # heatbeat
|
||||
0x1710: logging.DEBUG, # every 2 minutes
|
||||
|
||||
}
|
||||
|
||||
'''
|
||||
Our puplic methods
|
||||
'''
|
||||
def close(self) -> None:
|
||||
logging.info('SolarmanEmu.close()')
|
||||
# we have references to methods of this class in self.switch
|
||||
# so we have to erase self.switch, otherwise this instance can't be
|
||||
# deallocated by the garbage collector ==> we get a memory leak
|
||||
self.switch.clear()
|
||||
self.log_lvl.clear()
|
||||
self.hb_timer.close()
|
||||
self.data_timer.close()
|
||||
self.db = None
|
||||
super().close()
|
||||
|
||||
def _set_serial_no(self, snr: int):
|
||||
logging.debug(f'SolarmanEmu._set_serial_no, snr: {snr}')
|
||||
self.unique_id = str(snr)
|
||||
|
||||
def _init_new_client_conn(self) -> bool:
|
||||
logging.debug('SolarmanEmu.init_new()')
|
||||
self.data_timer.start(self.data_up_inv)
|
||||
return False
|
||||
|
||||
def next_pkt_cnt(self):
|
||||
'''get the next packet number'''
|
||||
self.pkt_cnt = (self.pkt_cnt + 1) & 0xffffffff
|
||||
return self.pkt_cnt
|
||||
|
||||
def seconds_since_last_sync(self):
|
||||
'''get seconds since last 0x4110 message was sent'''
|
||||
return self._emu_timestamp() - self.last_sync
|
||||
|
||||
def send_heartbeat_cb(self, exp_cnt):
|
||||
'''send a heartbeat to the TSUN cloud'''
|
||||
self._build_header(0x4710)
|
||||
self.ifc.tx_add(struct.pack('<B', 0))
|
||||
self._finish_send_msg()
|
||||
log_lvl = self.log_lvl.get(0x4710, logging.WARNING)
|
||||
self.ifc.tx_log(log_lvl, 'Send heartbeat:')
|
||||
self.ifc.tx_flush()
|
||||
|
||||
def send_data_cb(self, exp_cnt):
|
||||
'''send a inverter data message to the TSUN cloud'''
|
||||
self.hb_timer.start(self.hb_timeout)
|
||||
self.data_timer.start(self.data_up_inv)
|
||||
_len = 420
|
||||
ftype = 1
|
||||
build_msg = self.db.build(_len, 0x42, ftype)
|
||||
|
||||
self._build_header(0x4210)
|
||||
self.ifc.tx_add(
|
||||
struct.pack(
|
||||
'<BHLLLHL', ftype, 0x02b0,
|
||||
self._emu_timestamp(),
|
||||
self.seconds_since_last_sync(),
|
||||
self.time_ofs,
|
||||
1, # offset 0x1a
|
||||
self.next_pkt_cnt()))
|
||||
self.ifc.tx_add(build_msg[0x20:])
|
||||
self._finish_send_msg()
|
||||
log_lvl = self.log_lvl.get(0x4210, logging.WARNING)
|
||||
self.ifc.tx_log(log_lvl, 'Send inv-data:')
|
||||
self.ifc.tx_flush()
|
||||
|
||||
'''
|
||||
Message handler methods
|
||||
'''
|
||||
def msg_response(self):
|
||||
'''handle a received response from the TSUN cloud'''
|
||||
logger.debug("EMU received rsp:")
|
||||
_, _, ts, hb = super().msg_response()
|
||||
logger.debug(f"EMU ts:{ts} hb:{hb}")
|
||||
self.hb_timeout = hb
|
||||
self.time_ofs = ts - self._emu_timestamp()
|
||||
self.hb_timer.start(self.hb_timeout)
|
||||
|
||||
def msg_unknown(self):
|
||||
'''counts a unknown or unexpected message from the TSUN cloud'''
|
||||
logger.warning(f"EMU Unknow Msg: ID:{int(self.control):#04x}")
|
||||
self.inc_counter('Unknown_Msg')
|
||||
706
app/src/gen3plus/solarman_v5.py
Normal file
706
app/src/gen3plus/solarman_v5.py
Normal file
@@ -0,0 +1,706 @@
|
||||
import struct
|
||||
import logging
|
||||
import time
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
|
||||
from async_ifc import AsyncIfc
|
||||
from messages import hex_dump_memory, Message, State
|
||||
from cnf.config import Config
|
||||
from modbus import Modbus
|
||||
from gen3plus.infos_g3p import InfosG3P
|
||||
from infos import Register, Fmt
|
||||
|
||||
logger = logging.getLogger('msg')
|
||||
|
||||
|
||||
class Sequence():
|
||||
def __init__(self, server_side: bool):
|
||||
self.rcv_idx = 0
|
||||
self.snd_idx = 0
|
||||
self.server_side = server_side
|
||||
|
||||
def set_recv(self, val: int):
|
||||
if self.server_side:
|
||||
self.rcv_idx = val >> 8
|
||||
self.snd_idx = val & 0xff
|
||||
else:
|
||||
self.rcv_idx = val & 0xff
|
||||
self.snd_idx = val >> 8
|
||||
|
||||
def get_send(self):
|
||||
self.snd_idx += 1
|
||||
self.snd_idx &= 0xff
|
||||
if self.server_side:
|
||||
return (self.rcv_idx << 8) | self.snd_idx
|
||||
else:
|
||||
return (self.snd_idx << 8) | self.rcv_idx
|
||||
|
||||
def __str__(self):
|
||||
return f'{self.rcv_idx:02x}:{self.snd_idx:02x}'
|
||||
|
||||
|
||||
class SolarmanBase(Message):
|
||||
def __init__(self, addr, ifc: "AsyncIfc", server_side: bool,
|
||||
_send_modbus_cb, mb_timeout: int):
|
||||
super().__init__('G3P', ifc, server_side, _send_modbus_cb,
|
||||
mb_timeout)
|
||||
ifc.rx_set_cb(self.read)
|
||||
ifc.prot_set_timeout_cb(self._timeout)
|
||||
ifc.prot_set_init_new_client_conn_cb(self._init_new_client_conn)
|
||||
ifc.prot_set_update_header_cb(self.__update_header)
|
||||
self.addr = addr
|
||||
self.conn_no = ifc.get_conn_no()
|
||||
self.header_len = 11 # overwrite construcor in class Message
|
||||
self.control = 0
|
||||
self.seq = Sequence(server_side)
|
||||
self.snr = 0
|
||||
self.time_ofs = 0
|
||||
|
||||
def read(self) -> float:
|
||||
'''process all received messages in the _recv_buffer'''
|
||||
self._read()
|
||||
while True:
|
||||
if not self.header_valid:
|
||||
self.__parse_header(self.ifc.rx_peek(),
|
||||
self.ifc.rx_len())
|
||||
|
||||
if self.header_valid and self.ifc.rx_len() >= \
|
||||
(self.header_len + self.data_len+2):
|
||||
self.__process_complete_received_msg()
|
||||
self.__flush_recv_msg()
|
||||
else:
|
||||
return 0 # wait 0s before sending a response
|
||||
'''
|
||||
Our public methods
|
||||
'''
|
||||
def _flow_str(self, server_side: bool, type: str): # noqa: F821
|
||||
switch = {
|
||||
'rx': ' <',
|
||||
'tx': ' >',
|
||||
'forwrd': '<< ',
|
||||
'drop': ' xx',
|
||||
'rxS': '> ',
|
||||
'txS': '< ',
|
||||
'forwrdS': ' >>',
|
||||
'dropS': 'xx ',
|
||||
}
|
||||
if server_side:
|
||||
type += 'S'
|
||||
return switch.get(type, '???')
|
||||
|
||||
def get_fnc_handler(self, ctrl):
|
||||
fnc = self.switch.get(ctrl, self.msg_unknown)
|
||||
if callable(fnc):
|
||||
return fnc, repr(fnc.__name__)
|
||||
else:
|
||||
return self.msg_unknown, repr(fnc)
|
||||
|
||||
def _build_header(self, ctrl) -> None:
|
||||
'''build header for new transmit message'''
|
||||
self.send_msg_ofs = self.ifc.tx_len()
|
||||
|
||||
self.ifc.tx_add(struct.pack(
|
||||
'<BHHHL', 0xA5, 0, ctrl, self.seq.get_send(), self.snr))
|
||||
_fnc, _str = self.get_fnc_handler(ctrl)
|
||||
logger.info(self._flow_str(self.server_side, 'tx') +
|
||||
f' Ctl: {int(ctrl):#04x} Msg: {_str}')
|
||||
|
||||
def _finish_send_msg(self) -> None:
|
||||
'''finish the transmit message, set lenght and checksum'''
|
||||
_len = self.ifc.tx_len() - self.send_msg_ofs
|
||||
struct.pack_into('<H', self.ifc.tx_peek(), self.send_msg_ofs+1,
|
||||
_len-11)
|
||||
check = sum(self.ifc.tx_peek()[
|
||||
self.send_msg_ofs+1:self.send_msg_ofs + _len]) & 0xff
|
||||
self.ifc.tx_add(struct.pack('<BB', check, 0x15)) # crc & stop
|
||||
|
||||
def _timestamp(self):
|
||||
# utc as epoche
|
||||
return int(time.time()) # pragma: no cover
|
||||
|
||||
def _emu_timestamp(self):
|
||||
'''timestamp for an emulated inverter (realtime - 1 day)'''
|
||||
one_day = 24*60*60
|
||||
return self._timestamp()-one_day
|
||||
|
||||
'''
|
||||
Our private methods
|
||||
'''
|
||||
def __update_header(self, _forward_buffer):
|
||||
'''update header for message before forwarding,
|
||||
set sequence and checksum'''
|
||||
_len = len(_forward_buffer)
|
||||
ofs = 0
|
||||
while ofs < _len:
|
||||
result = struct.unpack_from('<BH', _forward_buffer, ofs)
|
||||
data_len = result[1] # len of variable id string
|
||||
|
||||
struct.pack_into('<H', _forward_buffer, ofs+5,
|
||||
self.seq.get_send())
|
||||
|
||||
check = sum(_forward_buffer[ofs+1:ofs+data_len+11]) & 0xff
|
||||
struct.pack_into('<B', _forward_buffer, ofs+data_len+11, check)
|
||||
ofs += (13 + data_len)
|
||||
|
||||
def __process_complete_received_msg(self):
|
||||
log_lvl = self.log_lvl.get(self.control, logging.WARNING)
|
||||
if callable(log_lvl):
|
||||
log_lvl = log_lvl()
|
||||
self.ifc.rx_log(log_lvl, f'Received from {self.addr}:')
|
||||
# self._recv_buffer, self.header_len +
|
||||
# self.data_len+2)
|
||||
if self.__trailer_is_ok(self.ifc.rx_peek(), self.header_len
|
||||
+ self.data_len + 2):
|
||||
if self.state == State.init:
|
||||
self.state = State.received
|
||||
self._set_serial_no(self.snr)
|
||||
self.__dispatch_msg()
|
||||
|
||||
def __parse_header(self, buf: bytes, buf_len: int) -> None:
|
||||
|
||||
if (buf_len < self.header_len): # enough bytes for complete header?
|
||||
return
|
||||
|
||||
result = struct.unpack_from('<BHHHL', buf, 0)
|
||||
|
||||
# store parsed header values in the class
|
||||
start = result[0] # start byte
|
||||
self.data_len = result[1] # len of variable id string
|
||||
self.control = result[2]
|
||||
self.seq.set_recv(result[3])
|
||||
self.snr = result[4]
|
||||
|
||||
if start != 0xA5:
|
||||
hex_dump_memory(logging.ERROR,
|
||||
'Drop packet w invalid start byte from'
|
||||
f' {self.addr}:', buf, buf_len)
|
||||
|
||||
self.inc_counter('Invalid_Msg_Format')
|
||||
# erase broken recv buffer
|
||||
self.ifc.rx_clear()
|
||||
return
|
||||
self.header_valid = True
|
||||
|
||||
def __trailer_is_ok(self, buf: bytes, buf_len: int) -> bool:
|
||||
crc = buf[self.data_len+11]
|
||||
stop = buf[self.data_len+12]
|
||||
if stop != 0x15:
|
||||
hex_dump_memory(logging.ERROR,
|
||||
'Drop packet w invalid stop byte from '
|
||||
f'{self.addr}:', buf, buf_len)
|
||||
self.inc_counter('Invalid_Msg_Format')
|
||||
if self.ifc.rx_len() > (self.data_len+13):
|
||||
next_start = buf[self.data_len+13]
|
||||
if next_start != 0xa5:
|
||||
# erase broken recv buffer
|
||||
self.ifc.rx_clear()
|
||||
|
||||
return False
|
||||
|
||||
check = sum(buf[1:buf_len-2]) & 0xff
|
||||
if check != crc:
|
||||
self.inc_counter('Invalid_Msg_Format')
|
||||
logger.debug(f'CRC {int(crc):#02x} {int(check):#08x}'
|
||||
f' Stop:{int(stop):#02x}')
|
||||
# start & stop byte are valid, discard only this message
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def __flush_recv_msg(self) -> None:
|
||||
self.ifc.rx_get(self.header_len + self.data_len+2)
|
||||
self.header_valid = False
|
||||
|
||||
def __dispatch_msg(self) -> None:
|
||||
_fnc, _str = self.get_fnc_handler(self.control)
|
||||
if self.unique_id:
|
||||
logger.info(self._flow_str(self.server_side, 'rx') +
|
||||
f' Ctl: {int(self.control):#04x}' +
|
||||
f' Msg: {_str}')
|
||||
_fnc()
|
||||
else:
|
||||
logger.info(self._flow_str(self.server_side, 'drop') +
|
||||
f' Ctl: {int(self.control):#04x}' +
|
||||
f' Msg: {_str}')
|
||||
|
||||
'''
|
||||
Message handler methods
|
||||
'''
|
||||
def msg_response(self):
|
||||
data = self.ifc.rx_peek()[self.header_len:]
|
||||
result = struct.unpack_from('<BBLL', data, 0)
|
||||
ftype = result[0] # always 2
|
||||
valid = result[1] == 1 # status
|
||||
ts = result[2]
|
||||
set_hb = result[3] # always 60 or 120
|
||||
logger.debug(f'ftype:{ftype} accepted:{valid}'
|
||||
f' ts:{ts:08x} nextHeartbeat: {set_hb}s')
|
||||
|
||||
dt = datetime.fromtimestamp(ts)
|
||||
logger.debug(f'ts: {dt.strftime("%Y-%m-%d %H:%M:%S")}')
|
||||
return ftype, valid, ts, set_hb
|
||||
|
||||
|
||||
class SolarmanV5(SolarmanBase):
|
||||
AT_CMD = 1
|
||||
MB_RTU_CMD = 2
|
||||
MB_CLIENT_DATA_UP = 30
|
||||
'''Data up time in client mode'''
|
||||
HDR_FMT = '<BLLL'
|
||||
'''format string for packing of the header'''
|
||||
|
||||
def __init__(self, addr, ifc: "AsyncIfc",
|
||||
server_side: bool, client_mode: bool):
|
||||
super().__init__(addr, ifc, server_side, self.send_modbus_cb,
|
||||
mb_timeout=8)
|
||||
|
||||
self.db = InfosG3P(client_mode)
|
||||
self.forward_at_cmd_resp = False
|
||||
self.no_forwarding = False
|
||||
'''not allowed to connect to TSUN cloud by connection type'''
|
||||
self.establish_inv_emu = False
|
||||
'''create an Solarman EMU instance to send data to the TSUN cloud'''
|
||||
self.switch = {
|
||||
|
||||
0x4210: self.msg_data_ind, # real time data
|
||||
0x1210: self.msg_response, # at least every 5 minutes
|
||||
|
||||
0x4710: self.msg_hbeat_ind, # heatbeat
|
||||
0x1710: self.msg_response, # every 2 minutes
|
||||
|
||||
# every 3 hours comes a sync seuqence:
|
||||
# 00:00:00 0x4110 device data ftype: 0x02
|
||||
# 00:00:02 0x4210 real time data ftype: 0x01
|
||||
# 00:00:03 0x4210 real time data ftype: 0x81
|
||||
# 00:00:05 0x4310 wifi data ftype: 0x81 sub-id 0x0018: 0c # noqa: E501
|
||||
# 00:00:06 0x4310 wifi data ftype: 0x81 sub-id 0x0018: 1c # noqa: E501
|
||||
# 00:00:07 0x4310 wifi data ftype: 0x01 sub-id 0x0018: 0c # noqa: E501
|
||||
# 00:00:08 0x4810 options? ftype: 0x01
|
||||
|
||||
0x4110: self.msg_dev_ind, # device data, sync start
|
||||
0x1110: self.msg_response, # every 3 hours
|
||||
|
||||
0x4310: self.msg_sync_start, # regulary after 3-6 hours
|
||||
0x1310: self.msg_response,
|
||||
0x4810: self.msg_sync_end, # sync end
|
||||
0x1810: self.msg_response,
|
||||
|
||||
#
|
||||
# MODbus or AT cmd
|
||||
0x4510: self.msg_command_req, # from server
|
||||
0x1510: self.msg_command_rsp, # from inverter
|
||||
# 0x0510: self.msg_command_rsp, # from inverter
|
||||
}
|
||||
|
||||
self.log_lvl = {
|
||||
|
||||
0x4210: logging.INFO, # real time data
|
||||
0x1210: logging.INFO, # at least every 5 minutes
|
||||
|
||||
0x4710: logging.DEBUG, # heatbeat
|
||||
0x1710: logging.DEBUG, # every 2 minutes
|
||||
|
||||
0x4110: logging.INFO, # device data, sync start
|
||||
0x1110: logging.INFO, # every 3 hours
|
||||
|
||||
0x4310: logging.INFO, # regulary after 3-6 hours
|
||||
0x1310: logging.INFO,
|
||||
|
||||
0x4810: logging.INFO, # sync end
|
||||
0x1810: logging.INFO,
|
||||
|
||||
#
|
||||
# MODbus or AT cmd
|
||||
0x4510: logging.INFO, # from server
|
||||
0x1510: self.get_cmd_rsp_log_lvl,
|
||||
}
|
||||
g3p_cnf = Config.get('gen3plus')
|
||||
|
||||
if 'at_acl' in g3p_cnf: # pragma: no cover
|
||||
self.at_acl = g3p_cnf['at_acl']
|
||||
|
||||
self.sensor_list = 0
|
||||
|
||||
'''
|
||||
Our puplic methods
|
||||
'''
|
||||
def close(self) -> None:
|
||||
logging.debug('Solarman.close()')
|
||||
# we have references to methods of this class in self.switch
|
||||
# so we have to erase self.switch, otherwise this instance can't be
|
||||
# deallocated by the garbage collector ==> we get a memory leak
|
||||
self.switch.clear()
|
||||
self.log_lvl.clear()
|
||||
super().close()
|
||||
|
||||
async def send_start_cmd(self, snr: int, host: str,
|
||||
forward: bool,
|
||||
start_timeout=MB_CLIENT_DATA_UP):
|
||||
self.no_forwarding = True
|
||||
self.establish_inv_emu = forward
|
||||
self.snr = snr
|
||||
self._set_serial_no(snr)
|
||||
self.mb_timeout = start_timeout
|
||||
self.db.set_db_def_value(Register.IP_ADDRESS, host)
|
||||
self.db.set_db_def_value(Register.POLLING_INTERVAL,
|
||||
self.mb_timeout)
|
||||
self.db.set_db_def_value(Register.DATA_UP_INTERVAL,
|
||||
300)
|
||||
self.db.set_db_def_value(Register.COLLECT_INTERVAL,
|
||||
1)
|
||||
self.db.set_db_def_value(Register.HEARTBEAT_INTERVAL,
|
||||
120)
|
||||
self.db.set_db_def_value(Register.SENSOR_LIST,
|
||||
Fmt.hex4((self.sensor_list, )))
|
||||
self.new_data['controller'] = True
|
||||
|
||||
self.state = State.up
|
||||
self._send_modbus_cmd(Modbus.READ_REGS, 0x3000, 48, logging.DEBUG)
|
||||
self.mb_timer.start(self.mb_timeout)
|
||||
|
||||
def new_state_up(self):
|
||||
if self.state is not State.up:
|
||||
self.state = State.up
|
||||
if (self.modbus_polling):
|
||||
self.mb_timer.start(self.mb_first_timeout)
|
||||
self.db.set_db_def_value(Register.POLLING_INTERVAL,
|
||||
self.mb_timeout)
|
||||
|
||||
def establish_emu(self):
|
||||
_len = 223
|
||||
build_msg = self.db.build(_len, 0x41, 2)
|
||||
struct.pack_into(
|
||||
'<BHHHLBL', build_msg, 0, 0xA5, _len-11, 0x4110,
|
||||
0, self.snr, 2, self._emu_timestamp())
|
||||
self.ifc.fwd_add(build_msg)
|
||||
self.ifc.fwd_add(struct.pack('<BB', 0, 0x15)) # crc & stop
|
||||
|
||||
def __set_config_parms(self, inv: dict):
|
||||
'''init connection with params from the configuration'''
|
||||
self.node_id = inv['node_id']
|
||||
self.sug_area = inv['suggested_area']
|
||||
self.modbus_polling = inv['modbus_polling']
|
||||
self.sensor_list = inv['sensor_list']
|
||||
if self.mb:
|
||||
self.mb.set_node_id(self.node_id)
|
||||
|
||||
def _set_serial_no(self, snr: int):
|
||||
'''check the serial number and configure the inverter connection'''
|
||||
serial_no = str(snr)
|
||||
if self.unique_id == serial_no:
|
||||
logger.debug(f'SerialNo: {serial_no}')
|
||||
else:
|
||||
inverters = Config.get('inverters')
|
||||
# logger.debug(f'Inverters: {inverters}')
|
||||
|
||||
for key, inv in inverters.items():
|
||||
# logger.debug(f'key: {key} -> {inv}')
|
||||
if (type(inv) is dict and 'monitor_sn' in inv
|
||||
and inv['monitor_sn'] == snr):
|
||||
self.__set_config_parms(inv)
|
||||
self.db.set_pv_module_details(inv)
|
||||
logger.debug(f'SerialNo {serial_no} allowed! area:{self.sug_area}') # noqa: E501
|
||||
|
||||
self.db.set_db_def_value(Register.COLLECTOR_SNR, snr)
|
||||
self.db.set_db_def_value(Register.SERIAL_NUMBER, key)
|
||||
break
|
||||
else:
|
||||
self.node_id = ''
|
||||
self.sug_area = ''
|
||||
if 'allow_all' not in inverters or not inverters['allow_all']:
|
||||
self.inc_counter('Unknown_SNR')
|
||||
self.unique_id = None
|
||||
logger.warning(f'ignore message from unknow inverter! (SerialNo: {serial_no})') # noqa: E501
|
||||
return
|
||||
logger.warning(f'SerialNo {serial_no} not known but accepted!')
|
||||
|
||||
self.unique_id = serial_no
|
||||
|
||||
def forward(self, buffer, buflen) -> None:
|
||||
'''add the actual receive msg to the forwarding queue'''
|
||||
if self.no_forwarding:
|
||||
return
|
||||
tsun = Config.get('solarman')
|
||||
if tsun['enabled']:
|
||||
self.ifc.fwd_add(buffer[:buflen])
|
||||
self.ifc.fwd_log(logging.DEBUG, 'Store for forwarding:')
|
||||
|
||||
_, _str = self.get_fnc_handler(self.control)
|
||||
logger.info(self._flow_str(self.server_side, 'forwrd') +
|
||||
f' Ctl: {int(self.control):#04x}'
|
||||
f' Msg: {_str}')
|
||||
|
||||
def _init_new_client_conn(self) -> bool:
|
||||
return False
|
||||
|
||||
def _heartbeat(self) -> int:
|
||||
return 60 # pragma: no cover
|
||||
|
||||
def __send_ack_rsp(self, msgtype, ftype, ack=1):
|
||||
self._build_header(msgtype)
|
||||
self.ifc.tx_add(struct.pack('<BBLL', ftype, ack,
|
||||
self._timestamp(),
|
||||
self._heartbeat()))
|
||||
self._finish_send_msg()
|
||||
|
||||
def send_modbus_cb(self, pdu: bytearray, log_lvl: int, state: str):
|
||||
if self.state != State.up:
|
||||
logger.warning(f'[{self.node_id}] ignore MODBUS cmd,'
|
||||
' cause the state is not UP anymore')
|
||||
return
|
||||
self._build_header(0x4510)
|
||||
self.ifc.tx_add(struct.pack('<BHLLL', self.MB_RTU_CMD,
|
||||
self.sensor_list, 0, 0, 0))
|
||||
self.ifc.tx_add(pdu)
|
||||
self._finish_send_msg()
|
||||
self.ifc.tx_log(log_lvl, f'Send Modbus {state}:{self.addr}:')
|
||||
self.ifc.tx_flush()
|
||||
|
||||
def mb_timout_cb(self, exp_cnt):
|
||||
self.mb_timer.start(self.mb_timeout)
|
||||
|
||||
self._send_modbus_cmd(Modbus.READ_REGS, 0x3000, 48, logging.DEBUG)
|
||||
|
||||
if 1 == (exp_cnt % 30):
|
||||
# logging.info("Regular Modbus Status request")
|
||||
self._send_modbus_cmd(Modbus.READ_REGS, 0x2000, 96, logging.DEBUG)
|
||||
|
||||
def at_cmd_forbidden(self, cmd: str, connection: str) -> bool:
|
||||
return not cmd.startswith(tuple(self.at_acl[connection]['allow'])) or \
|
||||
cmd.startswith(tuple(self.at_acl[connection]['block']))
|
||||
|
||||
async def send_at_cmd(self, at_cmd: str) -> None:
|
||||
if self.state != State.up:
|
||||
logger.warning(f'[{self.node_id}] ignore AT+ cmd,'
|
||||
' as the state is not UP')
|
||||
return
|
||||
at_cmd = at_cmd.strip()
|
||||
|
||||
if self.at_cmd_forbidden(cmd=at_cmd, connection='mqtt'):
|
||||
data_json = f'\'{at_cmd}\' is forbidden'
|
||||
node_id = self.node_id
|
||||
key = 'at_resp'
|
||||
logger.info(f'{key}: {data_json}')
|
||||
await self.mqtt.publish(f'{self.entity_prfx}{node_id}{key}', data_json) # noqa: E501
|
||||
return
|
||||
|
||||
self.forward_at_cmd_resp = False
|
||||
self._build_header(0x4510)
|
||||
self.ifc.tx_add(struct.pack(f'<BHLLL{len(at_cmd)}sc', self.AT_CMD,
|
||||
0x0002, 0, 0, 0,
|
||||
at_cmd.encode('utf-8'), b'\r'))
|
||||
self._finish_send_msg()
|
||||
self.ifc.tx_log(logging.INFO, 'Send AT Command:')
|
||||
try:
|
||||
self.ifc.tx_flush()
|
||||
except Exception:
|
||||
self.ifc.tx_clear()
|
||||
|
||||
def __forward_msg(self):
|
||||
self.forward(self.ifc.rx_peek(), self.header_len+self.data_len+2)
|
||||
|
||||
def __build_model_name(self):
|
||||
db = self.db
|
||||
max_pow = db.get_db_value(Register.MAX_DESIGNED_POWER, 0)
|
||||
rated = db.get_db_value(Register.RATED_POWER, 0)
|
||||
model = None
|
||||
if max_pow == 2000:
|
||||
if rated == 800 or rated == 600:
|
||||
model = f'TSOL-MS{max_pow}({rated})'
|
||||
else:
|
||||
model = f'TSOL-MS{max_pow}'
|
||||
elif max_pow == 1800 or max_pow == 1600:
|
||||
model = f'TSOL-MS{max_pow}'
|
||||
if model:
|
||||
logger.info(f'Model: {model}')
|
||||
self.db.set_db_def_value(Register.EQUIPMENT_MODEL, model)
|
||||
|
||||
def __process_data(self, ftype, ts):
|
||||
inv_update = False
|
||||
msg_type = self.control >> 8
|
||||
for key, update in self.db.parse(self.ifc.rx_peek(), msg_type, ftype,
|
||||
self.node_id):
|
||||
if update:
|
||||
if key == 'inverter':
|
||||
inv_update = True
|
||||
self._set_mqtt_timestamp(key, ts)
|
||||
self.new_data[key] = True
|
||||
|
||||
if inv_update:
|
||||
self.__build_model_name()
|
||||
'''
|
||||
Message handler methods
|
||||
'''
|
||||
def msg_unknown(self):
|
||||
logger.warning(f"Unknow Msg: ID:{int(self.control):#04x}")
|
||||
self.inc_counter('Unknown_Msg')
|
||||
self.__forward_msg()
|
||||
|
||||
def msg_dev_ind(self):
|
||||
data = self.ifc.rx_peek()[self.header_len:]
|
||||
result = struct.unpack_from(self.HDR_FMT, data, 0)
|
||||
ftype = result[0] # always 2
|
||||
total = result[1]
|
||||
tim = result[2]
|
||||
res = result[3] # always zero
|
||||
logger.info(f'frame type:{ftype:02x}'
|
||||
f' timer:{tim:08x}s null:{res}')
|
||||
if self.time_ofs:
|
||||
# dt = datetime.fromtimestamp(total + self.time_ofs)
|
||||
# logger.info(f'ts: {dt.strftime("%Y-%m-%d %H:%M:%S")}')
|
||||
ts = total + self.time_ofs
|
||||
else:
|
||||
ts = None
|
||||
self.__process_data(ftype, ts)
|
||||
self.sensor_list = int(self.db.get_db_value(Register.SENSOR_LIST, 0),
|
||||
16)
|
||||
self.__forward_msg()
|
||||
self.__send_ack_rsp(0x1110, ftype)
|
||||
|
||||
def msg_data_ind(self):
|
||||
data = self.ifc.rx_peek()
|
||||
result = struct.unpack_from('<BHLLLHL', data, self.header_len)
|
||||
ftype = result[0] # 1 or 0x81
|
||||
sensor = result[1]
|
||||
total = result[2]
|
||||
tim = result[3]
|
||||
if 1 == ftype:
|
||||
self.time_ofs = result[4]
|
||||
unkn = result[5]
|
||||
cnt = result[6]
|
||||
if sensor != self.sensor_list:
|
||||
logger.warning(f'Unexpected Sensor-List:{sensor:04x}'
|
||||
f' (!={self.sensor_list:04x})')
|
||||
logger.info(f'ftype:{ftype:02x} timer:{tim:08x}s'
|
||||
f' ??: {unkn:04x} cnt:{cnt}')
|
||||
if self.time_ofs:
|
||||
# dt = datetime.fromtimestamp(total + self.time_ofs)
|
||||
# logger.info(f'ts: {dt.strftime("%Y-%m-%d %H:%M:%S")}')
|
||||
ts = total + self.time_ofs
|
||||
else:
|
||||
ts = None
|
||||
|
||||
self.__process_data(ftype, ts)
|
||||
self.__forward_msg()
|
||||
self.__send_ack_rsp(0x1210, ftype)
|
||||
self.new_state_up()
|
||||
|
||||
def msg_sync_start(self):
|
||||
data = self.ifc.rx_peek()[self.header_len:]
|
||||
result = struct.unpack_from(self.HDR_FMT, data, 0)
|
||||
ftype = result[0]
|
||||
total = result[1]
|
||||
self.time_ofs = result[3]
|
||||
|
||||
dt = datetime.fromtimestamp(total + self.time_ofs)
|
||||
logger.info(f'ts: {dt.strftime("%Y-%m-%d %H:%M:%S")}')
|
||||
|
||||
self.__forward_msg()
|
||||
self.__send_ack_rsp(0x1310, ftype)
|
||||
|
||||
def msg_command_req(self):
|
||||
data = self.ifc.rx_peek()[self.header_len:
|
||||
self.header_len+self.data_len]
|
||||
result = struct.unpack_from('<B', data, 0)
|
||||
ftype = result[0]
|
||||
if ftype == self.AT_CMD:
|
||||
at_cmd = data[15:].decode()
|
||||
if self.at_cmd_forbidden(cmd=at_cmd, connection='tsun'):
|
||||
self.inc_counter('AT_Command_Blocked')
|
||||
return
|
||||
self.inc_counter('AT_Command')
|
||||
self.forward_at_cmd_resp = True
|
||||
|
||||
elif ftype == self.MB_RTU_CMD:
|
||||
rstream = self.ifc.remote.stream
|
||||
if rstream.mb.recv_req(data[15:],
|
||||
rstream.__forward_msg):
|
||||
self.inc_counter('Modbus_Command')
|
||||
else:
|
||||
logger.error('Invalid Modbus Msg')
|
||||
self.inc_counter('Invalid_Msg_Format')
|
||||
return
|
||||
|
||||
self.__forward_msg()
|
||||
|
||||
def publish_mqtt(self, key, data): # pragma: no cover
|
||||
asyncio.ensure_future(
|
||||
self.mqtt.publish(key, data))
|
||||
|
||||
def get_cmd_rsp_log_lvl(self) -> int:
|
||||
ftype = self.ifc.rx_peek()[self.header_len]
|
||||
if ftype == self.AT_CMD:
|
||||
if self.forward_at_cmd_resp:
|
||||
return logging.INFO
|
||||
return logging.DEBUG
|
||||
elif ftype == self.MB_RTU_CMD \
|
||||
and self.server_side:
|
||||
return self.mb.last_log_lvl
|
||||
|
||||
return logging.WARNING
|
||||
|
||||
def msg_command_rsp(self):
|
||||
data = self.ifc.rx_peek()[self.header_len:
|
||||
self.header_len+self.data_len]
|
||||
ftype = data[0]
|
||||
if ftype == self.AT_CMD:
|
||||
if not self.forward_at_cmd_resp:
|
||||
data_json = data[14:].decode("utf-8")
|
||||
node_id = self.node_id
|
||||
key = 'at_resp'
|
||||
logger.info(f'{key}: {data_json}')
|
||||
self.publish_mqtt(f'{self.entity_prfx}{node_id}{key}', data_json) # noqa: E501
|
||||
return
|
||||
elif ftype == self.MB_RTU_CMD:
|
||||
self.__modbus_command_rsp(data)
|
||||
return
|
||||
self.__forward_msg()
|
||||
|
||||
def __parse_modbus_rsp(self, data):
|
||||
inv_update = False
|
||||
self.modbus_elms = 0
|
||||
for key, update, _ in self.mb.recv_resp(self.db, data[14:]):
|
||||
self.modbus_elms += 1
|
||||
if update:
|
||||
if key == 'inverter':
|
||||
inv_update = True
|
||||
self._set_mqtt_timestamp(key, self._timestamp())
|
||||
self.new_data[key] = True
|
||||
return inv_update
|
||||
|
||||
def __modbus_command_rsp(self, data):
|
||||
'''precess MODBUS RTU response'''
|
||||
valid = data[1]
|
||||
modbus_msg_len = self.data_len - 14
|
||||
# logger.debug(f'modbus_len:{modbus_msg_len} accepted:{valid}')
|
||||
if valid == 1 and modbus_msg_len > 4:
|
||||
# logger.info(f'first byte modbus:{data[14]}')
|
||||
inv_update = self.__parse_modbus_rsp(data)
|
||||
if inv_update:
|
||||
self.__build_model_name()
|
||||
|
||||
if self.establish_inv_emu and not self.ifc.remote.stream:
|
||||
self.establish_emu()
|
||||
|
||||
def msg_hbeat_ind(self):
|
||||
data = self.ifc.rx_peek()[self.header_len:]
|
||||
result = struct.unpack_from('<B', data, 0)
|
||||
ftype = result[0]
|
||||
|
||||
self.__forward_msg()
|
||||
self.__send_ack_rsp(0x1710, ftype)
|
||||
self.new_state_up()
|
||||
|
||||
def msg_sync_end(self):
|
||||
data = self.ifc.rx_peek()[self.header_len:]
|
||||
result = struct.unpack_from(self.HDR_FMT, data, 0)
|
||||
ftype = result[0]
|
||||
total = result[1]
|
||||
self.time_ofs = result[3]
|
||||
|
||||
dt = datetime.fromtimestamp(total + self.time_ofs)
|
||||
logger.info(f'ts: {dt.strftime("%Y-%m-%d %H:%M:%S")}')
|
||||
|
||||
self.__forward_msg()
|
||||
self.__send_ack_rsp(0x1810, ftype)
|
||||
1118
app/src/infos.py
1118
app/src/infos.py
File diff suppressed because it is too large
Load Diff
@@ -1,213 +0,0 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import traceback
|
||||
import json
|
||||
from config import Config
|
||||
from async_stream import AsyncStream
|
||||
from mqtt import Mqtt
|
||||
from aiomqtt import MqttCodeError
|
||||
from infos import Infos
|
||||
|
||||
# import gc
|
||||
|
||||
# logger = logging.getLogger('conn')
|
||||
logger_mqtt = logging.getLogger('mqtt')
|
||||
|
||||
|
||||
class Inverter(AsyncStream):
|
||||
'''class Inverter is a derivation of an Async_Stream
|
||||
|
||||
The class has some class method for managing common resources like a
|
||||
connection to the MQTT broker or proxy error counter which are common
|
||||
for all inverter connection
|
||||
|
||||
Instances of the class are connections to an inverter and can have an
|
||||
optional link to an remote connection to the TSUN cloud. A remote
|
||||
connection dies with the inverter connection.
|
||||
|
||||
class methods:
|
||||
class_init(): initialize the common resources of the proxy (MQTT
|
||||
broker, Proxy DB, etc). Must be called before the
|
||||
first Ib´verter instance can be created
|
||||
class_close(): release the common resources of the proxy. Should not
|
||||
be called before any instances of the class are
|
||||
destroyed
|
||||
|
||||
methods:
|
||||
server_loop(addr): Async loop method for receiving messages from the
|
||||
inverter (server-side)
|
||||
client_loop(addr): Async loop method for receiving messages from the
|
||||
TSUN cloud (client-side)
|
||||
async_create_remote(): Establish a client connection to the TSUN cloud
|
||||
async_publ_mqtt(): Publish data to MQTT broker
|
||||
close(): Release method which must be called before a instance can be
|
||||
destroyed
|
||||
'''
|
||||
@classmethod
|
||||
def class_init(cls) -> None:
|
||||
logging.debug('Inverter.class_init')
|
||||
# initialize the proxy statistics
|
||||
Infos.static_init()
|
||||
cls.db_stat = Infos()
|
||||
|
||||
ha = Config.get('ha')
|
||||
cls.entity_prfx = ha['entity_prefix'] + '/'
|
||||
cls.discovery_prfx = ha['discovery_prefix'] + '/'
|
||||
cls.proxy_node_id = ha['proxy_node_id'] + '/'
|
||||
cls.proxy_unique_id = ha['proxy_unique_id']
|
||||
|
||||
# call Mqtt singleton to establisch the connection to the mqtt broker
|
||||
cls.mqtt = Mqtt(cls.__cb_mqtt_is_up)
|
||||
|
||||
@classmethod
|
||||
async def __cb_mqtt_is_up(cls) -> None:
|
||||
logging.info('Initialize proxy device on home assistant')
|
||||
# register proxy status counters at home assistant
|
||||
await cls.__register_proxy_stat_home_assistant()
|
||||
|
||||
# send values of the proxy status counters
|
||||
await asyncio.sleep(0.5) # wait a bit, before sending data
|
||||
cls.new_stat_data['proxy'] = True # force sending data to sync ha
|
||||
await cls.__async_publ_mqtt_proxy_stat('proxy')
|
||||
|
||||
@classmethod
|
||||
async def __register_proxy_stat_home_assistant(cls) -> None:
|
||||
'''register all our topics at home assistant'''
|
||||
for data_json, component, node_id, id in cls.db_stat.ha_confs(
|
||||
cls.entity_prfx, cls.proxy_node_id,
|
||||
cls.proxy_unique_id, True):
|
||||
logger_mqtt.debug(f"MQTT Register: cmp:'{component}' node_id:'{node_id}' {data_json}") # noqa: E501
|
||||
await cls.mqtt.publish(f'{cls.discovery_prfx}{component}/{node_id}{id}/config', data_json) # noqa: E501
|
||||
|
||||
@classmethod
|
||||
async def __async_publ_mqtt_proxy_stat(cls, key) -> None:
|
||||
stat = Infos.stat
|
||||
if key in stat and cls.new_stat_data[key]:
|
||||
data_json = json.dumps(stat[key])
|
||||
node_id = cls.proxy_node_id
|
||||
logger_mqtt.debug(f'{key}: {data_json}')
|
||||
await cls.mqtt.publish(f"{cls.entity_prfx}{node_id}{key}",
|
||||
data_json)
|
||||
cls.new_stat_data[key] = False
|
||||
|
||||
@classmethod
|
||||
def class_close(cls, loop) -> None:
|
||||
logging.debug('Inverter.class_close')
|
||||
logging.info('Close MQTT Task')
|
||||
loop.run_until_complete(cls.mqtt.close())
|
||||
cls.mqtt = None
|
||||
|
||||
def __init__(self, reader, writer, addr):
|
||||
super().__init__(reader, writer, addr, None, True)
|
||||
self.ha_restarts = -1
|
||||
|
||||
async def server_loop(self, addr):
|
||||
'''Loop for receiving messages from the inverter (server-side)'''
|
||||
logging.info(f'Accept connection from {addr}')
|
||||
self.inc_counter('Inverter_Cnt')
|
||||
await self.loop()
|
||||
self.dec_counter('Inverter_Cnt')
|
||||
logging.info(f'Server loop stopped for r{self.r_addr}')
|
||||
|
||||
# if the server connection closes, we also have to disconnect
|
||||
# the connection to te TSUN cloud
|
||||
if self.remoteStream:
|
||||
logging.debug("disconnect client connection")
|
||||
self.remoteStream.disc()
|
||||
try:
|
||||
await self.__async_publ_mqtt_proxy_stat('proxy')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
async def client_loop(self, addr):
|
||||
'''Loop for receiving messages from the TSUN cloud (client-side)'''
|
||||
clientStream = await self.remoteStream.loop()
|
||||
logging.info(f'Client loop stopped for l{clientStream.l_addr}')
|
||||
|
||||
# if the client connection closes, we don't touch the server
|
||||
# connection. Instead we erase the client connection stream,
|
||||
# thus on the next received packet from the inverter, we can
|
||||
# establish a new connection to the TSUN cloud
|
||||
|
||||
# erase backlink to inverter
|
||||
clientStream.remoteStream = None
|
||||
|
||||
if self.remoteStream == clientStream:
|
||||
# logging.debug(f'Client l{clientStream.l_addr} refs:'
|
||||
# f' {gc.get_referrers(clientStream)}')
|
||||
# than erase client connection
|
||||
self.remoteStream = None
|
||||
|
||||
async def async_create_remote(self) -> None:
|
||||
'''Establish a client connection to the TSUN cloud'''
|
||||
tsun = Config.get('tsun')
|
||||
host = tsun['host']
|
||||
port = tsun['port']
|
||||
addr = (host, port)
|
||||
|
||||
try:
|
||||
logging.info(f'Connected to {addr}')
|
||||
connect = asyncio.open_connection(host, port)
|
||||
reader, writer = await connect
|
||||
self.remoteStream = AsyncStream(reader, writer, addr, self, False)
|
||||
asyncio.create_task(self.client_loop(addr))
|
||||
|
||||
except ConnectionRefusedError as error:
|
||||
logging.info(f'{error}')
|
||||
except Exception:
|
||||
logging.error(
|
||||
f"Inverter: Exception for {addr}:\n"
|
||||
f"{traceback.format_exc()}")
|
||||
|
||||
async def async_publ_mqtt(self) -> None:
|
||||
'''publish data to MQTT broker'''
|
||||
# check if new inverter or collector infos are available or when the
|
||||
# home assistant has changed the status back to online
|
||||
try:
|
||||
if (('inverter' in self.new_data and self.new_data['inverter'])
|
||||
or ('collector' in self.new_data and
|
||||
self.new_data['collector'])
|
||||
or self.mqtt.ha_restarts != self.ha_restarts):
|
||||
await self.__register_proxy_stat_home_assistant()
|
||||
await self.__register_home_assistant()
|
||||
self.ha_restarts = self.mqtt.ha_restarts
|
||||
|
||||
for key in self.new_data:
|
||||
await self.__async_publ_mqtt_packet(key)
|
||||
for key in self.new_stat_data:
|
||||
await self.__async_publ_mqtt_proxy_stat(key)
|
||||
|
||||
except MqttCodeError as error:
|
||||
logging.error(f'Mqtt except: {error}')
|
||||
except Exception:
|
||||
logging.error(
|
||||
f"Inverter: Exception:\n"
|
||||
f"{traceback.format_exc()}")
|
||||
|
||||
async def __async_publ_mqtt_packet(self, key):
|
||||
db = self.db.db
|
||||
if key in db and self.new_data[key]:
|
||||
data_json = json.dumps(db[key])
|
||||
node_id = self.node_id
|
||||
logger_mqtt.debug(f'{key}: {data_json}')
|
||||
await self.mqtt.publish(f'{self.entity_prfx}{node_id}{key}', data_json) # noqa: E501
|
||||
self.new_data[key] = False
|
||||
|
||||
async def __register_home_assistant(self) -> None:
|
||||
'''register all our topics at home assistant'''
|
||||
for data_json, component, node_id, id in self.db.ha_confs(
|
||||
self.entity_prfx, self.node_id, self.unique_id,
|
||||
False, self.sug_area):
|
||||
logger_mqtt.debug(f"MQTT Register: cmp:'{component}'"
|
||||
f" node_id:'{node_id}' {data_json}")
|
||||
await self.mqtt.publish(f"{self.discovery_prfx}{component}"
|
||||
f"/{node_id}{id}/config", data_json)
|
||||
|
||||
def close(self) -> None:
|
||||
logging.debug(f'Inverter.close() l{self.l_addr} | r{self.r_addr}')
|
||||
super().close() # call close handler in the parent class
|
||||
# logger.debug (f'Inverter refs: {gc.get_referrers(self)}')
|
||||
|
||||
def __del__(self):
|
||||
logging.debug("Inverter.__del__")
|
||||
super().__del__()
|
||||
193
app/src/inverter_base.py
Normal file
193
app/src/inverter_base.py
Normal file
@@ -0,0 +1,193 @@
|
||||
import weakref
|
||||
import asyncio
|
||||
import logging
|
||||
import traceback
|
||||
import json
|
||||
import gc
|
||||
from aiomqtt import MqttCodeError
|
||||
from asyncio import StreamReader, StreamWriter
|
||||
from ipaddress import ip_address
|
||||
|
||||
from inverter_ifc import InverterIfc
|
||||
from proxy import Proxy
|
||||
from async_stream import StreamPtr
|
||||
from async_stream import AsyncStreamClient
|
||||
from async_stream import AsyncStreamServer
|
||||
from cnf.config import Config
|
||||
from infos import Infos
|
||||
|
||||
logger_mqtt = logging.getLogger('mqtt')
|
||||
|
||||
|
||||
class InverterBase(InverterIfc, Proxy):
|
||||
|
||||
def __init__(self, reader: StreamReader, writer: StreamWriter,
|
||||
config_id: str, prot_class,
|
||||
client_mode: bool = False,
|
||||
remote_prot_class=None):
|
||||
Proxy.__init__(self)
|
||||
self._registry.append(weakref.ref(self))
|
||||
self.addr = writer.get_extra_info('peername')
|
||||
self.config_id = config_id
|
||||
if remote_prot_class:
|
||||
self.prot_class = remote_prot_class
|
||||
else:
|
||||
self.prot_class = prot_class
|
||||
self.__ha_restarts = -1
|
||||
self.remote = StreamPtr(None)
|
||||
ifc = AsyncStreamServer(reader, writer,
|
||||
self.async_publ_mqtt,
|
||||
self.create_remote,
|
||||
self.remote)
|
||||
|
||||
self.local = StreamPtr(
|
||||
prot_class(self.addr, ifc, True, client_mode), ifc
|
||||
)
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc, tb) -> None:
|
||||
logging.debug(f'InverterBase.__exit__() {self.addr}')
|
||||
self.__del_remote()
|
||||
|
||||
self.local.stream.close()
|
||||
self.local.stream = None
|
||||
self.local.ifc.close()
|
||||
self.local.ifc = None
|
||||
|
||||
# now explicitly call garbage collector to release unreachable objects
|
||||
unreachable_obj = gc.collect()
|
||||
logging.debug(
|
||||
f'InverterBase.__exit: freed unreachable obj: {unreachable_obj}')
|
||||
|
||||
def __del_remote(self):
|
||||
if self.remote.stream:
|
||||
self.remote.stream.close()
|
||||
self.remote.stream = None
|
||||
|
||||
if self.remote.ifc:
|
||||
self.remote.ifc.close()
|
||||
self.remote.ifc = None
|
||||
|
||||
async def disc(self, shutdown_started=False) -> None:
|
||||
if self.remote.stream:
|
||||
self.remote.stream.shutdown_started = shutdown_started
|
||||
if self.remote.ifc:
|
||||
await self.remote.ifc.disc()
|
||||
if self.local.stream:
|
||||
self.local.stream.shutdown_started = shutdown_started
|
||||
if self.local.ifc:
|
||||
await self.local.ifc.disc()
|
||||
|
||||
def healthy(self) -> bool:
|
||||
logging.debug('InverterBase healthy()')
|
||||
|
||||
if self.local.ifc and not self.local.ifc.healthy():
|
||||
return False
|
||||
if self.remote.ifc and not self.remote.ifc.healthy():
|
||||
return False
|
||||
return True
|
||||
|
||||
async def create_remote(self) -> None:
|
||||
'''Establish a client connection to the TSUN cloud'''
|
||||
|
||||
tsun = Config.get(self.config_id)
|
||||
host = tsun['host']
|
||||
port = tsun['port']
|
||||
addr = (host, port)
|
||||
stream = self.local.stream
|
||||
|
||||
try:
|
||||
logging.info(f'[{stream.node_id}] Connect to {addr}')
|
||||
connect = asyncio.open_connection(host, port)
|
||||
reader, writer = await connect
|
||||
r_addr = writer.get_extra_info('peername')
|
||||
if r_addr is not None:
|
||||
(ip, _) = r_addr
|
||||
if ip_address(ip).is_private:
|
||||
logging.error(
|
||||
f"""resolve {host} to {ip}, which is a private IP!
|
||||
\u001B[31m Check your DNS settings and use a public DNS resolver!
|
||||
|
||||
To prevent a possible loop, forwarding to local IP addresses is
|
||||
not supported and is deactivated for subsequent connections
|
||||
\u001B[0m
|
||||
""")
|
||||
Config.act_config[self.config_id]['enabled'] = False
|
||||
|
||||
ifc = AsyncStreamClient(
|
||||
reader, writer, self.local, self.__del_remote)
|
||||
|
||||
self.remote.ifc = ifc
|
||||
if hasattr(stream, 'id_str'):
|
||||
self.remote.stream = self.prot_class(
|
||||
addr, ifc, server_side=False,
|
||||
client_mode=False, id_str=stream.id_str)
|
||||
else:
|
||||
self.remote.stream = self.prot_class(
|
||||
addr, ifc, server_side=False,
|
||||
client_mode=False)
|
||||
|
||||
logging.info(f'[{self.remote.stream.node_id}:'
|
||||
f'{self.remote.stream.conn_no}] '
|
||||
f'Connected to {addr}')
|
||||
asyncio.create_task(self.remote.ifc.client_loop(addr))
|
||||
|
||||
except (ConnectionRefusedError, TimeoutError) as error:
|
||||
logging.info(f'{error}')
|
||||
except Exception:
|
||||
Infos.inc_counter('SW_Exception')
|
||||
logging.error(
|
||||
f"Inverter: Exception for {addr}:\n"
|
||||
f"{traceback.format_exc()}")
|
||||
|
||||
async def async_publ_mqtt(self) -> None:
|
||||
'''publish data to MQTT broker'''
|
||||
stream = self.local.stream
|
||||
if not stream or not stream.unique_id:
|
||||
return
|
||||
# check if new inverter or collector infos are available or when the
|
||||
# home assistant has changed the status back to online
|
||||
try:
|
||||
if (('inverter' in stream.new_data and stream.new_data['inverter'])
|
||||
or ('collector' in stream.new_data and
|
||||
stream.new_data['collector'])
|
||||
or self.mqtt.ha_restarts != self.__ha_restarts):
|
||||
await self._register_proxy_stat_home_assistant()
|
||||
await self.__register_home_assistant(stream)
|
||||
self.__ha_restarts = self.mqtt.ha_restarts
|
||||
|
||||
for key in stream.new_data:
|
||||
await self.__async_publ_mqtt_packet(stream, key)
|
||||
for key in Infos.new_stat_data:
|
||||
await Proxy._async_publ_mqtt_proxy_stat(key)
|
||||
|
||||
except MqttCodeError as error:
|
||||
logging.error(f'Mqtt except: {error}')
|
||||
except Exception:
|
||||
Infos.inc_counter('SW_Exception')
|
||||
logging.error(
|
||||
f"Inverter: Exception:\n"
|
||||
f"{traceback.format_exc()}")
|
||||
|
||||
async def __async_publ_mqtt_packet(self, stream, key):
|
||||
db = stream.db.db
|
||||
if key in db and stream.new_data[key]:
|
||||
data_json = json.dumps(db[key])
|
||||
node_id = stream.node_id
|
||||
logger_mqtt.debug(f'{key}: {data_json}')
|
||||
await self.mqtt.publish(f'{self.entity_prfx}{node_id}{key}', data_json) # noqa: E501
|
||||
stream.new_data[key] = False
|
||||
|
||||
async def __register_home_assistant(self, stream) -> None:
|
||||
'''register all our topics at home assistant'''
|
||||
for data_json, component, node_id, id in stream.db.ha_confs(
|
||||
self.entity_prfx, stream.node_id, stream.unique_id,
|
||||
stream.sug_area):
|
||||
logger_mqtt.debug(f"MQTT Register: cmp:'{component}'"
|
||||
f" node_id:'{node_id}' {data_json}")
|
||||
await self.mqtt.publish(f"{self.discovery_prfx}{component}"
|
||||
f"/{node_id}{id}/config", data_json)
|
||||
|
||||
stream.db.reg_clr_at_midnight(f'{self.entity_prfx}{stream.node_id}')
|
||||
37
app/src/inverter_ifc.py
Normal file
37
app/src/inverter_ifc.py
Normal file
@@ -0,0 +1,37 @@
|
||||
from abc import abstractmethod
|
||||
import logging
|
||||
from asyncio import StreamReader, StreamWriter
|
||||
|
||||
from iter_registry import AbstractIterMeta
|
||||
|
||||
logger_mqtt = logging.getLogger('mqtt')
|
||||
|
||||
|
||||
class InverterIfc(metaclass=AbstractIterMeta):
|
||||
_registry = []
|
||||
|
||||
@abstractmethod
|
||||
def __init__(self, reader: StreamReader, writer: StreamWriter,
|
||||
config_id: str, prot_class,
|
||||
client_mode: bool):
|
||||
pass # pragma: no cover
|
||||
|
||||
@abstractmethod
|
||||
def __enter__(self):
|
||||
pass # pragma: no cover
|
||||
|
||||
@abstractmethod
|
||||
def __exit__(self, exc_type, exc, tb):
|
||||
pass # pragma: no cover
|
||||
|
||||
@abstractmethod
|
||||
def healthy(self) -> bool:
|
||||
pass # pragma: no cover
|
||||
|
||||
@abstractmethod
|
||||
async def disc(self, shutdown_started=False) -> None:
|
||||
pass # pragma: no cover
|
||||
|
||||
@abstractmethod
|
||||
async def create_remote(self) -> None:
|
||||
pass # pragma: no cover
|
||||
9
app/src/iter_registry.py
Normal file
9
app/src/iter_registry.py
Normal file
@@ -0,0 +1,9 @@
|
||||
from abc import ABCMeta
|
||||
|
||||
|
||||
class AbstractIterMeta(ABCMeta):
|
||||
def __iter__(cls):
|
||||
for ref in cls._registry:
|
||||
obj = ref()
|
||||
if obj is not None:
|
||||
yield obj
|
||||
@@ -1,5 +1,5 @@
|
||||
[loggers]
|
||||
keys=root,tracer,mesg,conn,data,mqtt
|
||||
keys=root,tracer,mesg,conn,data,mqtt,asyncio
|
||||
|
||||
[handlers]
|
||||
keys=console_handler,file_handler_name1,file_handler_name2
|
||||
@@ -24,6 +24,12 @@ handlers=console_handler,file_handler_name1
|
||||
propagate=0
|
||||
qualname=mqtt
|
||||
|
||||
[logger_asyncio]
|
||||
level=INFO
|
||||
handlers=console_handler,file_handler_name1
|
||||
propagate=0
|
||||
qualname=asyncio
|
||||
|
||||
[logger_data]
|
||||
level=DEBUG
|
||||
handlers=file_handler_name1
|
||||
@@ -52,13 +58,13 @@ formatter=console_formatter
|
||||
class=handlers.TimedRotatingFileHandler
|
||||
level=INFO
|
||||
formatter=file_formatter
|
||||
args=('log/proxy.log', when:='midnight')
|
||||
args=(handlers.log_path + 'proxy.log', when:='midnight', backupCount:=handlers.log_backups)
|
||||
|
||||
[handler_file_handler_name2]
|
||||
class=handlers.TimedRotatingFileHandler
|
||||
level=NOTSET
|
||||
formatter=file_formatter
|
||||
args=('log/trace.log', when:='midnight')
|
||||
args=(handlers.log_path + 'trace.log', when:='midnight', backupCount:=handlers.log_backups)
|
||||
|
||||
[formatter_console_formatter]
|
||||
format=%(asctime)s %(levelname)5s | %(name)4s | %(message)s'
|
||||
|
||||
@@ -1,99 +1,131 @@
|
||||
import struct
|
||||
import logging
|
||||
import time
|
||||
from datetime import datetime
|
||||
import weakref
|
||||
from typing import Callable
|
||||
from enum import Enum
|
||||
|
||||
if __name__ == "app.src.messages":
|
||||
from app.src.infos import Infos
|
||||
from app.src.config import Config
|
||||
else: # pragma: no cover
|
||||
from infos import Infos
|
||||
from config import Config
|
||||
from async_ifc import AsyncIfc
|
||||
from protocol_ifc import ProtocolIfc
|
||||
from infos import Infos, Register
|
||||
from modbus import Modbus
|
||||
from my_timer import Timer
|
||||
|
||||
logger = logging.getLogger('msg')
|
||||
|
||||
|
||||
def hex_dump_memory(level, info, data, num):
|
||||
def __hex_val(n, data, data_len):
|
||||
line = ''
|
||||
for j in range(n-16, n):
|
||||
if j >= data_len:
|
||||
break
|
||||
line += '%02x ' % abs(data[j])
|
||||
return line
|
||||
|
||||
|
||||
def __asc_val(n, data, data_len):
|
||||
line = ''
|
||||
for j in range(n-16, n):
|
||||
if j >= data_len:
|
||||
break
|
||||
c = data[j] if not (data[j] < 0x20 or data[j] > 0x7e) else '.'
|
||||
line += '%c' % c
|
||||
return line
|
||||
|
||||
|
||||
def hex_dump(data, data_len) -> list:
|
||||
n = 0
|
||||
lines = []
|
||||
|
||||
for i in range(0, data_len, 16):
|
||||
line = ' '
|
||||
line += '%04x | ' % (i)
|
||||
n += 16
|
||||
line += __hex_val(n, data, data_len)
|
||||
line += ' ' * (3 * 16 + 9 - len(line)) + ' | '
|
||||
line += __asc_val(n, data, data_len)
|
||||
lines.append(line)
|
||||
|
||||
return lines
|
||||
|
||||
|
||||
def hex_dump_str(data, data_len):
|
||||
lines = hex_dump(data, data_len)
|
||||
return '\n'.join(lines)
|
||||
|
||||
|
||||
def hex_dump_memory(level, info, data, data_len):
|
||||
lines = []
|
||||
lines.append(info)
|
||||
tracer = logging.getLogger('tracer')
|
||||
if not tracer.isEnabledFor(level):
|
||||
return
|
||||
|
||||
for i in range(0, num, 16):
|
||||
line = ' '
|
||||
line += '%04x | ' % (i)
|
||||
n += 16
|
||||
|
||||
for j in range(n-16, n):
|
||||
if j >= len(data):
|
||||
break
|
||||
line += '%02x ' % abs(data[j])
|
||||
|
||||
line += ' ' * (3 * 16 + 9 - len(line)) + ' | '
|
||||
|
||||
for j in range(n-16, n):
|
||||
if j >= len(data):
|
||||
break
|
||||
c = data[j] if not (data[j] < 0x20 or data[j] > 0x7e) else '.'
|
||||
line += '%c' % c
|
||||
|
||||
lines.append(line)
|
||||
lines += hex_dump(data, data_len)
|
||||
|
||||
tracer.log(level, '\n'.join(lines))
|
||||
|
||||
|
||||
class Control:
|
||||
def __init__(self, ctrl: int):
|
||||
self.ctrl = ctrl
|
||||
|
||||
def __int__(self) -> int:
|
||||
return self.ctrl
|
||||
|
||||
def is_ind(self) -> bool:
|
||||
return (self.ctrl == 0x91)
|
||||
|
||||
# def is_req(self) -> bool:
|
||||
# return not (self.ctrl & 0x08)
|
||||
|
||||
def is_resp(self) -> bool:
|
||||
return (self.ctrl == 0x99)
|
||||
class State(Enum):
|
||||
'''state of the logical connection'''
|
||||
init = 0
|
||||
'''just created'''
|
||||
received = 1
|
||||
'''at least one packet received'''
|
||||
up = 2
|
||||
'''at least one cmd-rsp transaction'''
|
||||
pend = 3
|
||||
'''inverter transaction pending, don't send MODBUS cmds'''
|
||||
closed = 4
|
||||
'''connection closed'''
|
||||
|
||||
|
||||
class IterRegistry(type):
|
||||
def __iter__(cls):
|
||||
for ref in cls._registry:
|
||||
obj = ref()
|
||||
if obj is not None:
|
||||
yield obj
|
||||
class Message(ProtocolIfc):
|
||||
MAX_START_TIME = 400
|
||||
'''maximum time without a received msg in sec'''
|
||||
MAX_INV_IDLE_TIME = 120
|
||||
'''maximum time without a received msg from the inverter in sec'''
|
||||
MAX_DEF_IDLE_TIME = 360
|
||||
'''maximum default time without a received msg in sec'''
|
||||
MB_START_TIMEOUT = 40
|
||||
'''start delay for Modbus polling in server mode'''
|
||||
MB_REGULAR_TIMEOUT = 60
|
||||
'''regular Modbus polling time in server mode'''
|
||||
|
||||
|
||||
class Message(metaclass=IterRegistry):
|
||||
_registry = []
|
||||
new_stat_data = {}
|
||||
|
||||
def __init__(self, server_side: bool):
|
||||
def __init__(self, node_id, ifc: "AsyncIfc", server_side: bool,
|
||||
send_modbus_cb: Callable[[bytes, int, str], None],
|
||||
mb_timeout: int):
|
||||
self._registry.append(weakref.ref(self))
|
||||
|
||||
self.server_side = server_side
|
||||
self.ifc = ifc
|
||||
self.node_id = node_id
|
||||
if server_side:
|
||||
self.mb = Modbus(send_modbus_cb, mb_timeout)
|
||||
self.mb_timer = Timer(self.mb_timout_cb, self.node_id)
|
||||
else:
|
||||
self.mb = None
|
||||
self.mb_timer = None
|
||||
self.header_valid = False
|
||||
self.header_len = 0
|
||||
self.data_len = 0
|
||||
self.unique_id = 0
|
||||
self.node_id = ''
|
||||
self.sug_area = ''
|
||||
self._recv_buffer = b''
|
||||
self._send_buffer = bytearray(0)
|
||||
self._forward_buffer = bytearray(0)
|
||||
self.db = Infos()
|
||||
self.new_data = {}
|
||||
self.switch = {
|
||||
0x00: self.msg_contact_info,
|
||||
0x22: self.msg_get_time,
|
||||
0x71: self.msg_collector_data,
|
||||
0x04: self.msg_inverter_data,
|
||||
}
|
||||
self.state = State.init
|
||||
self.shutdown_started = False
|
||||
self.modbus_elms = 0 # for unit tests
|
||||
self.mb_timeout = self.MB_REGULAR_TIMEOUT
|
||||
self.mb_first_timeout = self.MB_START_TIMEOUT
|
||||
'''timer value for next Modbus polling request'''
|
||||
self.modbus_polling = False
|
||||
|
||||
@property
|
||||
def node_id(self):
|
||||
return self._node_id
|
||||
|
||||
@node_id.setter
|
||||
def node_id(self, value):
|
||||
self._node_id = value
|
||||
self.ifc.set_node_id(value)
|
||||
|
||||
'''
|
||||
Empty methods, that have to be implemented in any child class which
|
||||
@@ -103,254 +135,69 @@ class Message(metaclass=IterRegistry):
|
||||
# to our _recv_buffer
|
||||
return # pragma: no cover
|
||||
|
||||
def _set_mqtt_timestamp(self, key, ts: float | None):
|
||||
if key not in self.new_data or \
|
||||
not self.new_data[key]:
|
||||
if key == 'grid':
|
||||
info_id = Register.TS_GRID
|
||||
elif key == 'input':
|
||||
info_id = Register.TS_INPUT
|
||||
elif key == 'total':
|
||||
info_id = Register.TS_TOTAL
|
||||
else:
|
||||
return
|
||||
# tstr = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(ts))
|
||||
# logger.info(f'update: key: {key} ts:{tstr}'
|
||||
self.db.set_db_def_value(info_id, round(ts))
|
||||
|
||||
def _timeout(self) -> int:
|
||||
if self.state == State.init or self.state == State.received:
|
||||
to = self.MAX_START_TIME
|
||||
elif self.state == State.up and \
|
||||
self.server_side and self.modbus_polling:
|
||||
to = self.MAX_INV_IDLE_TIME
|
||||
else:
|
||||
to = self.MAX_DEF_IDLE_TIME
|
||||
return to
|
||||
|
||||
def _send_modbus_cmd(self, func, addr, val, log_lvl) -> None:
|
||||
if self.state != State.up:
|
||||
logger.log(log_lvl, f'[{self.node_id}] ignore MODBUS cmd,'
|
||||
' as the state is not UP')
|
||||
return
|
||||
self.mb.build_msg(Modbus.INV_ADDR, func, addr, val, log_lvl)
|
||||
|
||||
async def send_modbus_cmd(self, func, addr, val, log_lvl) -> None:
|
||||
self._send_modbus_cmd(func, addr, val, log_lvl)
|
||||
|
||||
'''
|
||||
Our puplic methods
|
||||
'''
|
||||
def close(self) -> None:
|
||||
# we have refernces to methods of this class in self.switch
|
||||
# so we have to erase self.switch, otherwise this instance can't be
|
||||
# deallocated by the garbage collector ==> we get a memory leak
|
||||
self.switch.clear()
|
||||
if self.server_side:
|
||||
# set inverter state to offline, if output power is very low
|
||||
logging.debug('close power: '
|
||||
f'{self.db.get_db_value(Register.OUTPUT_POWER, -1)}')
|
||||
if self.db.get_db_value(Register.OUTPUT_POWER, 999) < 2:
|
||||
self.db.set_db_def_value(Register.INVERTER_STATUS, 0)
|
||||
self.new_data['env'] = True
|
||||
self.mb_timer.close()
|
||||
self.state = State.closed
|
||||
self.ifc.rx_set_cb(None)
|
||||
self.ifc.prot_set_timeout_cb(None)
|
||||
self.ifc.prot_set_init_new_client_conn_cb(None)
|
||||
self.ifc.prot_set_update_header_cb(None)
|
||||
self.ifc = None
|
||||
|
||||
if self.mb:
|
||||
self.mb.close()
|
||||
self.mb = None
|
||||
# pragma: no cover
|
||||
|
||||
def inc_counter(self, counter: str) -> None:
|
||||
self.db.inc_counter(counter)
|
||||
self.new_stat_data['proxy'] = True
|
||||
Infos.new_stat_data['proxy'] = True
|
||||
|
||||
def dec_counter(self, counter: str) -> None:
|
||||
self.db.dec_counter(counter)
|
||||
self.new_stat_data['proxy'] = True
|
||||
|
||||
def set_serial_no(self, serial_no: str):
|
||||
|
||||
if self.unique_id == serial_no:
|
||||
logger.debug(f'SerialNo: {serial_no}')
|
||||
else:
|
||||
inverters = Config.get('inverters')
|
||||
# logger.debug(f'Inverters: {inverters}')
|
||||
|
||||
if serial_no in inverters:
|
||||
inv = inverters[serial_no]
|
||||
self.node_id = inv['node_id']
|
||||
self.sug_area = inv['suggested_area']
|
||||
logger.debug(f'SerialNo {serial_no} allowed! area:{self.sug_area}') # noqa: E501
|
||||
else:
|
||||
self.node_id = ''
|
||||
self.sug_area = ''
|
||||
if 'allow_all' not in inverters or not inverters['allow_all']:
|
||||
self.inc_counter('Unknown_SNR')
|
||||
self.unique_id = None
|
||||
logger.warning(f'ignore message from unknow inverter! (SerialNo: {serial_no})') # noqa: E501
|
||||
return
|
||||
logger.debug(f'SerialNo {serial_no} not known but accepted!')
|
||||
|
||||
self.unique_id = serial_no
|
||||
|
||||
def read(self) -> None:
|
||||
self._read()
|
||||
|
||||
if not self.header_valid:
|
||||
self.__parse_header(self._recv_buffer, len(self._recv_buffer))
|
||||
|
||||
if self.header_valid and len(self._recv_buffer) >= (self.header_len +
|
||||
self.data_len):
|
||||
hex_dump_memory(logging.INFO, f'Received from {self.addr}:',
|
||||
self._recv_buffer, self.header_len+self.data_len)
|
||||
|
||||
self.set_serial_no(self.id_str.decode("utf-8"))
|
||||
self.__dispatch_msg()
|
||||
self.__flush_recv_msg()
|
||||
return
|
||||
|
||||
def forward(self, buffer, buflen) -> None:
|
||||
tsun = Config.get('tsun')
|
||||
if tsun['enabled']:
|
||||
self._forward_buffer = buffer[:buflen]
|
||||
hex_dump_memory(logging.DEBUG, 'Store for forwarding:',
|
||||
buffer, buflen)
|
||||
|
||||
self.__parse_header(self._forward_buffer,
|
||||
len(self._forward_buffer))
|
||||
fnc = self.switch.get(self.msg_id, self.msg_unknown)
|
||||
logger.info(self.__flow_str(self.server_side, 'forwrd') +
|
||||
f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}')
|
||||
return
|
||||
|
||||
'''
|
||||
Our private methods
|
||||
'''
|
||||
def __flow_str(self, server_side: bool, type:
|
||||
('rx', 'tx', 'forwrd', 'drop')): # noqa: F821
|
||||
switch = {
|
||||
'rx': ' <',
|
||||
'tx': ' >',
|
||||
'forwrd': '<< ',
|
||||
'drop': ' xx',
|
||||
'rxS': '> ',
|
||||
'txS': '< ',
|
||||
'forwrdS': ' >>',
|
||||
'dropS': 'xx ',
|
||||
}
|
||||
if server_side:
|
||||
type += 'S'
|
||||
return switch.get(type, '???')
|
||||
|
||||
def __timestamp(self):
|
||||
if False:
|
||||
# utc as epoche
|
||||
ts = time.time()
|
||||
else:
|
||||
# convert localtime in epoche
|
||||
ts = (datetime.now() - datetime(1970, 1, 1)).total_seconds()
|
||||
return round(ts*1000)
|
||||
|
||||
# check if there is a complete header in the buffer, parse it
|
||||
# and set
|
||||
# self.header_len
|
||||
# self.data_len
|
||||
# self.id_str
|
||||
# self.ctrl
|
||||
# self.msg_id
|
||||
#
|
||||
# if the header is incomplete, than self.header_len is still 0
|
||||
#
|
||||
def __parse_header(self, buf: bytes, buf_len: int) -> None:
|
||||
|
||||
if (buf_len < 5): # enough bytes to read len and id_len?
|
||||
return
|
||||
result = struct.unpack_from('!lB', buf, 0)
|
||||
len = result[0] # len of complete message
|
||||
id_len = result[1] # len of variable id string
|
||||
|
||||
hdr_len = 5+id_len+2
|
||||
|
||||
if (buf_len < hdr_len): # enough bytes for complete header?
|
||||
return
|
||||
|
||||
result = struct.unpack_from(f'!{id_len+1}pBB', buf, 4)
|
||||
|
||||
# store parsed header values in the class
|
||||
self.id_str = result[0]
|
||||
self.ctrl = Control(result[1])
|
||||
self.msg_id = result[2]
|
||||
self.data_len = len-id_len-3
|
||||
self.header_len = hdr_len
|
||||
self.header_valid = True
|
||||
return
|
||||
|
||||
def __build_header(self, ctrl) -> None:
|
||||
self.send_msg_ofs = len(self._send_buffer)
|
||||
self._send_buffer += struct.pack(f'!l{len(self.id_str)+1}pBB',
|
||||
0, self.id_str, ctrl, self.msg_id)
|
||||
fnc = self.switch.get(self.msg_id, self.msg_unknown)
|
||||
logger.info(self.__flow_str(self.server_side, 'tx') +
|
||||
f' Ctl: {int(ctrl):#02x} Msg: {fnc.__name__!r}')
|
||||
|
||||
def __finish_send_msg(self) -> None:
|
||||
_len = len(self._send_buffer) - self.send_msg_ofs
|
||||
struct.pack_into('!l', self._send_buffer, self.send_msg_ofs, _len-4)
|
||||
|
||||
def __dispatch_msg(self) -> None:
|
||||
fnc = self.switch.get(self.msg_id, self.msg_unknown)
|
||||
if self.unique_id:
|
||||
logger.info(self.__flow_str(self.server_side, 'rx') +
|
||||
f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}')
|
||||
fnc()
|
||||
else:
|
||||
logger.info(self.__flow_str(self.server_side, 'drop') +
|
||||
f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}')
|
||||
|
||||
def __flush_recv_msg(self) -> None:
|
||||
self._recv_buffer = self._recv_buffer[(self.header_len+self.data_len):]
|
||||
self.header_valid = False
|
||||
|
||||
'''
|
||||
Message handler methods
|
||||
'''
|
||||
def msg_contact_info(self):
|
||||
if self.ctrl.is_ind():
|
||||
self.__build_header(0x99)
|
||||
self._send_buffer += b'\x01'
|
||||
self.__finish_send_msg()
|
||||
elif self.ctrl.is_resp():
|
||||
return # ignore received response from tsun
|
||||
else:
|
||||
self.inc_counter('Unknown_Ctrl')
|
||||
|
||||
self.forward(self._recv_buffer, self.header_len+self.data_len)
|
||||
|
||||
def msg_get_time(self):
|
||||
if self.ctrl.is_ind():
|
||||
ts = self.__timestamp()
|
||||
logger.debug(f'time: {ts:08x}')
|
||||
|
||||
self.__build_header(0x99)
|
||||
self._send_buffer += struct.pack('!q', ts)
|
||||
self.__finish_send_msg()
|
||||
|
||||
elif self.ctrl.is_resp():
|
||||
result = struct.unpack_from('!q', self._recv_buffer,
|
||||
self.header_len)
|
||||
logger.debug(f'tsun-time: {result[0]:08x}')
|
||||
return # ignore received response from tsun
|
||||
else:
|
||||
self.inc_counter('Unknown_Ctrl')
|
||||
self.forward(self._recv_buffer, self.header_len+self.data_len)
|
||||
|
||||
def parse_msg_header(self):
|
||||
result = struct.unpack_from('!lB', self._recv_buffer, self.header_len)
|
||||
|
||||
data_id = result[0] # len of complete message
|
||||
id_len = result[1] # len of variable id string
|
||||
logger.debug(f'Data_ID: {data_id} id_len: {id_len}')
|
||||
|
||||
msg_hdr_len = 5+id_len+9
|
||||
|
||||
result = struct.unpack_from(f'!{id_len+1}pBq', self._recv_buffer,
|
||||
self.header_len + 4)
|
||||
|
||||
logger.debug(f'ID: {result[0]} B: {result[1]}')
|
||||
logger.debug(f'time: {result[2]:08x}')
|
||||
# logger.info(f'time: {datetime.utcfromtimestamp(result[2]).strftime(
|
||||
# "%Y-%m-%d %H:%M:%S")}')
|
||||
return msg_hdr_len
|
||||
|
||||
def msg_collector_data(self):
|
||||
if self.ctrl.is_ind():
|
||||
self.__build_header(0x99)
|
||||
self._send_buffer += b'\x01'
|
||||
self.__finish_send_msg()
|
||||
self.__process_data()
|
||||
|
||||
elif self.ctrl.is_resp():
|
||||
return # ignore received response
|
||||
else:
|
||||
self.inc_counter('Unknown_Ctrl')
|
||||
|
||||
self.forward(self._recv_buffer, self.header_len+self.data_len)
|
||||
|
||||
def msg_inverter_data(self):
|
||||
if self.ctrl.is_ind():
|
||||
self.__build_header(0x99)
|
||||
self._send_buffer += b'\x01'
|
||||
self.__finish_send_msg()
|
||||
self.__process_data()
|
||||
|
||||
elif self.ctrl.is_resp():
|
||||
return # ignore received response
|
||||
else:
|
||||
self.inc_counter('Unknown_Ctrl')
|
||||
|
||||
self.forward(self._recv_buffer, self.header_len+self.data_len)
|
||||
|
||||
def __process_data(self):
|
||||
msg_hdr_len = self.parse_msg_header()
|
||||
|
||||
for key, update in self.db.parse(self._recv_buffer[self.header_len
|
||||
+ msg_hdr_len:]):
|
||||
if update:
|
||||
self.new_data[key] = True
|
||||
|
||||
def msg_unknown(self):
|
||||
logger.warning(f"Unknow Msg: ID:{self.msg_id}")
|
||||
self.inc_counter('Unknown_Msg')
|
||||
self.forward(self._recv_buffer, self.header_len+self.data_len)
|
||||
Infos.new_stat_data['proxy'] = True
|
||||
|
||||
345
app/src/modbus.py
Normal file
345
app/src/modbus.py
Normal file
@@ -0,0 +1,345 @@
|
||||
'''MODBUS module for TSUN inverter support
|
||||
|
||||
TSUN uses the MODBUS in the RTU transmission mode over serial line.
|
||||
see: https://modbus.org/docs/Modbus_Application_Protocol_V1_1b3.pdf
|
||||
see: https://modbus.org/docs/Modbus_over_serial_line_V1_02.pdf
|
||||
|
||||
A Modbus PDU consists of: 'Function-Code' + 'Data'
|
||||
A Modbus RTU message consists of: 'Addr' + 'Modbus-PDU' + 'CRC-16'
|
||||
The inverter is a MODBUS server and the proxy the MODBUS client.
|
||||
|
||||
The 16-bit CRC is known as CRC-16-ANSI(reverse)
|
||||
see: https://en.wikipedia.org/wiki/Computation_of_cyclic_redundancy_checks
|
||||
'''
|
||||
import struct
|
||||
import logging
|
||||
import asyncio
|
||||
from typing import Generator, Callable
|
||||
|
||||
from infos import Register, Fmt
|
||||
|
||||
logger = logging.getLogger('data')
|
||||
|
||||
CRC_POLY = 0xA001 # (LSBF/reverse)
|
||||
CRC_INIT = 0xFFFF
|
||||
|
||||
|
||||
class Modbus():
|
||||
'''Simple MODBUS implementation with TX queue and retransmit timer'''
|
||||
INV_ADDR = 1
|
||||
'''MODBUS server address of the TSUN inverter'''
|
||||
READ_REGS = 3
|
||||
'''MODBUS function code: Read Holding Register'''
|
||||
READ_INPUTS = 4
|
||||
'''MODBUS function code: Read Input Register'''
|
||||
WRITE_SINGLE_REG = 6
|
||||
'''Modbus function code: Write Single Register'''
|
||||
|
||||
__crc_tab = []
|
||||
mb_reg_mapping = {
|
||||
0x2000: {'reg': Register.BOOT_STATUS, 'fmt': '!H'}, # noqa: E501
|
||||
0x2001: {'reg': Register.DSP_STATUS, 'fmt': '!H'}, # noqa: E501
|
||||
0x2003: {'reg': Register.WORK_MODE, 'fmt': '!H'},
|
||||
0x2006: {'reg': Register.OUTPUT_SHUTDOWN, 'fmt': '!H'},
|
||||
0x2007: {'reg': Register.MAX_DESIGNED_POWER, 'fmt': '!H', 'ratio': 1}, # noqa: E501
|
||||
0x2008: {'reg': Register.RATED_LEVEL, 'fmt': '!H'},
|
||||
0x2009: {'reg': Register.INPUT_COEFFICIENT, 'fmt': '!H', 'ratio': 100/1024}, # noqa: E501
|
||||
0x200a: {'reg': Register.GRID_VOLT_CAL_COEF, 'fmt': '!H'},
|
||||
0x2010: {'reg': Register.PROD_COMPL_TYPE, 'fmt': '!H'},
|
||||
0x202c: {'reg': Register.OUTPUT_COEFFICIENT, 'fmt': '!H', 'ratio': 100/1024}, # noqa: E501
|
||||
|
||||
0x3000: {'reg': Register.INVERTER_STATUS, 'fmt': '!H'}, # noqa: E501
|
||||
0x3001: {'reg': Register.DETECT_STATUS_1, 'fmt': '!H'}, # noqa: E501
|
||||
0x3002: {'reg': Register.DETECT_STATUS_2, 'fmt': '!H'}, # noqa: E501
|
||||
0x3003: {'reg': Register.EVENT_ALARM, 'fmt': '!H'}, # noqa: E501
|
||||
0x3004: {'reg': Register.EVENT_FAULT, 'fmt': '!H'}, # noqa: E501
|
||||
0x3005: {'reg': Register.EVENT_BF1, 'fmt': '!H'}, # noqa: E501
|
||||
0x3006: {'reg': Register.EVENT_BF2, 'fmt': '!H'}, # noqa: E501
|
||||
|
||||
0x3008: {'reg': Register.VERSION, 'fmt': '!H', 'func': Fmt.version}, # noqa: E501
|
||||
0x3009: {'reg': Register.GRID_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
|
||||
0x300a: {'reg': Register.GRID_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
|
||||
0x300b: {'reg': Register.GRID_FREQUENCY, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
|
||||
0x300c: {'reg': Register.INVERTER_TEMP, 'fmt': '!H', 'offset': -40}, # noqa: E501
|
||||
# 0x300d
|
||||
0x300e: {'reg': Register.RATED_POWER, 'fmt': '!H', 'ratio': 1}, # noqa: E501
|
||||
0x300f: {'reg': Register.OUTPUT_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
|
||||
0x3010: {'reg': Register.PV1_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
|
||||
0x3011: {'reg': Register.PV1_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
|
||||
0x3012: {'reg': Register.PV1_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
|
||||
0x3013: {'reg': Register.PV2_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
|
||||
0x3014: {'reg': Register.PV2_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
|
||||
0x3015: {'reg': Register.PV2_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
|
||||
0x3016: {'reg': Register.PV3_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
|
||||
0x3017: {'reg': Register.PV3_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
|
||||
0x3018: {'reg': Register.PV3_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
|
||||
0x3019: {'reg': Register.PV4_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
|
||||
0x301a: {'reg': Register.PV4_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
|
||||
0x301b: {'reg': Register.PV4_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501
|
||||
0x301c: {'reg': Register.DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
|
||||
0x301d: {'reg': Register.TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501
|
||||
0x301f: {'reg': Register.PV1_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
|
||||
0x3020: {'reg': Register.PV1_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501
|
||||
0x3022: {'reg': Register.PV2_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
|
||||
0x3023: {'reg': Register.PV2_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501
|
||||
0x3025: {'reg': Register.PV3_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
|
||||
0x3026: {'reg': Register.PV3_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501
|
||||
0x3028: {'reg': Register.PV4_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501
|
||||
0x3029: {'reg': Register.PV4_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501
|
||||
# 0x302a
|
||||
}
|
||||
|
||||
def __init__(self, snd_handler: Callable[[bytes, int, str], None],
|
||||
timeout: int = 1):
|
||||
if not len(self.__crc_tab):
|
||||
self.__build_crc_tab(CRC_POLY)
|
||||
self.que = asyncio.Queue(100)
|
||||
self.snd_handler = snd_handler
|
||||
'''Send handler to transmit a MODBUS RTU request'''
|
||||
self.rsp_handler = None
|
||||
'''Response handler to forward the response'''
|
||||
self.timeout = timeout
|
||||
'''MODBUS response timeout in seconds'''
|
||||
self.max_retries = 1
|
||||
'''Max retransmit for MODBUS requests'''
|
||||
self.retry_cnt = 0
|
||||
self.last_req = b''
|
||||
self.counter = {}
|
||||
'''Dictenary with statistic counter'''
|
||||
self.counter['timeouts'] = 0
|
||||
self.counter['retries'] = {}
|
||||
for i in range(0, self.max_retries+1):
|
||||
self.counter['retries'][f'{i}'] = 0
|
||||
self.last_log_lvl = logging.DEBUG
|
||||
self.last_addr = 0
|
||||
self.last_fcode = 0
|
||||
self.last_len = 0
|
||||
self.last_reg = 0
|
||||
self.err = 0
|
||||
self.loop = asyncio.get_event_loop()
|
||||
self.req_pend = False
|
||||
self.tim = None
|
||||
self.node_id = ''
|
||||
|
||||
def close(self):
|
||||
"""free the queue and erase the callback handlers"""
|
||||
logging.debug('Modbus close:')
|
||||
self.__stop_timer()
|
||||
self.rsp_handler = None
|
||||
self.snd_handler = None
|
||||
while not self.que.empty():
|
||||
self.que.get_nowait()
|
||||
|
||||
def set_node_id(self, node_id: str):
|
||||
self.node_id = node_id
|
||||
|
||||
def build_msg(self, addr: int, func: int, reg: int, val: int,
|
||||
log_lvl=logging.DEBUG) -> None:
|
||||
"""Build MODBUS RTU request frame and add it to the tx queue
|
||||
|
||||
Keyword arguments:
|
||||
addr: RTU server address (inverter)
|
||||
func: MODBUS function code
|
||||
reg: 16-bit register number
|
||||
val: 16 bit value
|
||||
"""
|
||||
msg = struct.pack('>BBHH', addr, func, reg, val)
|
||||
msg += struct.pack('<H', self.__calc_crc(msg))
|
||||
self.que.put_nowait({'req': msg,
|
||||
'rsp_hdl': None,
|
||||
'log_lvl': log_lvl})
|
||||
if self.que.qsize() == 1:
|
||||
self.__send_next_from_que()
|
||||
|
||||
def recv_req(self, buf: bytes,
|
||||
rsp_handler: Callable[[None], None] = None) -> bool:
|
||||
"""Add the received Modbus RTU request to the tx queue
|
||||
|
||||
Keyword arguments:
|
||||
buf: Modbus RTU pdu incl ADDR byte and trailing CRC
|
||||
rsp_handler: Callback, if the received pdu is valid
|
||||
|
||||
Returns:
|
||||
True: PDU was added to the queue
|
||||
False: PDU was ignored, due to an error
|
||||
"""
|
||||
# logging.info(f'recv_req: first byte modbus:{buf[0]} len:{len(buf)}')
|
||||
if not self.__check_crc(buf):
|
||||
self.err = 1
|
||||
logger.error('Modbus recv: CRC error')
|
||||
return False
|
||||
self.que.put_nowait({'req': buf,
|
||||
'rsp_hdl': rsp_handler,
|
||||
'log_lvl': logging.INFO})
|
||||
if self.que.qsize() == 1:
|
||||
self.__send_next_from_que()
|
||||
|
||||
return True
|
||||
|
||||
def recv_resp(self, info_db, buf: bytes) -> \
|
||||
Generator[tuple[str, bool, int | float | str], None, None]:
|
||||
"""Generator which check and parse a received MODBUS response.
|
||||
|
||||
Keyword arguments:
|
||||
info_db: database for info lockups
|
||||
buf: received Modbus RTU response frame
|
||||
|
||||
Returns on error and set Self.err to:
|
||||
1: CRC error
|
||||
2: Wrong server address
|
||||
3: Unexpected function code
|
||||
4: Unexpected data length
|
||||
5: No MODBUS request pending
|
||||
"""
|
||||
# logging.info(f'recv_resp: first byte modbus:{buf[0]} len:{len(buf)}')
|
||||
|
||||
fcode = buf[1]
|
||||
data_available = self.last_addr == self.INV_ADDR and \
|
||||
(fcode == 3 or fcode == 4)
|
||||
|
||||
if self.__resp_error_check(buf, data_available):
|
||||
return
|
||||
|
||||
if data_available:
|
||||
elmlen = buf[2] >> 1
|
||||
first_reg = self.last_reg # save last_reg before sending next pdu
|
||||
self.__stop_timer() # stop timer and send next pdu
|
||||
yield from self.__process_data(info_db, buf, first_reg, elmlen)
|
||||
else:
|
||||
self.__stop_timer()
|
||||
|
||||
self.counter['retries'][f'{self.retry_cnt}'] += 1
|
||||
if self.rsp_handler:
|
||||
self.rsp_handler()
|
||||
self.__send_next_from_que()
|
||||
|
||||
def __resp_error_check(self, buf: bytes, data_available: bool) -> bool:
|
||||
'''Check the MODBUS response for errors, returns True if one accure'''
|
||||
if not self.req_pend:
|
||||
self.err = 5
|
||||
return True
|
||||
if not self.__check_crc(buf):
|
||||
logger.error(f'[{self.node_id}] Modbus resp: CRC error')
|
||||
self.err = 1
|
||||
return True
|
||||
if buf[0] != self.last_addr:
|
||||
logger.info(f'[{self.node_id}] Modbus resp: Wrong addr {buf[0]}')
|
||||
self.err = 2
|
||||
return True
|
||||
fcode = buf[1]
|
||||
if fcode != self.last_fcode:
|
||||
logger.info(f'[{self.node_id}] Modbus: Wrong fcode {fcode}'
|
||||
f' != {self.last_fcode}')
|
||||
self.err = 3
|
||||
return True
|
||||
if data_available:
|
||||
elmlen = buf[2] >> 1
|
||||
if elmlen != self.last_len:
|
||||
logger.info(f'[{self.node_id}] Modbus: len error {elmlen}'
|
||||
f' != {self.last_len}')
|
||||
self.err = 4
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def __process_data(self, info_db, buf: bytes, first_reg, elmlen):
|
||||
'''Generator over received registers, updates the db'''
|
||||
for i in range(0, elmlen):
|
||||
addr = first_reg+i
|
||||
if addr in self.mb_reg_mapping:
|
||||
row = self.mb_reg_mapping[addr]
|
||||
info_id = row['reg']
|
||||
keys, level, unit, must_incr = info_db._key_obj(info_id)
|
||||
if keys:
|
||||
result = Fmt.get_value(buf, 3+2*i, row)
|
||||
name, update = info_db.update_db(keys, must_incr,
|
||||
result)
|
||||
yield keys[0], update, result
|
||||
if update:
|
||||
info_db.tracer.log(level,
|
||||
f'[{self.node_id}] MODBUS: {name}'
|
||||
f' : {result}{unit}')
|
||||
|
||||
'''
|
||||
MODBUS response timer
|
||||
'''
|
||||
def __start_timer(self) -> None:
|
||||
'''Start response timer and set `req_pend` to True'''
|
||||
self.req_pend = True
|
||||
self.tim = self.loop.call_later(self.timeout, self.__timeout_cb)
|
||||
# logging.debug(f'Modbus start timer {self}')
|
||||
|
||||
def __stop_timer(self) -> None:
|
||||
'''Stop response timer and set `req_pend` to False'''
|
||||
self.req_pend = False
|
||||
# logging.debug(f'Modbus stop timer {self}')
|
||||
if self.tim:
|
||||
self.tim.cancel()
|
||||
self.tim = None
|
||||
|
||||
def __timeout_cb(self) -> None:
|
||||
'''Rsponse timeout handler retransmit pdu or send next pdu'''
|
||||
self.req_pend = False
|
||||
|
||||
if self.retry_cnt < self.max_retries:
|
||||
logger.debug(f'Modbus retrans {self}')
|
||||
self.retry_cnt += 1
|
||||
self.__start_timer()
|
||||
self.snd_handler(self.last_req, self.last_log_lvl, state='Retrans')
|
||||
else:
|
||||
logger.info(f'[{self.node_id}] Modbus timeout '
|
||||
f'(FCode: {self.last_fcode} '
|
||||
f'Reg: 0x{self.last_reg:04x}, '
|
||||
f'{self.last_len})')
|
||||
self.counter['timeouts'] += 1
|
||||
self.__send_next_from_que()
|
||||
|
||||
def __send_next_from_que(self) -> None:
|
||||
'''Get next MODBUS pdu from queue and transmit it'''
|
||||
if self.req_pend:
|
||||
return
|
||||
try:
|
||||
item = self.que.get_nowait()
|
||||
req = item['req']
|
||||
self.last_req = req
|
||||
self.rsp_handler = item['rsp_hdl']
|
||||
self.last_log_lvl = item['log_lvl']
|
||||
self.last_addr = req[0]
|
||||
self.last_fcode = req[1]
|
||||
|
||||
res = struct.unpack_from('>HH', req, 2)
|
||||
self.last_reg = res[0]
|
||||
self.last_len = res[1]
|
||||
self.retry_cnt = 0
|
||||
self.__start_timer()
|
||||
self.snd_handler(self.last_req, self.last_log_lvl, state='Command')
|
||||
except asyncio.QueueEmpty:
|
||||
pass
|
||||
|
||||
'''
|
||||
Helper function for CRC-16 handling
|
||||
'''
|
||||
def __check_crc(self, msg: bytes) -> bool:
|
||||
'''Check CRC-16 and returns True if valid'''
|
||||
return 0 == self.__calc_crc(msg)
|
||||
|
||||
def __calc_crc(self, buffer: bytes) -> int:
|
||||
'''Build CRC-16 for buffer and returns it'''
|
||||
crc = CRC_INIT
|
||||
|
||||
for cur in buffer:
|
||||
crc = (crc >> 8) ^ self.__crc_tab[(crc ^ cur) & 0xFF]
|
||||
return crc
|
||||
|
||||
def __build_crc_tab(self, poly: int) -> None:
|
||||
'''Build CRC-16 helper table, must be called exactly one time'''
|
||||
for index in range(256):
|
||||
data = index << 1
|
||||
crc = 0
|
||||
for _ in range(8, 0, -1):
|
||||
data >>= 1
|
||||
if (data ^ crc) & 1:
|
||||
crc = (crc >> 1) ^ poly
|
||||
else:
|
||||
crc >>= 1
|
||||
self.__crc_tab.append(crc)
|
||||
88
app/src/modbus_tcp.py
Normal file
88
app/src/modbus_tcp.py
Normal file
@@ -0,0 +1,88 @@
|
||||
import logging
|
||||
import traceback
|
||||
import asyncio
|
||||
|
||||
from cnf.config import Config
|
||||
from gen3plus.inverter_g3p import InverterG3P
|
||||
from infos import Infos
|
||||
|
||||
logger = logging.getLogger('conn')
|
||||
|
||||
|
||||
class ModbusConn():
|
||||
def __init__(self, host, port):
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.addr = (host, port)
|
||||
self.inverter = None
|
||||
|
||||
async def __aenter__(self) -> 'InverterG3P':
|
||||
'''Establish a client connection to the TSUN cloud'''
|
||||
connection = asyncio.open_connection(self.host, self.port)
|
||||
reader, writer = await connection
|
||||
self.inverter = InverterG3P(reader, writer,
|
||||
client_mode=True)
|
||||
self.inverter.__enter__()
|
||||
stream = self.inverter.local.stream
|
||||
logging.info(f'[{stream.node_id}:{stream.conn_no}] '
|
||||
f'Connected to {self.addr}')
|
||||
Infos.inc_counter('Inverter_Cnt')
|
||||
await self.inverter.local.ifc.publish_outstanding_mqtt()
|
||||
return self.inverter
|
||||
|
||||
async def __aexit__(self, exc_type, exc, tb):
|
||||
Infos.dec_counter('Inverter_Cnt')
|
||||
await self.inverter.local.ifc.publish_outstanding_mqtt()
|
||||
self.inverter.__exit__(exc_type, exc, tb)
|
||||
|
||||
|
||||
class ModbusTcp():
|
||||
|
||||
def __init__(self, loop, tim_restart=10) -> None:
|
||||
self.tim_restart = tim_restart
|
||||
|
||||
inverters = Config.get('inverters')
|
||||
# logging.info(f'Inverters: {inverters}')
|
||||
|
||||
for inv in inverters.values():
|
||||
if (type(inv) is dict
|
||||
and 'monitor_sn' in inv
|
||||
and 'client_mode' in inv):
|
||||
client = inv['client_mode']
|
||||
logger.info(f"'client_mode' for snr: {inv['monitor_sn']} host: {client['host']}:{client['port']}, forward: {client['forward']}") # noqa: E501
|
||||
loop.create_task(self.modbus_loop(client['host'],
|
||||
client['port'],
|
||||
inv['monitor_sn'],
|
||||
client['forward']))
|
||||
|
||||
async def modbus_loop(self, host, port,
|
||||
snr: int, forward: bool) -> None:
|
||||
'''Loop for receiving messages from the TSUN cloud (client-side)'''
|
||||
while True:
|
||||
try:
|
||||
async with ModbusConn(host, port) as inverter:
|
||||
stream = inverter.local.stream
|
||||
await stream.send_start_cmd(snr, host, forward)
|
||||
await stream.ifc.loop()
|
||||
logger.info(f'[{stream.node_id}:{stream.conn_no}] '
|
||||
f'Connection closed - Shutdown: '
|
||||
f'{stream.shutdown_started}')
|
||||
if stream.shutdown_started:
|
||||
return
|
||||
del inverter # decrease ref counter after the with block
|
||||
|
||||
except (ConnectionRefusedError, TimeoutError) as error:
|
||||
logging.debug(f'Inv-conn:{error}')
|
||||
|
||||
except OSError as error:
|
||||
if error.errno == 113: # pragma: no cover
|
||||
logging.debug(f'os-error:{error}')
|
||||
else:
|
||||
logging.info(f'os-error: {error}')
|
||||
|
||||
except Exception:
|
||||
logging.error(
|
||||
f"ModbusTcpCreate: Exception for {(host, port)}:\n"
|
||||
f"{traceback.format_exc()}")
|
||||
|
||||
await asyncio.sleep(self.tim_restart)
|
||||
175
app/src/mqtt.py
175
app/src/mqtt.py
@@ -1,34 +1,36 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import aiomqtt
|
||||
from config import Config
|
||||
import traceback
|
||||
|
||||
from modbus import Modbus
|
||||
from messages import Message
|
||||
from cnf.config import Config
|
||||
from singleton import Singleton
|
||||
|
||||
logger_mqtt = logging.getLogger('mqtt')
|
||||
|
||||
|
||||
class Singleton(type):
|
||||
_instances = {}
|
||||
|
||||
def __call__(cls, *args, **kwargs):
|
||||
logger_mqtt.debug('singleton: __call__')
|
||||
if cls not in cls._instances:
|
||||
cls._instances[cls] = super(Singleton,
|
||||
cls).__call__(*args, **kwargs)
|
||||
return cls._instances[cls]
|
||||
|
||||
|
||||
class Mqtt(metaclass=Singleton):
|
||||
client = None
|
||||
cb_MqttIsUp = None
|
||||
__client = None
|
||||
__cb_mqtt_is_up = None
|
||||
|
||||
def __init__(self, cb_MqttIsUp):
|
||||
def __init__(self, cb_mqtt_is_up):
|
||||
logger_mqtt.debug('MQTT: __init__')
|
||||
if cb_MqttIsUp:
|
||||
self.cb_MqttIsUp = cb_MqttIsUp
|
||||
if cb_mqtt_is_up:
|
||||
self.__cb_mqtt_is_up = cb_mqtt_is_up
|
||||
loop = asyncio.get_event_loop()
|
||||
self.task = loop.create_task(self.__loop())
|
||||
self.ha_restarts = 0
|
||||
|
||||
ha = Config.get('ha')
|
||||
self.ha_status_topic = f"{ha['auto_conf_prefix']}/status"
|
||||
self.mb_rated_topic = f"{ha['entity_prefix']}/+/rated_load"
|
||||
self.mb_out_coeff_topic = f"{ha['entity_prefix']}/+/out_coeff"
|
||||
self.mb_reads_topic = f"{ha['entity_prefix']}/+/modbus_read_regs"
|
||||
self.mb_inputs_topic = f"{ha['entity_prefix']}/+/modbus_read_inputs"
|
||||
self.mb_at_cmd_topic = f"{ha['entity_prefix']}/+/at_cmd"
|
||||
|
||||
@property
|
||||
def ha_restarts(self):
|
||||
return self._ha_restarts
|
||||
@@ -37,57 +39,144 @@ class Mqtt(metaclass=Singleton):
|
||||
def ha_restarts(self, value):
|
||||
self._ha_restarts = value
|
||||
|
||||
def __del__(self):
|
||||
logger_mqtt.debug('MQTT: __del__')
|
||||
|
||||
async def close(self) -> None:
|
||||
logger_mqtt.debug('MQTT: close')
|
||||
self.task.cancel()
|
||||
try:
|
||||
await self.task
|
||||
except Exception as e:
|
||||
|
||||
except (asyncio.CancelledError, Exception) as e:
|
||||
logging.debug(f"Mqtt.close: exception: {e} ...")
|
||||
|
||||
async def publish(self, topic: str, payload: str | bytes | bytearray
|
||||
| int | float | None = None) -> None:
|
||||
if self.client:
|
||||
await self.client.publish(topic, payload)
|
||||
if self.__client:
|
||||
await self.__client.publish(topic, payload)
|
||||
|
||||
async def __loop(self) -> None:
|
||||
mqtt = Config.get('mqtt')
|
||||
ha = Config.get('ha')
|
||||
logger_mqtt.info(f'start MQTT: host:{mqtt["host"]} port:'
|
||||
f'{mqtt["port"]} '
|
||||
f'user:{mqtt["user"]}')
|
||||
self.client = aiomqtt.Client(hostname=mqtt['host'], port=mqtt['port'],
|
||||
username=mqtt['user'],
|
||||
password=mqtt['passwd'])
|
||||
self.__client = aiomqtt.Client(hostname=mqtt['host'],
|
||||
port=mqtt['port'],
|
||||
username=mqtt['user'],
|
||||
password=mqtt['passwd'])
|
||||
|
||||
interval = 5 # Seconds
|
||||
|
||||
while True:
|
||||
try:
|
||||
async with self.client:
|
||||
async with self.__client:
|
||||
logger_mqtt.info('MQTT broker connection established')
|
||||
|
||||
if self.cb_MqttIsUp:
|
||||
await self.cb_MqttIsUp()
|
||||
if self.__cb_mqtt_is_up:
|
||||
await self.__cb_mqtt_is_up()
|
||||
|
||||
async with self.client.messages() as messages:
|
||||
await self.client.subscribe(f"{ha['auto_conf_prefix']}"
|
||||
"/status")
|
||||
async for message in messages:
|
||||
status = message.payload.decode("UTF-8")
|
||||
logger_mqtt.info('Home-Assistant Status:'
|
||||
f' {status}')
|
||||
if status == 'online':
|
||||
self.ha_restarts += 1
|
||||
await self.cb_MqttIsUp()
|
||||
await self.__client.subscribe(self.ha_status_topic)
|
||||
await self.__client.subscribe(self.mb_rated_topic)
|
||||
await self.__client.subscribe(self.mb_out_coeff_topic)
|
||||
await self.__client.subscribe(self.mb_reads_topic)
|
||||
await self.__client.subscribe(self.mb_inputs_topic)
|
||||
await self.__client.subscribe(self.mb_at_cmd_topic)
|
||||
|
||||
async for message in self.__client.messages:
|
||||
await self.dispatch_msg(message)
|
||||
|
||||
except aiomqtt.MqttError:
|
||||
logger_mqtt.info(f"Connection lost; Reconnecting in {interval}"
|
||||
" seconds ...")
|
||||
if Config.is_default('mqtt'):
|
||||
logger_mqtt.info(
|
||||
"MQTT is unconfigured; Check your config.toml!")
|
||||
interval = 30
|
||||
else:
|
||||
interval = 5 # Seconds
|
||||
logger_mqtt.info(
|
||||
f"Connection lost; Reconnecting in {interval}"
|
||||
" seconds ...")
|
||||
|
||||
await asyncio.sleep(interval)
|
||||
except asyncio.CancelledError:
|
||||
logger_mqtt.debug("MQTT task cancelled")
|
||||
self.client = None
|
||||
self.__client = None
|
||||
return
|
||||
except Exception:
|
||||
# self.inc_counter('SW_Exception') # fixme
|
||||
logger_mqtt.error(
|
||||
f"Exception:\n"
|
||||
f"{traceback.format_exc()}")
|
||||
|
||||
async def dispatch_msg(self, message):
|
||||
if message.topic.matches(self.ha_status_topic):
|
||||
status = message.payload.decode("UTF-8")
|
||||
logger_mqtt.info('Home-Assistant Status:'
|
||||
f' {status}')
|
||||
if status == 'online':
|
||||
self.ha_restarts += 1
|
||||
await self.__cb_mqtt_is_up()
|
||||
|
||||
if message.topic.matches(self.mb_rated_topic):
|
||||
await self.modbus_cmd(message,
|
||||
Modbus.WRITE_SINGLE_REG,
|
||||
1, 0x2008)
|
||||
|
||||
if message.topic.matches(self.mb_out_coeff_topic):
|
||||
payload = message.payload.decode("UTF-8")
|
||||
try:
|
||||
val = round(float(payload) * 1024/100)
|
||||
if val < 0 or val > 1024:
|
||||
logger_mqtt.error('out_coeff: value must be in'
|
||||
'the range 0..100,'
|
||||
f' got: {payload}')
|
||||
else:
|
||||
await self.modbus_cmd(message,
|
||||
Modbus.WRITE_SINGLE_REG,
|
||||
0, 0x202c, val)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if message.topic.matches(self.mb_reads_topic):
|
||||
await self.modbus_cmd(message,
|
||||
Modbus.READ_REGS, 2)
|
||||
|
||||
if message.topic.matches(self.mb_inputs_topic):
|
||||
await self.modbus_cmd(message,
|
||||
Modbus.READ_INPUTS, 2)
|
||||
|
||||
if message.topic.matches(self.mb_at_cmd_topic):
|
||||
await self.at_cmd(message)
|
||||
|
||||
def each_inverter(self, message, func_name: str):
|
||||
topic = str(message.topic)
|
||||
node_id = topic.split('/')[1] + '/'
|
||||
for m in Message:
|
||||
if m.server_side and (m.node_id == node_id):
|
||||
logger_mqtt.debug(f'Found: {node_id}')
|
||||
fnc = getattr(m, func_name, None)
|
||||
if callable(fnc):
|
||||
yield fnc
|
||||
else:
|
||||
logger_mqtt.warning(f'Cmd not supported by: {node_id}')
|
||||
break
|
||||
|
||||
else:
|
||||
logger_mqtt.warning(f'Node_id: {node_id} not found')
|
||||
|
||||
async def modbus_cmd(self, message, func, params=0, addr=0, val=0):
|
||||
payload = message.payload.decode("UTF-8")
|
||||
for fnc in self.each_inverter(message, "send_modbus_cmd"):
|
||||
res = payload.split(',')
|
||||
if params > 0 and params != len(res):
|
||||
logger_mqtt.error(f'Parameter expected: {params}, '
|
||||
f'got: {len(res)}')
|
||||
return
|
||||
if params == 1:
|
||||
val = int(payload)
|
||||
elif params == 2:
|
||||
addr = int(res[0], base=16)
|
||||
val = int(res[1]) # lenght
|
||||
await fnc(func, addr, val, logging.INFO)
|
||||
|
||||
async def at_cmd(self, message):
|
||||
payload = message.payload.decode("UTF-8")
|
||||
for fnc in self.each_inverter(message, "send_at_cmd"):
|
||||
await fnc(payload)
|
||||
|
||||
35
app/src/my_timer.py
Normal file
35
app/src/my_timer.py
Normal file
@@ -0,0 +1,35 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from itertools import count
|
||||
|
||||
|
||||
class Timer:
|
||||
def __init__(self, cb, id_str: str = ''):
|
||||
self.__timeout_cb = cb
|
||||
self.loop = asyncio.get_event_loop()
|
||||
self.tim = None
|
||||
self.id_str = id_str
|
||||
self.exp_count = count(0)
|
||||
|
||||
def start(self, timeout: float) -> None:
|
||||
'''Start timer with timeout seconds'''
|
||||
if self.tim:
|
||||
self.tim.cancel()
|
||||
self.tim = self.loop.call_later(timeout, self.__timeout)
|
||||
logging.debug(f'[{self.id_str}]Start timer')
|
||||
|
||||
def stop(self) -> None:
|
||||
'''Stop timer'''
|
||||
logging.debug(f'[{self.id_str}]Stop timer')
|
||||
if self.tim:
|
||||
self.tim.cancel()
|
||||
self.tim = None
|
||||
|
||||
def __timeout(self) -> None:
|
||||
'''timer expired handler'''
|
||||
logging.debug(f'[{self.id_str}]Timer expired')
|
||||
self.__timeout_cb(next(self.exp_count))
|
||||
|
||||
def close(self) -> None:
|
||||
self.stop()
|
||||
self.__timeout_cb = None
|
||||
17
app/src/protocol_ifc.py
Normal file
17
app/src/protocol_ifc.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from abc import abstractmethod
|
||||
|
||||
from async_ifc import AsyncIfc
|
||||
from iter_registry import AbstractIterMeta
|
||||
|
||||
|
||||
class ProtocolIfc(metaclass=AbstractIterMeta):
|
||||
_registry = []
|
||||
|
||||
@abstractmethod
|
||||
def __init__(self, addr, ifc: "AsyncIfc", server_side: bool,
|
||||
client_mode: bool = False, id_str=b''):
|
||||
pass # pragma: no cover
|
||||
|
||||
@abstractmethod
|
||||
def close(self):
|
||||
pass # pragma: no cover
|
||||
101
app/src/proxy.py
Normal file
101
app/src/proxy.py
Normal file
@@ -0,0 +1,101 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import json
|
||||
|
||||
from cnf.config import Config
|
||||
from mqtt import Mqtt
|
||||
from infos import Infos
|
||||
|
||||
logger_mqtt = logging.getLogger('mqtt')
|
||||
|
||||
|
||||
class Proxy():
|
||||
'''class Proxy is a baseclass
|
||||
|
||||
The class has some class method for managing common resources like a
|
||||
connection to the MQTT broker or proxy error counter which are common
|
||||
for all inverter connection
|
||||
|
||||
Instances of the class are connections to an inverter and can have an
|
||||
optional link to an remote connection to the TSUN cloud. A remote
|
||||
connection dies with the inverter connection.
|
||||
|
||||
class methods:
|
||||
class_init(): initialize the common resources of the proxy (MQTT
|
||||
broker, Proxy DB, etc). Must be called before the
|
||||
first inverter instance can be created
|
||||
class_close(): release the common resources of the proxy. Should not
|
||||
be called before any instances of the class are
|
||||
destroyed
|
||||
|
||||
methods:
|
||||
create_remote(): Establish a client connection to the TSUN cloud
|
||||
async_publ_mqtt(): Publish data to MQTT broker
|
||||
'''
|
||||
@classmethod
|
||||
def class_init(cls) -> None:
|
||||
logging.debug('Proxy.class_init')
|
||||
# initialize the proxy statistics
|
||||
Infos.static_init()
|
||||
cls.db_stat = Infos()
|
||||
|
||||
ha = Config.get('ha')
|
||||
cls.entity_prfx = ha['entity_prefix'] + '/'
|
||||
cls.discovery_prfx = ha['discovery_prefix'] + '/'
|
||||
cls.proxy_node_id = ha['proxy_node_id'] + '/'
|
||||
cls.proxy_unique_id = ha['proxy_unique_id']
|
||||
|
||||
# call Mqtt singleton to establisch the connection to the mqtt broker
|
||||
cls.mqtt = Mqtt(cls._cb_mqtt_is_up)
|
||||
|
||||
# register all counters which should be reset at midnight.
|
||||
# This is needed if the proxy is restated before midnight
|
||||
# and the inverters are offline, cause the normal refgistering
|
||||
# needs an update on the counters.
|
||||
# Without this registration here the counters would not be
|
||||
# reset at midnight when you restart the proxy just before
|
||||
# midnight!
|
||||
inverters = Config.get('inverters')
|
||||
# logger.debug(f'Proxys: {inverters}')
|
||||
for inv in inverters.values():
|
||||
if (type(inv) is dict):
|
||||
node_id = inv['node_id']
|
||||
cls.db_stat.reg_clr_at_midnight(f'{cls.entity_prfx}{node_id}',
|
||||
check_dependencies=False)
|
||||
|
||||
@classmethod
|
||||
async def _cb_mqtt_is_up(cls) -> None:
|
||||
logging.info('Initialize proxy device on home assistant')
|
||||
# register proxy status counters at home assistant
|
||||
await cls._register_proxy_stat_home_assistant()
|
||||
|
||||
# send values of the proxy status counters
|
||||
await asyncio.sleep(0.5) # wait a bit, before sending data
|
||||
Infos.new_stat_data['proxy'] = True # force sending data to sync ha
|
||||
await cls._async_publ_mqtt_proxy_stat('proxy')
|
||||
|
||||
@classmethod
|
||||
async def _register_proxy_stat_home_assistant(cls) -> None:
|
||||
'''register all our topics at home assistant'''
|
||||
for data_json, component, node_id, id in cls.db_stat.ha_proxy_confs(
|
||||
cls.entity_prfx, cls.proxy_node_id, cls.proxy_unique_id):
|
||||
logger_mqtt.debug(f"MQTT Register: cmp:'{component}' node_id:'{node_id}' {data_json}") # noqa: E501
|
||||
await cls.mqtt.publish(f'{cls.discovery_prfx}{component}/{node_id}{id}/config', data_json) # noqa: E501
|
||||
|
||||
@classmethod
|
||||
async def _async_publ_mqtt_proxy_stat(cls, key) -> None:
|
||||
stat = Infos.stat
|
||||
if key in stat and Infos.new_stat_data[key]:
|
||||
data_json = json.dumps(stat[key])
|
||||
node_id = cls.proxy_node_id
|
||||
logger_mqtt.debug(f'{key}: {data_json}')
|
||||
await cls.mqtt.publish(f"{cls.entity_prfx}{node_id}{key}",
|
||||
data_json)
|
||||
Infos.new_stat_data[key] = False
|
||||
|
||||
@classmethod
|
||||
def class_close(cls, loop) -> None: # pragma: no cover
|
||||
logging.debug('Proxy.class_close')
|
||||
logging.info('Close MQTT Task')
|
||||
loop.run_until_complete(cls.mqtt.close())
|
||||
cls.mqtt = None
|
||||
30
app/src/scheduler.py
Normal file
30
app/src/scheduler.py
Normal file
@@ -0,0 +1,30 @@
|
||||
import logging
|
||||
import json
|
||||
from mqtt import Mqtt
|
||||
from aiocron import crontab
|
||||
from infos import ClrAtMidnight
|
||||
|
||||
logger_mqtt = logging.getLogger('mqtt')
|
||||
|
||||
|
||||
class Schedule:
|
||||
mqtt = None
|
||||
count = 0
|
||||
|
||||
@classmethod
|
||||
def start(cls) -> None:
|
||||
'''Start the scheduler and schedule the tasks (cron jobs)'''
|
||||
logging.debug("Scheduler init")
|
||||
cls.mqtt = Mqtt(None)
|
||||
|
||||
crontab('0 0 * * *', func=cls.atmidnight, start=True)
|
||||
|
||||
@classmethod
|
||||
async def atmidnight(cls) -> None:
|
||||
'''Clear daily counters at midnight'''
|
||||
logging.info("Clear daily counters at midnight")
|
||||
|
||||
for key, data in ClrAtMidnight.elm():
|
||||
logger_mqtt.debug(f'{key}: {data}')
|
||||
data_json = json.dumps(data)
|
||||
await cls.mqtt.publish(f"{key}", data_json)
|
||||
@@ -1,44 +1,128 @@
|
||||
import logging
|
||||
import asyncio
|
||||
import logging.handlers
|
||||
import signal
|
||||
import functools
|
||||
import os
|
||||
import argparse
|
||||
from asyncio import StreamReader, StreamWriter
|
||||
from aiohttp import web
|
||||
from logging import config # noqa F401
|
||||
from async_stream import AsyncStream
|
||||
from inverter import Inverter
|
||||
from config import Config
|
||||
from proxy import Proxy
|
||||
from inverter_ifc import InverterIfc
|
||||
from gen3.inverter_g3 import InverterG3
|
||||
from gen3plus.inverter_g3p import InverterG3P
|
||||
from scheduler import Schedule
|
||||
from cnf.config import Config
|
||||
from cnf.config_read_env import ConfigReadEnv
|
||||
from cnf.config_read_toml import ConfigReadToml
|
||||
from cnf.config_read_json import ConfigReadJson
|
||||
from modbus_tcp import ModbusTcp
|
||||
|
||||
routes = web.RouteTableDef()
|
||||
proxy_is_up = False
|
||||
|
||||
|
||||
async def handle_client(reader, writer):
|
||||
@routes.get('/')
|
||||
async def hello(request):
|
||||
return web.Response(text="Hello, world")
|
||||
|
||||
|
||||
@routes.get('/-/ready')
|
||||
async def ready(request):
|
||||
if proxy_is_up:
|
||||
status = 200
|
||||
text = 'Is ready'
|
||||
else:
|
||||
status = 503
|
||||
text = 'Not ready'
|
||||
return web.Response(status=status, text=text)
|
||||
|
||||
|
||||
@routes.get('/-/healthy')
|
||||
async def healthy(request):
|
||||
|
||||
if proxy_is_up:
|
||||
# logging.info('web reqeust healthy()')
|
||||
for inverter in InverterIfc:
|
||||
try:
|
||||
res = inverter.healthy()
|
||||
if not res:
|
||||
return web.Response(status=503, text="I have a problem")
|
||||
except Exception as err:
|
||||
logging.info(f'Exception:{err}')
|
||||
|
||||
return web.Response(status=200, text="I'm fine")
|
||||
|
||||
|
||||
async def webserver(addr, port):
|
||||
'''coro running our webserver'''
|
||||
app = web.Application()
|
||||
app.add_routes(routes)
|
||||
runner = web.AppRunner(app)
|
||||
|
||||
await runner.setup()
|
||||
site = web.TCPSite(runner, addr, port)
|
||||
await site.start()
|
||||
logging.info(f'HTTP server listen on port: {port}')
|
||||
|
||||
try:
|
||||
# Normal interaction with aiohttp
|
||||
while True:
|
||||
await asyncio.sleep(3600) # sleep forever
|
||||
except asyncio.CancelledError:
|
||||
logging.info('HTTP server cancelled')
|
||||
await runner.cleanup()
|
||||
logging.debug('HTTP cleanup done')
|
||||
|
||||
|
||||
async def handle_client(reader: StreamReader, writer: StreamWriter, inv_class):
|
||||
'''Handles a new incoming connection and starts an async loop'''
|
||||
|
||||
addr = writer.get_extra_info('peername')
|
||||
await Inverter(reader, writer, addr).server_loop(addr)
|
||||
with inv_class(reader, writer) as inv:
|
||||
await inv.local.ifc.server_loop()
|
||||
|
||||
|
||||
def handle_SIGTERM(loop):
|
||||
async def handle_shutdown(loop, web_task):
|
||||
'''Close all TCP connections and stop the event loop'''
|
||||
|
||||
logging.info('Shutdown due to SIGTERM')
|
||||
global proxy_is_up
|
||||
proxy_is_up = False
|
||||
|
||||
#
|
||||
# first, close all open TCP connections
|
||||
# first, disc all open TCP connections gracefully
|
||||
#
|
||||
for stream in AsyncStream:
|
||||
stream.close()
|
||||
for inverter in InverterIfc:
|
||||
await inverter.disc(True)
|
||||
|
||||
logging.info('Proxy disconnecting done')
|
||||
|
||||
#
|
||||
# at last, we stop the loop
|
||||
# second, cancel the web server
|
||||
#
|
||||
web_task.cancel()
|
||||
await web_task
|
||||
|
||||
#
|
||||
# now cancel all remaining (pending) tasks
|
||||
#
|
||||
pending = asyncio.all_tasks()
|
||||
for task in pending:
|
||||
task.cancel()
|
||||
|
||||
#
|
||||
# at last, start a coro for stopping the loop
|
||||
#
|
||||
logging.debug("Stop event loop")
|
||||
loop.stop()
|
||||
|
||||
logging.info('Shutdown complete')
|
||||
|
||||
|
||||
def get_log_level() -> int:
|
||||
'''checks if LOG_LVL is set in the environment and returns the
|
||||
corresponding logging.LOG_LEVEL'''
|
||||
log_level = os.getenv('LOG_LVL', 'INFO')
|
||||
logging.info(f"LOG_LVL : {log_level}")
|
||||
|
||||
if log_level == 'DEBUG':
|
||||
log_level = logging.DEBUG
|
||||
elif log_level == 'WARN':
|
||||
@@ -48,51 +132,111 @@ def get_log_level() -> int:
|
||||
return log_level
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
def main(): # pragma: no cover
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('-c', '--config_path', type=str,
|
||||
default='./config/',
|
||||
help='set path for the configuration files')
|
||||
parser.add_argument('-j', '--json_config', type=str,
|
||||
help='read user config from json-file')
|
||||
parser.add_argument('-t', '--toml_config', type=str,
|
||||
help='read user config from toml-file')
|
||||
parser.add_argument('-l', '--log_path', type=str,
|
||||
default='./log/',
|
||||
help='set path for the logging files')
|
||||
parser.add_argument('-b', '--log_backups', type=int,
|
||||
default=0,
|
||||
help='set max number of daily log-files')
|
||||
args = parser.parse_args()
|
||||
#
|
||||
# Setup our daily, rotating logger
|
||||
#
|
||||
serv_name = os.getenv('SERVICE_NAME', 'proxy')
|
||||
version = os.getenv('VERSION', 'unknown')
|
||||
|
||||
setattr(logging.handlers, "log_path", args.log_path)
|
||||
setattr(logging.handlers, "log_backups", args.log_backups)
|
||||
|
||||
logging.config.fileConfig('logging.ini')
|
||||
logging.info(f'Server "{serv_name} - {version}" will be started')
|
||||
logging.info(f'current dir: {os.getcwd()}')
|
||||
logging.info(f"config_path: {args.config_path}")
|
||||
logging.info(f"json_config: {args.json_config}")
|
||||
logging.info(f"toml_config: {args.toml_config}")
|
||||
logging.info(f"log_path: {args.log_path}")
|
||||
if args.log_backups == 0:
|
||||
logging.info("log_backups: unlimited")
|
||||
else:
|
||||
logging.info(f"log_backups: {args.log_backups} days")
|
||||
log_level = get_log_level()
|
||||
logging.info('******')
|
||||
|
||||
# set lowest-severity for 'root', 'msg', 'conn' and 'data' logger
|
||||
log_level = get_log_level()
|
||||
logging.getLogger().setLevel(log_level)
|
||||
logging.getLogger('msg').setLevel(log_level)
|
||||
logging.getLogger('conn').setLevel(log_level)
|
||||
logging.getLogger('data').setLevel(log_level)
|
||||
|
||||
# read config file
|
||||
Config.read()
|
||||
logging.getLogger('tracer').setLevel(log_level)
|
||||
logging.getLogger('asyncio').setLevel(log_level)
|
||||
# logging.getLogger('mqtt').setLevel(log_level)
|
||||
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
Inverter.class_init()
|
||||
# read config file
|
||||
Config.init(ConfigReadToml("default_config.toml"))
|
||||
ConfigReadEnv()
|
||||
ConfigReadJson(args.config_path + "config.json")
|
||||
ConfigReadToml(args.config_path + "config.toml")
|
||||
ConfigReadJson(args.json_config)
|
||||
ConfigReadToml(args.toml_config)
|
||||
config_err = Config.get_error()
|
||||
|
||||
if config_err is not None:
|
||||
logging.info(f'config_err: {config_err}')
|
||||
return
|
||||
|
||||
logging.info('******')
|
||||
|
||||
Proxy.class_init()
|
||||
Schedule.start()
|
||||
ModbusTcp(loop)
|
||||
|
||||
#
|
||||
# Create tasks for our listening servers. These must be tasks! If we call
|
||||
# start_server directly out of our main task, the eventloop will be blocked
|
||||
# and we can't receive and handle the UNIX signals!
|
||||
#
|
||||
for inv_class, port in [(InverterG3, 5005), (InverterG3P, 10000)]:
|
||||
logging.info(f'listen on port: {port} for inverters')
|
||||
loop.create_task(asyncio.start_server(lambda r, w, i=inv_class:
|
||||
handle_client(r, w, i),
|
||||
'0.0.0.0', port))
|
||||
web_task = loop.create_task(webserver('0.0.0.0', 8127))
|
||||
|
||||
#
|
||||
# Register some UNIX Signal handler for a gracefully server shutdown
|
||||
# on Docker restart and stop
|
||||
#
|
||||
for signame in ('SIGINT', 'SIGTERM'):
|
||||
loop.add_signal_handler(getattr(signal, signame),
|
||||
functools.partial(handle_SIGTERM, loop))
|
||||
|
||||
#
|
||||
# Create a task for our listening server. This must be a task! If we call
|
||||
# start_server directly out of our main task, the eventloop will be blocked
|
||||
# and we can't receive and handle the UNIX signals!
|
||||
#
|
||||
loop.create_task(asyncio.start_server(handle_client, '0.0.0.0', 5005))
|
||||
lambda loop=loop: asyncio.create_task(
|
||||
handle_shutdown(loop, web_task)))
|
||||
|
||||
loop.set_debug(log_level == logging.DEBUG)
|
||||
try:
|
||||
global proxy_is_up
|
||||
proxy_is_up = True
|
||||
loop.run_forever()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
finally:
|
||||
Inverter.class_close(loop)
|
||||
logging.info('Close event loop')
|
||||
logging.info("Event loop is stopped")
|
||||
Proxy.class_close(loop)
|
||||
logging.debug('Close event loop')
|
||||
loop.close()
|
||||
logging.info(f'Finally, exit Server "{serv_name}"')
|
||||
|
||||
|
||||
if __name__ == "__main__": # pragma: no cover
|
||||
main()
|
||||
|
||||
14
app/src/singleton.py
Normal file
14
app/src/singleton.py
Normal file
@@ -0,0 +1,14 @@
|
||||
from weakref import WeakValueDictionary
|
||||
|
||||
|
||||
class Singleton(type):
|
||||
_instances = WeakValueDictionary()
|
||||
|
||||
def __call__(cls, *args, **kwargs):
|
||||
# logger_mqtt.debug('singleton: __call__')
|
||||
if cls not in cls._instances:
|
||||
instance = super(Singleton,
|
||||
cls).__call__(*args, **kwargs)
|
||||
cls._instances[cls] = instance
|
||||
|
||||
return cls._instances[cls]
|
||||
573
app/tests/test_async_stream.py
Normal file
573
app/tests/test_async_stream.py
Normal file
@@ -0,0 +1,573 @@
|
||||
# test_with_pytest.py
|
||||
import pytest
|
||||
import asyncio
|
||||
import gc
|
||||
import time
|
||||
|
||||
from infos import Infos
|
||||
from inverter_base import InverterBase
|
||||
from async_stream import AsyncStreamServer, AsyncStreamClient, StreamPtr
|
||||
from messages import Message
|
||||
|
||||
from test_modbus_tcp import FakeReader, FakeWriter
|
||||
from test_inverter_base import config_conn, patch_open_connection
|
||||
|
||||
pytest_plugins = ('pytest_asyncio',)
|
||||
|
||||
# initialize the proxy statistics
|
||||
Infos.static_init()
|
||||
|
||||
class FakeProto(Message):
|
||||
def __init__(self, ifc, server_side):
|
||||
super().__init__('G3F', ifc, server_side, None, 10)
|
||||
self.conn_no = 0
|
||||
|
||||
def mb_timout_cb(self, exp_cnt):
|
||||
pass # empty callback
|
||||
|
||||
def fake_reader_fwd():
|
||||
reader = FakeReader()
|
||||
reader.test = FakeReader.RD_TEST_13_BYTES
|
||||
reader.on_recv.set()
|
||||
return reader
|
||||
|
||||
def test_timeout_cb():
|
||||
reader = FakeReader()
|
||||
writer = FakeWriter()
|
||||
def timeout():
|
||||
return 13
|
||||
|
||||
ifc = AsyncStreamClient(reader, writer, None, None)
|
||||
assert 360 == ifc._AsyncStream__timeout()
|
||||
ifc.prot_set_timeout_cb(timeout)
|
||||
assert 13 == ifc._AsyncStream__timeout()
|
||||
ifc.prot_set_timeout_cb(None)
|
||||
assert 360 == ifc._AsyncStream__timeout()
|
||||
|
||||
# call healthy outside the contexter manager (__exit__() was called)
|
||||
assert ifc.healthy()
|
||||
del ifc
|
||||
|
||||
cnt = 0
|
||||
for inv in InverterBase:
|
||||
print(f'InverterBase refs:{gc.get_referrers(inv)}')
|
||||
cnt += 1
|
||||
assert cnt == 0
|
||||
|
||||
def test_health():
|
||||
reader = FakeReader()
|
||||
writer = FakeWriter()
|
||||
|
||||
ifc = AsyncStreamClient(reader, writer, None, None)
|
||||
ifc.proc_start = time.time()
|
||||
assert ifc.healthy()
|
||||
ifc.proc_start = time.time() -10
|
||||
assert not ifc.healthy()
|
||||
ifc.proc_start = None
|
||||
assert ifc.healthy()
|
||||
|
||||
del ifc
|
||||
|
||||
cnt = 0
|
||||
for inv in InverterBase:
|
||||
print(f'InverterBase refs:{gc.get_referrers(inv)}')
|
||||
cnt += 1
|
||||
assert cnt == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_close_cb():
|
||||
assert asyncio.get_running_loop()
|
||||
reader = FakeReader()
|
||||
writer = FakeWriter()
|
||||
cnt = 0
|
||||
def timeout():
|
||||
return 0.1
|
||||
def closed():
|
||||
nonlocal cnt
|
||||
nonlocal ifc
|
||||
ifc.close() # clears the closed callback
|
||||
cnt += 1
|
||||
|
||||
cnt = 0
|
||||
ifc = AsyncStreamClient(reader, writer, None, closed)
|
||||
ifc.prot_set_timeout_cb(timeout)
|
||||
await ifc.client_loop('')
|
||||
assert cnt == 1
|
||||
ifc.prot_set_timeout_cb(timeout)
|
||||
await ifc.client_loop('')
|
||||
assert cnt == 1 # check that the closed method would not be called
|
||||
del ifc
|
||||
|
||||
cnt = 0
|
||||
ifc = AsyncStreamClient(reader, writer, None, None)
|
||||
ifc.prot_set_timeout_cb(timeout)
|
||||
await ifc.client_loop('')
|
||||
assert cnt == 0
|
||||
del ifc
|
||||
|
||||
cnt = 0
|
||||
for inv in InverterBase:
|
||||
print(f'InverterBase refs:{gc.get_referrers(inv)}')
|
||||
cnt += 1
|
||||
assert cnt == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_read():
|
||||
global test
|
||||
assert asyncio.get_running_loop()
|
||||
reader = FakeReader()
|
||||
reader.test = FakeReader.RD_TEST_13_BYTES
|
||||
reader.on_recv.set()
|
||||
writer = FakeWriter()
|
||||
cnt = 0
|
||||
def timeout():
|
||||
return 1
|
||||
def closed():
|
||||
nonlocal cnt
|
||||
nonlocal ifc
|
||||
ifc.close() # clears the closed callback
|
||||
cnt += 1
|
||||
def app_read():
|
||||
nonlocal ifc
|
||||
ifc.proc_start -= 3
|
||||
return 0.01 # async wait of 0.01
|
||||
cnt = 0
|
||||
ifc = AsyncStreamClient(reader, writer, None, closed)
|
||||
ifc.proc_max = 0
|
||||
ifc.prot_set_timeout_cb(timeout)
|
||||
ifc.rx_set_cb(app_read)
|
||||
await ifc.client_loop('')
|
||||
print('End loop')
|
||||
assert ifc.proc_max >= 3
|
||||
assert 13 == ifc.rx_len()
|
||||
assert cnt == 1
|
||||
del ifc
|
||||
|
||||
cnt = 0
|
||||
for inv in InverterBase:
|
||||
print(f'InverterBase refs:{gc.get_referrers(inv)}')
|
||||
cnt += 1
|
||||
assert cnt == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_write():
|
||||
global test
|
||||
assert asyncio.get_running_loop()
|
||||
reader = FakeReader()
|
||||
reader.test = FakeReader.RD_TEST_13_BYTES
|
||||
reader.on_recv.set()
|
||||
writer = FakeWriter()
|
||||
cnt = 0
|
||||
def timeout():
|
||||
return 1
|
||||
def closed():
|
||||
nonlocal cnt
|
||||
nonlocal ifc
|
||||
ifc.close() # clears the closed callback
|
||||
cnt += 1
|
||||
def app_read():
|
||||
nonlocal ifc
|
||||
ifc.proc_start -= 3
|
||||
return 0.01 # async wait of 0.01
|
||||
|
||||
cnt = 0
|
||||
ifc = AsyncStreamClient(reader, writer, None, closed)
|
||||
ifc.proc_max = 10
|
||||
ifc.prot_set_timeout_cb(timeout)
|
||||
ifc.rx_set_cb(app_read)
|
||||
ifc.tx_add(b'test-data-resp')
|
||||
assert 14 == ifc.tx_len()
|
||||
await ifc.client_loop('')
|
||||
print('End loop')
|
||||
assert ifc.proc_max >= 3
|
||||
assert 13 == ifc.rx_len()
|
||||
assert 0 == ifc.tx_len()
|
||||
assert cnt == 1
|
||||
del ifc
|
||||
|
||||
cnt = 0
|
||||
for inv in InverterBase:
|
||||
print(f'InverterBase refs:{gc.get_referrers(inv)}')
|
||||
cnt += 1
|
||||
assert cnt == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_publ_mqtt_cb():
|
||||
assert asyncio.get_running_loop()
|
||||
reader = FakeReader()
|
||||
reader.test = FakeReader.RD_TEST_13_BYTES
|
||||
reader.on_recv.set()
|
||||
writer = FakeWriter()
|
||||
cnt = 0
|
||||
def timeout():
|
||||
return 0.1
|
||||
async def publ_mqtt():
|
||||
nonlocal cnt
|
||||
nonlocal ifc
|
||||
cnt += 1
|
||||
|
||||
cnt = 0
|
||||
ifc = AsyncStreamServer(reader, writer, publ_mqtt, None, None)
|
||||
assert ifc.async_publ_mqtt
|
||||
ifc.prot_set_timeout_cb(timeout)
|
||||
await ifc.server_loop()
|
||||
assert cnt == 3 # 2 calls in server_loop() and 1 in loop()
|
||||
assert ifc.async_publ_mqtt
|
||||
ifc.close() # clears the closed callback
|
||||
assert not ifc.async_publ_mqtt
|
||||
del ifc
|
||||
|
||||
cnt = 0
|
||||
for inv in InverterBase:
|
||||
print(f'InverterBase refs:{gc.get_referrers(inv)}')
|
||||
cnt += 1
|
||||
assert cnt == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_remote_cb():
|
||||
assert asyncio.get_running_loop()
|
||||
reader = FakeReader()
|
||||
writer = FakeWriter()
|
||||
cnt = 0
|
||||
def timeout():
|
||||
return 0.1
|
||||
async def create_remote():
|
||||
nonlocal cnt
|
||||
nonlocal ifc
|
||||
ifc.close() # clears the closed callback
|
||||
cnt += 1
|
||||
|
||||
cnt = 0
|
||||
ifc = AsyncStreamServer(reader, writer, None, create_remote, None)
|
||||
assert ifc.create_remote
|
||||
await ifc.create_remote()
|
||||
assert cnt == 1
|
||||
ifc.prot_set_timeout_cb(timeout)
|
||||
await ifc.server_loop()
|
||||
assert not ifc.create_remote
|
||||
del ifc
|
||||
|
||||
cnt = 0
|
||||
for inv in InverterBase:
|
||||
print(f'InverterBase refs:{gc.get_referrers(inv)}')
|
||||
cnt += 1
|
||||
assert cnt == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_sw_exception():
|
||||
global test
|
||||
assert asyncio.get_running_loop()
|
||||
reader = FakeReader()
|
||||
reader.test = FakeReader.RD_TEST_SW_EXCEPT
|
||||
reader.on_recv.set()
|
||||
writer = FakeWriter()
|
||||
cnt = 0
|
||||
def timeout():
|
||||
return 1
|
||||
def closed():
|
||||
nonlocal cnt
|
||||
nonlocal ifc
|
||||
ifc.close() # clears the closed callback
|
||||
cnt += 1
|
||||
cnt = 0
|
||||
ifc = AsyncStreamClient(reader, writer, None, closed)
|
||||
ifc.prot_set_timeout_cb(timeout)
|
||||
await ifc.client_loop('')
|
||||
print('End loop')
|
||||
assert cnt == 1
|
||||
del ifc
|
||||
|
||||
cnt = 0
|
||||
for inv in InverterBase:
|
||||
print(f'InverterBase refs:{gc.get_referrers(inv)}')
|
||||
cnt += 1
|
||||
assert cnt == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_os_error():
|
||||
global test
|
||||
assert asyncio.get_running_loop()
|
||||
reader = FakeReader()
|
||||
reader.test = FakeReader.RD_TEST_OS_ERROR
|
||||
|
||||
reader.on_recv.set()
|
||||
writer = FakeWriter()
|
||||
cnt = 0
|
||||
def timeout():
|
||||
return 1
|
||||
def closed():
|
||||
nonlocal cnt
|
||||
nonlocal ifc
|
||||
ifc.close() # clears the closed callback
|
||||
cnt += 1
|
||||
cnt = 0
|
||||
ifc = AsyncStreamClient(reader, writer, None, closed)
|
||||
ifc.prot_set_timeout_cb(timeout)
|
||||
await ifc.client_loop('')
|
||||
print('End loop')
|
||||
assert cnt == 1
|
||||
del ifc
|
||||
|
||||
cnt = 0
|
||||
for inv in InverterBase:
|
||||
print(f'InverterBase refs:{gc.get_referrers(inv)}')
|
||||
cnt += 1
|
||||
assert cnt == 0
|
||||
|
||||
class TestType():
|
||||
FWD_NO_EXCPT = 1
|
||||
FWD_SW_EXCPT = 2
|
||||
FWD_TIMEOUT = 3
|
||||
FWD_OS_ERROR = 4
|
||||
FWD_OS_ERROR_NO_STREAM = 5
|
||||
FWD_RUNTIME_ERROR = 6
|
||||
FWD_RUNTIME_ERROR_NO_STREAM = 7
|
||||
|
||||
def create_remote(remote, test_type, with_close_hdr:bool = False):
|
||||
def update_hdr(buf):
|
||||
return
|
||||
def callback():
|
||||
if test_type == TestType.FWD_SW_EXCPT:
|
||||
remote.unknown_var += 1
|
||||
elif test_type == TestType.FWD_TIMEOUT:
|
||||
raise TimeoutError
|
||||
elif test_type == TestType.FWD_OS_ERROR:
|
||||
raise ConnectionRefusedError
|
||||
elif test_type == TestType.FWD_OS_ERROR_NO_STREAM:
|
||||
remote.stream = None
|
||||
raise ConnectionRefusedError
|
||||
elif test_type == TestType.FWD_RUNTIME_ERROR:
|
||||
raise RuntimeError("Peer closed")
|
||||
elif test_type == TestType.FWD_RUNTIME_ERROR_NO_STREAM:
|
||||
remote.stream = None
|
||||
raise RuntimeError("Peer closed")
|
||||
return True
|
||||
|
||||
def close():
|
||||
return
|
||||
if with_close_hdr:
|
||||
close_hndl = close
|
||||
else:
|
||||
close_hndl = None
|
||||
|
||||
remote.ifc = AsyncStreamClient(
|
||||
FakeReader(), FakeWriter(), StreamPtr(None), close_hndl)
|
||||
remote.ifc.prot_set_update_header_cb(update_hdr)
|
||||
remote.ifc.prot_set_init_new_client_conn_cb(callback)
|
||||
remote.stream = FakeProto(remote.ifc, False)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_forward():
|
||||
assert asyncio.get_running_loop()
|
||||
remote = StreamPtr(None)
|
||||
cnt = 0
|
||||
|
||||
async def _create_remote():
|
||||
nonlocal cnt, remote, ifc
|
||||
create_remote(remote, TestType.FWD_NO_EXCPT)
|
||||
ifc.fwd_add(b'test-forward_msg2 ')
|
||||
cnt += 1
|
||||
|
||||
cnt = 0
|
||||
ifc = AsyncStreamServer(fake_reader_fwd(), FakeWriter(), None, _create_remote, remote)
|
||||
ifc.fwd_add(b'test-forward_msg')
|
||||
await ifc.server_loop()
|
||||
assert cnt == 1
|
||||
del ifc
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_forward_with_conn():
|
||||
assert asyncio.get_running_loop()
|
||||
remote = StreamPtr(None)
|
||||
cnt = 0
|
||||
|
||||
async def _create_remote():
|
||||
nonlocal cnt, remote, ifc
|
||||
cnt += 1
|
||||
|
||||
cnt = 0
|
||||
ifc = AsyncStreamServer(fake_reader_fwd(), FakeWriter(), None, _create_remote, remote)
|
||||
create_remote(remote, TestType.FWD_NO_EXCPT)
|
||||
ifc.fwd_add(b'test-forward_msg')
|
||||
await ifc.server_loop()
|
||||
assert cnt == 0
|
||||
del ifc
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_forward_no_conn():
|
||||
assert asyncio.get_running_loop()
|
||||
remote = StreamPtr(None)
|
||||
cnt = 0
|
||||
|
||||
async def _create_remote():
|
||||
nonlocal cnt
|
||||
cnt += 1
|
||||
|
||||
cnt = 0
|
||||
ifc = AsyncStreamServer(fake_reader_fwd(), FakeWriter(), None, _create_remote, remote)
|
||||
ifc.fwd_add(b'test-forward_msg')
|
||||
await ifc.server_loop()
|
||||
assert cnt == 1
|
||||
del ifc
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_forward_sw_except():
|
||||
assert asyncio.get_running_loop()
|
||||
remote = StreamPtr(None)
|
||||
cnt = 0
|
||||
|
||||
async def _create_remote():
|
||||
nonlocal cnt, remote
|
||||
create_remote(remote, TestType.FWD_SW_EXCPT)
|
||||
cnt += 1
|
||||
|
||||
cnt = 0
|
||||
ifc = AsyncStreamServer(fake_reader_fwd(), FakeWriter(), None, _create_remote, remote)
|
||||
ifc.fwd_add(b'test-forward_msg')
|
||||
await ifc.server_loop()
|
||||
assert cnt == 1
|
||||
del ifc
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_forward_os_error():
|
||||
assert asyncio.get_running_loop()
|
||||
remote = StreamPtr(None)
|
||||
cnt = 0
|
||||
|
||||
async def _create_remote():
|
||||
nonlocal cnt, remote
|
||||
create_remote(remote, TestType.FWD_OS_ERROR)
|
||||
cnt += 1
|
||||
|
||||
cnt = 0
|
||||
ifc = AsyncStreamServer(fake_reader_fwd(), FakeWriter(), None, _create_remote, remote)
|
||||
ifc.fwd_add(b'test-forward_msg')
|
||||
await ifc.server_loop()
|
||||
assert cnt == 1
|
||||
del ifc
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_forward_os_error2():
|
||||
assert asyncio.get_running_loop()
|
||||
remote = StreamPtr(None)
|
||||
cnt = 0
|
||||
|
||||
async def _create_remote():
|
||||
nonlocal cnt, remote
|
||||
create_remote(remote, TestType.FWD_OS_ERROR, True)
|
||||
cnt += 1
|
||||
|
||||
cnt = 0
|
||||
ifc = AsyncStreamServer(fake_reader_fwd(), FakeWriter(), None, _create_remote, remote)
|
||||
ifc.fwd_add(b'test-forward_msg')
|
||||
await ifc.server_loop()
|
||||
assert cnt == 1
|
||||
del ifc
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_forward_os_error3():
|
||||
assert asyncio.get_running_loop()
|
||||
remote = StreamPtr(None)
|
||||
cnt = 0
|
||||
|
||||
async def _create_remote():
|
||||
nonlocal cnt, remote
|
||||
create_remote(remote, TestType.FWD_OS_ERROR_NO_STREAM)
|
||||
cnt += 1
|
||||
|
||||
cnt = 0
|
||||
ifc = AsyncStreamServer(fake_reader_fwd(), FakeWriter(), None, _create_remote, remote)
|
||||
ifc.fwd_add(b'test-forward_msg')
|
||||
await ifc.server_loop()
|
||||
assert cnt == 1
|
||||
del ifc
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_forward_runtime_error():
|
||||
assert asyncio.get_running_loop()
|
||||
remote = StreamPtr(None)
|
||||
cnt = 0
|
||||
|
||||
async def _create_remote():
|
||||
nonlocal cnt, remote
|
||||
create_remote(remote, TestType.FWD_RUNTIME_ERROR)
|
||||
cnt += 1
|
||||
|
||||
cnt = 0
|
||||
ifc = AsyncStreamServer(fake_reader_fwd(), FakeWriter(), None, _create_remote, remote)
|
||||
ifc.fwd_add(b'test-forward_msg')
|
||||
await ifc.server_loop()
|
||||
assert cnt == 1
|
||||
del ifc
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_forward_runtime_error2():
|
||||
assert asyncio.get_running_loop()
|
||||
remote = StreamPtr(None)
|
||||
cnt = 0
|
||||
|
||||
async def _create_remote():
|
||||
nonlocal cnt, remote
|
||||
create_remote(remote, TestType.FWD_RUNTIME_ERROR, True)
|
||||
cnt += 1
|
||||
|
||||
cnt = 0
|
||||
ifc = AsyncStreamServer(fake_reader_fwd(), FakeWriter(), None, _create_remote, remote)
|
||||
ifc.fwd_add(b'test-forward_msg')
|
||||
await ifc.server_loop()
|
||||
assert cnt == 1
|
||||
del ifc
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_forward_runtime_error3():
|
||||
assert asyncio.get_running_loop()
|
||||
remote = StreamPtr(None)
|
||||
cnt = 0
|
||||
|
||||
async def _create_remote():
|
||||
nonlocal cnt, remote
|
||||
create_remote(remote, TestType.FWD_RUNTIME_ERROR_NO_STREAM, True)
|
||||
cnt += 1
|
||||
|
||||
cnt = 0
|
||||
ifc = AsyncStreamServer(fake_reader_fwd(), FakeWriter(), None, _create_remote, remote)
|
||||
ifc.fwd_add(b'test-forward_msg')
|
||||
await ifc.server_loop()
|
||||
assert cnt == 1
|
||||
del ifc
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_forward_resp():
|
||||
assert asyncio.get_running_loop()
|
||||
remote = StreamPtr(None)
|
||||
cnt = 0
|
||||
|
||||
def _close_cb():
|
||||
nonlocal cnt, remote, ifc
|
||||
cnt += 1
|
||||
|
||||
cnt = 0
|
||||
ifc = AsyncStreamClient(fake_reader_fwd(), FakeWriter(), remote, _close_cb)
|
||||
create_remote(remote, TestType.FWD_NO_EXCPT)
|
||||
ifc.fwd_add(b'test-forward_msg')
|
||||
await ifc.client_loop('')
|
||||
assert cnt == 1
|
||||
del ifc
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_forward_resp2():
|
||||
assert asyncio.get_running_loop()
|
||||
remote = StreamPtr(None)
|
||||
cnt = 0
|
||||
|
||||
def _close_cb():
|
||||
nonlocal cnt, remote, ifc
|
||||
cnt += 1
|
||||
|
||||
cnt = 0
|
||||
ifc = AsyncStreamClient(fake_reader_fwd(), FakeWriter(), None, _close_cb)
|
||||
create_remote(remote, TestType.FWD_NO_EXCPT)
|
||||
ifc.fwd_add(b'test-forward_msg')
|
||||
await ifc.client_loop('')
|
||||
assert cnt == 1
|
||||
del ifc
|
||||
43
app/tests/test_byte_fifo.py
Normal file
43
app/tests/test_byte_fifo.py
Normal file
@@ -0,0 +1,43 @@
|
||||
# test_with_pytest.py
|
||||
|
||||
from byte_fifo import ByteFifo
|
||||
|
||||
def test_fifo():
|
||||
read = ByteFifo()
|
||||
assert 0 == len(read)
|
||||
read += b'12'
|
||||
assert 2 == len(read)
|
||||
read += bytearray("34", encoding='UTF8')
|
||||
assert 4 == len(read)
|
||||
assert b'12' == read.peek(2)
|
||||
assert 4 == len(read)
|
||||
assert b'1234' == read.peek()
|
||||
assert 4 == len(read)
|
||||
assert b'12' == read.get(2)
|
||||
assert 2 == len(read)
|
||||
assert b'34' == read.get()
|
||||
assert 0 == len(read)
|
||||
|
||||
def test_fifo_fmt():
|
||||
read = ByteFifo()
|
||||
read += b'1234'
|
||||
assert b'1234' == read.peek()
|
||||
assert " 0000 | 31 32 33 34 | 1234" == f'{read}'
|
||||
|
||||
def test_fifo_observer():
|
||||
read = ByteFifo()
|
||||
|
||||
def _read():
|
||||
assert b'1234' == read.get(4)
|
||||
|
||||
read += b'12'
|
||||
assert 2 == len(read)
|
||||
read()
|
||||
read.reg_trigger(_read)
|
||||
read += b'34'
|
||||
assert 4 == len(read)
|
||||
read()
|
||||
assert 0 == len(read)
|
||||
assert b'' == read.peek(2)
|
||||
assert b'' == read.get(2)
|
||||
assert 0 == len(read)
|
||||
393
app/tests/test_config.py
Normal file
393
app/tests/test_config.py
Normal file
@@ -0,0 +1,393 @@
|
||||
# test_with_pytest.py
|
||||
import pytest
|
||||
import json
|
||||
from mock import patch
|
||||
from schema import SchemaMissingKeyError
|
||||
from cnf.config import Config, ConfigIfc
|
||||
from cnf.config_read_toml import ConfigReadToml
|
||||
|
||||
class FakeBuffer:
|
||||
rd = str()
|
||||
|
||||
test_buffer = FakeBuffer
|
||||
|
||||
|
||||
class FakeFile():
|
||||
def __init__(self):
|
||||
self.buf = test_buffer
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc, tb):
|
||||
pass
|
||||
|
||||
|
||||
class FakeOptionsFile(FakeFile):
|
||||
def __init__(self, OpenTextMode):
|
||||
super().__init__()
|
||||
self.bin_mode = 'b' in OpenTextMode
|
||||
|
||||
def read(self):
|
||||
if self.bin_mode:
|
||||
return bytearray(self.buf.rd.encode('utf-8')).copy()
|
||||
else:
|
||||
return self.buf.rd.copy()
|
||||
|
||||
def patch_open():
|
||||
def new_open(file: str, OpenTextMode="rb"):
|
||||
if file == "_no__file__no_":
|
||||
raise FileNotFoundError
|
||||
return FakeOptionsFile(OpenTextMode)
|
||||
|
||||
with patch('builtins.open', new_open) as conn:
|
||||
yield conn
|
||||
|
||||
class TstConfig(ConfigIfc):
|
||||
|
||||
@classmethod
|
||||
def __init__(cls, cnf):
|
||||
cls.act_config = cnf
|
||||
|
||||
@classmethod
|
||||
def add_config(cls) -> dict:
|
||||
return cls.act_config
|
||||
|
||||
|
||||
def test_empty_config():
|
||||
cnf = {}
|
||||
try:
|
||||
Config.conf_schema.validate(cnf)
|
||||
assert False
|
||||
except SchemaMissingKeyError:
|
||||
pass
|
||||
|
||||
@pytest.fixture
|
||||
def ConfigDefault():
|
||||
return {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'},
|
||||
'inverters': {
|
||||
'allow_all': False,
|
||||
'R170000000000001': {
|
||||
'suggested_area': '',
|
||||
'modbus_polling': False,
|
||||
'monitor_sn': 0,
|
||||
'node_id': '',
|
||||
'pv1': {'manufacturer': 'Risen',
|
||||
'type': 'RSM40-8-395M'},
|
||||
'pv2': {'manufacturer': 'Risen',
|
||||
'type': 'RSM40-8-395M'},
|
||||
'sensor_list': 688
|
||||
},
|
||||
'Y170000000000001': {
|
||||
'modbus_polling': True,
|
||||
'monitor_sn': 2000000000,
|
||||
'suggested_area': '',
|
||||
'node_id': '',
|
||||
'pv1': {'manufacturer': 'Risen',
|
||||
'type': 'RSM40-8-410M'},
|
||||
'pv2': {'manufacturer': 'Risen',
|
||||
'type': 'RSM40-8-410M'},
|
||||
'pv3': {'manufacturer': 'Risen',
|
||||
'type': 'RSM40-8-410M'},
|
||||
'pv4': {'manufacturer': 'Risen',
|
||||
'type': 'RSM40-8-410M'},
|
||||
'sensor_list': 688
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@pytest.fixture
|
||||
def ConfigComplete():
|
||||
return {
|
||||
'gen3plus': {
|
||||
'at_acl': {
|
||||
'mqtt': {'allow': ['AT+'], 'block': ['AT+SUPDATE']},
|
||||
'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'],
|
||||
'block': ['AT+SUPDATE']}
|
||||
}
|
||||
},
|
||||
'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com',
|
||||
'port': 5005},
|
||||
'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com',
|
||||
'port': 10000},
|
||||
'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None},
|
||||
'ha': {'auto_conf_prefix': 'homeassistant',
|
||||
'discovery_prefix': 'homeassistant',
|
||||
'entity_prefix': 'tsun',
|
||||
'proxy_node_id': 'proxy',
|
||||
'proxy_unique_id': 'P170000000000001'},
|
||||
'inverters': {
|
||||
'allow_all': False,
|
||||
'R170000000000001': {'node_id': 'PV-Garage/',
|
||||
'modbus_polling': False,
|
||||
'monitor_sn': 0,
|
||||
'pv1': {'manufacturer': 'man1',
|
||||
'type': 'type1'},
|
||||
'pv2': {'manufacturer': 'man2',
|
||||
'type': 'type2'},
|
||||
'suggested_area': 'Garage',
|
||||
'sensor_list': 688},
|
||||
'Y170000000000001': {'modbus_polling': True,
|
||||
'monitor_sn': 2000000000,
|
||||
'node_id': 'PV-Garage2/',
|
||||
'pv1': {'manufacturer': 'man1',
|
||||
'type': 'type1'},
|
||||
'pv2': {'manufacturer': 'man2',
|
||||
'type': 'type2'},
|
||||
'pv3': {'manufacturer': 'man3',
|
||||
'type': 'type3'},
|
||||
'pv4': {'manufacturer': 'man4',
|
||||
'type': 'type4'},
|
||||
'suggested_area': 'Garage2',
|
||||
'sensor_list': 688}
|
||||
}
|
||||
}
|
||||
|
||||
def test_default_config():
|
||||
Config.init(ConfigReadToml("app/config/default_config.toml"))
|
||||
validated = Config.def_config
|
||||
assert validated == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'},
|
||||
'inverters': {
|
||||
'allow_all': False,
|
||||
'R170000000000001': {
|
||||
'node_id': '',
|
||||
'pv1': {'manufacturer': 'Risen',
|
||||
'type': 'RSM40-8-395M'},
|
||||
'pv2': {'manufacturer': 'Risen',
|
||||
'type': 'RSM40-8-395M'},
|
||||
'modbus_polling': False,
|
||||
'monitor_sn': 0,
|
||||
'suggested_area': '',
|
||||
'sensor_list': 688},
|
||||
'Y170000000000001': {
|
||||
'modbus_polling': True,
|
||||
'monitor_sn': 2000000000,
|
||||
'node_id': '',
|
||||
'pv1': {'manufacturer': 'Risen',
|
||||
'type': 'RSM40-8-410M'},
|
||||
'pv2': {'manufacturer': 'Risen',
|
||||
'type': 'RSM40-8-410M'},
|
||||
'pv3': {'manufacturer': 'Risen',
|
||||
'type': 'RSM40-8-410M'},
|
||||
'pv4': {'manufacturer': 'Risen',
|
||||
'type': 'RSM40-8-410M'},
|
||||
'suggested_area': '',
|
||||
'sensor_list': 688}}}
|
||||
|
||||
def test_full_config(ConfigComplete):
|
||||
cnf = {'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005},
|
||||
'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': ['AT+SUPDATE']},
|
||||
'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': ['AT+SUPDATE']}}},
|
||||
'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000},
|
||||
'mqtt': {'host': 'mqtt', 'port': 1883, 'user': '', 'passwd': ''},
|
||||
'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'},
|
||||
'inverters': {'allow_all': False,
|
||||
'R170000000000001': {'modbus_polling': False, 'node_id': 'PV-Garage/', 'sensor_list': 0x02B0, 'suggested_area': 'Garage', 'pv1': {'type': 'type1', 'manufacturer': 'man1'}, 'pv2': {'type': 'type2', 'manufacturer': 'man2'}},
|
||||
'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'node_id': 'PV-Garage2/', 'sensor_list': 0x02B0, 'suggested_area': 'Garage2', 'pv1': {'type': 'type1', 'manufacturer': 'man1'}, 'pv2': {'type': 'type2', 'manufacturer': 'man2'}, 'pv3': {'type': 'type3', 'manufacturer': 'man3'}, 'pv4': {'type': 'type4', 'manufacturer': 'man4'}}}}
|
||||
try:
|
||||
validated = Config.conf_schema.validate(cnf)
|
||||
except Exception:
|
||||
assert False
|
||||
assert validated == ConfigComplete
|
||||
|
||||
def test_read_empty(ConfigDefault):
|
||||
test_buffer.rd = ""
|
||||
|
||||
Config.init(ConfigReadToml("app/config/default_config.toml"))
|
||||
for _ in patch_open():
|
||||
ConfigReadToml("config/config.toml")
|
||||
err = Config.get_error()
|
||||
|
||||
assert err == None
|
||||
cnf = Config.get()
|
||||
assert cnf == ConfigDefault
|
||||
|
||||
defcnf = Config.def_config.get('solarman')
|
||||
assert defcnf == {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}
|
||||
assert True == Config.is_default('solarman')
|
||||
|
||||
def test_no_file():
|
||||
Config.init(ConfigReadToml("default_config.toml"))
|
||||
err = Config.get_error()
|
||||
assert err == "Config.read: [Errno 2] No such file or directory: 'default_config.toml'"
|
||||
cnf = Config.get()
|
||||
assert cnf == {}
|
||||
defcnf = Config.def_config.get('solarman')
|
||||
assert defcnf == None
|
||||
|
||||
def test_no_file2():
|
||||
Config.init(ConfigReadToml("app/config/default_config.toml"))
|
||||
assert Config.err == None
|
||||
ConfigReadToml("_no__file__no_")
|
||||
err = Config.get_error()
|
||||
assert err == None
|
||||
|
||||
def test_invalid_filename():
|
||||
Config.init(ConfigReadToml("app/config/default_config.toml"))
|
||||
assert Config.err == None
|
||||
ConfigReadToml(None)
|
||||
err = Config.get_error()
|
||||
assert err == None
|
||||
|
||||
def test_read_cnf1():
|
||||
test_buffer.rd = "solarman.enabled = false"
|
||||
|
||||
Config.init(ConfigReadToml("app/config/default_config.toml"))
|
||||
for _ in patch_open():
|
||||
ConfigReadToml("config/config.toml")
|
||||
err = Config.get_error()
|
||||
|
||||
assert err == None
|
||||
cnf = Config.get()
|
||||
assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': False, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'},
|
||||
'inverters': {
|
||||
'allow_all': False,
|
||||
'R170000000000001': {
|
||||
'suggested_area': '',
|
||||
'modbus_polling': False,
|
||||
'monitor_sn': 0,
|
||||
'node_id': '',
|
||||
'pv1': {'manufacturer': 'Risen',
|
||||
'type': 'RSM40-8-395M'},
|
||||
'pv2': {'manufacturer': 'Risen',
|
||||
'type': 'RSM40-8-395M'},
|
||||
'sensor_list': 688
|
||||
},
|
||||
'Y170000000000001': {
|
||||
'modbus_polling': True,
|
||||
'monitor_sn': 2000000000,
|
||||
'suggested_area': '',
|
||||
'node_id': '',
|
||||
'pv1': {'manufacturer': 'Risen',
|
||||
'type': 'RSM40-8-410M'},
|
||||
'pv2': {'manufacturer': 'Risen',
|
||||
'type': 'RSM40-8-410M'},
|
||||
'pv3': {'manufacturer': 'Risen',
|
||||
'type': 'RSM40-8-410M'},
|
||||
'pv4': {'manufacturer': 'Risen',
|
||||
'type': 'RSM40-8-410M'},
|
||||
'sensor_list': 688
|
||||
}
|
||||
}
|
||||
}
|
||||
cnf = Config.get('solarman')
|
||||
assert cnf == {'enabled': False, 'host': 'iot.talent-monitoring.com', 'port': 10000}
|
||||
defcnf = Config.def_config.get('solarman')
|
||||
assert defcnf == {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}
|
||||
assert False == Config.is_default('solarman')
|
||||
|
||||
def test_read_cnf2():
|
||||
test_buffer.rd = "solarman.enabled = 'FALSE'"
|
||||
|
||||
Config.init(ConfigReadToml("app/config/default_config.toml"))
|
||||
for _ in patch_open():
|
||||
ConfigReadToml("config/config.toml")
|
||||
err = Config.get_error()
|
||||
|
||||
assert err == None
|
||||
cnf = Config.get()
|
||||
assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'},
|
||||
'inverters': {
|
||||
'allow_all': False,
|
||||
'R170000000000001': {
|
||||
'suggested_area': '',
|
||||
'modbus_polling': False,
|
||||
'monitor_sn': 0,
|
||||
'node_id': '',
|
||||
'pv1': {'manufacturer': 'Risen',
|
||||
'type': 'RSM40-8-395M'},
|
||||
'pv2': {'manufacturer': 'Risen',
|
||||
'type': 'RSM40-8-395M'},
|
||||
'sensor_list': 688
|
||||
},
|
||||
'Y170000000000001': {
|
||||
'modbus_polling': True,
|
||||
'monitor_sn': 2000000000,
|
||||
'suggested_area': '',
|
||||
'node_id': '',
|
||||
'pv1': {'manufacturer': 'Risen',
|
||||
'type': 'RSM40-8-410M'},
|
||||
'pv2': {'manufacturer': 'Risen',
|
||||
'type': 'RSM40-8-410M'},
|
||||
'pv3': {'manufacturer': 'Risen',
|
||||
'type': 'RSM40-8-410M'},
|
||||
'pv4': {'manufacturer': 'Risen',
|
||||
'type': 'RSM40-8-410M'},
|
||||
'sensor_list': 688
|
||||
}
|
||||
}
|
||||
}
|
||||
assert True == Config.is_default('solarman')
|
||||
|
||||
def test_read_cnf3(ConfigDefault):
|
||||
test_buffer.rd = "solarman.port = 'FALSE'"
|
||||
|
||||
Config.init(ConfigReadToml("app/config/default_config.toml"))
|
||||
for _ in patch_open():
|
||||
ConfigReadToml("config/config.toml")
|
||||
err = Config.get_error()
|
||||
|
||||
assert err == 'error: Key \'solarman\' error:\nKey \'port\' error:\nint(\'FALSE\') raised ValueError("invalid literal for int() with base 10: \'FALSE\'")'
|
||||
cnf = Config.get()
|
||||
assert cnf == ConfigDefault
|
||||
|
||||
def test_read_cnf4():
|
||||
test_buffer.rd = "solarman.port = 5000"
|
||||
|
||||
Config.init(ConfigReadToml("app/config/default_config.toml"))
|
||||
for _ in patch_open():
|
||||
ConfigReadToml("config/config.toml")
|
||||
err = Config.get_error()
|
||||
|
||||
assert err == None
|
||||
cnf = Config.get()
|
||||
assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 5000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'},
|
||||
'inverters': {
|
||||
'allow_all': False,
|
||||
'R170000000000001': {
|
||||
'suggested_area': '',
|
||||
'modbus_polling': False,
|
||||
'monitor_sn': 0,
|
||||
'node_id': '',
|
||||
'pv1': {'manufacturer': 'Risen',
|
||||
'type': 'RSM40-8-395M'},
|
||||
'pv2': {'manufacturer': 'Risen',
|
||||
'type': 'RSM40-8-395M'},
|
||||
'sensor_list': 688
|
||||
},
|
||||
'Y170000000000001': {
|
||||
'modbus_polling': True,
|
||||
'monitor_sn': 2000000000,
|
||||
'suggested_area': '',
|
||||
'node_id': '',
|
||||
'pv1': {'manufacturer': 'Risen',
|
||||
'type': 'RSM40-8-410M'},
|
||||
'pv2': {'manufacturer': 'Risen',
|
||||
'type': 'RSM40-8-410M'},
|
||||
'pv3': {'manufacturer': 'Risen',
|
||||
'type': 'RSM40-8-410M'},
|
||||
'pv4': {'manufacturer': 'Risen',
|
||||
'type': 'RSM40-8-410M'},
|
||||
'sensor_list': 688
|
||||
}
|
||||
}
|
||||
}
|
||||
assert False == Config.is_default('solarman')
|
||||
|
||||
def test_read_cnf5():
|
||||
test_buffer.rd = "solarman.port = 1023"
|
||||
|
||||
Config.init(ConfigReadToml("app/config/default_config.toml"))
|
||||
for _ in patch_open():
|
||||
ConfigReadToml("config/config.toml")
|
||||
err = Config.get_error()
|
||||
assert err != None
|
||||
|
||||
def test_read_cnf6():
|
||||
test_buffer.rd = "solarman.port = 65536"
|
||||
|
||||
Config.init(ConfigReadToml("app/config/default_config.toml"))
|
||||
for _ in patch_open():
|
||||
ConfigReadToml("config/config.toml")
|
||||
err = Config.get_error()
|
||||
assert err != None
|
||||
53
app/tests/test_config_read_env.py
Normal file
53
app/tests/test_config_read_env.py
Normal file
@@ -0,0 +1,53 @@
|
||||
# test_with_pytest.py
|
||||
import pytest
|
||||
import os
|
||||
from mock import patch
|
||||
from cnf.config import Config
|
||||
from cnf.config_read_toml import ConfigReadToml
|
||||
from cnf.config_read_env import ConfigReadEnv
|
||||
|
||||
def patch_getenv():
|
||||
def new_getenv(key: str, defval=None):
|
||||
"""Get an environment variable, return None if it doesn't exist.
|
||||
The optional second argument can specify an alternate default. key,
|
||||
default and the result are str."""
|
||||
if key == 'MQTT_PASSWORD':
|
||||
return 'passwd'
|
||||
elif key == 'MQTT_PORT':
|
||||
return 1234
|
||||
elif key == 'MQTT_HOST':
|
||||
return ""
|
||||
return defval
|
||||
|
||||
with patch.object(os, 'getenv', new_getenv) as conn:
|
||||
yield conn
|
||||
|
||||
def test_extend_key():
|
||||
cnf_rd = ConfigReadEnv()
|
||||
|
||||
conf = {}
|
||||
cnf_rd._extend_key(conf, "mqtt.user", "testuser")
|
||||
assert conf == {
|
||||
'mqtt': {
|
||||
'user': 'testuser',
|
||||
},
|
||||
}
|
||||
|
||||
conf = {}
|
||||
cnf_rd._extend_key(conf, "mqtt", "testuser")
|
||||
assert conf == {
|
||||
'mqtt': 'testuser',
|
||||
}
|
||||
|
||||
conf = {}
|
||||
cnf_rd._extend_key(conf, "", "testuser")
|
||||
assert conf == {'': 'testuser'}
|
||||
|
||||
def test_read_env_config():
|
||||
Config.init(ConfigReadToml("app/config/default_config.toml"))
|
||||
assert Config.get('mqtt') == {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}
|
||||
for _ in patch_getenv():
|
||||
|
||||
ConfigReadEnv()
|
||||
assert Config.get_error() == None
|
||||
assert Config.get('mqtt') == {'host': 'mqtt', 'port': 1234, 'user': None, 'passwd': 'passwd'}
|
||||
411
app/tests/test_config_read_json.py
Normal file
411
app/tests/test_config_read_json.py
Normal file
@@ -0,0 +1,411 @@
|
||||
# test_with_pytest.py
|
||||
import pytest
|
||||
from mock import patch
|
||||
from cnf.config import Config
|
||||
from cnf.config_read_json import ConfigReadJson
|
||||
from cnf.config_read_toml import ConfigReadToml
|
||||
|
||||
from test_config import ConfigDefault, ConfigComplete
|
||||
|
||||
|
||||
class CnfIfc(ConfigReadJson):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
|
||||
class FakeBuffer:
|
||||
rd = str()
|
||||
wr = str()
|
||||
|
||||
|
||||
test_buffer = FakeBuffer
|
||||
|
||||
|
||||
class FakeFile():
|
||||
def __init__(self):
|
||||
self.buf = test_buffer
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc, tb):
|
||||
pass
|
||||
|
||||
|
||||
class FakeOptionsFile(FakeFile):
|
||||
def __init__(self, OpenTextMode):
|
||||
super().__init__()
|
||||
self.bin_mode = 'b' in OpenTextMode
|
||||
|
||||
def read(self):
|
||||
print(f"Fake.read: bmode:{self.bin_mode}")
|
||||
if self.bin_mode:
|
||||
return bytearray(self.buf.rd.encode('utf-8')).copy()
|
||||
else:
|
||||
print(f"Fake.read: str:{self.buf.rd}")
|
||||
return self.buf.rd
|
||||
|
||||
def patch_open():
|
||||
def new_open(file: str, OpenTextMode="r"):
|
||||
if file == "_no__file__no_":
|
||||
raise FileNotFoundError
|
||||
return FakeOptionsFile(OpenTextMode)
|
||||
|
||||
with patch('builtins.open', new_open) as conn:
|
||||
yield conn
|
||||
|
||||
@pytest.fixture
|
||||
def ConfigTomlEmpty():
|
||||
return {
|
||||
'mqtt': {'host': 'mqtt', 'port': 1883, 'user': '', 'passwd': ''},
|
||||
'ha': {'auto_conf_prefix': 'homeassistant',
|
||||
'discovery_prefix': 'homeassistant',
|
||||
'entity_prefix': 'tsun',
|
||||
'proxy_node_id': 'proxy',
|
||||
'proxy_unique_id': 'P170000000000001'},
|
||||
'solarman': {
|
||||
'enabled': True,
|
||||
'host': 'iot.talent-monitoring.com',
|
||||
'port': 10000,
|
||||
},
|
||||
'tsun': {
|
||||
'enabled': True,
|
||||
'host': 'logger.talent-monitoring.com',
|
||||
'port': 5005,
|
||||
},
|
||||
'inverters': {
|
||||
'allow_all': False
|
||||
},
|
||||
'gen3plus': {'at_acl': {'tsun': {'allow': [], 'block': []},
|
||||
'mqtt': {'allow': [], 'block': []}}},
|
||||
}
|
||||
|
||||
|
||||
def test_no_config(ConfigDefault):
|
||||
test_buffer.rd = "" # empty buffer, no json
|
||||
|
||||
Config.init(ConfigReadToml("app/config/default_config.toml"))
|
||||
for _ in patch_open():
|
||||
ConfigReadJson()
|
||||
err = Config.get_error()
|
||||
|
||||
assert err == 'error: Expecting value: line 1 column 1 (char 0)'
|
||||
cnf = Config.get()
|
||||
assert cnf == ConfigDefault
|
||||
|
||||
def test_no_file(ConfigDefault):
|
||||
test_buffer.rd = "" # empty buffer, no json
|
||||
|
||||
Config.init(ConfigReadToml("app/config/default_config.toml"))
|
||||
for _ in patch_open():
|
||||
ConfigReadJson("_no__file__no_")
|
||||
err = Config.get_error()
|
||||
|
||||
assert err == None
|
||||
cnf = Config.get()
|
||||
assert cnf == ConfigDefault
|
||||
|
||||
def test_invalid_filename(ConfigDefault):
|
||||
test_buffer.rd = "" # empty buffer, no json
|
||||
|
||||
Config.init(ConfigReadToml("app/config/default_config.toml"))
|
||||
for _ in patch_open():
|
||||
ConfigReadJson(None)
|
||||
err = Config.get_error()
|
||||
|
||||
assert err == None
|
||||
cnf = Config.get()
|
||||
assert cnf == ConfigDefault
|
||||
|
||||
def test_cnv1():
|
||||
"""test dotted key converting"""
|
||||
tst = {
|
||||
"gen3plus.at_acl.mqtt.block": [
|
||||
"AT+SUPDATE",
|
||||
"AT+"
|
||||
]
|
||||
}
|
||||
|
||||
cnf = ConfigReadJson()
|
||||
obj = cnf.convert_to_obj(tst)
|
||||
assert obj == {
|
||||
'gen3plus': {
|
||||
'at_acl': {
|
||||
'mqtt': {
|
||||
'block': [
|
||||
'AT+SUPDATE',
|
||||
"AT+"
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
def test_cnv2():
|
||||
"""test a valid list with serials in inverters"""
|
||||
tst = {
|
||||
"inverters": [
|
||||
{
|
||||
"serial": "R170000000000001",
|
||||
},
|
||||
{
|
||||
"serial": "Y170000000000001",
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
cnf = ConfigReadJson()
|
||||
obj = cnf.convert_to_obj(tst)
|
||||
assert obj == {
|
||||
'inverters': {
|
||||
'R170000000000001': {},
|
||||
'Y170000000000001': {}
|
||||
},
|
||||
}
|
||||
|
||||
def test_cnv3():
|
||||
"""test the combination of a list and a scalar in inverters"""
|
||||
tst = {
|
||||
"inverters": [
|
||||
{
|
||||
"serial": "R170000000000001",
|
||||
},
|
||||
{
|
||||
"serial": "Y170000000000001",
|
||||
}
|
||||
],
|
||||
"inverters.allow_all": False,
|
||||
}
|
||||
|
||||
cnf = ConfigReadJson()
|
||||
obj = cnf.convert_to_obj(tst)
|
||||
assert obj == {
|
||||
'inverters': {
|
||||
'R170000000000001': {},
|
||||
'Y170000000000001': {},
|
||||
'allow_all': False,
|
||||
},
|
||||
}
|
||||
|
||||
def test_cnv4():
|
||||
tst = {
|
||||
"inverters": [
|
||||
{
|
||||
"serial": "R170000000000001",
|
||||
"node_id": "PV-Garage/",
|
||||
"suggested_area": "Garage",
|
||||
"modbus_polling": False,
|
||||
"pv1.manufacturer": "man1",
|
||||
"pv1.type": "type1",
|
||||
"pv2.manufacturer": "man2",
|
||||
"pv2.type": "type2",
|
||||
"sensor_list": 688
|
||||
},
|
||||
{
|
||||
"serial": "Y170000000000001",
|
||||
"monitor_sn": 2000000000,
|
||||
"node_id": "PV-Garage2/",
|
||||
"suggested_area": "Garage2",
|
||||
"modbus_polling": True,
|
||||
"client_mode.host": "InverterIP",
|
||||
"client_mode.port": 1234,
|
||||
"client_mode.forward": True,
|
||||
"pv1.manufacturer": "man1",
|
||||
"pv1.type": "type1",
|
||||
"pv2.manufacturer": "man2",
|
||||
"pv2.type": "type2",
|
||||
"pv3.manufacturer": "man3",
|
||||
"pv3.type": "type3",
|
||||
"pv4.manufacturer": "man4",
|
||||
"pv4.type": "type4",
|
||||
"sensor_list": 688
|
||||
}
|
||||
],
|
||||
"tsun.enabled": True,
|
||||
"solarman.enabled": True,
|
||||
"inverters.allow_all": False,
|
||||
"gen3plus.at_acl.tsun.allow": [
|
||||
"AT+Z",
|
||||
"AT+UPURL",
|
||||
"AT+SUPDATE"
|
||||
],
|
||||
"gen3plus.at_acl.tsun.block": [
|
||||
"AT+SUPDATE"
|
||||
],
|
||||
"gen3plus.at_acl.mqtt.allow": [
|
||||
"AT+"
|
||||
],
|
||||
"gen3plus.at_acl.mqtt.block": [
|
||||
"AT+SUPDATE"
|
||||
]
|
||||
}
|
||||
|
||||
cnf = ConfigReadJson()
|
||||
obj = cnf.convert_to_obj(tst)
|
||||
assert obj == {
|
||||
'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': ['AT+SUPDATE']},
|
||||
'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'],
|
||||
'block': ['AT+SUPDATE']}}},
|
||||
'inverters': {'R170000000000001': {'modbus_polling': False,
|
||||
'node_id': 'PV-Garage/',
|
||||
'pv1': {
|
||||
'manufacturer': 'man1',
|
||||
'type': 'type1'},
|
||||
'pv2': {
|
||||
'manufacturer': 'man2',
|
||||
'type': 'type2'},
|
||||
'sensor_list': 688,
|
||||
'suggested_area': 'Garage'},
|
||||
'Y170000000000001': {'client_mode': {
|
||||
'host': 'InverterIP',
|
||||
'port': 1234,
|
||||
'forward': True},
|
||||
'modbus_polling': True,
|
||||
'monitor_sn': 2000000000,
|
||||
'node_id': 'PV-Garage2/',
|
||||
'pv1': {
|
||||
'manufacturer': 'man1',
|
||||
'type': 'type1'},
|
||||
'pv2': {
|
||||
'manufacturer': 'man2',
|
||||
'type': 'type2'},
|
||||
'pv3': {
|
||||
'manufacturer': 'man3',
|
||||
'type': 'type3'},
|
||||
'pv4': {
|
||||
'manufacturer': 'man4',
|
||||
'type': 'type4'},
|
||||
'sensor_list': 688,
|
||||
'suggested_area': 'Garage2'},
|
||||
'allow_all': False},
|
||||
'solarman': {'enabled': True},
|
||||
'tsun': {'enabled': True}
|
||||
}
|
||||
|
||||
def test_cnv5():
|
||||
"""test a invalid list with missing serials"""
|
||||
tst = {
|
||||
"inverters": [
|
||||
{
|
||||
"node_id": "PV-Garage1/",
|
||||
},
|
||||
{
|
||||
"serial": "Y170000000000001",
|
||||
"node_id": "PV-Garage2/",
|
||||
}
|
||||
],
|
||||
}
|
||||
cnf = ConfigReadJson()
|
||||
obj = cnf.convert_to_obj(tst)
|
||||
assert obj == {
|
||||
'inverters': {
|
||||
'Y170000000000001': {'node_id': 'PV-Garage2/'}
|
||||
},
|
||||
}
|
||||
|
||||
def test_cnv6():
|
||||
"""test overwritting a value in inverters"""
|
||||
tst = {
|
||||
"inverters": [{
|
||||
"serial": "Y170000000000001",
|
||||
"node_id": "PV-Garage2/",
|
||||
}],
|
||||
}
|
||||
tst2 = {
|
||||
"inverters": [{
|
||||
"serial": "Y170000000000001",
|
||||
"node_id": "PV-Garden/",
|
||||
}],
|
||||
}
|
||||
cnf = ConfigReadJson()
|
||||
conf = {}
|
||||
for key, val in tst.items():
|
||||
cnf.convert_inv_arr(conf, key, val)
|
||||
|
||||
assert conf == {
|
||||
'inverters': {
|
||||
'Y170000000000001': {'node_id': 'PV-Garage2/'}
|
||||
},
|
||||
}
|
||||
|
||||
for key, val in tst2.items():
|
||||
cnf.convert_inv_arr(conf, key, val)
|
||||
|
||||
assert conf == {
|
||||
'inverters': {
|
||||
'Y170000000000001': {'node_id': 'PV-Garden/'}
|
||||
},
|
||||
}
|
||||
|
||||
def test_empty_config(ConfigDefault):
|
||||
test_buffer.rd = "{}" # empty json
|
||||
|
||||
Config.init(ConfigReadToml("app/config/default_config.toml"))
|
||||
for _ in patch_open():
|
||||
ConfigReadJson()
|
||||
err = Config.get_error()
|
||||
|
||||
assert err == None
|
||||
cnf = Config.get()
|
||||
assert cnf == ConfigDefault
|
||||
|
||||
|
||||
def test_full_config(ConfigComplete):
|
||||
test_buffer.rd = """
|
||||
{
|
||||
"inverters": [
|
||||
{
|
||||
"serial": "R170000000000001",
|
||||
"node_id": "PV-Garage/",
|
||||
"suggested_area": "Garage",
|
||||
"modbus_polling": false,
|
||||
"pv1.manufacturer": "man1",
|
||||
"pv1.type": "type1",
|
||||
"pv2.manufacturer": "man2",
|
||||
"pv2.type": "type2",
|
||||
"sensor_list": 688
|
||||
},
|
||||
{
|
||||
"serial": "Y170000000000001",
|
||||
"monitor_sn": 2000000000,
|
||||
"node_id": "PV-Garage2/",
|
||||
"suggested_area": "Garage2",
|
||||
"modbus_polling": true,
|
||||
"pv1.manufacturer": "man1",
|
||||
"pv1.type": "type1",
|
||||
"pv2.manufacturer": "man2",
|
||||
"pv2.type": "type2",
|
||||
"pv3.manufacturer": "man3",
|
||||
"pv3.type": "type3",
|
||||
"pv4.manufacturer": "man4",
|
||||
"pv4.type": "type4",
|
||||
"sensor_list": 688
|
||||
}
|
||||
],
|
||||
"tsun.enabled": true,
|
||||
"solarman.enabled": true,
|
||||
"inverters.allow_all": false,
|
||||
"gen3plus.at_acl.tsun.allow": [
|
||||
"AT+Z",
|
||||
"AT+UPURL",
|
||||
"AT+SUPDATE"
|
||||
],
|
||||
"gen3plus.at_acl.tsun.block": [
|
||||
"AT+SUPDATE"
|
||||
],
|
||||
"gen3plus.at_acl.mqtt.allow": [
|
||||
"AT+"
|
||||
],
|
||||
"gen3plus.at_acl.mqtt.block": [
|
||||
"AT+SUPDATE"
|
||||
]
|
||||
}
|
||||
"""
|
||||
Config.init(ConfigReadToml("app/config/default_config.toml"))
|
||||
for _ in patch_open():
|
||||
ConfigReadJson()
|
||||
err = Config.get_error()
|
||||
|
||||
assert err == None
|
||||
cnf = Config.get()
|
||||
assert cnf == ConfigComplete
|
||||
@@ -1,312 +1,9 @@
|
||||
# test_with_pytest.py
|
||||
import pytest, json
|
||||
from app.src.infos import Infos
|
||||
|
||||
@pytest.fixture
|
||||
def ContrDataSeq(): # Get Time Request message
|
||||
msg = b'\x00\x00\x00\x15\x00\x09\x2b\xa8\x54\x10\x52\x53\x57\x5f\x34\x30\x30\x5f\x56\x31\x2e\x30\x30\x2e\x30\x36\x00\x09\x27\xc0\x54\x06\x52\x61\x79\x6d\x6f'
|
||||
msg += b'\x6e\x00\x09\x2f\x90\x54\x0b\x52\x53\x57\x2d\x31\x2d\x31\x30\x30\x30\x31\x00\x09\x5a\x88\x54\x0f\x74\x2e\x72\x61\x79\x6d\x6f\x6e\x69\x6f\x74\x2e\x63\x6f\x6d\x00\x09\x5a\xec\x54'
|
||||
msg += b'\x1c\x6c\x6f\x67\x67\x65\x72\x2e\x74\x61\x6c\x65\x6e\x74\x2d\x6d\x6f\x6e\x69\x74\x6f\x72\x69\x6e\x67\x2e\x63\x6f\x6d\x00\x0d\x00\x20\x49\x00\x00\x00\x01\x00\x0c\x35\x00\x49\x00'
|
||||
msg += b'\x00\x00\x64\x00\x0c\x96\xa8\x49\x00\x00\x00\x1d\x00\x0c\x7f\x38\x49\x00\x00\x00\x01\x00\x0c\xfc\x38\x49\x00\x00\x00\x01\x00\x0c\xf8\x50\x49\x00\x00\x01\x2c\x00\x0c\x63\xe0\x49'
|
||||
msg += b'\x00\x00\x00\x00\x00\x0c\x67\xc8\x49\x00\x00\x00\x00\x00\x0c\x50\x58\x49\x00\x00\x00\x01\x00\x09\x5e\x70\x49\x00\x00\x13\x8d\x00\x09\x5e\xd4\x49\x00\x00\x13\x8d\x00\x09\x5b\x50'
|
||||
msg += b'\x49\x00\x00\x00\x02\x00\x0d\x04\x08\x49\x00\x00\x00\x00\x00\x07\xa1\x84\x49\x00\x00\x00\x01\x00\x0c\x50\x59\x49\x00\x00\x00\x4c\x00\x0d\x1f\x60\x49\x00\x00\x00\x00'
|
||||
return msg
|
||||
|
||||
@pytest.fixture
|
||||
def InvDataSeq(): # Data indication from the controller
|
||||
msg = b'\x00\x00\x00\x06\x00\x00\x00\x0a\x54\x08\x4d\x69\x63\x72\x6f\x69\x6e\x76\x00\x00\x00\x14\x54\x04\x54\x53\x55\x4e\x00\x00\x00\x1E\x54\x07\x56\x35\x2e\x30\x2e\x31\x31\x00\x00\x00\x28'
|
||||
msg += b'\x54\x10\x54\x31\x37\x45\x37\x33\x30\x37\x30\x32\x31\x44\x30\x30\x36\x41\x00\x00\x00\x32\x54\x0a\x54\x53\x4f\x4c\x2d\x4d\x53\x36\x30\x30\x00\x00\x00\x3c\x54\x05\x41\x2c\x42\x2c\x43'
|
||||
return msg
|
||||
|
||||
@pytest.fixture
|
||||
def InvalidDataSeq(): # Data indication from the controller
|
||||
msg = b'\x00\x00\x00\x06\x00\x00\x00\x0a\x54\x08\x4d\x69\x63\x72\x6f\x69\x6e\x76\x00\x00\x00\x14\x64\x04\x54\x53\x55\x4e\x00\x00\x00\x1E\x54\x07\x56\x35\x2e\x30\x2e\x31\x31\x00\x00\x00\x28'
|
||||
msg += b'\x54\x10\x54\x31\x37\x45\x37\x33\x30\x37\x30\x32\x31\x44\x30\x30\x36\x41\x00\x00\x00\x32\x54\x0a\x54\x53\x4f\x4c\x2d\x4d\x53\x36\x30\x30\x00\x00\x00\x3c\x54\x05\x41\x2c\x42\x2c\x43'
|
||||
return msg
|
||||
|
||||
@pytest.fixture
|
||||
def InvDataSeq2(): # Data indication from the controller
|
||||
msg = b'\x00\x00\x00\xa3\x00\x00\x00\x64\x53\x00\x01\x00\x00\x00\xc8\x53\x00\x02\x00\x00\x01\x2c\x53\x00\x00\x00\x00\x01\x90\x49\x00\x00\x00\x00\x00\x00\x01\x91\x53\x00\x00'
|
||||
msg += b'\x00\x00\x01\x92\x53\x00\x00\x00\x00\x01\x93\x53\x00\x00\x00\x00\x01\x94\x53\x00\x00\x00\x00\x01\x95\x53\x00\x00\x00\x00\x01\x96\x53\x00\x00\x00\x00\x01\x97\x53\x00'
|
||||
msg += b'\x00\x00\x00\x01\x98\x53\x00\x00\x00\x00\x01\x99\x53\x00\x00\x00\x00\x01\x9a\x53\x00\x00\x00\x00\x01\x9b\x53\x00\x00\x00\x00\x01\x9c\x53\x00\x00\x00\x00\x01\x9d\x53'
|
||||
msg += b'\x00\x00\x00\x00\x01\x9e\x53\x00\x00\x00\x00\x01\x9f\x53\x00\x00\x00\x00\x01\xa0\x53\x00\x00\x00\x00\x01\xf4\x49\x00\x00\x00\x00\x00\x00\x01\xf5\x53\x00\x00\x00\x00'
|
||||
msg += b'\x01\xf6\x53\x00\x00\x00\x00\x01\xf7\x53\x00\x00\x00\x00\x01\xf8\x53\x00\x00\x00\x00\x01\xf9\x53\x00\x00\x00\x00\x01\xfa\x53\x00\x00\x00\x00\x01\xfb\x53\x00\x00\x00'
|
||||
msg += b'\x00\x01\xfc\x53\x00\x00\x00\x00\x01\xfd\x53\x00\x00\x00\x00\x01\xfe\x53\x00\x00\x00\x00\x01\xff\x53\x00\x00\x00\x00\x02\x00\x53\x00\x00\x00\x00\x02\x01\x53\x00\x00'
|
||||
msg += b'\x00\x00\x02\x02\x53\x00\x00\x00\x00\x02\x03\x53\x00\x00\x00\x00\x02\x04\x53\x00\x00\x00\x00\x02\x58\x49\x00\x00\x00\x00\x00\x00\x02\x59\x53\x00\x00\x00\x00\x02\x5a'
|
||||
msg += b'\x53\x00\x00\x00\x00\x02\x5b\x53\x00\x00\x00\x00\x02\x5c\x53\x00\x00\x00\x00\x02\x5d\x53\x00\x00\x00\x00\x02\x5e\x53\x00\x00\x00\x00\x02\x5f\x53\x00\x00\x00\x00\x02'
|
||||
msg += b'\x60\x53\x00\x00\x00\x00\x02\x61\x53\x00\x00\x00\x00\x02\x62\x53\x00\x00\x00\x00\x02\x63\x53\x00\x00\x00\x00\x02\x64\x53\x00\x00\x00\x00\x02\x65\x53\x00\x00\x00\x00'
|
||||
msg += b'\x02\x66\x53\x00\x00\x00\x00\x02\x67\x53\x00\x00\x00\x00\x02\x68\x53\x00\x00\x00\x00\x02\xbc\x49\x00\x00\x00\x00\x00\x00\x02\xbd\x53\x00\x00\x00\x00\x02\xbe\x53\x00'
|
||||
msg += b'\x00\x00\x00\x02\xbf\x53\x00\x00\x00\x00\x02\xc0\x53\x00\x00\x00\x00\x02\xc1\x53\x00\x00\x00\x00\x02\xc2\x53\x00\x00\x00\x00\x02\xc3\x53\x00\x00\x00\x00\x02\xc4\x53'
|
||||
msg += b'\x00\x00\x00\x00\x02\xc5\x53\x00\x00\x00\x00\x02\xc6\x53\x00\x00\x00\x00\x02\xc7\x53\x00\x00\x00\x00\x02\xc8\x53\x00\x00\x00\x00\x02\xc9\x53\x00\x00\x00\x00\x02\xca'
|
||||
msg += b'\x53\x00\x00\x00\x00\x02\xcb\x53\x00\x00\x00\x00\x02\xcc\x53\x00\x00\x00\x00\x03\x20\x53\x00\x00\x00\x00\x03\x84\x53\x50\x11\x00\x00\x03\xe8\x46\x43\x61\x66\x66\x00'
|
||||
msg += b'\x00\x04\x4c\x46\x3e\xeb\x85\x1f\x00\x00\x04\xb0\x46\x42\x48\x14\x7b\x00\x00\x05\x14\x53\x00\x17\x00\x00\x05\x78\x53\x00\x00\x00\x00\x05\xdc\x53\x02\x58\x00\x00\x06'
|
||||
msg += b'\x40\x46\x42\xd3\x66\x66\x00\x00\x06\xa4\x46\x42\x06\x66\x66\x00\x00\x07\x08\x46\x3f\xf4\x7a\xe1\x00\x00\x07\x6c\x46\x42\x81\x00\x00\x00\x00\x07\xd0\x46\x42\x06\x00'
|
||||
msg += b'\x00\x00\x00\x08\x34\x46\x3f\xae\x14\x7b\x00\x00\x08\x98\x46\x42\x36\xcc\xcd\x00\x00\x08\xfc\x46\x00\x00\x00\x00\x00\x00\x09\x60\x46\x00\x00\x00\x00\x00\x00\x09\xc4'
|
||||
msg += b'\x46\x00\x00\x00\x00\x00\x00\x0a\x28\x46\x00\x00\x00\x00\x00\x00\x0a\x8c\x46\x00\x00\x00\x00\x00\x00\x0a\xf0\x46\x00\x00\x00\x00\x00\x00\x0b\x54\x46\x3f\xd9\x99\x9a'
|
||||
msg += b'\x00\x00\x0b\xb8\x46\x41\x8a\xe1\x48\x00\x00\x0c\x1c\x46\x3f\x8a\x3d\x71\x00\x00\x0c\x80\x46\x41\x1b\xd7\x0a\x00\x00\x0c\xe4\x46\x3f\x1e\xb8\x52\x00\x00\x0d\x48\x46'
|
||||
msg += b'\x40\xf3\xd7\x0a\x00\x00\x0d\xac\x46\x00\x00\x00\x00\x00\x00\x0e\x10\x46\x00\x00\x00\x00\x00\x00\x0e\x74\x46\x00\x00\x00\x00\x00\x00\x0e\xd8\x46\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x0f\x3c\x53\x00\x00\x00\x00\x0f\xa0\x53\x00\x00\x00\x00\x10\x04\x53\x55\xaa\x00\x00\x10\x68\x53\x00\x00\x00\x00\x10\xcc\x53\x00\x00\x00\x00\x11\x30\x53\x00\x00'
|
||||
msg += b'\x00\x00\x11\x94\x53\x00\x00\x00\x00\x11\xf8\x53\xff\xff\x00\x00\x12\x5c\x53\xff\xff\x00\x00\x12\xc0\x53\x00\x02\x00\x00\x13\x24\x53\xff\xff\x00\x00\x13\x88\x53\xff'
|
||||
msg += b'\xff\x00\x00\x13\xec\x53\xff\xff\x00\x00\x14\x50\x53\xff\xff\x00\x00\x14\xb4\x53\xff\xff\x00\x00\x15\x18\x53\xff\xff\x00\x00\x15\x7c\x53\x00\x00\x00\x00\x27\x10\x53'
|
||||
msg += b'\x00\x02\x00\x00\x27\x74\x53\x00\x3c\x00\x00\x27\xd8\x53\x00\x68\x00\x00\x28\x3c\x53\x05\x00\x00\x00\x28\xa0\x46\x43\x79\x00\x00\x00\x00\x29\x04\x46\x43\x48\x00\x00'
|
||||
msg += b'\x00\x00\x29\x68\x46\x42\x48\x33\x33\x00\x00\x29\xcc\x46\x42\x3e\x3d\x71\x00\x00\x2a\x30\x53\x00\x01\x00\x00\x2a\x94\x46\x43\x37\x00\x00\x00\x00\x2a\xf8\x46\x42\xce'
|
||||
msg += b'\x00\x00\x00\x00\x2b\x5c\x53\x00\x96\x00\x00\x2b\xc0\x53\x00\x10\x00\x00\x2c\x24\x46\x43\x90\x00\x00\x00\x00\x2c\x88\x46\x43\x95\x00\x00\x00\x00\x2c\xec\x53\x00\x06'
|
||||
msg += b'\x00\x00\x2d\x50\x53\x00\x06\x00\x00\x2d\xb4\x46\x43\x7d\x00\x00\x00\x00\x2e\x18\x46\x42\x3d\xeb\x85\x00\x00\x2e\x7c\x46\x42\x3d\xeb\x85\x00\x00\x2e\xe0\x53\x00\x03'
|
||||
msg += b'\x00\x00\x2f\x44\x53\x00\x03\x00\x00\x2f\xa8\x46\x42\x4d\xeb\x85\x00\x00\x30\x0c\x46\x42\x4d\xeb\x85\x00\x00\x30\x70\x53\x00\x03\x00\x00\x30\xd4\x53\x00\x03\x00\x00'
|
||||
msg += b'\x31\x38\x46\x42\x08\x00\x00\x00\x00\x31\x9c\x53\x00\x05\x00\x00\x32\x00\x53\x04\x00\x00\x00\x32\x64\x53\x00\x01\x00\x00\x32\xc8\x53\x13\x9c\x00\x00\x33\x2c\x53\x0f'
|
||||
msg += b'\xa0\x00\x00\x33\x90\x53\x00\x4f\x00\x00\x33\xf4\x53\x00\x66\x00\x00\x34\x58\x53\x03\xe8\x00\x00\x34\xbc\x53\x04\x00\x00\x00\x35\x20\x53\x00\x00\x00\x00\x35\x84\x53'
|
||||
msg += b'\x00\x00\x00\x00\x35\xe8\x53\x00\x00\x00\x00\x36\x4c\x53\x00\x00\x00\x01\x38\x80\x53\x00\x02\x00\x01\x38\x81\x53\x00\x01\x00\x01\x38\x82\x53\x00\x01\x00\x01\x38\x83'
|
||||
msg += b'\x53\x00\x00'
|
||||
return msg
|
||||
|
||||
@pytest.fixture
|
||||
def InvDataSeq2_Zero(): # Data indication from the controller
|
||||
msg = b'\x00\x00\x00\xa3\x00\x00\x00\x64\x53\x00\x01\x00\x00\x00\xc8\x53\x00\x02\x00\x00\x01\x2c\x53\x00\x00\x00\x00\x01\x90\x49\x00\x00\x00\x00\x00\x00\x01\x91\x53\x00\x00'
|
||||
msg += b'\x00\x00\x01\x92\x53\x00\x00\x00\x00\x01\x93\x53\x00\x00\x00\x00\x01\x94\x53\x00\x00\x00\x00\x01\x95\x53\x00\x00\x00\x00\x01\x96\x53\x00\x00\x00\x00\x01\x97\x53\x00'
|
||||
msg += b'\x00\x00\x00\x01\x98\x53\x00\x00\x00\x00\x01\x99\x53\x00\x00\x00\x00\x01\x9a\x53\x00\x00\x00\x00\x01\x9b\x53\x00\x00\x00\x00\x01\x9c\x53\x00\x00\x00\x00\x01\x9d\x53'
|
||||
msg += b'\x00\x00\x00\x00\x01\x9e\x53\x00\x00\x00\x00\x01\x9f\x53\x00\x00\x00\x00\x01\xa0\x53\x00\x00\x00\x00\x01\xf4\x49\x00\x00\x00\x00\x00\x00\x01\xf5\x53\x00\x00\x00\x00'
|
||||
msg += b'\x01\xf6\x53\x00\x00\x00\x00\x01\xf7\x53\x00\x00\x00\x00\x01\xf8\x53\x00\x00\x00\x00\x01\xf9\x53\x00\x00\x00\x00\x01\xfa\x53\x00\x00\x00\x00\x01\xfb\x53\x00\x00\x00'
|
||||
msg += b'\x00\x01\xfc\x53\x00\x00\x00\x00\x01\xfd\x53\x00\x00\x00\x00\x01\xfe\x53\x00\x00\x00\x00\x01\xff\x53\x00\x00\x00\x00\x02\x00\x53\x00\x00\x00\x00\x02\x01\x53\x00\x00'
|
||||
msg += b'\x00\x00\x02\x02\x53\x00\x00\x00\x00\x02\x03\x53\x00\x00\x00\x00\x02\x04\x53\x00\x00\x00\x00\x02\x58\x49\x00\x00\x00\x00\x00\x00\x02\x59\x53\x00\x00\x00\x00\x02\x5a'
|
||||
msg += b'\x53\x00\x00\x00\x00\x02\x5b\x53\x00\x00\x00\x00\x02\x5c\x53\x00\x00\x00\x00\x02\x5d\x53\x00\x00\x00\x00\x02\x5e\x53\x00\x00\x00\x00\x02\x5f\x53\x00\x00\x00\x00\x02'
|
||||
msg += b'\x60\x53\x00\x00\x00\x00\x02\x61\x53\x00\x00\x00\x00\x02\x62\x53\x00\x00\x00\x00\x02\x63\x53\x00\x00\x00\x00\x02\x64\x53\x00\x00\x00\x00\x02\x65\x53\x00\x00\x00\x00'
|
||||
msg += b'\x02\x66\x53\x00\x00\x00\x00\x02\x67\x53\x00\x00\x00\x00\x02\x68\x53\x00\x00\x00\x00\x02\xbc\x49\x00\x00\x00\x00\x00\x00\x02\xbd\x53\x00\x00\x00\x00\x02\xbe\x53\x00'
|
||||
msg += b'\x00\x00\x00\x02\xbf\x53\x00\x00\x00\x00\x02\xc0\x53\x00\x00\x00\x00\x02\xc1\x53\x00\x00\x00\x00\x02\xc2\x53\x00\x00\x00\x00\x02\xc3\x53\x00\x00\x00\x00\x02\xc4\x53'
|
||||
msg += b'\x00\x00\x00\x00\x02\xc5\x53\x00\x00\x00\x00\x02\xc6\x53\x00\x00\x00\x00\x02\xc7\x53\x00\x00\x00\x00\x02\xc8\x53\x00\x00\x00\x00\x02\xc9\x53\x00\x00\x00\x00\x02\xca'
|
||||
msg += b'\x53\x00\x00\x00\x00\x02\xcb\x53\x00\x00\x00\x00\x02\xcc\x53\x00\x00\x00\x00\x03\x20\x53\x00\x00\x00\x00\x03\x84\x53\x50\x11\x00\x00\x03\xe8\x46\x43\x61\x66\x66\x00'
|
||||
msg += b'\x00\x04\x4c\x46\x3e\xeb\x85\x1f\x00\x00\x04\xb0\x46\x42\x48\x14\x7b\x00\x00\x05\x14\x53\x00\x00\x00\x00\x05\x78\x53\x00\x00\x00\x00\x05\xdc\x53\x00\x00\x00\x00\x06'
|
||||
msg += b'\x40\x46\x42\xd3\x66\x66\x00\x00\x06\xa4\x46\x42\x06\x66\x66\x00\x00\x07\x08\x46\x3f\xf4\x7a\xe1\x00\x00\x07\x6c\x46\x00\x00\x00\x00\x00\x00\x07\xd0\x46\x42\x06\x00'
|
||||
msg += b'\x00\x00\x00\x08\x34\x46\x3f\xae\x14\x7b\x00\x00\x08\x98\x46\x00\x00\x00\x00\x00\x00\x08\xfc\x46\x00\x00\x00\x00\x00\x00\x09\x60\x46\x00\x00\x00\x00\x00\x00\x09\xc4'
|
||||
msg += b'\x46\x00\x00\x00\x00\x00\x00\x0a\x28\x46\x00\x00\x00\x00\x00\x00\x0a\x8c\x46\x00\x00\x00\x00\x00\x00\x0a\xf0\x46\x00\x00\x00\x00\x00\x00\x0b\x54\x46\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x0b\xb8\x46\x00\x00\x00\x00\x00\x00\x0c\x1c\x46\x00\x00\x00\x00\x00\x00\x0c\x80\x46\x00\x00\x00\x00\x00\x00\x0c\xe4\x46\x00\x00\x00\x00\x00\x00\x0d\x48\x46'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x0d\xac\x46\x00\x00\x00\x00\x00\x00\x0e\x10\x46\x00\x00\x00\x00\x00\x00\x0e\x74\x46\x00\x00\x00\x00\x00\x00\x0e\xd8\x46\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x0f\x3c\x53\x00\x00\x00\x00\x0f\xa0\x53\x00\x00\x00\x00\x10\x04\x53\x55\xaa\x00\x00\x10\x68\x53\x00\x00\x00\x00\x10\xcc\x53\x00\x00\x00\x00\x11\x30\x53\x00\x00'
|
||||
msg += b'\x00\x00\x11\x94\x53\x00\x00\x00\x00\x11\xf8\x53\xff\xff\x00\x00\x12\x5c\x53\xff\xff\x00\x00\x12\xc0\x53\x00\x02\x00\x00\x13\x24\x53\xff\xff\x00\x00\x13\x88\x53\xff'
|
||||
msg += b'\xff\x00\x00\x13\xec\x53\xff\xff\x00\x00\x14\x50\x53\xff\xff\x00\x00\x14\xb4\x53\xff\xff\x00\x00\x15\x18\x53\xff\xff\x00\x00\x15\x7c\x53\x00\x00\x00\x00\x27\x10\x53'
|
||||
msg += b'\x00\x02\x00\x00\x27\x74\x53\x00\x3c\x00\x00\x27\xd8\x53\x00\x68\x00\x00\x28\x3c\x53\x05\x00\x00\x00\x28\xa0\x46\x43\x79\x00\x00\x00\x00\x29\x04\x46\x43\x48\x00\x00'
|
||||
msg += b'\x00\x00\x29\x68\x46\x42\x48\x33\x33\x00\x00\x29\xcc\x46\x42\x3e\x3d\x71\x00\x00\x2a\x30\x53\x00\x01\x00\x00\x2a\x94\x46\x43\x37\x00\x00\x00\x00\x2a\xf8\x46\x42\xce'
|
||||
msg += b'\x00\x00\x00\x00\x2b\x5c\x53\x00\x96\x00\x00\x2b\xc0\x53\x00\x10\x00\x00\x2c\x24\x46\x43\x90\x00\x00\x00\x00\x2c\x88\x46\x43\x95\x00\x00\x00\x00\x2c\xec\x53\x00\x06'
|
||||
msg += b'\x00\x00\x2d\x50\x53\x00\x06\x00\x00\x2d\xb4\x46\x43\x7d\x00\x00\x00\x00\x2e\x18\x46\x42\x3d\xeb\x85\x00\x00\x2e\x7c\x46\x42\x3d\xeb\x85\x00\x00\x2e\xe0\x53\x00\x03'
|
||||
msg += b'\x00\x00\x2f\x44\x53\x00\x03\x00\x00\x2f\xa8\x46\x42\x4d\xeb\x85\x00\x00\x30\x0c\x46\x42\x4d\xeb\x85\x00\x00\x30\x70\x53\x00\x03\x00\x00\x30\xd4\x53\x00\x03\x00\x00'
|
||||
msg += b'\x31\x38\x46\x42\x08\x00\x00\x00\x00\x31\x9c\x53\x00\x05\x00\x00\x32\x00\x53\x04\x00\x00\x00\x32\x64\x53\x00\x01\x00\x00\x32\xc8\x53\x13\x9c\x00\x00\x33\x2c\x53\x0f'
|
||||
msg += b'\xa0\x00\x00\x33\x90\x53\x00\x4f\x00\x00\x33\xf4\x53\x00\x66\x00\x00\x34\x58\x53\x03\xe8\x00\x00\x34\xbc\x53\x04\x00\x00\x00\x35\x20\x53\x00\x00\x00\x00\x35\x84\x53'
|
||||
msg += b'\x00\x00\x00\x00\x35\xe8\x53\x00\x00\x00\x00\x36\x4c\x53\x00\x00\x00\x01\x38\x80\x53\x00\x02\x00\x01\x38\x81\x53\x00\x01\x00\x01\x38\x82\x53\x00\x01\x00\x01\x38\x83'
|
||||
msg += b'\x53\x00\x00'
|
||||
return msg
|
||||
|
||||
|
||||
def test_parse_control(ContrDataSeq):
|
||||
i = Infos()
|
||||
for key, result in i.parse (ContrDataSeq):
|
||||
pass
|
||||
|
||||
assert json.dumps(i.db) == json.dumps(
|
||||
{"collector": {"Collector_Fw_Version": "RSW_400_V1.00.06", "Chip_Type": "Raymon", "Chip_Model": "RSW-1-10001", "Trace_URL": "t.raymoniot.com", "Logger_URL": "logger.talent-monitoring.com", "No_Inputs": 2}, "controller": {"Signal_Strength": 100, "Power_On_Time": 29, "Data_Up_Interval": 300}})
|
||||
|
||||
def test_parse_inverter(InvDataSeq):
|
||||
i = Infos()
|
||||
for key, result in i.parse (InvDataSeq):
|
||||
pass
|
||||
|
||||
assert json.dumps(i.db) == json.dumps(
|
||||
{"inverter": {"Product_Name": "Microinv", "Manufacturer": "TSUN", "Version": "V5.0.11", "Serial_Number": "T17E7307021D006A", "Equipment_Model": "TSOL-MS600"}})
|
||||
|
||||
def test_parse_cont_and_invert(ContrDataSeq, InvDataSeq):
|
||||
i = Infos()
|
||||
for key, result in i.parse (ContrDataSeq):
|
||||
pass
|
||||
|
||||
for key, result in i.parse (InvDataSeq):
|
||||
pass
|
||||
|
||||
assert json.dumps(i.db) == json.dumps(
|
||||
{
|
||||
"collector": {"Collector_Fw_Version": "RSW_400_V1.00.06", "Chip_Type": "Raymon", "Chip_Model": "RSW-1-10001", "Trace_URL": "t.raymoniot.com", "Logger_URL": "logger.talent-monitoring.com", "No_Inputs": 2}, "controller": {"Signal_Strength": 100, "Power_On_Time": 29, "Data_Up_Interval": 300},
|
||||
"inverter": {"Product_Name": "Microinv", "Manufacturer": "TSUN", "Version": "V5.0.11", "Serial_Number": "T17E7307021D006A", "Equipment_Model": "TSOL-MS600"}})
|
||||
|
||||
|
||||
def test_build_ha_conf1(ContrDataSeq):
|
||||
i = Infos()
|
||||
i.static_init() # initialize counter
|
||||
|
||||
tests = 0
|
||||
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id="garagendach/", snr='123', singleton=False):
|
||||
|
||||
if id == 'out_power_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/grid", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "out_power_123", "val_tpl": "{{value_json['Output_Power'] | float}}", "unit_of_meas": "W", "dev": {"name": "Micro Inverter", "sa": "Micro Inverter", "via_device": "controller_123", "ids": ["inverter_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
|
||||
elif id == 'daily_gen_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Daily Generation", "stat_t": "tsun/garagendach/total", "dev_cla": "energy", "stat_cla": "total_increasing", "uniq_id": "daily_gen_123", "val_tpl": "{{value_json['Daily_Generation'] | float}}", "unit_of_meas": "kWh", "ic": "mdi:solar-power-variant", "dev": {"name": "Micro Inverter", "sa": "Micro Inverter", "via_device": "controller_123", "ids": ["inverter_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
|
||||
elif id == 'power_pv1_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/input", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "power_pv1_123", "val_tpl": "{{ (value_json['pv1']['Power'] | float)}}", "unit_of_meas": "W", "dev": {"name": "Module PV1", "sa": "Module PV1", "via_device": "inverter_123", "ids": ["input_pv1_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
|
||||
elif id == 'power_pv2_123':
|
||||
assert False # if we haven't received and parsed a control data msg, we don't know the number of inputs. In this case we only register the first one!!
|
||||
|
||||
|
||||
elif id == 'signal_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Signal Strength", "stat_t": "tsun/garagendach/controller", "dev_cla": None, "stat_cla": "measurement", "uniq_id": "signal_123", "val_tpl": "{{value_json[\'Signal_Strength\'] | int}}", "unit_of_meas": "%", "ic": "mdi:wifi", "dev": {"name": "Controller", "sa": "Controller", "via_device": "proxy", "ids": ["controller_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
elif id == 'inv_count_456':
|
||||
assert False
|
||||
|
||||
assert tests==4
|
||||
|
||||
|
||||
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id = 'proxy/', snr = '456', singleton=True):
|
||||
|
||||
if id == 'out_power_123':
|
||||
assert False
|
||||
elif id == 'daily_gen_123':
|
||||
assert False
|
||||
elif id == 'power_pv1_123':
|
||||
assert False
|
||||
elif id == 'power_pv2_123':
|
||||
assert False # if we haven't received and parsed a control data msg, we don't know the number of inputs. In this case we only register the first one!!
|
||||
|
||||
elif id == 'signal_123':
|
||||
assert False
|
||||
elif id == 'inv_count_456':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Active Inverter Connections", "stat_t": "tsun/proxy/proxy", "dev_cla": None, "stat_cla": None, "uniq_id": "inv_count_456", "val_tpl": "{{value_json['Inverter_Cnt'] | int}}", "ic": "mdi:counter", "dev": {"name": "Proxy", "sa": "Proxy", "mdl": "proxy", "mf": "Stefan Allius", "sw": "unknown", "ids": ["proxy"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
|
||||
assert tests==5
|
||||
|
||||
def test_build_ha_conf2(ContrDataSeq, InvDataSeq):
|
||||
i = Infos()
|
||||
for key, result in i.parse (ContrDataSeq):
|
||||
pass
|
||||
|
||||
for key, result in i.parse (InvDataSeq):
|
||||
pass
|
||||
|
||||
tests = 0
|
||||
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id="garagendach/", snr='123', singleton=False, sug_area = 'roof'):
|
||||
|
||||
if id == 'out_power_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/grid", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "out_power_123", "val_tpl": "{{value_json['Output_Power'] | float}}", "unit_of_meas": "W", "dev": {"name": "Micro Inverter - roof", "sa": "Micro Inverter - roof", "via_device": "controller_123", "mdl": "TSOL-MS600", "mf": "TSUN", "sw": "V5.0.11", "ids": ["inverter_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
|
||||
if id == 'daily_gen_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Daily Generation", "stat_t": "tsun/garagendach/total", "dev_cla": "energy", "stat_cla": "total_increasing", "uniq_id": "daily_gen_123", "val_tpl": "{{value_json['Daily_Generation'] | float}}", "unit_of_meas": "kWh", "ic": "mdi:solar-power-variant", "dev": {"name": "Micro Inverter - roof", "sa": "Micro Inverter - roof", "via_device": "controller_123", "mdl": "TSOL-MS600", "mf": "TSUN", "sw": "V5.0.11", "ids": ["inverter_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
|
||||
elif id == 'power_pv1_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/input", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "power_pv1_123", "val_tpl": "{{ (value_json['pv1']['Power'] | float)}}", "unit_of_meas": "W", "dev": {"name": "Module PV1 - roof", "sa": "Module PV1 - roof", "via_device": "inverter_123", "ids": ["input_pv1_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
|
||||
elif id == 'power_pv2_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/input", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "power_pv2_123", "val_tpl": "{{ (value_json['pv2']['Power'] | float)}}", "unit_of_meas": "W", "dev": {"name": "Module PV2 - roof", "sa": "Module PV2 - roof", "via_device": "inverter_123", "ids": ["input_pv2_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
|
||||
elif id == 'signal_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Signal Strength", "stat_t": "tsun/garagendach/controller", "dev_cla": None, "stat_cla": "measurement", "uniq_id": "signal_123", "val_tpl": "{{value_json[\'Signal_Strength\'] | int}}", "unit_of_meas": "%", "ic": "mdi:wifi", "dev": {"name": "Controller - roof", "sa": "Controller - roof", "via_device": "proxy", "mdl": "RSW-1-10001", "mf": "Raymon", "sw": "RSW_400_V1.00.06", "ids": ["controller_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
assert tests==5
|
||||
|
||||
def test_must_incr_total(InvDataSeq2, InvDataSeq2_Zero):
|
||||
i = Infos()
|
||||
tests = 0
|
||||
for key, update in i.parse (InvDataSeq2):
|
||||
if key == 'total':
|
||||
assert update == True
|
||||
tests +=1
|
||||
elif key == 'env':
|
||||
assert update == True
|
||||
tests +=1
|
||||
|
||||
|
||||
assert tests==4
|
||||
assert json.dumps(i.db['total']) == json.dumps({'Daily_Generation': 1.7, 'Total_Generation': 17.36})
|
||||
assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 64.5, "Daily_Generation": 1.08, "Total_Generation": 9.74}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 45.7, "Daily_Generation": 0.62, "Total_Generation": 7.62}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}})
|
||||
assert json.dumps(i.db['env']) == json.dumps({"Inverter_Temp": 23, "Rated_Power": 600})
|
||||
tests = 0
|
||||
for key, update in i.parse (InvDataSeq2):
|
||||
if key == 'total':
|
||||
assert update == False
|
||||
tests +=1
|
||||
elif key == 'env':
|
||||
assert update == False
|
||||
tests +=1
|
||||
|
||||
|
||||
assert tests==4
|
||||
assert json.dumps(i.db['total']) == json.dumps({'Daily_Generation': 1.7, 'Total_Generation': 17.36})
|
||||
assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 64.5, "Daily_Generation": 1.08, "Total_Generation": 9.74}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 45.7, "Daily_Generation": 0.62, "Total_Generation": 7.62}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}})
|
||||
assert json.dumps(i.db['env']) == json.dumps({"Inverter_Temp": 23, "Rated_Power": 600})
|
||||
|
||||
tests = 0
|
||||
for key, update in i.parse (InvDataSeq2_Zero):
|
||||
if key == 'total':
|
||||
assert update == False
|
||||
tests +=1
|
||||
elif key == 'env':
|
||||
assert update == True
|
||||
tests +=1
|
||||
|
||||
assert tests==4
|
||||
assert json.dumps(i.db['total']) == json.dumps({'Daily_Generation': 1.7, 'Total_Generation': 17.36})
|
||||
assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 0.0, "Daily_Generation": 1.08, "Total_Generation": 9.74}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 0.0, "Daily_Generation": 0.62, "Total_Generation": 7.62}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}})
|
||||
assert json.dumps(i.db['env']) == json.dumps({"Inverter_Temp": 0, "Rated_Power": 0})
|
||||
|
||||
def test_must_incr_total2(InvDataSeq2, InvDataSeq2_Zero):
|
||||
i = Infos()
|
||||
tests = 0
|
||||
for key, update in i.parse (InvDataSeq2_Zero):
|
||||
if key == 'total':
|
||||
assert update == False
|
||||
tests +=1
|
||||
elif key == 'env':
|
||||
assert update == True
|
||||
tests +=1
|
||||
|
||||
assert tests==4
|
||||
assert json.dumps(i.db['total']) == json.dumps({})
|
||||
assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 0.0}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 0.0}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}})
|
||||
assert json.dumps(i.db['env']) == json.dumps({"Inverter_Temp": 0, "Rated_Power": 0})
|
||||
|
||||
tests = 0
|
||||
for key, update in i.parse (InvDataSeq2_Zero):
|
||||
if key == 'total':
|
||||
assert update == False
|
||||
tests +=1
|
||||
elif key == 'env':
|
||||
assert update == False
|
||||
tests +=1
|
||||
|
||||
assert tests==4
|
||||
assert json.dumps(i.db['total']) == json.dumps({})
|
||||
assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 0.0}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 0.0}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}})
|
||||
assert json.dumps(i.db['env']) == json.dumps({"Inverter_Temp": 0, "Rated_Power": 0})
|
||||
|
||||
tests = 0
|
||||
for key, update in i.parse (InvDataSeq2):
|
||||
if key == 'total':
|
||||
assert update == True
|
||||
tests +=1
|
||||
elif key == 'env':
|
||||
assert update == True
|
||||
tests +=1
|
||||
|
||||
assert tests==4
|
||||
assert json.dumps(i.db['total']) == json.dumps({'Daily_Generation': 1.7, 'Total_Generation': 17.36})
|
||||
assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 64.5, "Daily_Generation": 1.08, "Total_Generation": 9.74}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 45.7, "Daily_Generation": 0.62, "Total_Generation": 7.62}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}})
|
||||
assert json.dumps(i.db['env']) == json.dumps({"Inverter_Temp": 23, "Rated_Power": 600})
|
||||
|
||||
import pytest
|
||||
import json, math
|
||||
import logging
|
||||
from infos import Register, ClrAtMidnight
|
||||
from infos import Infos, Fmt
|
||||
|
||||
def test_statistic_counter():
|
||||
i = Infos()
|
||||
@@ -316,22 +13,22 @@ def test_statistic_counter():
|
||||
val = i.dev_value(0xffffffff) # invalid addr
|
||||
assert val == None
|
||||
|
||||
val = i.dev_value(0xffffff00) # valid addr but not initiliazed
|
||||
val = i.dev_value(Register.INVERTER_CNT) # valid addr but not initiliazed
|
||||
assert val == None or val == 0
|
||||
|
||||
i.static_init() # initialize counter
|
||||
assert json.dumps(i.stat) == json.dumps({"proxy": {"Inverter_Cnt": 0, "Unknown_SNR": 0, "Unknown_Msg": 0, "Invalid_Data_Type": 0, "Internal_Error": 0,"Unknown_Ctrl": 0}})
|
||||
assert json.dumps(i.stat) == json.dumps({"proxy": {"Inverter_Cnt": 0, "Cloud_Conn_Cnt": 0, "Unknown_SNR": 0, "Unknown_Msg": 0, "Invalid_Data_Type": 0, "Internal_Error": 0,"Unknown_Ctrl": 0, "OTA_Start_Msg": 0, "SW_Exception": 0, "Invalid_Msg_Format": 0, "AT_Command": 0, "AT_Command_Blocked": 0, "Modbus_Command": 0}})
|
||||
|
||||
val = i.dev_value(0xffffff00) # valid and initiliazed addr
|
||||
val = i.dev_value(Register.INVERTER_CNT) # valid and initiliazed addr
|
||||
assert val == 0
|
||||
|
||||
i.inc_counter('Inverter_Cnt')
|
||||
assert json.dumps(i.stat) == json.dumps({"proxy": {"Inverter_Cnt": 1, "Unknown_SNR": 0, "Unknown_Msg": 0, "Invalid_Data_Type": 0, "Internal_Error": 0,"Unknown_Ctrl": 0}})
|
||||
val = i.dev_value(0xffffff00)
|
||||
assert json.dumps(i.stat) == json.dumps({"proxy": {"Inverter_Cnt": 1, "Cloud_Conn_Cnt": 0, "Unknown_SNR": 0, "Unknown_Msg": 0, "Invalid_Data_Type": 0, "Internal_Error": 0,"Unknown_Ctrl": 0, "OTA_Start_Msg": 0, "SW_Exception": 0, "Invalid_Msg_Format": 0, "AT_Command": 0, "AT_Command_Blocked": 0, "Modbus_Command": 0}})
|
||||
val = i.dev_value(Register.INVERTER_CNT)
|
||||
assert val == 1
|
||||
|
||||
i.dec_counter('Inverter_Cnt')
|
||||
val = i.dev_value(0xffffff00)
|
||||
val = i.dev_value(Register.INVERTER_CNT)
|
||||
assert val == 0
|
||||
|
||||
def test_dep_rules():
|
||||
@@ -345,97 +42,238 @@ def test_dep_rules():
|
||||
assert res == True
|
||||
|
||||
i.inc_counter('Inverter_Cnt') # is 1
|
||||
val = i.dev_value(0xffffff00)
|
||||
val = i.dev_value(Register.INVERTER_CNT)
|
||||
assert val == 1
|
||||
res = i.ignore_this_device({'reg':0xffffff00})
|
||||
res = i.ignore_this_device({'reg': Register.INVERTER_CNT})
|
||||
assert res == True
|
||||
res = i.ignore_this_device({'reg':0xffffff00, 'less_eq': 2})
|
||||
res = i.ignore_this_device({'reg': Register.INVERTER_CNT, 'less_eq': 2})
|
||||
assert res == False
|
||||
res = i.ignore_this_device({'reg':0xffffff00, 'gte': 2})
|
||||
res = i.ignore_this_device({'reg': Register.INVERTER_CNT, 'gte': 2})
|
||||
assert res == True
|
||||
|
||||
i.inc_counter('Inverter_Cnt') # is 2
|
||||
res = i.ignore_this_device({'reg':0xffffff00, 'less_eq': 2})
|
||||
res = i.ignore_this_device({'reg': Register.INVERTER_CNT, 'less_eq': 2})
|
||||
assert res == False
|
||||
res = i.ignore_this_device({'reg':0xffffff00, 'gte': 2})
|
||||
res = i.ignore_this_device({'reg': Register.INVERTER_CNT, 'gte': 2})
|
||||
assert res == False
|
||||
|
||||
i.inc_counter('Inverter_Cnt') # is 3
|
||||
res = i.ignore_this_device({'reg':0xffffff00, 'less_eq': 2})
|
||||
res = i.ignore_this_device({'reg': Register.INVERTER_CNT, 'less_eq': 2})
|
||||
assert res == True
|
||||
res = i.ignore_this_device({'reg':0xffffff00, 'gte': 2})
|
||||
res = i.ignore_this_device({'reg': Register.INVERTER_CNT, 'gte': 2})
|
||||
assert res == False
|
||||
|
||||
def test_table_definition():
|
||||
i = Infos()
|
||||
i.static_init() # initialize counter
|
||||
|
||||
val = i.dev_value(0xffffff04) # check internal error counter
|
||||
val = i.dev_value(Register.INTERNAL_ERROR) # check internal error counter
|
||||
assert val == 0
|
||||
|
||||
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id="garagendach/", snr='123', singleton=False, sug_area = 'roof'):
|
||||
pass
|
||||
# for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id="garagendach/", snr='123', sug_area = 'roof'):
|
||||
# pass
|
||||
for reg in Register:
|
||||
i.ha_conf(reg, ha_prfx="tsun/", node_id="garagendach/", snr='123', singleton=False, sug_area = 'roof') # noqa: E501
|
||||
|
||||
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id = 'proxy/', snr = '456', singleton=True, sug_area = 'roof'):
|
||||
pass
|
||||
|
||||
val = i.dev_value(0xffffff04) # check internal error counter
|
||||
for d_json, comp, node_id, id in i.ha_proxy_confs(ha_prfx="tsun/", node_id = 'proxy/', snr = '456'):
|
||||
pass # sideeffect is calling generator i.ha_proxy_confs()
|
||||
|
||||
val = i.dev_value(Register.INTERNAL_ERROR) # check internal error counter
|
||||
assert val == 0
|
||||
|
||||
# test missing 'fmt' value
|
||||
Infos._Infos__info_defs[0xfffffffe] = {'name':['proxy', 'Internal_Test1'], 'singleton': True, 'ha':{'dev':'proxy', 'dev_cla': None, 'stat_cla': None, 'id':'intern_test1_'}}
|
||||
i.info_defs[Register.TEST_REG1] = {'name':['proxy', 'Internal_Test1'], 'singleton': True, 'ha':{'dev':'proxy', 'dev_cla': None, 'stat_cla': None, 'id':'intern_test1_'}}
|
||||
|
||||
tests = 0
|
||||
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id = 'proxy/', snr = '456', singleton=True, sug_area = 'roof'):
|
||||
for d_json, comp, node_id, id in i.ha_proxy_confs(ha_prfx="tsun/", node_id = 'proxy/', snr = '456'):
|
||||
if id == 'intern_test1_456':
|
||||
tests +=1
|
||||
|
||||
assert tests == 1
|
||||
|
||||
val = i.dev_value(0xffffff04) # check internal error counter
|
||||
val = i.dev_value(Register.INTERNAL_ERROR) # check internal error counter
|
||||
assert val == 1
|
||||
|
||||
# test missing 'dev' value
|
||||
Infos._Infos__info_defs[0xfffffffe] = {'name':['proxy', 'Internal_Test2'], 'singleton': True, 'ha':{'dev_cla': None, 'stat_cla': None, 'id':'intern_test2_', 'fmt':'| int'}}
|
||||
i.info_defs[Register.TEST_REG1] = {'name':['proxy', 'Internal_Test2'], 'singleton': True, 'ha':{'dev_cla': None, 'stat_cla': None, 'id':'intern_test2_', 'fmt':'| int'}}
|
||||
tests = 0
|
||||
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id = 'proxy/', snr = '456', singleton=True, sug_area = 'roof'):
|
||||
for d_json, comp, node_id, id in i.ha_proxy_confs(ha_prfx="tsun/", node_id = 'proxy/', snr = '456'):
|
||||
if id == 'intern_test2_456':
|
||||
tests +=1
|
||||
|
||||
assert tests == 1
|
||||
|
||||
val = i.dev_value(0xffffff04) # check internal error counter
|
||||
val = i.dev_value(Register.INTERNAL_ERROR) # check internal error counter
|
||||
assert val == 2
|
||||
|
||||
|
||||
|
||||
# test invalid 'via' value
|
||||
Infos._Infos__info_devs['test_dev'] = {'via':'xyz', 'name':'Module PV1'}
|
||||
i.info_devs['test_dev'] = {'via':'xyz', 'name':'Module PV1'}
|
||||
|
||||
Infos._Infos__info_defs[0xfffffffe] = {'name':['proxy', 'Internal_Test2'], 'singleton': True, 'ha':{'dev':'test_dev', 'dev_cla': None, 'stat_cla': None, 'id':'intern_test2_', 'fmt':'| int'}}
|
||||
i.info_defs[Register.TEST_REG1] = {'name':['proxy', 'Internal_Test2'], 'singleton': True, 'ha':{'dev':'test_dev', 'dev_cla': None, 'stat_cla': None, 'id':'intern_test2_', 'fmt':'| int'}}
|
||||
tests = 0
|
||||
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id = 'proxy/', snr = '456', singleton=True, sug_area = 'roof'):
|
||||
for d_json, comp, node_id, id in i.ha_proxy_confs(ha_prfx="tsun/", node_id = 'proxy/', snr = '456'):
|
||||
if id == 'intern_test2_456':
|
||||
tests +=1
|
||||
|
||||
assert tests == 1
|
||||
|
||||
val = i.dev_value(0xffffff04) # check internal error counter
|
||||
val = i.dev_value(Register.INTERNAL_ERROR) # check internal error counter
|
||||
assert val == 3
|
||||
|
||||
|
||||
def test_invalid_data_type(InvalidDataSeq):
|
||||
def test_table_remove():
|
||||
i = Infos()
|
||||
i.static_init() # initialize counter
|
||||
|
||||
val = i.dev_value(0xffffff03) # check invalid data type counter
|
||||
val = i.dev_value(Register.INTERNAL_ERROR) # check internal error counter
|
||||
assert val == 0
|
||||
|
||||
# for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id="garagendach/", snr='123', sug_area = 'roof'):
|
||||
# pass
|
||||
test = 0
|
||||
for reg in Register:
|
||||
res = i.ha_remove(reg, node_id="garagendach/", snr='123') # noqa: E501
|
||||
if reg == Register.INVERTER_STATUS:
|
||||
test += 1
|
||||
assert res == ('{}', 'sensor', 'garagendach/', 'inv_status_123')
|
||||
elif reg == Register.COLLECT_INTERVAL:
|
||||
test += 1
|
||||
assert res == ('{}', 'sensor', 'garagendach/', 'data_collect_intval_123')
|
||||
|
||||
assert test == 2
|
||||
val = i.dev_value(Register.INTERNAL_ERROR) # check internal error counter
|
||||
assert val == 0
|
||||
|
||||
|
||||
for key, result in i.parse (InvalidDataSeq):
|
||||
pass
|
||||
assert json.dumps(i.db) == json.dumps({"inverter": {"Product_Name": "Microinv"}})
|
||||
def test_clr_at_midnight():
|
||||
i = Infos()
|
||||
i.static_init() # initialize counter
|
||||
i.set_db_def_value(Register.NO_INPUTS, 2)
|
||||
val = i.dev_value(Register.NO_INPUTS) # valid addr but not initiliazed
|
||||
assert val == 2
|
||||
i.info_defs[Register.TEST_REG1] = { # add a entry with incomplete ha definition
|
||||
'name': ['test', 'grp', 'REG_1'], 'ha': {'dev_cla': None }
|
||||
}
|
||||
i.reg_clr_at_midnight('tsun/inv_1/')
|
||||
# tsun/inv_2/input
|
||||
assert json.dumps(ClrAtMidnight.db['tsun/inv_1/total']) == json.dumps({'Daily_Generation': 0})
|
||||
assert json.dumps(ClrAtMidnight.db['tsun/inv_1/input']) == json.dumps({"pv1": {"Daily_Generation": 0}, "pv2": {"Daily_Generation": 0}})
|
||||
|
||||
val = i.dev_value(0xffffff03) # check invalid data type counter
|
||||
assert val == 1
|
||||
i.reg_clr_at_midnight('tsun/inv_1/')
|
||||
assert json.dumps(ClrAtMidnight.db['tsun/inv_1/total']) == json.dumps({'Daily_Generation': 0})
|
||||
assert json.dumps(ClrAtMidnight.db['tsun/inv_1/input']) == json.dumps({"pv1": {"Daily_Generation": 0}, "pv2": {"Daily_Generation": 0}})
|
||||
|
||||
test = 0
|
||||
for key, data in ClrAtMidnight.elm():
|
||||
if key == 'tsun/inv_1/total':
|
||||
assert json.dumps(data) == json.dumps({'Daily_Generation': 0})
|
||||
test += 1
|
||||
elif key == 'tsun/inv_1/input':
|
||||
assert json.dumps(data) == json.dumps({"pv1": {"Daily_Generation": 0}, "pv2": {"Daily_Generation": 0}})
|
||||
test += 1
|
||||
assert test == 2
|
||||
assert json.dumps(ClrAtMidnight.db) == json.dumps({})
|
||||
|
||||
i.reg_clr_at_midnight('tsun/inv_1/')
|
||||
|
||||
def test_pv_module_config():
|
||||
i = Infos()
|
||||
# i.set_db_def_value(Register.NO_INPUTS, 2)
|
||||
|
||||
dt = {
|
||||
'pv1':{'manufacturer':'TSUN1','type': 'Module 100W'},
|
||||
'pv2':{'manufacturer':'TSUN2'},
|
||||
'pv3':{'manufacturer':'TSUN3','type': 'Module 300W'},
|
||||
'pv4':{'type': 'Module 400W'},
|
||||
'pv5':{},
|
||||
}
|
||||
i.set_pv_module_details(dt)
|
||||
assert 'TSUN1' == i.dev_value(Register.PV1_MANUFACTURER)
|
||||
assert 'TSUN2' == i.dev_value(Register.PV2_MANUFACTURER)
|
||||
assert 'TSUN3' == i.dev_value(Register.PV3_MANUFACTURER)
|
||||
assert None == i.dev_value(Register.PV4_MANUFACTURER)
|
||||
assert None == i.dev_value(Register.PV5_MANUFACTURER)
|
||||
assert 'Module 100W' == i.dev_value(Register.PV1_MODEL)
|
||||
assert None == i.dev_value(Register.PV2_MODEL)
|
||||
assert 'Module 300W' == i.dev_value(Register.PV3_MODEL)
|
||||
assert 'Module 400W' == i.dev_value(Register.PV4_MODEL)
|
||||
assert None == i.dev_value(Register.PV5_MODEL)
|
||||
|
||||
def test_broken_info_defs():
|
||||
i = Infos()
|
||||
val = i.get_db_value(Register.NO_INPUTS, 666)
|
||||
assert val == 666
|
||||
i.info_defs[Register.TEST_REG1] = 'test' # add a string instead of a dict
|
||||
val = i.get_db_value(Register.TEST_REG1, 666)
|
||||
assert val == 666
|
||||
i.set_db_def_value(Register.TEST_REG1, 2)
|
||||
del i.info_defs[Register.TEST_REG1] # delete the broken entry
|
||||
|
||||
def test_get_value():
|
||||
i = Infos()
|
||||
assert None == i.get_db_value(Register.PV1_VOLTAGE, None)
|
||||
assert None == i.get_db_value(Register.PV2_VOLTAGE, None)
|
||||
|
||||
i.set_db_def_value(Register.PV1_VOLTAGE, 30)
|
||||
assert 30 == i.get_db_value(Register.PV1_VOLTAGE, None)
|
||||
assert None == i.get_db_value(Register.PV2_VOLTAGE, None)
|
||||
|
||||
i.set_db_def_value(Register.PV2_VOLTAGE, 30.3)
|
||||
assert 30 == i.get_db_value(Register.PV1_VOLTAGE, None)
|
||||
assert math.isclose(30.3,i.get_db_value(Register.PV2_VOLTAGE, None), rel_tol=1e-09, abs_tol=1e-09)
|
||||
|
||||
def test_update_value():
|
||||
i = Infos()
|
||||
assert None == i.get_db_value(Register.PV1_VOLTAGE, None)
|
||||
|
||||
keys = i.info_defs[Register.PV1_VOLTAGE]['name']
|
||||
_, update = i.update_db(keys, True, 30)
|
||||
assert update == True
|
||||
assert 30 == i.get_db_value(Register.PV1_VOLTAGE, None)
|
||||
|
||||
keys = i.info_defs[Register.PV1_VOLTAGE]['name']
|
||||
_, update = i.update_db(keys, True, 30)
|
||||
assert update == False
|
||||
assert 30 == i.get_db_value(Register.PV1_VOLTAGE, None)
|
||||
|
||||
keys = i.info_defs[Register.PV1_VOLTAGE]['name']
|
||||
_, update = i.update_db(keys, False, 29)
|
||||
assert update == True
|
||||
assert 29 == i.get_db_value(Register.PV1_VOLTAGE, None)
|
||||
|
||||
def test_key_obj():
|
||||
i = Infos()
|
||||
keys, level, unit, must_incr = i._key_obj(Register.PV1_VOLTAGE)
|
||||
assert keys == ['input', 'pv1', 'Voltage']
|
||||
assert level == logging.DEBUG
|
||||
assert unit == 'V'
|
||||
assert must_incr == False
|
||||
|
||||
keys, level, unit, must_incr = i._key_obj(Register.PV1_DAILY_GENERATION)
|
||||
assert keys == ['input', 'pv1', 'Daily_Generation']
|
||||
assert level == logging.DEBUG
|
||||
assert unit == 'kWh'
|
||||
assert must_incr == True
|
||||
|
||||
def test_hex4_cnv():
|
||||
tst_val = (0x12ef, )
|
||||
string = Fmt.hex4(tst_val)
|
||||
assert string == '12ef'
|
||||
val = Fmt.hex4(string, reverse=True)
|
||||
assert val == tst_val[0]
|
||||
|
||||
def test_mac_cnv():
|
||||
tst_val = (0x12, 0x34, 0x67, 0x89, 0xcd, 0xef)
|
||||
string = Fmt.mac(tst_val)
|
||||
assert string == '12:34:67:89:cd:ef'
|
||||
val = Fmt.mac(string, reverse=True)
|
||||
assert val == tst_val
|
||||
|
||||
def test_version_cnv():
|
||||
tst_val = (0x123f, )
|
||||
string = Fmt.version(tst_val)
|
||||
assert string == 'V1.2.3F'
|
||||
val = Fmt.version(string, reverse=True)
|
||||
assert val == tst_val[0]
|
||||
|
||||
522
app/tests/test_infos_g3.py
Normal file
522
app/tests/test_infos_g3.py
Normal file
@@ -0,0 +1,522 @@
|
||||
# test_with_pytest.py
|
||||
import pytest, json, math
|
||||
from infos import Register
|
||||
from gen3.infos_g3 import InfosG3, RegisterMap
|
||||
|
||||
@pytest.fixture
|
||||
def contr_data_seq(): # Get Time Request message
|
||||
msg = b'\x00\x00\x00\x15\x00\x09\x2b\xa8\x54\x10\x52\x53\x57\x5f\x34\x30\x30\x5f\x56\x31\x2e\x30\x30\x2e\x30\x36\x00\x09\x27\xc0\x54\x06\x52\x61\x79\x6d\x6f'
|
||||
msg += b'\x6e\x00\x09\x2f\x90\x54\x0b\x52\x53\x57\x2d\x31\x2d\x31\x30\x30\x30\x31\x00\x09\x5a\x88\x54\x0f\x74\x2e\x72\x61\x79\x6d\x6f\x6e\x69\x6f\x74\x2e\x63\x6f\x6d\x00\x09\x5a\xec\x54'
|
||||
msg += b'\x1c\x6c\x6f\x67\x67\x65\x72\x2e\x74\x61\x6c\x65\x6e\x74\x2d\x6d\x6f\x6e\x69\x74\x6f\x72\x69\x6e\x67\x2e\x63\x6f\x6d\x00\x0d\x00\x20\x49\x00\x00\x00\x01\x00\x0c\x35\x00\x49\x00'
|
||||
msg += b'\x00\x00\x64\x00\x0c\x96\xa8\x49\x00\x00\x00\x1d\x00\x0c\x7f\x38\x49\x00\x00\x00\x01\x00\x0c\xfc\x38\x49\x00\x00\x00\x01\x00\x0c\xf8\x50\x49\x00\x00\x01\x2c\x00\x0c\x63\xe0\x49'
|
||||
msg += b'\x00\x00\x00\x00\x00\x0c\x67\xc8\x49\x00\x00\x00\x00\x00\x0c\x50\x58\x49\x00\x00\x00\x01\x00\x09\x5e\x70\x49\x00\x00\x13\x8d\x00\x09\x5e\xd4\x49\x00\x00\x13\x8d\x00\x09\x5b\x50'
|
||||
msg += b'\x49\x00\x00\x00\x02\x00\x0d\x04\x08\x49\x00\x00\x00\x00\x00\x07\xa1\x84\x49\x00\x00\x00\x01\x00\x0c\x50\x59\x49\x00\x00\x00\x4c\x00\x0d\x1f\x60\x49\x00\x00\x00\x00'
|
||||
return msg
|
||||
|
||||
@pytest.fixture
|
||||
def contr2_data_seq(): # Get Time Request message
|
||||
msg = b'\x00\x00\x00\x39\x00\x09\x2b\xa8\x54\x10\x52'
|
||||
msg += b'\x53\x57\x5f\x34\x30\x30\x5f\x56\x31\x2e\x30\x30\x2e\x32\x30\x00'
|
||||
msg += b'\x09\x27\xc0\x54\x06\x52\x61\x79\x6d\x6f\x6e\x00\x09\x2f\x90\x54'
|
||||
msg += b'\x0b\x52\x53\x57\x2d\x31\x2d\x31\x30\x30\x30\x31\x00\x09\x5a\x88'
|
||||
msg += b'\x54\x0f\x74\x2e\x72\x61\x79\x6d\x6f\x6e\x69\x6f\x74\x2e\x63\x6f'
|
||||
msg += b'\x6d\x00\x09\x5a\xec\x54\x1c\x6c\x6f\x67\x67\x65\x72\x2e\x74\x61'
|
||||
msg += b'\x6c\x65\x6e\x74\x2d\x6d\x6f\x6e\x69\x74\x6f\x72\x69\x6e\x67\x2e'
|
||||
msg += b'\x63\x6f\x6d\x00\x0d\x2f\x00\x54\x10\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x32\xe8\x54\x10\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00'
|
||||
msg += b'\x0d\x36\xd0\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\x00\x0d\x3a\xb8\x54\x10\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x3e\xa0\x54'
|
||||
msg += b'\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\x00\x0d\x42\x88\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x46\x70\x54\x10\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x4a'
|
||||
msg += b'\x58\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\x00\x0d\x4e\x40\x54\x10\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x52\x28\x54\x10\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00'
|
||||
msg += b'\x0d\x56\x10\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\x00\x0d\x59\xf8\x54\x10\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x5d\xe0\x54'
|
||||
msg += b'\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\x00\x0d\x61\xc8\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x65\xb0\x54\x10\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x69'
|
||||
msg += b'\x98\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\x00\x0d\x6d\x80\x54\x10\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x71\x68\x54\x10\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00'
|
||||
msg += b'\x0d\x75\x50\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\x00\x0d\x79\x38\x54\x10\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x7d\x20\x54'
|
||||
msg += b'\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\x00\x0d\x81\x08\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x84\xf0\x54\x10\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x88'
|
||||
msg += b'\xd8\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\x00\x0d\x8c\xc0\x54\x10\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x90\xa8\x54\x10\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00'
|
||||
msg += b'\x0d\x94\x90\x54\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\x00\x0d\x98\x78\x54\x10\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x0d\x9c\x60\x54'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
|
||||
msg += b'\x00\x0d\x00\x20\x49\x00\x00\x00\x01\x00\x0c\x35\x00\x49\x00\x00'
|
||||
msg += b'\x00\x10\x00\x0c\x96\xa8\x49\x00\x00\x01\x4e\x00\x0c\x7f\x38\x49'
|
||||
msg += b'\x00\x00\x00\x01\x00\x0c\xfc\x38\x49\x00\x00\x00\x01\x00\x0c\xf8'
|
||||
msg += b'\x50\x49\x00\x00\x01\x2c\x00\x0c\x63\xe0\x49\x00\x00\x00\x00\x00'
|
||||
msg += b'\x0c\x67\xc8\x49\x00\x00\x00\x00\x00\x0c\x50\x58\x49\x00\x00\x00'
|
||||
msg += b'\x01\x00\x09\x5e\x70\x49\x00\x00\x13\x8d\x00\x09\x5e\xd4\x49\x00'
|
||||
msg += b'\x00\x13\x8d\x00\x09\x5b\x50\x49\x00\x00\x00\x02\x00\x0d\x04\x08'
|
||||
msg += b'\x49\x00\x00\x00\x00\x00\x07\xa1\x84\x49\x00\x00\x00\x01\x00\x0c'
|
||||
msg += b'\x50\x59\x49\x00\x00\x00\x33\x00\x0d\x1f\x60\x49\x00\x00\x00\x00'
|
||||
msg += b'\x00\x0d\x23\x48\x49\xff\xff\xff\xff\x00\x0d\x27\x30\x49\xff\xff'
|
||||
msg += b'\xff\xff\x00\x0d\x2b\x18\x4c\x00\x00\x00\x00\x00\x00\xff\xff\x00'
|
||||
msg += b'\x0c\xa2\x60\x49\x00\x00\x00\x00\x00\x0d\xa0\x48\x49\x00\x00\x00'
|
||||
msg += b'\x00\x00\x0d\xa4\x30\x49\x00\x00\x00\x00\x00\x0d\xa8\x18\x49\x00'
|
||||
msg += b'\x00\x00\x00'
|
||||
return msg
|
||||
|
||||
@pytest.fixture
|
||||
def inv_data_seq(): # Data indication from the controller
|
||||
msg = b'\x00\x00\x00\x06\x00\x00\x00\x0a\x54\x08\x4d\x69\x63\x72\x6f\x69\x6e\x76\x00\x00\x00\x14\x54\x04\x54\x53\x55\x4e\x00\x00\x00\x1E\x54\x07\x56\x35\x2e\x30\x2e\x31\x31\x00\x00\x00\x28'
|
||||
msg += b'\x54\x10T170000000000001\x00\x00\x00\x32\x54\x0a\x54\x53\x4f\x4c\x2d\x4d\x53\x36\x30\x30\x00\x00\x00\x3c\x54\x05\x41\x2c\x42\x2c\x43'
|
||||
return msg
|
||||
|
||||
@pytest.fixture
|
||||
def invalid_data_seq(): # Data indication from the controller
|
||||
msg = b'\x00\x00\x00\x06\x00\x00\x00\x0a\x54\x08\x4d\x69\x63\x72\x6f\x69\x6e\x76\x00\x00\x00\x14\x64\x04\x54\x53\x55\x4e\x00\x00\x00\x1E\x54\x07\x56\x35\x2e\x30\x2e\x31\x31\x00\x00\x00\x28'
|
||||
msg += b'\x54\x10T170000000000001\x00\x00\x00\x32\x54\x0a\x54\x53\x4f\x4c\x2d\x4d\x53\x36\x30\x30\x00\x00\x00\x3c\x54\x05\x41\x2c\x42\x2c\x43'
|
||||
return msg
|
||||
|
||||
@pytest.fixture
|
||||
def inv_data_seq2(): # Data indication from the controller
|
||||
msg = b'\x00\x00\x00\xa3\x00\x00\x00\x64\x53\x00\x01\x00\x00\x00\xc8\x53\x00\x02\x00\x00\x01\x2c\x53\x00\x00\x00\x00\x01\x90\x49\x00\x00\x00\x00\x00\x00\x01\x91\x53\x00\x00'
|
||||
msg += b'\x00\x00\x01\x92\x53\x00\x00\x00\x00\x01\x93\x53\x00\x00\x00\x00\x01\x94\x53\x00\x00\x00\x00\x01\x95\x53\x00\x00\x00\x00\x01\x96\x53\x00\x00\x00\x00\x01\x97\x53\x00'
|
||||
msg += b'\x00\x00\x00\x01\x98\x53\x00\x00\x00\x00\x01\x99\x53\x00\x00\x00\x00\x01\x9a\x53\x00\x00\x00\x00\x01\x9b\x53\x00\x00\x00\x00\x01\x9c\x53\x00\x00\x00\x00\x01\x9d\x53'
|
||||
msg += b'\x00\x00\x00\x00\x01\x9e\x53\x00\x00\x00\x00\x01\x9f\x53\x00\x00\x00\x00\x01\xa0\x53\x00\x00\x00\x00\x01\xf4\x49\x00\x00\x00\x00\x00\x00\x01\xf5\x53\x00\x00\x00\x00'
|
||||
msg += b'\x01\xf6\x53\x00\x00\x00\x00\x01\xf7\x53\x00\x00\x00\x00\x01\xf8\x53\x00\x00\x00\x00\x01\xf9\x53\x00\x00\x00\x00\x01\xfa\x53\x00\x00\x00\x00\x01\xfb\x53\x00\x00\x00'
|
||||
msg += b'\x00\x01\xfc\x53\x00\x00\x00\x00\x01\xfd\x53\x00\x00\x00\x00\x01\xfe\x53\x00\x00\x00\x00\x01\xff\x53\x00\x00\x00\x00\x02\x00\x53\x00\x00\x00\x00\x02\x01\x53\x00\x00'
|
||||
msg += b'\x00\x00\x02\x02\x53\x00\x00\x00\x00\x02\x03\x53\x00\x00\x00\x00\x02\x04\x53\x00\x00\x00\x00\x02\x58\x49\x00\x00\x00\x00\x00\x00\x02\x59\x53\x00\x00\x00\x00\x02\x5a'
|
||||
msg += b'\x53\x00\x00\x00\x00\x02\x5b\x53\x00\x00\x00\x00\x02\x5c\x53\x00\x00\x00\x00\x02\x5d\x53\x00\x00\x00\x00\x02\x5e\x53\x00\x00\x00\x00\x02\x5f\x53\x00\x00\x00\x00\x02'
|
||||
msg += b'\x60\x53\x00\x00\x00\x00\x02\x61\x53\x00\x00\x00\x00\x02\x62\x53\x00\x00\x00\x00\x02\x63\x53\x00\x00\x00\x00\x02\x64\x53\x00\x00\x00\x00\x02\x65\x53\x00\x00\x00\x00'
|
||||
msg += b'\x02\x66\x53\x00\x00\x00\x00\x02\x67\x53\x00\x00\x00\x00\x02\x68\x53\x00\x00\x00\x00\x02\xbc\x49\x00\x00\x00\x00\x00\x00\x02\xbd\x53\x00\x00\x00\x00\x02\xbe\x53\x00'
|
||||
msg += b'\x00\x00\x00\x02\xbf\x53\x00\x00\x00\x00\x02\xc0\x53\x00\x00\x00\x00\x02\xc1\x53\x00\x00\x00\x00\x02\xc2\x53\x00\x00\x00\x00\x02\xc3\x53\x00\x00\x00\x00\x02\xc4\x53'
|
||||
msg += b'\x00\x00\x00\x00\x02\xc5\x53\x00\x00\x00\x00\x02\xc6\x53\x00\x00\x00\x00\x02\xc7\x53\x00\x00\x00\x00\x02\xc8\x53\x00\x00\x00\x00\x02\xc9\x53\x00\x00\x00\x00\x02\xca'
|
||||
msg += b'\x53\x00\x00\x00\x00\x02\xcb\x53\x00\x00\x00\x00\x02\xcc\x53\x00\x00\x00\x00\x03\x20\x53\x00\x00\x00\x00\x03\x84\x53\x50\x11\x00\x00\x03\xe8\x46\x43\x61\x66\x66\x00'
|
||||
msg += b'\x00\x04\x4c\x46\x3e\xeb\x85\x1f\x00\x00\x04\xb0\x46\x42\x48\x14\x7b\x00\x00\x05\x14\x53\x00\x17\x00\x00\x05\x78\x53\x00\x00\x00\x00\x05\xdc\x53\x02\x58\x00\x00\x06'
|
||||
msg += b'\x40\x46\x42\xd3\x66\x66\x00\x00\x06\xa4\x46\x42\x06\x66\x66\x00\x00\x07\x08\x46\x3f\xf4\x7a\xe1\x00\x00\x07\x6c\x46\x42\x81\x00\x00\x00\x00\x07\xd0\x46\x42\x06\x00'
|
||||
msg += b'\x00\x00\x00\x08\x34\x46\x3f\xae\x14\x7b\x00\x00\x08\x98\x46\x42\x36\xcc\xcd\x00\x00\x08\xfc\x46\x00\x00\x00\x00\x00\x00\x09\x60\x46\x00\x00\x00\x00\x00\x00\x09\xc4'
|
||||
msg += b'\x46\x00\x00\x00\x00\x00\x00\x0a\x28\x46\x00\x00\x00\x00\x00\x00\x0a\x8c\x46\x00\x00\x00\x00\x00\x00\x0a\xf0\x46\x00\x00\x00\x00\x00\x00\x0b\x54\x46\x3f\xd9\x99\x9a'
|
||||
msg += b'\x00\x00\x0b\xb8\x46\x41\x8a\xe1\x48\x00\x00\x0c\x1c\x46\x3f\x8a\x3d\x71\x00\x00\x0c\x80\x46\x41\x1b\xd7\x0a\x00\x00\x0c\xe4\x46\x3f\x1e\xb8\x52\x00\x00\x0d\x48\x46'
|
||||
msg += b'\x40\xf3\xd7\x0a\x00\x00\x0d\xac\x46\x00\x00\x00\x00\x00\x00\x0e\x10\x46\x00\x00\x00\x00\x00\x00\x0e\x74\x46\x00\x00\x00\x00\x00\x00\x0e\xd8\x46\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x0f\x3c\x53\x00\x00\x00\x00\x0f\xa0\x53\x00\x00\x00\x00\x10\x04\x53\x55\xaa\x00\x00\x10\x68\x53\x00\x00\x00\x00\x10\xcc\x53\x00\x00\x00\x00\x11\x30\x53\x00\x00'
|
||||
msg += b'\x00\x00\x11\x94\x53\x00\x00\x00\x00\x11\xf8\x53\xff\xff\x00\x00\x12\x5c\x53\xff\xff\x00\x00\x12\xc0\x53\x00\x02\x00\x00\x13\x24\x53\xff\xff\x00\x00\x13\x88\x53\xff'
|
||||
msg += b'\xff\x00\x00\x13\xec\x53\xff\xff\x00\x00\x14\x50\x53\xff\xff\x00\x00\x14\xb4\x53\xff\xff\x00\x00\x15\x18\x53\xff\xff\x00\x00\x15\x7c\x53\x00\x00\x00\x00\x27\x10\x53'
|
||||
msg += b'\x00\x02\x00\x00\x27\x74\x53\x00\x3c\x00\x00\x27\xd8\x53\x00\x68\x00\x00\x28\x3c\x53\x05\x00\x00\x00\x28\xa0\x46\x43\x79\x00\x00\x00\x00\x29\x04\x46\x43\x48\x00\x00'
|
||||
msg += b'\x00\x00\x29\x68\x46\x42\x48\x33\x33\x00\x00\x29\xcc\x46\x42\x3e\x3d\x71\x00\x00\x2a\x30\x53\x00\x01\x00\x00\x2a\x94\x46\x43\x37\x00\x00\x00\x00\x2a\xf8\x46\x42\xce'
|
||||
msg += b'\x00\x00\x00\x00\x2b\x5c\x53\x00\x96\x00\x00\x2b\xc0\x53\x00\x10\x00\x00\x2c\x24\x46\x43\x90\x00\x00\x00\x00\x2c\x88\x46\x43\x95\x00\x00\x00\x00\x2c\xec\x53\x00\x06'
|
||||
msg += b'\x00\x00\x2d\x50\x53\x00\x06\x00\x00\x2d\xb4\x46\x43\x7d\x00\x00\x00\x00\x2e\x18\x46\x42\x3d\xeb\x85\x00\x00\x2e\x7c\x46\x42\x3d\xeb\x85\x00\x00\x2e\xe0\x53\x00\x03'
|
||||
msg += b'\x00\x00\x2f\x44\x53\x00\x03\x00\x00\x2f\xa8\x46\x42\x4d\xeb\x85\x00\x00\x30\x0c\x46\x42\x4d\xeb\x85\x00\x00\x30\x70\x53\x00\x03\x00\x00\x30\xd4\x53\x00\x03\x00\x00'
|
||||
msg += b'\x31\x38\x46\x42\x08\x00\x00\x00\x00\x31\x9c\x53\x00\x05\x00\x00\x32\x00\x53\x04\x00\x00\x00\x32\x64\x53\x00\x01\x00\x00\x32\xc8\x53\x13\x9c\x00\x00\x33\x2c\x53\x0f'
|
||||
msg += b'\xa0\x00\x00\x33\x90\x53\x00\x4f\x00\x00\x33\xf4\x53\x00\x66\x00\x00\x34\x58\x53\x03\xe8\x00\x00\x34\xbc\x53\x04\x00\x00\x00\x35\x20\x53\x00\x00\x00\x00\x35\x84\x53'
|
||||
msg += b'\x00\x00\x00\x00\x35\xe8\x53\x00\x00\x00\x00\x36\x4c\x53\x00\x00\x00\x01\x38\x80\x53\x00\x02\x00\x01\x38\x81\x53\x00\x01\x00\x01\x38\x82\x53\x00\x01\x00\x01\x38\x83'
|
||||
msg += b'\x53\x00\x00'
|
||||
return msg
|
||||
|
||||
@pytest.fixture
|
||||
def inv_data_new(): # Data indication from DSP V5.0.17
|
||||
msg = b'\x00\x00\x00\xa3\x00\x00\x00\x00\x53\x00\x00'
|
||||
msg += b'\x00\x00\x00\x80\x53\x00\x00\x00\x00\x01\x04\x53\x00\x00\x00\x00'
|
||||
msg += b'\x01\x90\x41\x00\x00\x01\x91\x53\x00\x00\x00\x00\x01\x90\x53\x00'
|
||||
msg += b'\x00\x00\x00\x01\x91\x53\x00\x00\x00\x00\x01\x90\x53\x00\x00\x00'
|
||||
msg += b'\x00\x01\x91\x53\x00\x00\x00\x00\x01\x94\x53\x00\x00\x00\x00\x01'
|
||||
msg += b'\x95\x53\x00\x00\x00\x00\x01\x98\x53\x00\x00\x00\x00\x01\x99\x53'
|
||||
msg += b'\x00\x00\x00\x00\x01\x80\x53\x00\x00\x00\x00\x01\x90\x41\x00\x00'
|
||||
msg += b'\x01\x94\x53\x00\x00\x00\x00\x01\x94\x53\x00\x00\x00\x00\x01\x96'
|
||||
msg += b'\x53\x00\x00\x00\x00\x01\x98\x53\x00\x00\x00\x00\x01\xa0\x53\x00'
|
||||
msg += b'\x00\x00\x00\x01\xf0\x41\x00\x00\x01\xf1\x53\x00\x00\x00\x00\x01'
|
||||
msg += b'\xf4\x53\x00\x00\x00\x00\x01\xf5\x53\x00\x00\x00\x00\x01\xf8\x53'
|
||||
msg += b'\x00\x00\x00\x00\x01\xf9\x53\x00\x00\x00\x00\x00\x00\x53\x00\x00'
|
||||
msg += b'\x00\x00\x00\x01\x53\x00\x00\x00\x00\x00\x00\x53\x00\x00\x00\x00'
|
||||
msg += b'\x00\x01\x53\x00\x00\x00\x00\x00\x04\x53\x00\x00\x00\x00\x00\x58'
|
||||
msg += b'\x41\x00\x00\x02\x00\x53\x00\x00\x00\x00\x02\x00\x53\x00\x00\x00'
|
||||
msg += b'\x00\x02\x02\x53\x00\x00\x00\x00\x02\x00\x53\x00\x00\x00\x00\x02'
|
||||
msg += b'\x04\x53\x00\x00\x00\x00\x02\x58\x41\x00\x00\x02\x59\x53\x00\x00'
|
||||
msg += b'\x00\x00\x02\x40\x53\x00\x00\x00\x00\x02\x41\x53\x00\x00\x00\x00'
|
||||
msg += b'\x02\x40\x53\x00\x00\x00\x00\x02\x41\x53\x00\x00\x00\x00\x02\x44'
|
||||
msg += b'\x53\x00\x00\x00\x00\x02\x45\x53\x00\x00\x00\x00\x02\x60\x53\x00'
|
||||
msg += b'\x00\x00\x00\x02\x61\x53\x00\x00\x00\x00\x02\x60\x53\x00\x00\x00'
|
||||
msg += b'\x00\x02\x20\x41\x00\x00\x02\x24\x53\x00\x00\x00\x00\x02\x24\x53'
|
||||
msg += b'\x00\x00\x00\x00\x02\x26\x53\x00\x00\x00\x00\x02\x40\x53\x00\x00'
|
||||
msg += b'\x00\x00\x02\x40\x53\x00\x00\x00\x00\x02\x80\x41\x00\x00\x02\x81'
|
||||
msg += b'\x53\x00\x00\x00\x00\x02\x84\x53\x00\x00\x00\x00\x02\x85\x53\x00'
|
||||
msg += b'\x00\x00\x00\x02\xc0\x53\x00\x00\x00\x00\x02\xc1\x53\x00\x00\x00'
|
||||
msg += b'\x00\x02\xc0\x53\x00\x00\x00\x00\x02\xc1\x53\x00\x00\x00\x00\x02'
|
||||
msg += b'\xc0\x53\x00\x00\x00\x00\x02\xc1\x53\x00\x00\x00\x00\x02\xc4\x53'
|
||||
msg += b'\x00\x00\x00\x00\x02\x00\x53\x00\x00\x00\x00\x02\x80\x53\x00\x00'
|
||||
msg += b'\x00\x00\x02\xc8\x42\x00\x00\x00\x00\x48\x42\x00\x00\x00\x00\x80'
|
||||
msg += b'\x42\x00\x00\x00\x00\x04\x53\x00\x00\x00\x00\x01\x20\x53\x00\x00'
|
||||
msg += b'\x00\x00\x01\x84\x53\x00\x10\x00\x00\x02\x40\x46\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x04\x04\x46\x02\x00\x46\x02\x00\x00\x04\x00\x46\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x05\x04\x42\x00\x00\x00\x05\x50\x42\x00\x00\x00'
|
||||
msg += b'\x00\x14\x42\x00\x00\x00\x00\x00\x46\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\xa4\x46\x00\x00\x00\x00\x00\x00\x01\x00\x46\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x01\x44\x46\x00\x00\x00\x00\x00\x00\x02\x00\x46\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x08\x04\x46\x00\x00\x00\x00\x00\x00\x08\x90\x46\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x08\x54\x46\x00\x00\x00\x00\x00\x00\x09\x20'
|
||||
msg += b'\x46\x00\x00\x00\x00\x00\x00\x08\x04\x46\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x08\x00\x46\x00\x00\x00\x00\x00\x00\x08\x84\x46\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x08\x40\x46\x00\x00\x00\x00\x00\x00\x09\x04\x46\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x0a\x10\x46\x00\x00\x00\x00\x00\x00\x0c\x14\x46'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x0c\x80\x46\x00\x00\x00\x00\x00\x00\x0c'
|
||||
msg += b'\x24\x42\x00\x00\x00\x0d\x00\x42\x00\x00\x00\x00\x04\x42\x00\x00'
|
||||
msg += b'\x00\x00\x00\x42\x00\x00\x00\x00\x44\x42\x00\x00\x00\x00\x10\x42'
|
||||
msg += b'\x00\x00\x00\x01\x14\x53\x00\x00\x00\x00\x01\xa0\x53\x00\x00\x00'
|
||||
msg += b'\x00\x10\x04\x53\x55\xaa\x00\x00\x10\x40\x53\x00\x00\x00\x00\x10'
|
||||
msg += b'\x04\x53\x00\x00\x00\x00\x11\x00\x53\x00\x00\x00\x00\x11\x84\x53'
|
||||
msg += b'\x00\x00\x00\x00\x10\x50\x53\xff\xff\x00\x00\x10\x14\x53\x03\x20'
|
||||
msg += b'\x00\x00\x10\x00\x53\x00\x00\x00\x00\x11\x24\x53\x00\x00\x00\x00'
|
||||
msg += b'\x03\x00\x53\x00\x00\x00\x00\x03\x64\x53\x00\x00\x00\x00\x04\x50'
|
||||
msg += b'\x53\x00\x00\x00\x00\x00\x34\x53\x00\x00\x00\x00\x00\x00\x42\x02'
|
||||
msg += b'\x00\x00\x01\x04\x42\x00\x00\x00\x21\x00\x42\x00\x00\x00\x21\x44'
|
||||
msg += b'\x42\x00\x00\x00\x22\x10\x53\x00\x00\x00\x00\x28\x14\x42\x01\x00'
|
||||
msg += b'\x00\x28\xa0\x46\x42\x48\x00\x00\x00\x00\x29\x04\x42\x00\x00\x00'
|
||||
msg += b'\x29\x40\x42\x00\x00\x00\x28\x04\x46\x42\x10\x00\x00\x00\x00\x28'
|
||||
msg += b'\x00\x42\x00\x00\x00\x28\x84\x42\x00\x00\x00\x28\x50\x42\x00\x00'
|
||||
msg += b'\x00\x29\x14\x42\x00\x00\x00\x2a\x00\x42\x00\x00\x00\x2c\x24\x46'
|
||||
msg += b'\x42\x10\x00\x00\x00\x00\x2c\x80\x42\x00\x00\x00\x2c\x44\x53\x00'
|
||||
msg += b'\x02\x00\x00\x2d\x00\x42\x00\x00\x00\x20\x04\x46\x42\x4d\x00\x00'
|
||||
msg += b'\x00\x00\x20\x10\x42\x00\x00\x00\x20\x54\x42\x00\x00\x00\x20\x20'
|
||||
msg += b'\x42\x00\x00\x00\x21\x04\x53\x00\x01\x00\x00\x22\x00\x42\x00\x00'
|
||||
msg += b'\x00\x30\x04\x42\x00\x00\x00\x30\x40\x53\x00\x00\x00\x00\x30\x04'
|
||||
msg += b'\x53\x00\x00\x00\x00\x31\x10\x42\x00\x00\x00\x31\x94\x53\x00\x04'
|
||||
msg += b'\x00\x00\x30\x00\x53\x00\x00\x00\x00\x30\x24\x53\x00\x00\x00\x00'
|
||||
msg += b'\x30\x00\x53\x00\x00\x00\x00\x31\x04\x53\x00\x00\x00\x00\x31\x80'
|
||||
msg += b'\x53\x00\x00\x00\x00\x32\x44\x53\x00\x00\x00\x00\x30\x00\x53\x00'
|
||||
msg += b'\x00\x00\x00\x30\x80\x53\x00\x00\x00\x00\x30\x00\x53\x00\x00\x00'
|
||||
msg += b'\x00\x30\x80\x53\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x03\x00'
|
||||
msg += b'\x00\x00\x00\x00'
|
||||
return msg
|
||||
|
||||
@pytest.fixture
|
||||
def inv_data_seq2_zero(): # Data indication from the controller
|
||||
msg = b'\x00\x00\x00\xa3\x00\x00\x00\x64\x53\x00\x01\x00\x00\x00\xc8\x53\x00\x02\x00\x00\x01\x2c\x53\x00\x00\x00\x00\x01\x90\x49\x00\x00\x00\x00\x00\x00\x01\x91\x53\x00\x00'
|
||||
msg += b'\x00\x00\x01\x92\x53\x00\x00\x00\x00\x01\x93\x53\x00\x00\x00\x00\x01\x94\x53\x00\x00\x00\x00\x01\x95\x53\x00\x00\x00\x00\x01\x96\x53\x00\x00\x00\x00\x01\x97\x53\x00'
|
||||
msg += b'\x00\x00\x00\x01\x98\x53\x00\x00\x00\x00\x01\x99\x53\x00\x00\x00\x00\x01\x9a\x53\x00\x00\x00\x00\x01\x9b\x53\x00\x00\x00\x00\x01\x9c\x53\x00\x00\x00\x00\x01\x9d\x53'
|
||||
msg += b'\x00\x00\x00\x00\x01\x9e\x53\x00\x00\x00\x00\x01\x9f\x53\x00\x00\x00\x00\x01\xa0\x53\x00\x00\x00\x00\x01\xf4\x49\x00\x00\x00\x00\x00\x00\x01\xf5\x53\x00\x00\x00\x00'
|
||||
msg += b'\x01\xf6\x53\x00\x00\x00\x00\x01\xf7\x53\x00\x00\x00\x00\x01\xf8\x53\x00\x00\x00\x00\x01\xf9\x53\x00\x00\x00\x00\x01\xfa\x53\x00\x00\x00\x00\x01\xfb\x53\x00\x00\x00'
|
||||
msg += b'\x00\x01\xfc\x53\x00\x00\x00\x00\x01\xfd\x53\x00\x00\x00\x00\x01\xfe\x53\x00\x00\x00\x00\x01\xff\x53\x00\x00\x00\x00\x02\x00\x53\x00\x00\x00\x00\x02\x01\x53\x00\x00'
|
||||
msg += b'\x00\x00\x02\x02\x53\x00\x00\x00\x00\x02\x03\x53\x00\x00\x00\x00\x02\x04\x53\x00\x00\x00\x00\x02\x58\x49\x00\x00\x00\x00\x00\x00\x02\x59\x53\x00\x00\x00\x00\x02\x5a'
|
||||
msg += b'\x53\x00\x00\x00\x00\x02\x5b\x53\x00\x00\x00\x00\x02\x5c\x53\x00\x00\x00\x00\x02\x5d\x53\x00\x00\x00\x00\x02\x5e\x53\x00\x00\x00\x00\x02\x5f\x53\x00\x00\x00\x00\x02'
|
||||
msg += b'\x60\x53\x00\x00\x00\x00\x02\x61\x53\x00\x00\x00\x00\x02\x62\x53\x00\x00\x00\x00\x02\x63\x53\x00\x00\x00\x00\x02\x64\x53\x00\x00\x00\x00\x02\x65\x53\x00\x00\x00\x00'
|
||||
msg += b'\x02\x66\x53\x00\x00\x00\x00\x02\x67\x53\x00\x00\x00\x00\x02\x68\x53\x00\x00\x00\x00\x02\xbc\x49\x00\x00\x00\x00\x00\x00\x02\xbd\x53\x00\x00\x00\x00\x02\xbe\x53\x00'
|
||||
msg += b'\x00\x00\x00\x02\xbf\x53\x00\x00\x00\x00\x02\xc0\x53\x00\x00\x00\x00\x02\xc1\x53\x00\x00\x00\x00\x02\xc2\x53\x00\x00\x00\x00\x02\xc3\x53\x00\x00\x00\x00\x02\xc4\x53'
|
||||
msg += b'\x00\x00\x00\x00\x02\xc5\x53\x00\x00\x00\x00\x02\xc6\x53\x00\x00\x00\x00\x02\xc7\x53\x00\x00\x00\x00\x02\xc8\x53\x00\x00\x00\x00\x02\xc9\x53\x00\x00\x00\x00\x02\xca'
|
||||
msg += b'\x53\x00\x00\x00\x00\x02\xcb\x53\x00\x00\x00\x00\x02\xcc\x53\x00\x00\x00\x00\x03\x20\x53\x00\x00\x00\x00\x03\x84\x53\x50\x11\x00\x00\x03\xe8\x46\x43\x61\x66\x66\x00'
|
||||
msg += b'\x00\x04\x4c\x46\x3e\xeb\x85\x1f\x00\x00\x04\xb0\x46\x42\x48\x14\x7b\x00\x00\x05\x14\x53\x00\x00\x00\x00\x05\x78\x53\x00\x00\x00\x00\x05\xdc\x53\x00\x00\x00\x00\x06'
|
||||
msg += b'\x40\x46\x42\xd3\x66\x66\x00\x00\x06\xa4\x46\x42\x06\x66\x66\x00\x00\x07\x08\x46\x3f\xf4\x7a\xe1\x00\x00\x07\x6c\x46\x00\x00\x00\x00\x00\x00\x07\xd0\x46\x42\x06\x00'
|
||||
msg += b'\x00\x00\x00\x08\x34\x46\x3f\xae\x14\x7b\x00\x00\x08\x98\x46\x00\x00\x00\x00\x00\x00\x08\xfc\x46\x00\x00\x00\x00\x00\x00\x09\x60\x46\x00\x00\x00\x00\x00\x00\x09\xc4'
|
||||
msg += b'\x46\x00\x00\x00\x00\x00\x00\x0a\x28\x46\x00\x00\x00\x00\x00\x00\x0a\x8c\x46\x00\x00\x00\x00\x00\x00\x0a\xf0\x46\x00\x00\x00\x00\x00\x00\x0b\x54\x46\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x0b\xb8\x46\x00\x00\x00\x00\x00\x00\x0c\x1c\x46\x00\x00\x00\x00\x00\x00\x0c\x80\x46\x00\x00\x00\x00\x00\x00\x0c\xe4\x46\x00\x00\x00\x00\x00\x00\x0d\x48\x46'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x0d\xac\x46\x00\x00\x00\x00\x00\x00\x0e\x10\x46\x00\x00\x00\x00\x00\x00\x0e\x74\x46\x00\x00\x00\x00\x00\x00\x0e\xd8\x46\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x0f\x3c\x53\x00\x00\x00\x00\x0f\xa0\x53\x00\x00\x00\x00\x10\x04\x53\x55\xaa\x00\x00\x10\x68\x53\x00\x00\x00\x00\x10\xcc\x53\x00\x00\x00\x00\x11\x30\x53\x00\x00'
|
||||
msg += b'\x00\x00\x11\x94\x53\x00\x00\x00\x00\x11\xf8\x53\xff\xff\x00\x00\x12\x5c\x53\xff\xff\x00\x00\x12\xc0\x53\x00\x02\x00\x00\x13\x24\x53\xff\xff\x00\x00\x13\x88\x53\xff'
|
||||
msg += b'\xff\x00\x00\x13\xec\x53\xff\xff\x00\x00\x14\x50\x53\xff\xff\x00\x00\x14\xb4\x53\xff\xff\x00\x00\x15\x18\x53\xff\xff\x00\x00\x15\x7c\x53\x00\x00\x00\x00\x27\x10\x53'
|
||||
msg += b'\x00\x02\x00\x00\x27\x74\x53\x00\x3c\x00\x00\x27\xd8\x53\x00\x68\x00\x00\x28\x3c\x53\x05\x00\x00\x00\x28\xa0\x46\x43\x79\x00\x00\x00\x00\x29\x04\x46\x43\x48\x00\x00'
|
||||
msg += b'\x00\x00\x29\x68\x46\x42\x48\x33\x33\x00\x00\x29\xcc\x46\x42\x3e\x3d\x71\x00\x00\x2a\x30\x53\x00\x01\x00\x00\x2a\x94\x46\x43\x37\x00\x00\x00\x00\x2a\xf8\x46\x42\xce'
|
||||
msg += b'\x00\x00\x00\x00\x2b\x5c\x53\x00\x96\x00\x00\x2b\xc0\x53\x00\x10\x00\x00\x2c\x24\x46\x43\x90\x00\x00\x00\x00\x2c\x88\x46\x43\x95\x00\x00\x00\x00\x2c\xec\x53\x00\x06'
|
||||
msg += b'\x00\x00\x2d\x50\x53\x00\x06\x00\x00\x2d\xb4\x46\x43\x7d\x00\x00\x00\x00\x2e\x18\x46\x42\x3d\xeb\x85\x00\x00\x2e\x7c\x46\x42\x3d\xeb\x85\x00\x00\x2e\xe0\x53\x00\x03'
|
||||
msg += b'\x00\x00\x2f\x44\x53\x00\x03\x00\x00\x2f\xa8\x46\x42\x4d\xeb\x85\x00\x00\x30\x0c\x46\x42\x4d\xeb\x85\x00\x00\x30\x70\x53\x00\x03\x00\x00\x30\xd4\x53\x00\x03\x00\x00'
|
||||
msg += b'\x31\x38\x46\x42\x08\x00\x00\x00\x00\x31\x9c\x53\x00\x05\x00\x00\x32\x00\x53\x04\x00\x00\x00\x32\x64\x53\x00\x01\x00\x00\x32\xc8\x53\x13\x9c\x00\x00\x33\x2c\x53\x0f'
|
||||
msg += b'\xa0\x00\x00\x33\x90\x53\x00\x4f\x00\x00\x33\xf4\x53\x00\x66\x00\x00\x34\x58\x53\x03\xe8\x00\x00\x34\xbc\x53\x04\x00\x00\x00\x35\x20\x53\x00\x00\x00\x00\x35\x84\x53'
|
||||
msg += b'\x00\x00\x00\x00\x35\xe8\x53\x00\x00\x00\x00\x36\x4c\x53\x00\x00\x00\x01\x38\x80\x53\x00\x02\x00\x01\x38\x81\x53\x00\x01\x00\x01\x38\x82\x53\x00\x01\x00\x01\x38\x83'
|
||||
msg += b'\x53\x00\x00'
|
||||
return msg
|
||||
|
||||
|
||||
def test_parse_control(contr_data_seq):
|
||||
i = InfosG3()
|
||||
for key, result in i.parse (contr_data_seq):
|
||||
pass # side effect in calling i.parse()
|
||||
|
||||
assert json.dumps(i.db) == json.dumps(
|
||||
{"collector": {"Collector_Fw_Version": "RSW_400_V1.00.06", "Chip_Type": "Raymon", "Chip_Model": "RSW-1-10001", "Trace_URL": "t.raymoniot.com", "Logger_URL": "logger.talent-monitoring.com"}, "controller": {"Collect_Interval": 1, "Signal_Strength": 100, "Power_On_Time": 29, "Communication_Type": 1, "Connect_Count": 1, "Data_Up_Interval": 300}})
|
||||
|
||||
def test_parse_control2(contr2_data_seq):
|
||||
i = InfosG3()
|
||||
for key, result in i.parse (contr2_data_seq):
|
||||
pass # side effect in calling i.parse()
|
||||
|
||||
assert json.dumps(i.db) == json.dumps(
|
||||
{"collector": {"Collector_Fw_Version": "RSW_400_V1.00.20", "Chip_Type": "Raymon", "Chip_Model": "RSW-1-10001", "Trace_URL": "t.raymoniot.com", "Logger_URL": "logger.talent-monitoring.com"}, "controller": {"Collect_Interval": 1, "Signal_Strength": 16, "Power_On_Time": 334, "Communication_Type": 1, "Connect_Count": 1, "Data_Up_Interval": 300}})
|
||||
|
||||
def test_parse_inverter(inv_data_seq):
|
||||
i = InfosG3()
|
||||
for key, result in i.parse (inv_data_seq):
|
||||
pass # side effect in calling i.parse()
|
||||
|
||||
assert json.dumps(i.db) == json.dumps(
|
||||
{"inverter": {"Product_Name": "Microinv", "Manufacturer": "TSUN", "Version": "V5.0.11", "Serial_Number": "T170000000000001", "Equipment_Model": "TSOL-MS600"}})
|
||||
|
||||
def test_parse_cont_and_invert(contr_data_seq, inv_data_seq):
|
||||
i = InfosG3()
|
||||
for key, result in i.parse (contr_data_seq):
|
||||
pass # side effect in calling i.parse()
|
||||
|
||||
for key, result in i.parse (inv_data_seq):
|
||||
pass # side effect in calling i.parse()
|
||||
|
||||
assert json.dumps(i.db) == json.dumps(
|
||||
{
|
||||
"collector": {"Collector_Fw_Version": "RSW_400_V1.00.06", "Chip_Type": "Raymon", "Chip_Model": "RSW-1-10001", "Trace_URL": "t.raymoniot.com", "Logger_URL": "logger.talent-monitoring.com"}, "controller": {"Collect_Interval": 1, "Signal_Strength": 100, "Power_On_Time": 29, "Communication_Type": 1, "Connect_Count": 1, "Data_Up_Interval": 300},
|
||||
"inverter": {"Product_Name": "Microinv", "Manufacturer": "TSUN", "Version": "V5.0.11", "Serial_Number": "T170000000000001", "Equipment_Model": "TSOL-MS600"}})
|
||||
|
||||
|
||||
def test_build_ha_conf1(contr_data_seq):
|
||||
i = InfosG3()
|
||||
i.static_init() # initialize counter
|
||||
|
||||
tests = 0
|
||||
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id="garagendach/", snr='123'):
|
||||
|
||||
if id == 'out_power_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/grid", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "out_power_123", "val_tpl": "{{value_json['Output_Power'] | float}}", "unit_of_meas": "W", "dev": {"name": "Micro Inverter", "sa": "Micro Inverter", "via_device": "controller_123", "ids": ["inverter_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
|
||||
elif id == 'daily_gen_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Daily Generation", "stat_t": "tsun/garagendach/total", "dev_cla": "energy", "stat_cla": "total_increasing", "uniq_id": "daily_gen_123", "val_tpl": "{{value_json['Daily_Generation'] | float}}", "unit_of_meas": "kWh", "ic": "mdi:solar-power-variant", "dev": {"name": "Micro Inverter", "sa": "Micro Inverter", "via_device": "controller_123", "ids": ["inverter_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
|
||||
elif id == 'power_pv1_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/input", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "power_pv1_123", "val_tpl": "{{ (value_json['pv1']['Power'] | float)}}", "unit_of_meas": "W", "dev": {"name": "Module PV1", "sa": "Module PV1", "via_device": "inverter_123", "ids": ["input_pv1_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
|
||||
elif id == 'power_pv2_123':
|
||||
assert False # if we haven't received and parsed a control data msg, we don't know the number of inputs. In this case we only register the first one!!
|
||||
|
||||
|
||||
elif id == 'signal_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Signal Strength", "stat_t": "tsun/garagendach/controller", "dev_cla": None, "stat_cla": "measurement", "uniq_id": "signal_123", "val_tpl": "{{value_json[\'Signal_Strength\'] | int}}", "unit_of_meas": "%", "ic": "mdi:wifi", "dev": {"name": "Controller", "sa": "Controller", "via_device": "proxy", "ids": ["controller_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
elif id == 'inv_count_456':
|
||||
assert False
|
||||
|
||||
assert tests==4
|
||||
|
||||
def test_build_ha_conf2(contr_data_seq):
|
||||
i = InfosG3()
|
||||
i.static_init() # initialize counter
|
||||
|
||||
tests = 0
|
||||
for d_json, comp, node_id, id in i.ha_proxy_confs(ha_prfx="tsun/", node_id = 'proxy/', snr = '456'):
|
||||
|
||||
if id == 'out_power_123':
|
||||
assert False
|
||||
elif id == 'daily_gen_123':
|
||||
assert False
|
||||
elif id == 'power_pv1_123':
|
||||
assert False
|
||||
elif id == 'power_pv2_123':
|
||||
assert False # if we haven't received and parsed a control data msg, we don't know the number of inputs. In this case we only register the first one!!
|
||||
|
||||
elif id == 'signal_123':
|
||||
assert False
|
||||
elif id == 'inv_count_456':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Active Inverter Connections", "stat_t": "tsun/proxy/proxy", "dev_cla": None, "stat_cla": None, "uniq_id": "inv_count_456", "val_tpl": "{{value_json['Inverter_Cnt'] | int}}", "ic": "mdi:counter", "dev": {"name": "Proxy", "sa": "Proxy", "mdl": "proxy", "mf": "Stefan Allius", "sw": "unknown", "ids": ["proxy"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
|
||||
assert tests==1
|
||||
|
||||
def test_build_ha_conf3(contr_data_seq, inv_data_seq, inv_data_seq2):
|
||||
i = InfosG3()
|
||||
for key, result in i.parse (contr_data_seq):
|
||||
pass # side effect in calling i.parse()
|
||||
for key, result in i.parse (inv_data_seq):
|
||||
pass # side effect in calling i.parse()
|
||||
for key, result in i.parse (inv_data_seq2):
|
||||
pass # side effect in calling i.parse()
|
||||
|
||||
tests = 0
|
||||
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id="garagendach/", snr='123', sug_area = 'roof'):
|
||||
|
||||
if id == 'out_power_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/grid", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "out_power_123", "val_tpl": "{{value_json['Output_Power'] | float}}", "unit_of_meas": "W", "dev": {"name": "Micro Inverter - roof", "sa": "Micro Inverter - roof", "via_device": "controller_123", "mdl": "TSOL-MS600", "mf": "TSUN", "sw": "V5.0.11", "sn": "T170000000000001", "ids": ["inverter_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
|
||||
if id == 'daily_gen_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Daily Generation", "stat_t": "tsun/garagendach/total", "dev_cla": "energy", "stat_cla": "total_increasing", "uniq_id": "daily_gen_123", "val_tpl": "{{value_json['Daily_Generation'] | float}}", "unit_of_meas": "kWh", "ic": "mdi:solar-power-variant", "dev": {"name": "Micro Inverter - roof", "sa": "Micro Inverter - roof", "via_device": "controller_123", "mdl": "TSOL-MS600", "mf": "TSUN", "sw": "V5.0.11", "sn": "T170000000000001", "ids": ["inverter_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
|
||||
elif id == 'power_pv1_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/input", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "power_pv1_123", "val_tpl": "{{ (value_json['pv1']['Power'] | float)}}", "unit_of_meas": "W", "dev": {"name": "Module PV1 - roof", "sa": "Module PV1 - roof", "via_device": "inverter_123", "ids": ["input_pv1_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
|
||||
elif id == 'power_pv2_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/input", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "power_pv2_123", "val_tpl": "{{ (value_json['pv2']['Power'] | float)}}", "unit_of_meas": "W", "dev": {"name": "Module PV2 - roof", "sa": "Module PV2 - roof", "via_device": "inverter_123", "ids": ["input_pv2_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
|
||||
elif id == 'signal_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Signal Strength", "stat_t": "tsun/garagendach/controller", "dev_cla": None, "stat_cla": "measurement", "uniq_id": "signal_123", "val_tpl": "{{value_json[\'Signal_Strength\'] | int}}", "unit_of_meas": "%", "ic": "mdi:wifi", "dev": {"name": "Controller - roof", "sa": "Controller - roof", "via_device": "proxy", "mdl": "RSW-1-10001", "mf": "Raymon", "sw": "RSW_400_V1.00.06", "ids": ["controller_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
assert tests==5
|
||||
|
||||
def test_build_ha_conf4(contr_data_seq, inv_data_seq):
|
||||
i = InfosG3()
|
||||
for key, result in i.parse (contr_data_seq):
|
||||
pass # side effect in calling i.parse()
|
||||
for key, result in i.parse (inv_data_seq):
|
||||
pass # side effect in calling i.parse()
|
||||
i.set_db_def_value(Register.MAC_ADDR, "00a057123456")
|
||||
|
||||
tests = 0
|
||||
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id="garagendach/", snr='123', sug_area = 'roof'):
|
||||
if id == 'signal_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Signal Strength", "stat_t": "tsun/garagendach/controller", "dev_cla": None, "stat_cla": "measurement", "uniq_id": "signal_123", "val_tpl": "{{value_json[\'Signal_Strength\'] | int}}", "unit_of_meas": "%", "ic": "mdi:wifi", "dev": {"name": "Controller - roof", "sa": "Controller - roof", "via_device": "proxy", "mdl": "RSW-1-10001", "mf": "Raymon", "sw": "RSW_400_V1.00.06", "ids": ["controller_123"], "cns": [["mac", "00:a0:57:12:34:56"]]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
assert tests==1
|
||||
|
||||
i.set_db_def_value(Register.MAC_ADDR, "00:a0:57:12:34:57")
|
||||
|
||||
tests = 0
|
||||
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id="garagendach/", snr='123', sug_area = 'roof'):
|
||||
if id == 'signal_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Signal Strength", "stat_t": "tsun/garagendach/controller", "dev_cla": None, "stat_cla": "measurement", "uniq_id": "signal_123", "val_tpl": "{{value_json[\'Signal_Strength\'] | int}}", "unit_of_meas": "%", "ic": "mdi:wifi", "dev": {"name": "Controller - roof", "sa": "Controller - roof", "via_device": "proxy", "mdl": "RSW-1-10001", "mf": "Raymon", "sw": "RSW_400_V1.00.06", "ids": ["controller_123"], "cns": [["mac", "00:a0:57:12:34:57"]]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
assert tests==1
|
||||
|
||||
def test_must_incr_total(inv_data_seq2, inv_data_seq2_zero):
|
||||
i = InfosG3()
|
||||
tests = 0
|
||||
for key, update in i.parse (inv_data_seq2):
|
||||
if key == 'total' or key == 'inverter' or key == 'env':
|
||||
assert update == True
|
||||
tests +=1
|
||||
assert tests==12
|
||||
assert json.dumps(i.db['total']) == json.dumps({'Daily_Generation': 1.7, 'Total_Generation': 17.36})
|
||||
assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 64.5, "Daily_Generation": 1.08, "Total_Generation": 9.74}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 45.7, "Daily_Generation": 0.62, "Total_Generation": 7.62}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}})
|
||||
assert json.dumps(i.db['env']) == json.dumps({"Inverter_Status": 1, "Inverter_Temp": 23})
|
||||
tests = 0
|
||||
for key, update in i.parse (inv_data_seq2):
|
||||
if key == 'total' or key == 'env':
|
||||
assert update == False
|
||||
tests +=1
|
||||
|
||||
assert tests==4
|
||||
assert json.dumps(i.db['total']) == json.dumps({'Daily_Generation': 1.7, 'Total_Generation': 17.36})
|
||||
assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 64.5, "Daily_Generation": 1.08, "Total_Generation": 9.74}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 45.7, "Daily_Generation": 0.62, "Total_Generation": 7.62}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}})
|
||||
assert json.dumps(i.db['env']) == json.dumps({"Inverter_Status": 1, "Inverter_Temp": 23})
|
||||
assert json.dumps(i.db['inverter']) == json.dumps({"Rated_Power": 600, "BOOT_STATUS": 0, "DSP_STATUS": 21930, "Work_Mode": 0, "Max_Designed_Power": -1, "Input_Coefficient": -0.1, "Output_Coefficient": 100.0, "No_Inputs": 2})
|
||||
|
||||
tests = 0
|
||||
for key, update in i.parse (inv_data_seq2_zero):
|
||||
if key == 'total':
|
||||
assert update == False
|
||||
tests +=1
|
||||
elif key == 'env':
|
||||
tests +=1
|
||||
|
||||
assert tests==4
|
||||
assert json.dumps(i.db['total']) == json.dumps({'Daily_Generation': 1.7, 'Total_Generation': 17.36})
|
||||
assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 0.0, "Daily_Generation": 1.08, "Total_Generation": 9.74}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 0.0, "Daily_Generation": 0.62, "Total_Generation": 7.62}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}})
|
||||
assert json.dumps(i.db['env']) == json.dumps({"Inverter_Status": 1, "Inverter_Temp": 0})
|
||||
|
||||
def test_must_incr_total2(inv_data_seq2, inv_data_seq2_zero):
|
||||
i = InfosG3()
|
||||
tests = 0
|
||||
for key, update in i.parse (inv_data_seq2_zero):
|
||||
if key == 'total':
|
||||
assert update == False
|
||||
tests +=1
|
||||
elif key == 'env':
|
||||
assert update == True
|
||||
tests +=1
|
||||
|
||||
assert tests==4
|
||||
assert json.dumps(i.db['total']) == json.dumps({})
|
||||
assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 0.0}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 0.0}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}})
|
||||
assert json.dumps(i.db['env']) == json.dumps({"Inverter_Status": 1, "Inverter_Temp": 0})
|
||||
|
||||
tests = 0
|
||||
for key, update in i.parse (inv_data_seq2_zero):
|
||||
if key == 'total' or key == 'env':
|
||||
assert update == False
|
||||
tests +=1
|
||||
|
||||
assert tests==4
|
||||
assert json.dumps(i.db['total']) == json.dumps({})
|
||||
assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 0.0}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 0.0}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}})
|
||||
assert json.dumps(i.db['env']) == json.dumps({"Inverter_Status": 1, "Inverter_Temp": 0})
|
||||
|
||||
tests = 0
|
||||
for key, update in i.parse (inv_data_seq2):
|
||||
if key == 'total' or key == 'env':
|
||||
tests +=1
|
||||
|
||||
assert tests==4
|
||||
assert json.dumps(i.db['total']) == json.dumps({'Daily_Generation': 1.7, 'Total_Generation': 17.36})
|
||||
assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 64.5, "Daily_Generation": 1.08, "Total_Generation": 9.74}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 45.7, "Daily_Generation": 0.62, "Total_Generation": 7.62}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}})
|
||||
|
||||
def test_new_data_types(inv_data_new):
|
||||
i = InfosG3()
|
||||
tests = 0
|
||||
for key, update in i.parse (inv_data_new):
|
||||
if key == 'events':
|
||||
tests +=1
|
||||
elif key == 'inverter':
|
||||
assert update == True
|
||||
tests +=1
|
||||
elif key == 'input':
|
||||
assert update == False
|
||||
tests +=1
|
||||
else:
|
||||
assert False
|
||||
|
||||
assert tests==7
|
||||
assert json.dumps(i.db['inverter']) == json.dumps({"Manufacturer": 0, "DSP_STATUS": 0})
|
||||
assert json.dumps(i.db['input']) == json.dumps({"pv1": {}})
|
||||
assert json.dumps(i.db['events']) == json.dumps({"Inverter_Alarm": 0, "Inverter_Fault": 0})
|
||||
|
||||
def test_invalid_data_type(invalid_data_seq):
|
||||
i = InfosG3()
|
||||
i.static_init() # initialize counter
|
||||
|
||||
val = i.dev_value(Register.INVALID_DATA_TYPE) # check invalid data type counter
|
||||
assert val == 0
|
||||
|
||||
|
||||
for key, result in i.parse (invalid_data_seq):
|
||||
pass # side effect in calling i.parse()
|
||||
assert json.dumps(i.db) == json.dumps({"inverter": {"Product_Name": "Microinv"}})
|
||||
|
||||
val = i.dev_value(Register.INVALID_DATA_TYPE) # check invalid data type counter
|
||||
assert val == 1
|
||||
338
app/tests/test_infos_g3p.py
Normal file
338
app/tests/test_infos_g3p.py
Normal file
@@ -0,0 +1,338 @@
|
||||
|
||||
# test_with_pytest.py
|
||||
import pytest, json, math, random
|
||||
from infos import Register
|
||||
from gen3plus.infos_g3p import InfosG3P
|
||||
from gen3plus.infos_g3p import RegisterMap
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def str_test_ip():
|
||||
ip = ".".join(str(random.randint(1, 254)) for _ in range(4))
|
||||
print(f'random_ip: {ip}')
|
||||
return ip
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def bytes_test_ip(str_test_ip):
|
||||
ip = bytes(str.encode(str_test_ip))
|
||||
l = len(ip)
|
||||
if l < 16:
|
||||
ip = ip + bytearray(16-l)
|
||||
print(f'random_ip: {ip}')
|
||||
return ip
|
||||
|
||||
@pytest.fixture
|
||||
def device_data(bytes_test_ip): # 0x4110 ftype: 0x02
|
||||
msg = b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xba\xd2\x00\x00'
|
||||
msg += b'\x19\x00\x00\x00\x00\x00\x00\x00\x05\x3c\x78\x01\x64\x01\x4c\x53'
|
||||
msg += b'\x57\x35\x42\x4c\x45\x5f\x31\x37\x5f\x30\x32\x42\x30\x5f\x31\x2e'
|
||||
msg += b'\x30\x35\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x40\x2a\x8f\x4f\x51\x54' + bytes_test_ip
|
||||
msg += b'\x0f\x00\x01\xb0'
|
||||
msg += b'\x02\x0f\x00\xff\x56\x31\x2e\x31\x2e\x30\x30\x2e\x30\x42\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xfe\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x41\x6c\x6c\x69\x75\x73\x2d\x48\x6f'
|
||||
msg += b'\x6d\x65\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
return msg
|
||||
|
||||
@pytest.fixture
|
||||
def inverter_data(): # 0x4210 ftype: 0x01
|
||||
msg = b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xb0\x02\xbc\xc8'
|
||||
msg += b'\x24\x32\x6c\x1f\x00\x00\xa0\x47\xe4\x33\x01\x00\x03\x08\x00\x00'
|
||||
msg += b'\x59\x31\x37\x45\x30\x30\x30\x30\x30\x30\x30\x30\x30\x30\x30\x45'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x01\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x40\x10\x08\xc8\x00\x49\x13\x8d\x00\x36\x00\x00\x02\x58\x06\x7a'
|
||||
msg += b'\x01\x61\x00\xa8\x02\x54\x01\x5a\x00\x8a\x01\xe4\x01\x5a\x00\xbd'
|
||||
msg += b'\x02\x8f\x00\x11\x00\x01\x00\x00\x00\x0b\x00\x00\x27\x98\x00\x04'
|
||||
msg += b'\x00\x00\x0c\x04\x00\x03\x00\x00\x0a\xe7\x00\x05\x00\x00\x0c\x75'
|
||||
|
||||
msg += b'\x00\x00\x00\x00\x06\x16\x02\x00\x00\x00\x55\xaa\x00\x01\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\xff\xff\x07\xd0\x00\x03\x04\x00\x04\x00\x04\x00'
|
||||
msg += b'\x04\x00\x00\x01\xff\xff\x00\x01\x00\x06\x00\x68\x00\x68\x05\x00'
|
||||
msg += b'\x09\xcd\x07\xb6\x13\x9c\x13\x24\x00\x01\x07\xae\x04\x0f\x00\x41'
|
||||
msg += b'\x00\x0f\x0a\x64\x0a\x64\x00\x06\x00\x06\x09\xf6\x12\x8c\x12\x8c'
|
||||
msg += b'\x00\x10\x00\x10\x14\x52\x14\x52\x00\x10\x00\x10\x01\x51\x00\x05'
|
||||
msg += b'\x04\x00\x00\x01\x13\x9c\x0f\xa0\x00\x4e\x00\x66\x03\xe8\x04\x00'
|
||||
msg += b'\x09\xce\x07\xa8\x13\x9c\x13\x26\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x04\x00\x04\x00\x00\x00\x00\x00\xff\xff\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00'
|
||||
return msg
|
||||
|
||||
|
||||
def test_default_db():
|
||||
i = InfosG3P(client_mode=False)
|
||||
|
||||
assert json.dumps(i.db) == json.dumps({
|
||||
"inverter": {"Manufacturer": "TSUN", "Equipment_Model": "TSOL-MSxx00", "No_Inputs": 4},
|
||||
"collector": {"Chip_Type": "IGEN TECH"},
|
||||
})
|
||||
|
||||
def test_parse_4110(str_test_ip, device_data: bytes):
|
||||
i = InfosG3P(client_mode=False)
|
||||
i.db.clear()
|
||||
for key, update in i.parse (device_data, 0x41, 2):
|
||||
pass # side effect is calling generator i.parse()
|
||||
|
||||
assert json.dumps(i.db) == json.dumps({
|
||||
'controller': {"Data_Up_Interval": 300, "Collect_Interval": 1, "Heartbeat_Interval": 120, "Signal_Strength": 100, "IP_Address": str_test_ip, "Sensor_List": "02b0", "WiFi_SSID": "Allius-Home"},
|
||||
'collector': {"Chip_Model": "LSW5BLE_17_02B0_1.05", "MAC-Addr": "40:2a:8f:4f:51:54", "Collector_Fw_Version": "V1.1.00.0B"},
|
||||
})
|
||||
|
||||
def test_build_4110(str_test_ip, device_data: bytes):
|
||||
i = InfosG3P(client_mode=False)
|
||||
i.db.clear()
|
||||
for key, update in i.parse (device_data, 0x41, 2):
|
||||
pass # side effect is calling generator i.parse()
|
||||
|
||||
build_msg = i.build(len(device_data), 0x41, 2)
|
||||
for i in range(11, 20):
|
||||
build_msg[i] = device_data[i]
|
||||
assert device_data == build_msg
|
||||
|
||||
def test_parse_4210(inverter_data: bytes):
|
||||
i = InfosG3P(client_mode=False)
|
||||
i.db.clear()
|
||||
|
||||
for key, update in i.parse (inverter_data, 0x42, 1):
|
||||
pass # side effect is calling generator i.parse()
|
||||
|
||||
assert json.dumps(i.db) == json.dumps({
|
||||
"controller": {"Sensor_List": "02b0", "Power_On_Time": 2051},
|
||||
"inverter": {"Serial_Number": "Y17E00000000000E", "Version": "V4.0.10", "Rated_Power": 600, "BOOT_STATUS": 0, "DSP_STATUS": 21930, "Work_Mode": 0, "Max_Designed_Power": 2000, "Input_Coefficient": 100.0, "Output_Coefficient": 100.0},
|
||||
"env": {"Inverter_Status": 1, "Detect_Status_1": 2, "Detect_Status_2": 0, "Inverter_Temp": 14},
|
||||
"events": {"Inverter_Alarm": 0, "Inverter_Fault": 0, "Inverter_Bitfield_1": 0, "Inverter_bitfield_2": 0},
|
||||
"grid": {"Voltage": 224.8, "Current": 0.73, "Frequency": 50.05, "Output_Power": 165.8},
|
||||
"input": {"pv1": {"Voltage": 35.3, "Current": 1.68, "Power": 59.6, "Daily_Generation": 0.04, "Total_Generation": 30.76},
|
||||
"pv2": {"Voltage": 34.6, "Current": 1.38, "Power": 48.4, "Daily_Generation": 0.03, "Total_Generation": 27.91},
|
||||
"pv3": {"Voltage": 34.6, "Current": 1.89, "Power": 65.5, "Daily_Generation": 0.05, "Total_Generation": 31.89},
|
||||
"pv4": {"Voltage": 1.7, "Current": 0.01, "Power": 0.0, "Total_Generation": 15.58}},
|
||||
"total": {"Daily_Generation": 0.11, "Total_Generation": 101.36},
|
||||
"inv_unknown": {"Unknown_1": 512},
|
||||
"other": {"Output_Shutdown": 65535, "Rated_Level": 3, "Grid_Volt_Cal_Coef": 1024, "Prod_Compliance_Type": 6}
|
||||
})
|
||||
|
||||
def test_build_4210(inverter_data: bytes):
|
||||
i = InfosG3P(client_mode=False)
|
||||
i.db.clear()
|
||||
|
||||
for key, update in i.parse (inverter_data, 0x42, 1):
|
||||
pass # side effect is calling generator i.parse()
|
||||
|
||||
build_msg = i.build(len(inverter_data), 0x42, 1)
|
||||
for i in range(11, 31):
|
||||
build_msg[i] = inverter_data[i]
|
||||
assert inverter_data == build_msg
|
||||
|
||||
def test_build_ha_conf1():
|
||||
i = InfosG3P(client_mode=False)
|
||||
i.static_init() # initialize counter
|
||||
|
||||
tests = 0
|
||||
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id="garagendach/", snr='123'):
|
||||
|
||||
if id == 'out_power_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/grid", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "out_power_123", "val_tpl": "{{value_json['Output_Power'] | float}}", "unit_of_meas": "W", "dev": {"name": "Micro Inverter", "sa": "Micro Inverter", "via_device": "controller_123", "mdl": "TSOL-MSxx00", "mf": "TSUN", "ids": ["inverter_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
|
||||
elif id == 'daily_gen_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Daily Generation", "stat_t": "tsun/garagendach/total", "dev_cla": "energy", "stat_cla": "total_increasing", "uniq_id": "daily_gen_123", "val_tpl": "{{value_json['Daily_Generation'] | float}}", "unit_of_meas": "kWh", "ic": "mdi:solar-power-variant", "dev": {"name": "Micro Inverter", "sa": "Micro Inverter", "via_device": "controller_123", "mdl": "TSOL-MSxx00", "mf": "TSUN", "ids": ["inverter_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
|
||||
elif id == 'power_pv1_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/input", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "power_pv1_123", "val_tpl": "{{ (value_json['pv1']['Power'] | float)}}", "unit_of_meas": "W", "dev": {"name": "Module PV1", "sa": "Module PV1", "via_device": "inverter_123", "ids": ["input_pv1_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
|
||||
elif id == 'power_pv2_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/input", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "power_pv2_123", "val_tpl": "{{ (value_json['pv2']['Power'] | float)}}", "unit_of_meas": "W", "dev": {"name": "Module PV2", "sa": "Module PV2", "via_device": "inverter_123", "ids": ["input_pv2_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
|
||||
elif id == 'power_pv3_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/input", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "power_pv3_123", "val_tpl": "{{ (value_json['pv3']['Power'] | float)}}", "unit_of_meas": "W", "dev": {"name": "Module PV3", "sa": "Module PV3", "via_device": "inverter_123", "ids": ["input_pv3_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
|
||||
elif id == 'power_pv4_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/input", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "power_pv4_123", "val_tpl": "{{ (value_json['pv4']['Power'] | float)}}", "unit_of_meas": "W", "dev": {"name": "Module PV4", "sa": "Module PV4", "via_device": "inverter_123", "ids": ["input_pv4_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
|
||||
elif id == 'signal_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Signal Strength", "stat_t": "tsun/garagendach/controller", "dev_cla": None, "stat_cla": "measurement", "uniq_id": "signal_123", "val_tpl": "{{value_json[\'Signal_Strength\'] | int}}", "unit_of_meas": "%", "ic": "mdi:wifi", "dev": {"name": "Controller", "sa": "Controller", "via_device": "proxy", "mf": "IGEN TECH", "ids": ["controller_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
elif id == 'inv_count_456':
|
||||
assert False
|
||||
|
||||
assert tests==7
|
||||
|
||||
def test_build_ha_conf2():
|
||||
i = InfosG3P(client_mode=False)
|
||||
i.static_init() # initialize counter
|
||||
|
||||
tests = 0
|
||||
for d_json, comp, node_id, id in i.ha_proxy_confs(ha_prfx="tsun/", node_id = 'proxy/', snr = '456'):
|
||||
|
||||
if id == 'out_power_123':
|
||||
assert False
|
||||
elif id == 'daily_gen_123':
|
||||
assert False
|
||||
elif id == 'power_pv1_123':
|
||||
assert False
|
||||
elif id == 'power_pv2_123':
|
||||
assert False
|
||||
elif id == 'power_pv3_123':
|
||||
assert False
|
||||
elif id == 'power_pv4_123':
|
||||
assert False
|
||||
elif id == 'signal_123':
|
||||
assert False
|
||||
elif id == 'inv_count_456':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Active Inverter Connections", "stat_t": "tsun/proxy/proxy", "dev_cla": None, "stat_cla": None, "uniq_id": "inv_count_456", "val_tpl": "{{value_json['Inverter_Cnt'] | int}}", "ic": "mdi:counter", "dev": {"name": "Proxy", "sa": "Proxy", "mdl": "proxy", "mf": "Stefan Allius", "sw": "unknown", "ids": ["proxy"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
|
||||
assert tests==1
|
||||
|
||||
def test_build_ha_conf3():
|
||||
i = InfosG3P(client_mode=True)
|
||||
i.static_init() # initialize counter
|
||||
|
||||
tests = 0
|
||||
for d_json, comp, node_id, id in i.ha_confs(ha_prfx="tsun/", node_id="garagendach/", snr='123'):
|
||||
|
||||
if id == 'out_power_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/grid", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "out_power_123", "val_tpl": "{{value_json['Output_Power'] | float}}", "unit_of_meas": "W", "dev": {"name": "Micro Inverter", "sa": "Micro Inverter", "via_device": "controller_123", "mdl": "TSOL-MSxx00", "mf": "TSUN", "ids": ["inverter_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
|
||||
elif id == 'daily_gen_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Daily Generation", "stat_t": "tsun/garagendach/total", "dev_cla": "energy", "stat_cla": "total_increasing", "uniq_id": "daily_gen_123", "val_tpl": "{{value_json['Daily_Generation'] | float}}", "unit_of_meas": "kWh", "ic": "mdi:solar-power-variant", "dev": {"name": "Micro Inverter", "sa": "Micro Inverter", "via_device": "controller_123", "mdl": "TSOL-MSxx00", "mf": "TSUN", "ids": ["inverter_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
|
||||
elif id == 'power_pv1_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/input", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "power_pv1_123", "val_tpl": "{{ (value_json['pv1']['Power'] | float)}}", "unit_of_meas": "W", "dev": {"name": "Module PV1", "sa": "Module PV1", "via_device": "inverter_123", "ids": ["input_pv1_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
|
||||
elif id == 'power_pv2_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/input", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "power_pv2_123", "val_tpl": "{{ (value_json['pv2']['Power'] | float)}}", "unit_of_meas": "W", "dev": {"name": "Module PV2", "sa": "Module PV2", "via_device": "inverter_123", "ids": ["input_pv2_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
|
||||
elif id == 'power_pv3_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/input", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "power_pv3_123", "val_tpl": "{{ (value_json['pv3']['Power'] | float)}}", "unit_of_meas": "W", "dev": {"name": "Module PV3", "sa": "Module PV3", "via_device": "inverter_123", "ids": ["input_pv3_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
|
||||
elif id == 'power_pv4_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Power", "stat_t": "tsun/garagendach/input", "dev_cla": "power", "stat_cla": "measurement", "uniq_id": "power_pv4_123", "val_tpl": "{{ (value_json['pv4']['Power'] | float)}}", "unit_of_meas": "W", "dev": {"name": "Module PV4", "sa": "Module PV4", "via_device": "inverter_123", "ids": ["input_pv4_123"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
|
||||
elif id == 'signal_123':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({})
|
||||
tests +=1
|
||||
elif id == 'inv_count_456':
|
||||
assert False
|
||||
|
||||
assert tests==7
|
||||
|
||||
def test_build_ha_conf4():
|
||||
i = InfosG3P(client_mode=True)
|
||||
i.static_init() # initialize counter
|
||||
|
||||
tests = 0
|
||||
for d_json, comp, node_id, id in i.ha_proxy_confs(ha_prfx="tsun/", node_id = 'proxy/', snr = '456'):
|
||||
|
||||
if id == 'out_power_123':
|
||||
assert False
|
||||
elif id == 'daily_gen_123':
|
||||
assert False
|
||||
elif id == 'power_pv1_123':
|
||||
assert False
|
||||
elif id == 'power_pv2_123':
|
||||
assert False
|
||||
elif id == 'power_pv3_123':
|
||||
assert False
|
||||
elif id == 'power_pv4_123':
|
||||
assert False
|
||||
elif id == 'signal_123':
|
||||
assert False
|
||||
elif id == 'inv_count_456':
|
||||
assert comp == 'sensor'
|
||||
assert d_json == json.dumps({"name": "Active Inverter Connections", "stat_t": "tsun/proxy/proxy", "dev_cla": None, "stat_cla": None, "uniq_id": "inv_count_456", "val_tpl": "{{value_json['Inverter_Cnt'] | int}}", "ic": "mdi:counter", "dev": {"name": "Proxy", "sa": "Proxy", "mdl": "proxy", "mf": "Stefan Allius", "sw": "unknown", "ids": ["proxy"]}, "o": {"name": "proxy", "sw": "unknown"}})
|
||||
tests +=1
|
||||
|
||||
assert tests==1
|
||||
|
||||
def test_exception_and_calc(inverter_data: bytes):
|
||||
|
||||
# patch table to convert temperature from °F to °C
|
||||
ofs = RegisterMap.map[0x420100d8]['offset']
|
||||
RegisterMap.map[0x420100d8]['quotient'] = 1.8
|
||||
RegisterMap.map[0x420100d8]['offset'] = -32/1.8
|
||||
# map PV1_VOLTAGE to invalid register
|
||||
RegisterMap.map[0x420100e0]['reg'] = Register.TEST_REG2
|
||||
# set invalid maping entry for OUTPUT_POWER (string instead of dict type)
|
||||
backup = RegisterMap.map[0x420100de]
|
||||
RegisterMap.map[0x420100de] = 'invalid_entry'
|
||||
|
||||
i = InfosG3P(client_mode=False)
|
||||
i.db.clear()
|
||||
|
||||
for key, update in i.parse (inverter_data, 0x42, 1):
|
||||
pass # side effect is calling generator i.parse()
|
||||
assert math.isclose(12.2222, round (i.get_db_value(Register.INVERTER_TEMP, 0),4), rel_tol=1e-09, abs_tol=1e-09)
|
||||
|
||||
build_msg = i.build(len(inverter_data), 0x42, 1)
|
||||
assert build_msg[32:0xde] == inverter_data[32:0xde]
|
||||
assert build_msg[0xde:0xe2] == b'\x00\x00\x00\x00'
|
||||
assert build_msg[0xe2:-1] == inverter_data[0xe2:-1]
|
||||
|
||||
|
||||
# remove a table entry and test parsing and building
|
||||
del RegisterMap.map[0x420100d8]['quotient']
|
||||
del RegisterMap.map[0x420100d8]['offset']
|
||||
|
||||
i.db.clear()
|
||||
|
||||
for key, update in i.parse (inverter_data, 0x42, 1):
|
||||
pass # side effect is calling generator i.parse()
|
||||
assert 54 == i.get_db_value(Register.INVERTER_TEMP, 0)
|
||||
|
||||
build_msg = i.build(len(inverter_data), 0x42, 1)
|
||||
assert build_msg[32:0xd8] == inverter_data[32:0xd8]
|
||||
assert build_msg[0xd8:0xe2] == b'\x006\x00\x00\x02X\x00\x00\x00\x00'
|
||||
assert build_msg[0xe2:-1] == inverter_data[0xe2:-1]
|
||||
|
||||
# test restore table
|
||||
RegisterMap.map[0x420100d8]['offset'] = ofs
|
||||
RegisterMap.map[0x420100e0]['reg'] = Register.PV1_VOLTAGE # reset mapping
|
||||
RegisterMap.map[0x420100de] = backup # reset mapping
|
||||
|
||||
# test orginial table
|
||||
i.db.clear()
|
||||
for key, update in i.parse (inverter_data, 0x42, 1):
|
||||
pass # side effect is calling generator i.parse()
|
||||
assert 14 == i.get_db_value(Register.INVERTER_TEMP, 0)
|
||||
|
||||
build_msg = i.build(len(inverter_data), 0x42, 1)
|
||||
assert build_msg[32:-1] == inverter_data[32:-1]
|
||||
417
app/tests/test_inverter_base.py
Normal file
417
app/tests/test_inverter_base.py
Normal file
@@ -0,0 +1,417 @@
|
||||
# test_with_pytest.py
|
||||
import pytest
|
||||
import asyncio
|
||||
import gc
|
||||
|
||||
from mock import patch
|
||||
from enum import Enum
|
||||
from infos import Infos
|
||||
from cnf.config import Config
|
||||
from gen3.talent import Talent
|
||||
from inverter_base import InverterBase
|
||||
from singleton import Singleton
|
||||
from async_stream import AsyncStream, AsyncStreamClient
|
||||
|
||||
from test_modbus_tcp import patch_mqtt_err, patch_mqtt_except, test_port, test_hostname
|
||||
|
||||
pytest_plugins = ('pytest_asyncio',)
|
||||
|
||||
# initialize the proxy statistics
|
||||
Infos.static_init()
|
||||
|
||||
@pytest.fixture
|
||||
def config_conn():
|
||||
Config.act_config = {
|
||||
'mqtt':{
|
||||
'host': test_hostname,
|
||||
'port': test_port,
|
||||
'user': '',
|
||||
'passwd': ''
|
||||
},
|
||||
'ha':{
|
||||
'auto_conf_prefix': 'homeassistant',
|
||||
'discovery_prefix': 'homeassistant',
|
||||
'entity_prefix': 'tsun',
|
||||
'proxy_node_id': 'test_1',
|
||||
'proxy_unique_id': ''
|
||||
},
|
||||
'tsun':{'enabled': True, 'host': 'test_cloud.local', 'port': 1234}, 'inverters':{'allow_all':True}
|
||||
}
|
||||
|
||||
@pytest.fixture(scope="module", autouse=True)
|
||||
def module_init():
|
||||
Singleton._instances.clear()
|
||||
yield
|
||||
|
||||
class FakeReader():
|
||||
def __init__(self):
|
||||
self.on_recv = asyncio.Event()
|
||||
async def read(self, max_len: int):
|
||||
await self.on_recv.wait()
|
||||
return b''
|
||||
def feed_eof(self):
|
||||
return
|
||||
|
||||
|
||||
class FakeWriter():
|
||||
peer = ('47.1.2.3', 10000)
|
||||
def write(self, buf: bytes):
|
||||
return
|
||||
def get_extra_info(self, sel: str):
|
||||
if sel == 'peername':
|
||||
return self.peer
|
||||
elif sel == 'sockname':
|
||||
return 'sock:1234'
|
||||
assert False
|
||||
def is_closing(self):
|
||||
return False
|
||||
def close(self):
|
||||
return
|
||||
async def wait_closed(self):
|
||||
return
|
||||
|
||||
class MockType(Enum):
|
||||
RD_TEST_0_BYTES = 1
|
||||
RD_TEST_TIMEOUT = 2
|
||||
RD_TEST_EXCEPT = 3
|
||||
|
||||
|
||||
test = MockType.RD_TEST_0_BYTES
|
||||
|
||||
@pytest.fixture
|
||||
def patch_open_connection():
|
||||
async def new_conn(conn):
|
||||
await asyncio.sleep(0)
|
||||
return FakeReader(), FakeWriter()
|
||||
|
||||
def new_open(host: str, port: int):
|
||||
global test
|
||||
if test == MockType.RD_TEST_TIMEOUT:
|
||||
raise ConnectionRefusedError
|
||||
elif test == MockType.RD_TEST_EXCEPT:
|
||||
raise ValueError("Value cannot be negative") # Compliant
|
||||
return new_conn(None)
|
||||
|
||||
with patch.object(asyncio, 'open_connection', new_open) as conn:
|
||||
yield conn
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def patch_healthy():
|
||||
with patch.object(AsyncStream, 'healthy') as conn:
|
||||
yield conn
|
||||
|
||||
@pytest.fixture
|
||||
def patch_unhealthy():
|
||||
def new_healthy(self):
|
||||
return False
|
||||
with patch.object(AsyncStream, 'healthy', new_healthy) as conn:
|
||||
yield conn
|
||||
@pytest.fixture
|
||||
def patch_unhealthy_remote():
|
||||
def new_healthy(self):
|
||||
return False
|
||||
with patch.object(AsyncStreamClient, 'healthy', new_healthy) as conn:
|
||||
yield conn
|
||||
|
||||
def test_inverter_iter():
|
||||
InverterBase._registry.clear()
|
||||
cnt = 0
|
||||
reader = FakeReader()
|
||||
writer = FakeWriter()
|
||||
|
||||
with InverterBase(reader, writer, 'tsun', Talent) as inverter:
|
||||
for inv in InverterBase:
|
||||
assert inv == inverter
|
||||
cnt += 1
|
||||
del inv
|
||||
del inverter
|
||||
assert cnt == 1
|
||||
|
||||
for inv in InverterBase:
|
||||
assert False
|
||||
|
||||
def test_method_calls(patch_healthy):
|
||||
spy = patch_healthy
|
||||
InverterBase._registry.clear()
|
||||
reader = FakeReader()
|
||||
writer = FakeWriter()
|
||||
|
||||
with InverterBase(reader, writer, 'tsun', Talent) as inverter:
|
||||
assert inverter.local.stream
|
||||
assert inverter.local.ifc
|
||||
# call healthy inside the contexter manager
|
||||
for inv in InverterBase:
|
||||
assert inv.healthy()
|
||||
del inv
|
||||
spy.assert_called_once()
|
||||
|
||||
# outside context manager the health function of AsyncStream is not reachable
|
||||
cnt = 0
|
||||
for inv in InverterBase:
|
||||
assert inv.healthy()
|
||||
cnt += 1
|
||||
del inv
|
||||
assert cnt == 1
|
||||
spy.assert_called_once() # counter don't increase and keep one!
|
||||
|
||||
del inverter
|
||||
cnt = 0
|
||||
for inv in InverterBase:
|
||||
print(f'InverterBase refs:{gc.get_referrers(inv)}')
|
||||
cnt += 1
|
||||
assert cnt == 0
|
||||
|
||||
def test_unhealthy(patch_unhealthy):
|
||||
_ = patch_unhealthy
|
||||
InverterBase._registry.clear()
|
||||
reader = FakeReader()
|
||||
writer = FakeWriter()
|
||||
|
||||
with InverterBase(reader, writer, 'tsun', Talent) as inverter:
|
||||
assert inverter.local.stream
|
||||
assert inverter.local.ifc
|
||||
# call healthy inside the contexter manager
|
||||
assert not inverter.healthy()
|
||||
|
||||
# outside context manager the unhealth AsyncStream is released
|
||||
cnt = 0
|
||||
for inv in InverterBase:
|
||||
assert inv.healthy() # inverter is healthy again (without the unhealty AsyncStream)
|
||||
cnt += 1
|
||||
del inv
|
||||
assert cnt == 1
|
||||
|
||||
del inverter
|
||||
cnt = 0
|
||||
for inv in InverterBase:
|
||||
print(f'InverterBase refs:{gc.get_referrers(inv)}')
|
||||
cnt += 1
|
||||
assert cnt == 0
|
||||
|
||||
def test_unhealthy_remote(patch_unhealthy_remote):
|
||||
_ = patch_unhealthy
|
||||
InverterBase._registry.clear()
|
||||
reader = FakeReader()
|
||||
writer = FakeWriter()
|
||||
|
||||
with InverterBase(reader, writer, 'tsun', Talent) as inverter:
|
||||
assert inverter.local.stream
|
||||
assert inverter.local.ifc
|
||||
# call healthy inside the contexter manager
|
||||
assert not inverter.healthy()
|
||||
|
||||
# outside context manager the unhealth AsyncStream is released
|
||||
cnt = 0
|
||||
for inv in InverterBase:
|
||||
assert inv.healthy() # inverter is healthy again (without the unhealty AsyncStream)
|
||||
cnt += 1
|
||||
del inv
|
||||
assert cnt == 1
|
||||
|
||||
del inverter
|
||||
cnt = 0
|
||||
for inv in InverterBase:
|
||||
print(f'InverterBase refs:{gc.get_referrers(inv)}')
|
||||
cnt += 1
|
||||
assert cnt == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_remote_conn(config_conn, patch_open_connection):
|
||||
_ = config_conn
|
||||
_ = patch_open_connection
|
||||
assert asyncio.get_running_loop()
|
||||
reader = FakeReader()
|
||||
writer = FakeWriter()
|
||||
|
||||
with InverterBase(reader, writer, 'tsun', Talent) as inverter:
|
||||
await inverter.create_remote()
|
||||
await asyncio.sleep(0)
|
||||
assert inverter.remote.stream
|
||||
assert inverter.remote.ifc
|
||||
# call healthy inside the contexter manager
|
||||
assert inverter.healthy()
|
||||
|
||||
# call healthy outside the contexter manager (__exit__() was called)
|
||||
assert inverter.healthy()
|
||||
del inverter
|
||||
|
||||
cnt = 0
|
||||
for inv in InverterBase:
|
||||
print(f'InverterBase refs:{gc.get_referrers(inv)}')
|
||||
cnt += 1
|
||||
assert cnt == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_remote_conn_to_private(config_conn, patch_open_connection):
|
||||
'''check DNS resolving of the TSUN FQDN to a local address'''
|
||||
_ = config_conn
|
||||
_ = patch_open_connection
|
||||
assert asyncio.get_running_loop()
|
||||
InverterBase._registry.clear()
|
||||
reader = FakeReader()
|
||||
writer = FakeWriter()
|
||||
FakeWriter.peer = ("192.168.0.1", 10000)
|
||||
|
||||
with InverterBase(reader, writer, 'tsun', Talent) as inverter:
|
||||
assert inverter.local.stream
|
||||
assert inverter.local.ifc
|
||||
await inverter.create_remote()
|
||||
await asyncio.sleep(0)
|
||||
assert not Config.act_config['tsun']['enabled']
|
||||
assert inverter.remote.stream
|
||||
assert inverter.remote.ifc
|
||||
assert inverter.local.ifc.healthy()
|
||||
|
||||
# outside context manager the unhealth AsyncStream is released
|
||||
FakeWriter.peer = ("47.1.2.3", 10000)
|
||||
cnt = 0
|
||||
for inv in InverterBase:
|
||||
assert inv.healthy() # inverter is healthy again (without the unhealty AsyncStream)
|
||||
cnt += 1
|
||||
del inv
|
||||
assert cnt == 1
|
||||
|
||||
del inverter
|
||||
cnt = 0
|
||||
for inv in InverterBase:
|
||||
print(f'InverterBase refs:{gc.get_referrers(inv)}')
|
||||
cnt += 1
|
||||
assert cnt == 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_remote_conn_to_loopback(config_conn, patch_open_connection):
|
||||
'''check DNS resolving of the TSUN FQDN to the loopback address'''
|
||||
_ = config_conn
|
||||
_ = patch_open_connection
|
||||
assert asyncio.get_running_loop()
|
||||
InverterBase._registry.clear()
|
||||
reader = FakeReader()
|
||||
writer = FakeWriter()
|
||||
FakeWriter.peer = ("127.0.0.1", 10000)
|
||||
|
||||
with InverterBase(reader, writer, 'tsun', Talent) as inverter:
|
||||
assert inverter.local.stream
|
||||
assert inverter.local.ifc
|
||||
await inverter.create_remote()
|
||||
await asyncio.sleep(0)
|
||||
assert not Config.act_config['tsun']['enabled']
|
||||
assert inverter.remote.stream
|
||||
assert inverter.remote.ifc
|
||||
assert inverter.local.ifc.healthy()
|
||||
|
||||
# outside context manager the unhealth AsyncStream is released
|
||||
FakeWriter.peer = ("47.1.2.3", 10000)
|
||||
cnt = 0
|
||||
for inv in InverterBase:
|
||||
assert inv.healthy() # inverter is healthy again (without the unhealty AsyncStream)
|
||||
cnt += 1
|
||||
del inv
|
||||
assert cnt == 1
|
||||
|
||||
del inverter
|
||||
cnt = 0
|
||||
for inv in InverterBase:
|
||||
print(f'InverterBase refs:{gc.get_referrers(inv)}')
|
||||
cnt += 1
|
||||
assert cnt == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_remote_conn_to_None(config_conn, patch_open_connection):
|
||||
'''check if get_extra_info() return None in case of an error'''
|
||||
_ = config_conn
|
||||
_ = patch_open_connection
|
||||
assert asyncio.get_running_loop()
|
||||
InverterBase._registry.clear()
|
||||
reader = FakeReader()
|
||||
writer = FakeWriter()
|
||||
FakeWriter.peer = None
|
||||
|
||||
with InverterBase(reader, writer, 'tsun', Talent) as inverter:
|
||||
assert inverter.local.stream
|
||||
assert inverter.local.ifc
|
||||
await inverter.create_remote()
|
||||
await asyncio.sleep(0)
|
||||
assert Config.act_config['tsun']['enabled']
|
||||
assert inverter.remote.stream
|
||||
assert inverter.remote.ifc
|
||||
assert inverter.local.ifc.healthy()
|
||||
|
||||
# outside context manager the unhealth AsyncStream is released
|
||||
FakeWriter.peer = ("47.1.2.3", 10000)
|
||||
cnt = 0
|
||||
for inv in InverterBase:
|
||||
assert inv.healthy() # inverter is healthy again (without the unhealty AsyncStream)
|
||||
cnt += 1
|
||||
del inv
|
||||
assert cnt == 1
|
||||
|
||||
del inverter
|
||||
cnt = 0
|
||||
for inv in InverterBase:
|
||||
print(f'InverterBase refs:{gc.get_referrers(inv)}')
|
||||
cnt += 1
|
||||
assert cnt == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_unhealthy_remote(config_conn, patch_open_connection, patch_unhealthy_remote):
|
||||
_ = config_conn
|
||||
_ = patch_open_connection
|
||||
_ = patch_unhealthy_remote
|
||||
assert asyncio.get_running_loop()
|
||||
InverterBase._registry.clear()
|
||||
reader = FakeReader()
|
||||
writer = FakeWriter()
|
||||
|
||||
with InverterBase(reader, writer, 'tsun', Talent) as inverter:
|
||||
assert inverter.local.stream
|
||||
assert inverter.local.ifc
|
||||
await inverter.create_remote()
|
||||
await asyncio.sleep(0)
|
||||
assert inverter.remote.stream
|
||||
assert inverter.remote.ifc
|
||||
assert inverter.local.ifc.healthy()
|
||||
assert not inverter.remote.ifc.healthy()
|
||||
# call healthy inside the contexter manager
|
||||
assert not inverter.healthy()
|
||||
|
||||
# outside context manager the unhealth AsyncStream is released
|
||||
cnt = 0
|
||||
for inv in InverterBase:
|
||||
assert inv.healthy() # inverter is healthy again (without the unhealty AsyncStream)
|
||||
cnt += 1
|
||||
del inv
|
||||
assert cnt == 1
|
||||
|
||||
del inverter
|
||||
cnt = 0
|
||||
for inv in InverterBase:
|
||||
print(f'InverterBase refs:{gc.get_referrers(inv)}')
|
||||
cnt += 1
|
||||
assert cnt == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_remote_disc(config_conn, patch_open_connection):
|
||||
_ = config_conn
|
||||
_ = patch_open_connection
|
||||
assert asyncio.get_running_loop()
|
||||
reader = FakeReader()
|
||||
writer = FakeWriter()
|
||||
|
||||
with InverterBase(reader, writer, 'tsun', Talent) as inverter:
|
||||
await inverter.create_remote()
|
||||
await asyncio.sleep(0)
|
||||
assert inverter.remote.stream
|
||||
# call disc inside the contexter manager
|
||||
await inverter.disc()
|
||||
|
||||
# call disc outside the contexter manager (__exit__() was called)
|
||||
await inverter.disc()
|
||||
del inverter
|
||||
|
||||
cnt = 0
|
||||
for inv in InverterBase:
|
||||
print(f'InverterBase refs:{gc.get_referrers(inv)}')
|
||||
cnt += 1
|
||||
assert cnt == 0
|
||||
226
app/tests/test_inverter_g3.py
Normal file
226
app/tests/test_inverter_g3.py
Normal file
@@ -0,0 +1,226 @@
|
||||
# test_with_pytest.py
|
||||
import pytest
|
||||
import asyncio
|
||||
import sys,gc
|
||||
|
||||
from mock import patch
|
||||
from enum import Enum
|
||||
from infos import Infos
|
||||
from cnf.config import Config
|
||||
from proxy import Proxy
|
||||
from inverter_base import InverterBase
|
||||
from singleton import Singleton
|
||||
from gen3.inverter_g3 import InverterG3
|
||||
from async_stream import AsyncStream
|
||||
|
||||
from test_modbus_tcp import patch_mqtt_err, patch_mqtt_except, test_port, test_hostname
|
||||
|
||||
pytest_plugins = ('pytest_asyncio',)
|
||||
|
||||
# initialize the proxy statistics
|
||||
Infos.static_init()
|
||||
|
||||
@pytest.fixture
|
||||
def config_conn():
|
||||
Config.act_config = {
|
||||
'mqtt':{
|
||||
'host': test_hostname,
|
||||
'port': test_port,
|
||||
'user': '',
|
||||
'passwd': ''
|
||||
},
|
||||
'ha':{
|
||||
'auto_conf_prefix': 'homeassistant',
|
||||
'discovery_prefix': 'homeassistant',
|
||||
'entity_prefix': 'tsun',
|
||||
'proxy_node_id': 'test_1',
|
||||
'proxy_unique_id': ''
|
||||
},
|
||||
'tsun':{'enabled': True, 'host': 'test_cloud.local', 'port': 1234}, 'inverters':{'allow_all':True}
|
||||
}
|
||||
|
||||
@pytest.fixture(scope="module", autouse=True)
|
||||
def module_init():
|
||||
Singleton._instances.clear()
|
||||
yield
|
||||
|
||||
class FakeReader():
|
||||
def __init__(self):
|
||||
self.on_recv = asyncio.Event()
|
||||
async def read(self, max_len: int):
|
||||
await self.on_recv.wait()
|
||||
return b''
|
||||
def feed_eof(self):
|
||||
return
|
||||
|
||||
|
||||
class FakeWriter():
|
||||
def write(self, buf: bytes):
|
||||
return
|
||||
def get_extra_info(self, sel: str):
|
||||
if sel == 'peername':
|
||||
return ('47.1.2.3', 10000)
|
||||
elif sel == 'sockname':
|
||||
return 'sock:1234'
|
||||
assert False
|
||||
def is_closing(self):
|
||||
return False
|
||||
def close(self):
|
||||
return
|
||||
async def wait_closed(self):
|
||||
return
|
||||
|
||||
class MockType(Enum):
|
||||
RD_TEST_0_BYTES = 1
|
||||
RD_TEST_TIMEOUT = 2
|
||||
RD_TEST_EXCEPT = 3
|
||||
|
||||
|
||||
test = MockType.RD_TEST_0_BYTES
|
||||
|
||||
@pytest.fixture
|
||||
def patch_open_connection():
|
||||
async def new_conn(conn):
|
||||
await asyncio.sleep(0)
|
||||
return FakeReader(), FakeWriter()
|
||||
|
||||
def new_open(host: str, port: int):
|
||||
global test
|
||||
if test == MockType.RD_TEST_TIMEOUT:
|
||||
raise ConnectionRefusedError
|
||||
elif test == MockType.RD_TEST_EXCEPT:
|
||||
raise ValueError("Value cannot be negative") # Compliant
|
||||
return new_conn(None)
|
||||
|
||||
with patch.object(asyncio, 'open_connection', new_open) as conn:
|
||||
yield conn
|
||||
|
||||
@pytest.fixture
|
||||
def patch_healthy():
|
||||
with patch.object(AsyncStream, 'healthy') as conn:
|
||||
yield conn
|
||||
|
||||
def test_method_calls(patch_healthy):
|
||||
spy = patch_healthy
|
||||
reader = FakeReader()
|
||||
writer = FakeWriter()
|
||||
InverterBase._registry.clear()
|
||||
|
||||
with InverterG3(reader, writer) as inverter:
|
||||
assert inverter.local.stream
|
||||
assert inverter.local.ifc
|
||||
for inv in InverterBase:
|
||||
inv.healthy()
|
||||
del inv
|
||||
spy.assert_called_once()
|
||||
del inverter
|
||||
cnt = 0
|
||||
for inv in InverterBase:
|
||||
cnt += 1
|
||||
assert cnt == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_remote_conn(config_conn, patch_open_connection):
|
||||
_ = config_conn
|
||||
_ = patch_open_connection
|
||||
assert asyncio.get_running_loop()
|
||||
|
||||
with InverterG3(FakeReader(), FakeWriter()) as inverter:
|
||||
await inverter.create_remote()
|
||||
await asyncio.sleep(0)
|
||||
assert inverter.remote.stream
|
||||
del inverter
|
||||
|
||||
cnt = 0
|
||||
for inv in InverterBase:
|
||||
print(f'InverterBase refs:{gc.get_referrers(inv)}')
|
||||
cnt += 1
|
||||
assert cnt == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_remote_except(config_conn, patch_open_connection):
|
||||
_ = config_conn
|
||||
_ = patch_open_connection
|
||||
assert asyncio.get_running_loop()
|
||||
|
||||
global test
|
||||
test = MockType.RD_TEST_TIMEOUT
|
||||
|
||||
with InverterG3(FakeReader(), FakeWriter()) as inverter:
|
||||
await inverter.create_remote()
|
||||
await asyncio.sleep(0)
|
||||
assert inverter.remote.stream==None
|
||||
|
||||
test = MockType.RD_TEST_EXCEPT
|
||||
await inverter.create_remote()
|
||||
await asyncio.sleep(0)
|
||||
assert inverter.remote.stream==None
|
||||
del inverter
|
||||
|
||||
cnt = 0
|
||||
for inv in InverterBase:
|
||||
print(f'InverterBase refs:{gc.get_referrers(inv)}')
|
||||
cnt += 1
|
||||
assert cnt == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_mqtt_publish(config_conn, patch_open_connection):
|
||||
_ = config_conn
|
||||
_ = patch_open_connection
|
||||
assert asyncio.get_running_loop()
|
||||
|
||||
Proxy.class_init()
|
||||
|
||||
with InverterG3(FakeReader(), FakeWriter()) as inverter:
|
||||
stream = inverter.local.stream
|
||||
await inverter.async_publ_mqtt() # check call with invalid unique_id
|
||||
stream._Talent__set_serial_no(serial_no= "123344")
|
||||
|
||||
stream.new_data['inverter'] = True
|
||||
stream.db.db['inverter'] = {}
|
||||
await inverter.async_publ_mqtt()
|
||||
assert stream.new_data['inverter'] == False
|
||||
|
||||
stream.new_data['env'] = True
|
||||
stream.db.db['env'] = {}
|
||||
await inverter.async_publ_mqtt()
|
||||
assert stream.new_data['env'] == False
|
||||
|
||||
Infos.new_stat_data['proxy'] = True
|
||||
await inverter.async_publ_mqtt()
|
||||
assert Infos.new_stat_data['proxy'] == False
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_mqtt_err(config_conn, patch_open_connection, patch_mqtt_err):
|
||||
_ = config_conn
|
||||
_ = patch_open_connection
|
||||
_ = patch_mqtt_err
|
||||
assert asyncio.get_running_loop()
|
||||
|
||||
Proxy.class_init()
|
||||
|
||||
with InverterG3(FakeReader(), FakeWriter()) as inverter:
|
||||
stream = inverter.local.stream
|
||||
stream._Talent__set_serial_no(serial_no= "123344")
|
||||
stream.new_data['inverter'] = True
|
||||
stream.db.db['inverter'] = {}
|
||||
await inverter.async_publ_mqtt()
|
||||
assert stream.new_data['inverter'] == True
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_mqtt_except(config_conn, patch_open_connection, patch_mqtt_except):
|
||||
_ = config_conn
|
||||
_ = patch_open_connection
|
||||
_ = patch_mqtt_except
|
||||
assert asyncio.get_running_loop()
|
||||
|
||||
Proxy.class_init()
|
||||
|
||||
with InverterG3(FakeReader(), FakeWriter()) as inverter:
|
||||
stream = inverter.local.stream
|
||||
stream._Talent__set_serial_no(serial_no= "123344")
|
||||
|
||||
stream.new_data['inverter'] = True
|
||||
stream.db.db['inverter'] = {}
|
||||
await inverter.async_publ_mqtt()
|
||||
assert stream.new_data['inverter'] == True
|
||||
197
app/tests/test_inverter_g3p.py
Normal file
197
app/tests/test_inverter_g3p.py
Normal file
@@ -0,0 +1,197 @@
|
||||
# test_with_pytest.py
|
||||
import pytest
|
||||
import asyncio
|
||||
|
||||
from mock import patch
|
||||
from enum import Enum
|
||||
from infos import Infos
|
||||
from cnf.config import Config
|
||||
from proxy import Proxy
|
||||
from inverter_base import InverterBase
|
||||
from singleton import Singleton
|
||||
from gen3plus.inverter_g3p import InverterG3P
|
||||
|
||||
from test_modbus_tcp import patch_mqtt_err, patch_mqtt_except, test_port, test_hostname
|
||||
|
||||
|
||||
pytest_plugins = ('pytest_asyncio',)
|
||||
|
||||
# initialize the proxy statistics
|
||||
Infos.static_init()
|
||||
|
||||
@pytest.fixture
|
||||
def config_conn():
|
||||
Config.act_config = {
|
||||
'mqtt':{
|
||||
'host': test_hostname,
|
||||
'port': test_port,
|
||||
'user': '',
|
||||
'passwd': ''
|
||||
},
|
||||
'ha':{
|
||||
'auto_conf_prefix': 'homeassistant',
|
||||
'discovery_prefix': 'homeassistant',
|
||||
'entity_prefix': 'tsun',
|
||||
'proxy_node_id': 'test_1',
|
||||
'proxy_unique_id': ''
|
||||
},
|
||||
'solarman':{'enabled': True, 'host': 'test_cloud.local', 'port': 1234}, 'inverters':{'allow_all':True}
|
||||
}
|
||||
|
||||
@pytest.fixture(scope="module", autouse=True)
|
||||
def module_init():
|
||||
Singleton._instances.clear()
|
||||
yield
|
||||
|
||||
class FakeReader():
|
||||
def __init__(self):
|
||||
self.on_recv = asyncio.Event()
|
||||
async def read(self, max_len: int):
|
||||
await self.on_recv.wait()
|
||||
return b''
|
||||
def feed_eof(self):
|
||||
return
|
||||
|
||||
|
||||
class FakeWriter():
|
||||
def write(self, buf: bytes):
|
||||
return
|
||||
def get_extra_info(self, sel: str):
|
||||
if sel == 'peername':
|
||||
return ('47.1.2.3', 10000)
|
||||
elif sel == 'sockname':
|
||||
return 'sock:1234'
|
||||
assert False
|
||||
def is_closing(self):
|
||||
return False
|
||||
def close(self):
|
||||
return
|
||||
async def wait_closed(self):
|
||||
return
|
||||
|
||||
class MockType(Enum):
|
||||
RD_TEST_0_BYTES = 1
|
||||
RD_TEST_TIMEOUT = 2
|
||||
RD_TEST_EXCEPT = 3
|
||||
|
||||
|
||||
test = MockType.RD_TEST_0_BYTES
|
||||
|
||||
@pytest.fixture
|
||||
def patch_open_connection():
|
||||
async def new_conn(conn):
|
||||
await asyncio.sleep(0)
|
||||
return FakeReader(), FakeWriter()
|
||||
|
||||
def new_open(host: str, port: int):
|
||||
global test
|
||||
if test == MockType.RD_TEST_TIMEOUT:
|
||||
raise ConnectionRefusedError
|
||||
elif test == MockType.RD_TEST_EXCEPT:
|
||||
raise ValueError("Value cannot be negative") # Compliant
|
||||
return new_conn(None)
|
||||
|
||||
with patch.object(asyncio, 'open_connection', new_open) as conn:
|
||||
yield conn
|
||||
|
||||
def test_method_calls(config_conn):
|
||||
_ = config_conn
|
||||
reader = FakeReader()
|
||||
writer = FakeWriter()
|
||||
InverterBase._registry.clear()
|
||||
|
||||
with InverterG3P(reader, writer, client_mode=False) as inverter:
|
||||
assert inverter.local.stream
|
||||
assert inverter.local.ifc
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_remote_conn(config_conn, patch_open_connection):
|
||||
_ = config_conn
|
||||
_ = patch_open_connection
|
||||
assert asyncio.get_running_loop()
|
||||
|
||||
with InverterG3P(FakeReader(), FakeWriter(), client_mode=False) as inverter:
|
||||
await inverter.create_remote()
|
||||
await asyncio.sleep(0)
|
||||
assert inverter.remote.stream
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_remote_except(config_conn, patch_open_connection):
|
||||
_ = config_conn
|
||||
_ = patch_open_connection
|
||||
assert asyncio.get_running_loop()
|
||||
|
||||
global test
|
||||
test = MockType.RD_TEST_TIMEOUT
|
||||
|
||||
with InverterG3P(FakeReader(), FakeWriter(), client_mode=False) as inverter:
|
||||
await inverter.create_remote()
|
||||
await asyncio.sleep(0)
|
||||
assert inverter.remote.stream==None
|
||||
|
||||
test = MockType.RD_TEST_EXCEPT
|
||||
await inverter.create_remote()
|
||||
await asyncio.sleep(0)
|
||||
assert inverter.remote.stream==None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_mqtt_publish(config_conn, patch_open_connection):
|
||||
_ = config_conn
|
||||
_ = patch_open_connection
|
||||
assert asyncio.get_running_loop()
|
||||
|
||||
Proxy.class_init()
|
||||
|
||||
with InverterG3P(FakeReader(), FakeWriter(), client_mode=False) as inverter:
|
||||
stream = inverter.local.stream
|
||||
await inverter.async_publ_mqtt() # check call with invalid unique_id
|
||||
stream._set_serial_no(snr= 123344)
|
||||
|
||||
stream.new_data['inverter'] = True
|
||||
stream.db.db['inverter'] = {}
|
||||
await inverter.async_publ_mqtt()
|
||||
assert stream.new_data['inverter'] == False
|
||||
|
||||
stream.new_data['env'] = True
|
||||
stream.db.db['env'] = {}
|
||||
await inverter.async_publ_mqtt()
|
||||
assert stream.new_data['env'] == False
|
||||
|
||||
Infos.new_stat_data['proxy'] = True
|
||||
await inverter.async_publ_mqtt()
|
||||
assert Infos.new_stat_data['proxy'] == False
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_mqtt_err(config_conn, patch_open_connection, patch_mqtt_err):
|
||||
_ = config_conn
|
||||
_ = patch_open_connection
|
||||
_ = patch_mqtt_err
|
||||
assert asyncio.get_running_loop()
|
||||
|
||||
Proxy.class_init()
|
||||
|
||||
with InverterG3P(FakeReader(), FakeWriter(), client_mode=False) as inverter:
|
||||
stream = inverter.local.stream
|
||||
stream._set_serial_no(snr= 123344)
|
||||
stream.new_data['inverter'] = True
|
||||
stream.db.db['inverter'] = {}
|
||||
await inverter.async_publ_mqtt()
|
||||
assert stream.new_data['inverter'] == True
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_mqtt_except(config_conn, patch_open_connection, patch_mqtt_except):
|
||||
_ = config_conn
|
||||
_ = patch_open_connection
|
||||
_ = patch_mqtt_except
|
||||
assert asyncio.get_running_loop()
|
||||
|
||||
Proxy.class_init()
|
||||
|
||||
with InverterG3P(FakeReader(), FakeWriter(), client_mode=False) as inverter:
|
||||
stream = inverter.local.stream
|
||||
stream._set_serial_no(snr= 123344)
|
||||
|
||||
stream.new_data['inverter'] = True
|
||||
stream.db.db['inverter'] = {}
|
||||
await inverter.async_publ_mqtt()
|
||||
assert stream.new_data['inverter'] == True
|
||||
@@ -1,538 +0,0 @@
|
||||
# test_with_pytest.py
|
||||
import pytest, logging
|
||||
from app.src.messages import Message, Control
|
||||
from app.src.config import Config
|
||||
from app.src.infos import Infos
|
||||
|
||||
# initialize the proxy statistics
|
||||
Infos.static_init()
|
||||
|
||||
tracer = logging.getLogger('tracer')
|
||||
|
||||
class MemoryStream(Message):
|
||||
def __init__(self, msg, chunks = (0,), server_side: bool = True):
|
||||
super().__init__(server_side)
|
||||
self.__msg = msg
|
||||
self.__msg_len = len(msg)
|
||||
self.__chunks = chunks
|
||||
self.__offs = 0
|
||||
self.__chunk_idx = 0
|
||||
self.msg_count = 0
|
||||
self.addr = 'Test: SrvSide'
|
||||
|
||||
def append_msg(self, msg):
|
||||
self.__msg += msg
|
||||
self.__msg_len += len(msg)
|
||||
|
||||
def _read(self) -> int:
|
||||
copied_bytes = 0
|
||||
try:
|
||||
if (self.__offs < self.__msg_len):
|
||||
len = self.__chunks[self.__chunk_idx]
|
||||
self.__chunk_idx += 1
|
||||
if len!=0:
|
||||
self._recv_buffer += self.__msg[self.__offs:len]
|
||||
copied_bytes = len - self.__offs
|
||||
self.__offs = len
|
||||
else:
|
||||
self._recv_buffer += self.__msg[self.__offs:]
|
||||
copied_bytes = self.__msg_len - self.__offs
|
||||
self.__offs = self.__msg_len
|
||||
except:
|
||||
pass
|
||||
return copied_bytes
|
||||
|
||||
|
||||
def _Message__flush_recv_msg(self) -> None:
|
||||
super()._Message__flush_recv_msg()
|
||||
self.msg_count += 1
|
||||
return
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def MsgContactInfo(): # Contact Info message
|
||||
Config.config = {'tsun':{'enabled': True}}
|
||||
return b'\x00\x00\x00\x2c\x10R170000000000001\x91\x00\x08solarhub\x0fsolarhub\x40123456'
|
||||
|
||||
@pytest.fixture
|
||||
def MsgContactInfo_LongId(): # Contact Info message with longer ID
|
||||
Config.config = {'tsun':{'enabled': True}}
|
||||
return b'\x00\x00\x00\x2d\x11R1700000000000011\x91\x00\x08solarhub\x0fsolarhub\x40123456'
|
||||
|
||||
@pytest.fixture
|
||||
def Msg2ContactInfo(): # two Contact Info messages
|
||||
return b'\x00\x00\x00\x2c\x10R170000000000001\x91\x00\x08solarhub\x0fsolarhub\x40123456\x00\x00\x00\x2c\x10R170000000000002\x91\x00\x08solarhub\x0fsolarhub\x40123456'
|
||||
|
||||
@pytest.fixture
|
||||
def MsgContactResp(): # Contact Response message
|
||||
return b'\x00\x00\x00\x14\x10R170000000000001\x99\x00\x01'
|
||||
|
||||
@pytest.fixture
|
||||
def MsgContactInvalid(): # Contact Response message
|
||||
return b'\x00\x00\x00\x14\x10R170000000000001\x93\x00\x01'
|
||||
|
||||
@pytest.fixture
|
||||
def MsgGetTime(): # Get Time Request message
|
||||
return b'\x00\x00\x00\x13\x10R170000000000001\x91\x22'
|
||||
|
||||
@pytest.fixture
|
||||
def MsgTimeResp(): # Get Time Resonse message
|
||||
return b'\x00\x00\x00\x1b\x10R170000000000001\x99\x22\x00\x00\x01\x89\xc6\x63\x4d\x80'
|
||||
|
||||
@pytest.fixture
|
||||
def MsgTimeInvalid(): # Get Time Request message
|
||||
return b'\x00\x00\x00\x13\x10R170000000000001\x94\x22'
|
||||
|
||||
@pytest.fixture
|
||||
def MsgControllerInd(): # Data indication from the controller
|
||||
msg = b'\x00\x00\x01\x2f\x10R170000000000001\x91\x71\x0e\x10\x00\x00\x10R170000000000001'
|
||||
msg += b'\x01\x00\x00\x01\x89\xc6\x63\x55\x50'
|
||||
msg += b'\x00\x00\x00\x15\x00\x09\x2b\xa8\x54\x10\x52\x53\x57\x5f\x34\x30\x30\x5f\x56\x31\x2e\x30\x30\x2e\x30\x36\x00\x09\x27\xc0\x54\x06\x52\x61\x79\x6d\x6f'
|
||||
msg += b'\x6e\x00\x09\x2f\x90\x54\x0b\x52\x53\x57\x2d\x31\x2d\x31\x30\x30\x30\x31\x00\x09\x5a\x88\x54\x0f\x74\x2e\x72\x61\x79\x6d\x6f\x6e\x69\x6f\x74\x2e\x63\x6f\x6d\x00\x09\x5a\xec\x54'
|
||||
msg += b'\x1c\x6c\x6f\x67\x67\x65\x72\x2e\x74\x61\x6c\x65\x6e\x74\x2d\x6d\x6f\x6e\x69\x74\x6f\x72\x69\x6e\x67\x2e\x63\x6f\x6d\x00\x0d\x00\x20\x49\x00\x00\x00\x01\x00\x0c\x35\x00\x49\x00'
|
||||
msg += b'\x00\x00\x64\x00\x0c\x96\xa8\x49\x00\x00\x00\x1d\x00\x0c\x7f\x38\x49\x00\x00\x00\x01\x00\x0c\xfc\x38\x49\x00\x00\x00\x01\x00\x0c\xf8\x50\x49\x00\x00\x01\x2c\x00\x0c\x63\xe0\x49'
|
||||
msg += b'\x00\x00\x00\x00\x00\x0c\x67\xc8\x49\x00\x00\x00\x00\x00\x0c\x50\x58\x49\x00\x00\x00\x01\x00\x09\x5e\x70\x49\x00\x00\x13\x8d\x00\x09\x5e\xd4\x49\x00\x00\x13\x8d\x00\x09\x5b\x50'
|
||||
msg += b'\x49\x00\x00\x00\x02\x00\x0d\x04\x08\x49\x00\x00\x00\x00\x00\x07\xa1\x84\x49\x00\x00\x00\x01\x00\x0c\x50\x59\x49\x00\x00\x00\x4c\x00\x0d\x1f\x60\x49\x00\x00\x00\x00'
|
||||
return msg
|
||||
|
||||
@pytest.fixture
|
||||
def MsgControllerAck(): # Get Time Request message
|
||||
return b'\x00\x00\x00\x14\x10R170000000000001\x99\x71\x01'
|
||||
|
||||
@pytest.fixture
|
||||
def MsgControllerInvalid(): # Get Time Request message
|
||||
return b'\x00\x00\x00\x14\x10R170000000000001\x92\x71\x01'
|
||||
|
||||
@pytest.fixture
|
||||
def MsgInverterInd(): # Data indication from the controller
|
||||
msg = b'\x00\x00\x00\x8b\x10R170000000000001\x91\x04\x01\x90\x00\x01\x10R170000000000001'
|
||||
msg += b'\x01\x00\x00\x01\x89\xc6\x63\x61\x08'
|
||||
msg += b'\x00\x00\x00\x06\x00\x00\x00\x0a\x54\x08\x4d\x69\x63\x72\x6f\x69\x6e\x76\x00\x00\x00\x14\x54\x04\x54\x53\x55\x4e\x00\x00\x00\x1E\x54\x07\x56\x35\x2e\x30\x2e\x31\x31\x00\x00\x00\x28'
|
||||
msg += b'\x54\x10\x54\x31\x37\x45\x37\x33\x30\x37\x30\x32\x31\x44\x30\x30\x36\x41\x00\x00\x00\x32\x54\x0a\x54\x53\x4f\x4c\x2d\x4d\x53\x36\x30\x30\x00\x00\x00\x3c\x54\x05\x41\x2c\x42\x2c\x43'
|
||||
return msg
|
||||
|
||||
@pytest.fixture
|
||||
def MsgInverterAck(): # Get Time Request message
|
||||
return b'\x00\x00\x00\x14\x10R170000000000001\x99\x04\x01'
|
||||
|
||||
@pytest.fixture
|
||||
def MsgInverterInvalid(): # Get Time Request message
|
||||
return b'\x00\x00\x00\x14\x10R170000000000001\x92\x04\x01'
|
||||
|
||||
@pytest.fixture
|
||||
def MsgUnknown(): # Get Time Request message
|
||||
return b'\x00\x00\x00\x17\x10R170000000000001\x91\x17\x01\x02\x03\x04'
|
||||
|
||||
@pytest.fixture
|
||||
def MsgGetTime(): # Get Time Request message
|
||||
return b'\x00\x00\x00\x13\x10R170000000000001\x91\x22'
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def MsgTimeResp(): # Get Time Resonse message
|
||||
return b'\x00\x00\x00\x1b\x10R170000000000001\x99\x22\x00\x00\x01\x89\xc6\x63\x4d\x80'
|
||||
|
||||
@pytest.fixture
|
||||
def ConfigTsunAllowAll():
|
||||
Config.config = {'tsun':{'enabled': True}, 'inverters':{'allow_all':True}}
|
||||
|
||||
@pytest.fixture
|
||||
def ConfigNoTsunInv1():
|
||||
Config.config = {'tsun':{'enabled': False},'inverters':{'R170000000000001':{'node_id':'inv1','suggested_area':'roof'}}}
|
||||
|
||||
@pytest.fixture
|
||||
def ConfigTsunInv1():
|
||||
Config.config = {'tsun':{'enabled': True},'inverters':{'R170000000000001':{'node_id':'inv1','suggested_area':'roof'}}}
|
||||
|
||||
def test_read_message(MsgContactInfo):
|
||||
m = MemoryStream(MsgContactInfo, (0,))
|
||||
m.read() # read complete msg, and dispatch msg
|
||||
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
|
||||
assert m.msg_count == 1
|
||||
assert m.id_str == b"R170000000000001"
|
||||
assert m.unique_id == None
|
||||
assert int(m.ctrl)==145
|
||||
assert m.msg_id==0
|
||||
assert m.header_len==23
|
||||
assert m.data_len==25
|
||||
assert m._forward_buffer==b''
|
||||
m.close()
|
||||
|
||||
def test_read_message_twice(ConfigNoTsunInv1, MsgInverterInd):
|
||||
ConfigNoTsunInv1
|
||||
m = MemoryStream(MsgInverterInd, (0,))
|
||||
m.append_msg(MsgInverterInd)
|
||||
m.read() # read complete msg, and dispatch msg
|
||||
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
|
||||
assert m.msg_count == 1
|
||||
assert m.id_str == b"R170000000000001"
|
||||
assert m.unique_id == 'R170000000000001'
|
||||
assert int(m.ctrl)==145
|
||||
assert m.msg_id==4
|
||||
assert m.header_len==23
|
||||
assert m.data_len==120
|
||||
assert m._forward_buffer==b''
|
||||
m.read() # read complete msg, and dispatch msg
|
||||
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
|
||||
assert m.msg_count == 2
|
||||
assert m.id_str == b"R170000000000001"
|
||||
assert m.unique_id == 'R170000000000001'
|
||||
assert int(m.ctrl)==145
|
||||
assert m.msg_id==4
|
||||
assert m.header_len==23
|
||||
assert m.data_len==120
|
||||
assert m._forward_buffer==b''
|
||||
m.close()
|
||||
|
||||
def test_read_message_long_id(MsgContactInfo_LongId):
|
||||
m = MemoryStream(MsgContactInfo_LongId, (23,24))
|
||||
m.read() # read 23 bytes, one is missing
|
||||
assert not m.header_valid # must be invalid, since header not complete
|
||||
assert m.msg_count == 0
|
||||
m.read() # read the missing byte
|
||||
assert m.header_valid # must be valid, since header is complete but not the msg
|
||||
assert m.msg_count == 0
|
||||
assert m.id_str == b"R1700000000000011"
|
||||
assert m.unique_id == 0
|
||||
assert int(m.ctrl)==145
|
||||
assert m.msg_id==0
|
||||
assert m.header_len==24
|
||||
assert m.data_len==25
|
||||
m.read() # try to read rest of message, but there is no chunk available
|
||||
assert m.header_valid # must be valid, since header is complete but not the msg
|
||||
assert m.msg_count == 0
|
||||
m.close()
|
||||
|
||||
|
||||
def test_read_message_in_chunks(MsgContactInfo):
|
||||
m = MemoryStream(MsgContactInfo, (4,23,0))
|
||||
m.read() # read 4 bytes, header incomplere
|
||||
assert not m.header_valid # must be invalid, since header not complete
|
||||
assert m.msg_count == 0
|
||||
m.read() # read missing bytes for complete header
|
||||
assert m.header_valid # must be valid, since header is complete but not the msg
|
||||
assert m.msg_count == 0
|
||||
assert m.id_str == b"R170000000000001"
|
||||
assert m.unique_id == 0
|
||||
assert int(m.ctrl)==145
|
||||
assert m.msg_id==0
|
||||
assert m.header_len==23
|
||||
assert m.data_len==25
|
||||
m.read() # read rest of message
|
||||
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
|
||||
assert m.msg_count == 1
|
||||
m.close()
|
||||
|
||||
def test_read_message_in_chunks2(MsgContactInfo):
|
||||
m = MemoryStream(MsgContactInfo, (4,10,0))
|
||||
m.read() # read 4 bytes, header incomplere
|
||||
assert not m.header_valid
|
||||
assert m.msg_count == 0
|
||||
m.read() # read 6 more bytes, header incomplere
|
||||
assert not m.header_valid
|
||||
assert m.msg_count == 0
|
||||
m.read() # read rest of message
|
||||
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
|
||||
assert m.header_len==23
|
||||
assert m.data_len==25
|
||||
assert m.id_str == b"R170000000000001"
|
||||
assert m.unique_id == None
|
||||
assert int(m.ctrl)==145
|
||||
assert m.msg_id==0
|
||||
assert m.msg_count == 1
|
||||
while m.read(): # read rest of message
|
||||
pass
|
||||
assert m.msg_count == 1
|
||||
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
|
||||
m.close()
|
||||
|
||||
def test_read_two_messages(ConfigTsunAllowAll, Msg2ContactInfo):
|
||||
ConfigTsunAllowAll
|
||||
m = MemoryStream(Msg2ContactInfo, (0,))
|
||||
m.db.stat['proxy']['Unknown_Ctrl'] = 0
|
||||
m.read() # read complete msg, and dispatch msg
|
||||
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
|
||||
assert m.msg_count == 1
|
||||
assert m.id_str == b"R170000000000001"
|
||||
assert m.unique_id == 'R170000000000001'
|
||||
assert int(m.ctrl)==145
|
||||
assert m.msg_id==0
|
||||
assert m.header_len==23
|
||||
assert m.data_len==25
|
||||
assert m._forward_buffer==b'\x00\x00\x00,\x10R170000000000001\x91\x00\x08solarhub\x0fsolarhub@123456'
|
||||
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
|
||||
|
||||
m.read() # read complete msg, and dispatch msg
|
||||
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
|
||||
assert m.msg_count == 2
|
||||
assert m.id_str == b"R170000000000002"
|
||||
assert m.unique_id == 'R170000000000002'
|
||||
assert int(m.ctrl)==145
|
||||
assert m.msg_id==0
|
||||
assert m.header_len==23
|
||||
assert m.data_len==25
|
||||
assert m._forward_buffer==b'\x00\x00\x00,\x10R170000000000002\x91\x00\x08solarhub\x0fsolarhub@123456'
|
||||
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
|
||||
m.close()
|
||||
|
||||
def test_msg_contact_resp(ConfigTsunInv1, MsgContactResp):
|
||||
ConfigTsunInv1
|
||||
m = MemoryStream(MsgContactResp, (0,), False)
|
||||
m.db.stat['proxy']['Unknown_Ctrl'] = 0
|
||||
m.read() # read complete msg, and dispatch msg
|
||||
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
|
||||
assert m.msg_count == 1
|
||||
assert m.id_str == b"R170000000000001"
|
||||
assert m.unique_id == 'R170000000000001'
|
||||
assert int(m.ctrl)==153
|
||||
assert m.msg_id==0
|
||||
assert m.header_len==23
|
||||
assert m.data_len==1
|
||||
assert m._forward_buffer==b''
|
||||
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
|
||||
m.close()
|
||||
|
||||
def test_msg_contact_invalid(ConfigTsunInv1, MsgContactInvalid):
|
||||
ConfigTsunInv1
|
||||
m = MemoryStream(MsgContactInvalid, (0,))
|
||||
m.db.stat['proxy']['Unknown_Ctrl'] = 0
|
||||
m.read() # read complete msg, and dispatch msg
|
||||
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
|
||||
assert m.msg_count == 1
|
||||
assert m.id_str == b"R170000000000001"
|
||||
assert m.unique_id == 'R170000000000001'
|
||||
assert int(m.ctrl)==147
|
||||
assert m.msg_id==0
|
||||
assert m.header_len==23
|
||||
assert m.data_len==1
|
||||
assert m._forward_buffer==MsgContactInvalid
|
||||
assert m._send_buffer==b''
|
||||
assert m.db.stat['proxy']['Unknown_Ctrl'] == 1
|
||||
m.close()
|
||||
|
||||
def test_msg_get_time(ConfigTsunInv1, MsgGetTime):
|
||||
ConfigTsunInv1
|
||||
m = MemoryStream(MsgGetTime, (0,))
|
||||
m.db.stat['proxy']['Unknown_Ctrl'] = 0
|
||||
m.read() # read complete msg, and dispatch msg
|
||||
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
|
||||
assert m.msg_count == 1
|
||||
assert m.id_str == b"R170000000000001"
|
||||
assert m.unique_id == 'R170000000000001'
|
||||
assert int(m.ctrl)==145
|
||||
assert m.msg_id==34
|
||||
assert m.header_len==23
|
||||
assert m.data_len==0
|
||||
assert m._forward_buffer==MsgGetTime
|
||||
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
|
||||
m.close()
|
||||
|
||||
def test_msg_time_resp(ConfigTsunInv1, MsgTimeResp):
|
||||
ConfigTsunInv1
|
||||
m = MemoryStream(MsgTimeResp, (0,), False)
|
||||
m.db.stat['proxy']['Unknown_Ctrl'] = 0
|
||||
m.read() # read complete msg, and dispatch msg
|
||||
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
|
||||
assert m.msg_count == 1
|
||||
assert m.id_str == b"R170000000000001"
|
||||
assert m.unique_id == 'R170000000000001'
|
||||
assert int(m.ctrl)==153
|
||||
assert m.msg_id==34
|
||||
assert m.header_len==23
|
||||
assert m.data_len==8
|
||||
assert m._forward_buffer==b''
|
||||
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
|
||||
m.close()
|
||||
|
||||
def test_msg_time_invalid(ConfigTsunInv1, MsgTimeInvalid):
|
||||
ConfigTsunInv1
|
||||
m = MemoryStream(MsgTimeInvalid, (0,), False)
|
||||
m.db.stat['proxy']['Unknown_Ctrl'] = 0
|
||||
m.read() # read complete msg, and dispatch msg
|
||||
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
|
||||
assert m.msg_count == 1
|
||||
assert m.id_str == b"R170000000000001"
|
||||
assert m.unique_id == 'R170000000000001'
|
||||
assert int(m.ctrl)==148
|
||||
assert m.msg_id==34
|
||||
assert m.header_len==23
|
||||
assert m.data_len==0
|
||||
assert m._forward_buffer==MsgTimeInvalid
|
||||
assert m._send_buffer==b''
|
||||
assert m.db.stat['proxy']['Unknown_Ctrl'] == 1
|
||||
m.close()
|
||||
|
||||
|
||||
def test_msg_cntrl_ind(ConfigTsunInv1, MsgControllerInd, MsgControllerAck):
|
||||
ConfigTsunInv1
|
||||
m = MemoryStream(MsgControllerInd, (0,))
|
||||
m.db.stat['proxy']['Unknown_Ctrl'] = 0
|
||||
m.read() # read complete msg, and dispatch msg
|
||||
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
|
||||
assert m.msg_count == 1
|
||||
assert m.id_str == b"R170000000000001"
|
||||
assert m.unique_id == 'R170000000000001'
|
||||
assert int(m.ctrl)==145
|
||||
assert m.msg_id==113
|
||||
assert m.header_len==23
|
||||
assert m.data_len==284
|
||||
assert m._forward_buffer==MsgControllerInd
|
||||
assert m._send_buffer==MsgControllerAck
|
||||
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
|
||||
m.close()
|
||||
|
||||
def test_msg_cntrl_ack(ConfigTsunInv1, MsgControllerAck):
|
||||
ConfigTsunInv1
|
||||
m = MemoryStream(MsgControllerAck, (0,), False)
|
||||
m.db.stat['proxy']['Unknown_Ctrl'] = 0
|
||||
m.read() # read complete msg, and dispatch msg
|
||||
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
|
||||
assert m.msg_count == 1
|
||||
assert m.id_str == b"R170000000000001"
|
||||
assert m.unique_id == 'R170000000000001'
|
||||
assert int(m.ctrl)==153
|
||||
assert m.msg_id==113
|
||||
assert m.header_len==23
|
||||
assert m.data_len==1
|
||||
assert m._forward_buffer==b''
|
||||
assert m._send_buffer==b''
|
||||
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
|
||||
m.close()
|
||||
|
||||
def test_msg_cntrl_invalid(ConfigTsunInv1, MsgControllerInvalid):
|
||||
ConfigTsunInv1
|
||||
m = MemoryStream(MsgControllerInvalid, (0,))
|
||||
m.db.stat['proxy']['Unknown_Ctrl'] = 0
|
||||
m.read() # read complete msg, and dispatch msg
|
||||
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
|
||||
assert m.msg_count == 1
|
||||
assert m.id_str == b"R170000000000001"
|
||||
assert m.unique_id == 'R170000000000001'
|
||||
assert int(m.ctrl)==146
|
||||
assert m.msg_id==113
|
||||
assert m.header_len==23
|
||||
assert m.data_len==1
|
||||
assert m._forward_buffer==MsgControllerInvalid
|
||||
assert m._send_buffer==b''
|
||||
assert m.db.stat['proxy']['Unknown_Ctrl'] == 1
|
||||
m.close()
|
||||
|
||||
def test_msg_inv_ind(ConfigTsunInv1, MsgInverterInd, MsgInverterAck):
|
||||
ConfigTsunInv1
|
||||
tracer.setLevel(logging.DEBUG)
|
||||
m = MemoryStream(MsgInverterInd, (0,))
|
||||
m.db.stat['proxy']['Unknown_Ctrl'] = 0
|
||||
m.read() # read complete msg, and dispatch msg
|
||||
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
|
||||
assert m.msg_count == 1
|
||||
assert m.id_str == b"R170000000000001"
|
||||
assert m.unique_id == 'R170000000000001'
|
||||
assert int(m.ctrl)==145
|
||||
assert m.msg_id==4
|
||||
assert m.header_len==23
|
||||
assert m.data_len==120
|
||||
assert m._forward_buffer==MsgInverterInd
|
||||
assert m._send_buffer==MsgInverterAck
|
||||
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
|
||||
m.close()
|
||||
|
||||
def test_msg_inv_ack(ConfigTsunInv1, MsgInverterAck):
|
||||
ConfigTsunInv1
|
||||
tracer.setLevel(logging.ERROR)
|
||||
|
||||
m = MemoryStream(MsgInverterAck, (0,), False)
|
||||
m.db.stat['proxy']['Unknown_Ctrl'] = 0
|
||||
m.read() # read complete msg, and dispatch msg
|
||||
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
|
||||
assert m.msg_count == 1
|
||||
assert m.id_str == b"R170000000000001"
|
||||
assert m.unique_id == 'R170000000000001'
|
||||
assert int(m.ctrl)==153
|
||||
assert m.msg_id==4
|
||||
assert m.header_len==23
|
||||
assert m.data_len==1
|
||||
assert m._forward_buffer==b''
|
||||
assert m._send_buffer==b''
|
||||
assert m.db.stat['proxy']['Unknown_Ctrl'] == 0
|
||||
m.close()
|
||||
|
||||
def test_msg_inv_invalid(ConfigTsunInv1, MsgInverterInvalid):
|
||||
ConfigTsunInv1
|
||||
m = MemoryStream(MsgInverterInvalid, (0,), False)
|
||||
m.db.stat['proxy']['Unknown_Ctrl'] = 0
|
||||
m.read() # read complete msg, and dispatch msg
|
||||
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
|
||||
assert m.msg_count == 1
|
||||
assert m.id_str == b"R170000000000001"
|
||||
assert m.unique_id == 'R170000000000001'
|
||||
assert int(m.ctrl)==146
|
||||
assert m.msg_id==4
|
||||
assert m.header_len==23
|
||||
assert m.data_len==1
|
||||
assert m._forward_buffer==MsgInverterInvalid
|
||||
assert m._send_buffer==b''
|
||||
assert m.db.stat['proxy']['Unknown_Ctrl'] == 1
|
||||
m.close()
|
||||
|
||||
def test_msg_unknown(ConfigTsunInv1, MsgUnknown):
|
||||
ConfigTsunInv1
|
||||
m = MemoryStream(MsgUnknown, (0,), False)
|
||||
m.db.stat['proxy']['Unknown_Ctrl'] = 0
|
||||
m.read() # read complete msg, and dispatch msg
|
||||
assert not m.header_valid # must be invalid, since msg was handled and buffer flushed
|
||||
assert m.msg_count == 1
|
||||
assert m.id_str == b"R170000000000001"
|
||||
assert m.unique_id == 'R170000000000001'
|
||||
assert int(m.ctrl)==145
|
||||
assert m.msg_id==23
|
||||
assert m.header_len==23
|
||||
assert m.data_len==4
|
||||
assert m._forward_buffer==MsgUnknown
|
||||
assert m._send_buffer==b''
|
||||
assert 1 == m.db.stat['proxy']['Unknown_Msg']
|
||||
m.close()
|
||||
|
||||
def test_ctrl_byte():
|
||||
c = Control(0x91)
|
||||
assert c.is_ind()
|
||||
assert not c.is_resp()
|
||||
c = Control(0x99)
|
||||
assert not c.is_ind()
|
||||
assert c.is_resp()
|
||||
|
||||
|
||||
def test_msg_iterator():
|
||||
m1 = Message(server_side=True)
|
||||
m2 = Message(server_side=True)
|
||||
m3 = Message(server_side=True)
|
||||
m3.close()
|
||||
del m3
|
||||
test1 = 0
|
||||
test2 = 0
|
||||
for key in Message:
|
||||
if key == m1:
|
||||
test1+=1
|
||||
elif key == m2:
|
||||
test2+=1
|
||||
else:
|
||||
assert False
|
||||
assert test1 == 1
|
||||
assert test2 == 1
|
||||
|
||||
def test_proxy_counter():
|
||||
m = Message(server_side=True)
|
||||
assert m.new_data == {}
|
||||
m.db.stat['proxy']['Unknown_Msg'] = 0
|
||||
m.new_stat_data['proxy'] = False
|
||||
|
||||
m.inc_counter('Unknown_Msg')
|
||||
assert m.new_data == {}
|
||||
assert m.new_stat_data == {'proxy': True}
|
||||
assert 1 == m.db.stat['proxy']['Unknown_Msg']
|
||||
|
||||
m.new_stat_data['proxy'] = False
|
||||
m.dec_counter('Unknown_Msg')
|
||||
assert m.new_data == {}
|
||||
assert m.new_stat_data == {'proxy': True}
|
||||
assert 0 == m.db.stat['proxy']['Unknown_Msg']
|
||||
m.close()
|
||||
404
app/tests/test_modbus.py
Normal file
404
app/tests/test_modbus.py
Normal file
@@ -0,0 +1,404 @@
|
||||
# test_with_pytest.py
|
||||
import pytest
|
||||
import asyncio
|
||||
from modbus import Modbus
|
||||
from infos import Infos, Register
|
||||
|
||||
pytest_plugins = ('pytest_asyncio',)
|
||||
|
||||
class ModbusTestHelper(Modbus):
|
||||
def __init__(self):
|
||||
super().__init__(self.send_cb)
|
||||
self.db = Infos()
|
||||
self.pdu = None
|
||||
self.send_calls = 0
|
||||
self.recv_responses = 0
|
||||
def send_cb(self, pdu: bytearray, log_lvl: int, state: str):
|
||||
self.pdu = pdu
|
||||
self.send_calls += 1
|
||||
def resp_handler(self):
|
||||
self.recv_responses += 1
|
||||
|
||||
def test_modbus_crc():
|
||||
'''Check CRC-16 calculation'''
|
||||
mb = Modbus(None)
|
||||
assert 0x0b02 == mb._Modbus__calc_crc(b'\x01\x06\x20\x08\x00\x04')
|
||||
assert 0 == mb._Modbus__calc_crc(b'\x01\x06\x20\x08\x00\x04\x02\x0b')
|
||||
assert mb._Modbus__check_crc(b'\x01\x06\x20\x08\x00\x04\x02\x0b')
|
||||
|
||||
assert 0xc803 == mb._Modbus__calc_crc(b'\x01\x06\x20\x08\x00\x00')
|
||||
assert 0 == mb._Modbus__calc_crc(b'\x01\x06\x20\x08\x00\x00\x03\xc8')
|
||||
assert mb._Modbus__check_crc(b'\x01\x06\x20\x08\x00\x00\x03\xc8')
|
||||
|
||||
assert 0x5c75 == mb._Modbus__calc_crc(b'\x01\x03\x08\x01\x2c\x00\x2c\x02\x2c\x2c\x46')
|
||||
msg = b'\x01\x03\x28\x51'
|
||||
msg += b'\x0e\x08\xd3\x00\x29\x13\x87\x00\x3e\x00\x00\x01\x2c\x03\xb4\x00'
|
||||
msg += b'\x08\x00\x00\x00\x00\x01\x59\x01\x21\x03\xe6\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\xe6\xef'
|
||||
assert 0 == mb._Modbus__calc_crc(msg)
|
||||
|
||||
def test_build_modbus_pdu():
|
||||
'''Check building and sending a MODBUS RTU'''
|
||||
mb = ModbusTestHelper()
|
||||
mb.build_msg(1,6,0x2000,0x12)
|
||||
assert mb.pdu == b'\x01\x06\x20\x00\x00\x12\x02\x07'
|
||||
assert mb._Modbus__check_crc(mb.pdu)
|
||||
assert mb.last_addr == 1
|
||||
assert mb.last_fcode == 6
|
||||
assert mb.last_reg == 0x2000
|
||||
assert mb.last_len == 18
|
||||
assert mb.err == 0
|
||||
|
||||
def test_recv_req():
|
||||
'''Receive a valid request, which must transmitted'''
|
||||
mb = ModbusTestHelper()
|
||||
assert mb.recv_req(b'\x01\x06\x20\x00\x00\x12\x02\x07')
|
||||
assert mb.last_fcode == 6
|
||||
assert mb.last_reg == 0x2000
|
||||
assert mb.last_len == 0x12
|
||||
assert mb.err == 0
|
||||
|
||||
def test_recv_req_crc_err():
|
||||
'''Receive a request with invalid CRC, which must be dropped'''
|
||||
mb = ModbusTestHelper()
|
||||
assert not mb.recv_req(b'\x01\x06\x20\x00\x00\x12\x02\x08')
|
||||
assert mb.pdu == None
|
||||
assert mb.last_fcode == 0
|
||||
assert mb.last_reg == 0
|
||||
assert mb.last_len == 0
|
||||
assert mb.err == 1
|
||||
|
||||
def test_recv_resp_crc_err():
|
||||
'''Receive a response with invalid CRC, which must be dropped'''
|
||||
mb = ModbusTestHelper()
|
||||
# simulate a transmitted request
|
||||
mb.req_pend = True
|
||||
mb.last_addr = 1
|
||||
mb.last_fcode = 3
|
||||
mb.last_reg = 0x300e
|
||||
mb.last_len = 2
|
||||
mb.set_node_id('test')
|
||||
# check matching response, but with CRC error
|
||||
call = 0
|
||||
for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x04\x01\x2c\x00\x46\xbb\xf3'):
|
||||
call += 1
|
||||
assert mb.err == 1
|
||||
assert 0 == call
|
||||
assert mb.req_pend == True
|
||||
# cleanup queue
|
||||
mb._Modbus__stop_timer()
|
||||
assert not mb.req_pend
|
||||
|
||||
def test_recv_resp_invalid_addr():
|
||||
'''Receive a response with wrong server addr, which must be dropped'''
|
||||
mb = ModbusTestHelper()
|
||||
mb.req_pend = True
|
||||
# simulate a transmitted request
|
||||
mb.last_addr = 1
|
||||
mb.last_fcode = 3
|
||||
mb.last_reg = 0x300e
|
||||
mb.last_len = 2
|
||||
mb.set_node_id('test')
|
||||
|
||||
# check not matching response, with wrong server addr
|
||||
call = 0
|
||||
for key, update in mb.recv_resp(mb.db, b'\x02\x03\x04\x01\x2c\x00\x46\x88\xf4'):
|
||||
call += 1
|
||||
assert mb.err == 2
|
||||
assert 0 == call
|
||||
assert mb.req_pend == True
|
||||
assert mb.que.qsize() == 0
|
||||
|
||||
# cleanup queue
|
||||
mb._Modbus__stop_timer()
|
||||
assert not mb.req_pend
|
||||
|
||||
def test_recv_recv_fcode():
|
||||
'''Receive a response with wrong function code, which must be dropped'''
|
||||
mb = ModbusTestHelper()
|
||||
mb.build_msg(1,4,0x300e,2)
|
||||
assert mb.que.qsize() == 0
|
||||
assert mb.req_pend
|
||||
|
||||
# check not matching response, with wrong function code
|
||||
call = 0
|
||||
mb.set_node_id('test')
|
||||
for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x04\x01\x2c\x00\x46\xbb\xf4'):
|
||||
call += 1
|
||||
|
||||
assert mb.err == 3
|
||||
assert 0 == call
|
||||
assert mb.req_pend == True
|
||||
assert mb.que.qsize() == 0
|
||||
|
||||
# cleanup queue
|
||||
mb._Modbus__stop_timer()
|
||||
assert not mb.req_pend
|
||||
|
||||
def test_recv_resp_len():
|
||||
'''Receive a response with wrong data length, which must be dropped'''
|
||||
mb = ModbusTestHelper()
|
||||
mb.build_msg(1,3,0x300e,3)
|
||||
assert mb.que.qsize() == 0
|
||||
assert mb.req_pend
|
||||
assert mb.last_len == 3
|
||||
|
||||
# check not matching response, with wrong data length
|
||||
call = 0
|
||||
mb.set_node_id('test')
|
||||
for key, update, _ in mb.recv_resp(mb.db, b'\x01\x03\x04\x01\x2c\x00\x46\xbb\xf4'):
|
||||
call += 1
|
||||
|
||||
assert mb.err == 4
|
||||
assert 0 == call
|
||||
assert mb.req_pend == True
|
||||
assert mb.que.qsize() == 0
|
||||
|
||||
# cleanup queue
|
||||
mb._Modbus__stop_timer()
|
||||
assert not mb.req_pend
|
||||
|
||||
def test_recv_unexpect_resp():
|
||||
'''Receive a response when we havb't sent a request'''
|
||||
mb = ModbusTestHelper()
|
||||
assert not mb.req_pend
|
||||
|
||||
# check unexpected response, which must be dropped
|
||||
call = 0
|
||||
mb.set_node_id('test')
|
||||
for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x04\x01\x2c\x00\x46\xbb\xf4'):
|
||||
call += 1
|
||||
|
||||
assert mb.err == 5
|
||||
assert 0 == call
|
||||
assert mb.req_pend == False
|
||||
assert mb.que.qsize() == 0
|
||||
|
||||
def test_parse_resp():
|
||||
'''Receive matching response and parse the values'''
|
||||
mb = ModbusTestHelper()
|
||||
mb.build_msg(1,3,0x3007,6)
|
||||
assert mb.que.qsize() == 0
|
||||
assert mb.req_pend
|
||||
|
||||
call = 0
|
||||
mb.set_node_id('test')
|
||||
exp_result = ['V0.0.2C', 4.4, 0.7, 0.7, 30]
|
||||
for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x0c\x01\x2c\x00\x2c\x00\x2c\x00\x46\x00\x46\x00\x46\x32\xc8'):
|
||||
if key == 'grid':
|
||||
assert update == True
|
||||
elif key == 'inverter':
|
||||
assert update == True
|
||||
elif key == 'env':
|
||||
assert update == True
|
||||
else:
|
||||
assert False
|
||||
assert exp_result[call] == val
|
||||
call += 1
|
||||
assert 0 == mb.err
|
||||
assert 5 == call
|
||||
assert mb.que.qsize() == 0
|
||||
assert not mb.req_pend
|
||||
|
||||
def test_queue():
|
||||
mb = ModbusTestHelper()
|
||||
mb.build_msg(1,3,0x3022,4)
|
||||
assert mb.que.qsize() == 0
|
||||
assert mb.req_pend
|
||||
|
||||
assert mb.send_calls == 1
|
||||
assert mb.pdu == b'\x01\x030"\x00\x04\xeb\x03'
|
||||
mb.pdu = None
|
||||
assert mb.send_calls == 1
|
||||
assert mb.pdu == None
|
||||
|
||||
assert mb.que.qsize() == 0
|
||||
|
||||
# cleanup queue
|
||||
mb._Modbus__stop_timer()
|
||||
assert not mb.req_pend
|
||||
|
||||
def test_queue2():
|
||||
'''Check queue handling for build_msg() calls'''
|
||||
mb = ModbusTestHelper()
|
||||
mb.build_msg(1,3,0x3007,6)
|
||||
mb.build_msg(1,6,0x2008,4)
|
||||
assert mb.que.qsize() == 1
|
||||
assert mb.req_pend
|
||||
mb.build_msg(1,3,0x3007,6)
|
||||
assert mb.que.qsize() == 2
|
||||
assert mb.req_pend
|
||||
|
||||
assert mb.send_calls == 1
|
||||
assert mb.pdu == b'\x01\x030\x07\x00\x06{\t'
|
||||
call = 0
|
||||
mb.set_node_id('test')
|
||||
exp_result = ['V0.0.2C', 4.4, 0.7, 0.7, 30]
|
||||
for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x0c\x01\x2c\x00\x2c\x00\x2c\x00\x46\x00\x46\x00\x46\x32\xc8'):
|
||||
if key == 'grid':
|
||||
assert update == True
|
||||
elif key == 'inverter':
|
||||
assert update == True
|
||||
elif key == 'env':
|
||||
assert update == True
|
||||
else:
|
||||
assert False
|
||||
assert exp_result[call] == val
|
||||
call += 1
|
||||
assert 0 == mb.err
|
||||
assert 5 == call
|
||||
|
||||
assert mb.que.qsize() == 1
|
||||
assert mb.send_calls == 2
|
||||
assert mb.pdu == b'\x01\x06\x20\x08\x00\x04\x02\x0b'
|
||||
|
||||
for key, update, val in mb.recv_resp(mb.db, b'\x01\x06\x20\x08\x00\x04\x02\x0b'):
|
||||
pass # call generator mb.recv_resp()
|
||||
|
||||
assert mb.que.qsize() == 0
|
||||
assert mb.send_calls == 3
|
||||
assert mb.pdu == b'\x01\x030\x07\x00\x06{\t'
|
||||
call = 0
|
||||
for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x0c\x01\x2c\x00\x2c\x00\x2c\x00\x46\x00\x46\x00\x46\x32\xc8'):
|
||||
call += 1
|
||||
assert 0 == mb.err
|
||||
assert 5 == call
|
||||
|
||||
assert mb.que.qsize() == 0
|
||||
assert not mb.req_pend
|
||||
|
||||
def test_queue3():
|
||||
'''Check queue handling for recv_req() calls'''
|
||||
mb = ModbusTestHelper()
|
||||
assert mb.recv_req(b'\x01\x03\x30\x07\x00\x06{\t', mb.resp_handler)
|
||||
assert mb.recv_req(b'\x01\x06\x20\x08\x00\x04\x02\x0b', mb.resp_handler)
|
||||
assert mb.que.qsize() == 1
|
||||
assert mb.req_pend
|
||||
assert mb.recv_req(b'\x01\x03\x30\x07\x00\x06{\t')
|
||||
assert mb.que.qsize() == 2
|
||||
assert mb.req_pend
|
||||
|
||||
assert mb.send_calls == 1
|
||||
assert mb.pdu == b'\x01\x030\x07\x00\x06{\t'
|
||||
assert mb.recv_responses == 0
|
||||
|
||||
call = 0
|
||||
mb.set_node_id('test')
|
||||
exp_result = ['V0.0.2C', 4.4, 0.7, 0.7, 30]
|
||||
for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x0c\x01\x2c\x00\x2c\x00\x2c\x00\x46\x00\x46\x00\x46\x32\xc8'):
|
||||
if key == 'grid':
|
||||
assert update == True
|
||||
elif key == 'inverter':
|
||||
assert update == True
|
||||
elif key == 'env':
|
||||
assert update == True
|
||||
else:
|
||||
assert False
|
||||
assert exp_result[call] == val
|
||||
call += 1
|
||||
assert 0 == mb.err
|
||||
assert 5 == call
|
||||
assert mb.recv_responses == 1
|
||||
|
||||
assert mb.que.qsize() == 1
|
||||
assert mb.send_calls == 2
|
||||
assert mb.pdu == b'\x01\x06\x20\x08\x00\x04\x02\x0b'
|
||||
|
||||
for key, update, val in mb.recv_resp(mb.db, b'\x01\x06\x20\x08\x00\x04\x02\x0b'):
|
||||
pass # no code in loop is OK; calling the generator is the purpose
|
||||
assert 0 == mb.err
|
||||
assert mb.recv_responses == 2
|
||||
|
||||
assert mb.que.qsize() == 0
|
||||
assert mb.send_calls == 3
|
||||
assert mb.pdu == b'\x01\x030\x07\x00\x06{\t'
|
||||
call = 0
|
||||
for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x0c\x01\x2c\x00\x2c\x00\x2c\x00\x46\x00\x46\x00\x46\x32\xc8'):
|
||||
call += 1
|
||||
assert 0 == mb.err
|
||||
assert mb.recv_responses == 2
|
||||
assert 5 == call
|
||||
|
||||
|
||||
assert mb.que.qsize() == 0
|
||||
assert not mb.req_pend
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_timeout():
|
||||
'''Test MODBUS response timeout and RTU retransmitting'''
|
||||
assert asyncio.get_running_loop()
|
||||
mb = ModbusTestHelper()
|
||||
mb.max_retries = 2
|
||||
mb.timeout = 0.1 # 100ms timeout for fast testing, expect a time resolution of at least 10ms
|
||||
assert asyncio.get_running_loop() == mb.loop
|
||||
mb.build_msg(1,3,0x3007,6)
|
||||
mb.build_msg(1,6,0x2008,4)
|
||||
|
||||
assert mb.que.qsize() == 1
|
||||
assert mb.req_pend
|
||||
assert mb.retry_cnt == 0
|
||||
assert mb.send_calls == 1
|
||||
assert mb.pdu == b'\x01\x030\x07\x00\x06{\t'
|
||||
|
||||
mb.pdu = None
|
||||
await asyncio.sleep(0.11) # wait for first timeout and retransmittion
|
||||
assert mb.que.qsize() == 1
|
||||
assert mb.req_pend
|
||||
assert mb.retry_cnt == 1
|
||||
assert mb.send_calls == 2
|
||||
assert mb.pdu == b'\x01\x030\x07\x00\x06{\t'
|
||||
|
||||
mb.pdu = None
|
||||
await asyncio.sleep(0.11) # wait for second timeout and retransmittion
|
||||
assert mb.que.qsize() == 1
|
||||
assert mb.req_pend
|
||||
assert mb.retry_cnt == 2
|
||||
assert mb.send_calls == 3
|
||||
assert mb.pdu == b'\x01\x030\x07\x00\x06{\t'
|
||||
|
||||
mb.pdu = None
|
||||
await asyncio.sleep(0.11) # wait for third timeout and next pdu
|
||||
assert mb.que.qsize() == 0
|
||||
assert mb.req_pend
|
||||
assert mb.retry_cnt == 0
|
||||
assert mb.send_calls == 4
|
||||
assert mb.pdu == b'\x01\x06\x20\x08\x00\x04\x02\x0b'
|
||||
|
||||
mb.max_retries = 0 # next pdu without retranmsission
|
||||
await asyncio.sleep(0.11) # wait for fourth timout
|
||||
assert mb.que.qsize() == 0
|
||||
assert not mb.req_pend
|
||||
assert mb.retry_cnt == 0
|
||||
assert mb.send_calls == 4
|
||||
|
||||
def test_recv_unknown_data():
|
||||
'''Receive a response with an unknwon register'''
|
||||
mb = ModbusTestHelper()
|
||||
assert 0x9000 not in mb.mb_reg_mapping
|
||||
mb.mb_reg_mapping[0x9000] = {'reg': Register.TEST_REG1, 'fmt': '!H', 'ratio': 1}
|
||||
|
||||
mb.build_msg(1,3,0x9000,2)
|
||||
|
||||
# check matching response, but with CRC error
|
||||
call = 0
|
||||
mb.set_node_id('test')
|
||||
for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x04\x01\x2c\x00\x46\xbb\xf4'):
|
||||
call += 1
|
||||
assert mb.err == 0
|
||||
assert 0 == call
|
||||
assert not mb.req_pend
|
||||
|
||||
del mb.mb_reg_mapping[0x9000]
|
||||
|
||||
def test_close():
|
||||
'''Check queue handling for build_msg() calls'''
|
||||
mb = ModbusTestHelper()
|
||||
mb.build_msg(1,3,0x3007,6)
|
||||
mb.build_msg(1,6,0x2008,4)
|
||||
assert mb.que.qsize() == 1
|
||||
mb.build_msg(1,3,0x3007,6)
|
||||
assert mb.que.qsize() == 2
|
||||
assert mb.que.empty() == False
|
||||
mb.close()
|
||||
assert mb.que.qsize() == 0
|
||||
assert mb.que.empty() == True
|
||||
386
app/tests/test_modbus_tcp.py
Normal file
386
app/tests/test_modbus_tcp.py
Normal file
@@ -0,0 +1,386 @@
|
||||
# test_with_pytest.py
|
||||
import pytest
|
||||
import asyncio
|
||||
from aiomqtt import MqttCodeError
|
||||
|
||||
from mock import patch
|
||||
from enum import Enum
|
||||
from singleton import Singleton
|
||||
from cnf.config import Config
|
||||
from infos import Infos
|
||||
from mqtt import Mqtt
|
||||
from inverter_base import InverterBase
|
||||
from messages import Message, State
|
||||
from proxy import Proxy
|
||||
from modbus_tcp import ModbusConn, ModbusTcp
|
||||
|
||||
|
||||
pytest_plugins = ('pytest_asyncio',)
|
||||
|
||||
# initialize the proxy statistics
|
||||
Infos.static_init()
|
||||
|
||||
@pytest.fixture(scope="module", autouse=True)
|
||||
def module_init():
|
||||
Singleton._instances.clear()
|
||||
yield
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def test_port():
|
||||
return 1883
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def test_hostname():
|
||||
# if getenv("GITHUB_ACTIONS") == "true":
|
||||
# return 'mqtt'
|
||||
# else:
|
||||
return 'test.mosquitto.org'
|
||||
|
||||
@pytest.fixture
|
||||
def config_conn(test_hostname, test_port):
|
||||
Config.act_config = {
|
||||
'mqtt':{
|
||||
'host': test_hostname,
|
||||
'port': test_port,
|
||||
'user': '',
|
||||
'passwd': ''
|
||||
},
|
||||
'ha':{
|
||||
'auto_conf_prefix': 'homeassistant',
|
||||
'discovery_prefix': 'homeassistant',
|
||||
'entity_prefix': 'tsun',
|
||||
'proxy_node_id': 'test_1',
|
||||
'proxy_unique_id': ''
|
||||
},
|
||||
'solarman':{
|
||||
'host': 'access1.solarmanpv.com',
|
||||
'port': 10000
|
||||
},
|
||||
'inverters':{
|
||||
'allow_all': True,
|
||||
"R170000000000001":{
|
||||
'node_id': 'inv_1'
|
||||
},
|
||||
"Y170000000000001":{
|
||||
'node_id': 'inv_2',
|
||||
'monitor_sn': 2000000000,
|
||||
'modbus_polling': True,
|
||||
'suggested_area': "",
|
||||
'sensor_list': 0x2b0,
|
||||
'client_mode':{
|
||||
'host': '192.168.0.1',
|
||||
'port': 8899,
|
||||
'forward': True
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class FakeReader():
|
||||
RD_TEST_0_BYTES = 1
|
||||
RD_TEST_TIMEOUT = 2
|
||||
RD_TEST_13_BYTES = 3
|
||||
RD_TEST_SW_EXCEPT = 4
|
||||
RD_TEST_OS_ERROR = 5
|
||||
|
||||
def __init__(self):
|
||||
self.on_recv = asyncio.Event()
|
||||
self.test = self.RD_TEST_0_BYTES
|
||||
|
||||
async def read(self, max_len: int):
|
||||
print(f'fakeReader test: {self.test}')
|
||||
await self.on_recv.wait()
|
||||
if self.test == self.RD_TEST_0_BYTES:
|
||||
return b''
|
||||
elif self.test == self.RD_TEST_13_BYTES:
|
||||
print('fakeReader return 13 bytes')
|
||||
self.test = self.RD_TEST_0_BYTES
|
||||
return b'test-data-req'
|
||||
elif self.test == self.RD_TEST_TIMEOUT:
|
||||
raise TimeoutError
|
||||
elif self.test == self.RD_TEST_SW_EXCEPT:
|
||||
self.test = self.RD_TEST_0_BYTES
|
||||
self.unknown_var += 1
|
||||
elif self.test == self.RD_TEST_OS_ERROR:
|
||||
self.test = self.RD_TEST_0_BYTES
|
||||
raise ConnectionRefusedError
|
||||
|
||||
def feed_eof(self):
|
||||
return
|
||||
|
||||
|
||||
class FakeWriter():
|
||||
def __init__(self, conn='remote.intern'):
|
||||
self.conn = conn
|
||||
self.closing = False
|
||||
def write(self, buf: bytes):
|
||||
return
|
||||
async def drain(self):
|
||||
await asyncio.sleep(0)
|
||||
def get_extra_info(self, sel: str):
|
||||
if sel == 'peername':
|
||||
return self.conn
|
||||
elif sel == 'sockname':
|
||||
return 'sock:1234'
|
||||
assert False
|
||||
def is_closing(self):
|
||||
return self.closing
|
||||
def close(self):
|
||||
self.closing = True
|
||||
async def wait_closed(self):
|
||||
await asyncio.sleep(0)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def patch_open():
|
||||
async def new_conn(conn):
|
||||
await asyncio.sleep(0)
|
||||
return FakeReader(), FakeWriter(conn)
|
||||
|
||||
def new_open(host: str, port: int):
|
||||
return new_conn(f'{host}:{port}')
|
||||
|
||||
with patch.object(asyncio, 'open_connection', new_open) as conn:
|
||||
yield conn
|
||||
|
||||
@pytest.fixture
|
||||
def patch_open_timeout():
|
||||
def new_open(host: str, port: int):
|
||||
raise TimeoutError
|
||||
|
||||
with patch.object(asyncio, 'open_connection', new_open) as conn:
|
||||
yield conn
|
||||
|
||||
@pytest.fixture
|
||||
def patch_open_value_error():
|
||||
def new_open(host: str, port: int):
|
||||
raise ValueError
|
||||
|
||||
with patch.object(asyncio, 'open_connection', new_open) as conn:
|
||||
yield conn
|
||||
|
||||
@pytest.fixture
|
||||
def patch_open_conn_abort():
|
||||
def new_open(host: str, port: int):
|
||||
raise ConnectionAbortedError
|
||||
|
||||
with patch.object(asyncio, 'open_connection', new_open) as conn:
|
||||
yield conn
|
||||
|
||||
@pytest.fixture
|
||||
def patch_no_mqtt():
|
||||
with patch.object(Mqtt, 'publish') as conn:
|
||||
yield conn
|
||||
|
||||
@pytest.fixture
|
||||
def patch_mqtt_err():
|
||||
def new_publish(self, key, data):
|
||||
raise MqttCodeError(None)
|
||||
|
||||
with patch.object(Mqtt, 'publish', new_publish) as conn:
|
||||
yield conn
|
||||
|
||||
@pytest.fixture
|
||||
def patch_mqtt_except():
|
||||
def new_publish(self, key, data):
|
||||
raise ValueError("Test")
|
||||
|
||||
with patch.object(Mqtt, 'publish', new_publish) as conn:
|
||||
yield conn
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_modbus_conn(patch_open):
|
||||
_ = patch_open
|
||||
assert Infos.stat['proxy']['Inverter_Cnt'] == 0
|
||||
|
||||
async with ModbusConn('test.local', 1234) as inverter:
|
||||
stream = inverter.local.stream
|
||||
assert stream.node_id == 'G3P'
|
||||
assert stream.addr == ('test.local:1234')
|
||||
assert type(stream.ifc._reader) is FakeReader
|
||||
assert type(stream.ifc._writer) is FakeWriter
|
||||
assert Infos.stat['proxy']['Inverter_Cnt'] == 1
|
||||
del inverter
|
||||
|
||||
for _ in InverterBase:
|
||||
assert False
|
||||
|
||||
assert Infos.stat['proxy']['Inverter_Cnt'] == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_modbus_no_cnf():
|
||||
assert Infos.stat['proxy']['Inverter_Cnt'] == 0
|
||||
loop = asyncio.get_event_loop()
|
||||
ModbusTcp(loop)
|
||||
assert Infos.stat['proxy']['Inverter_Cnt'] == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_modbus_timeout(config_conn, patch_open_timeout):
|
||||
_ = config_conn
|
||||
_ = patch_open_timeout
|
||||
assert asyncio.get_running_loop()
|
||||
Proxy.class_init()
|
||||
|
||||
assert Infos.stat['proxy']['Inverter_Cnt'] == 0
|
||||
loop = asyncio.get_event_loop()
|
||||
ModbusTcp(loop)
|
||||
await asyncio.sleep(0.01)
|
||||
for m in Message:
|
||||
if (m.node_id == 'inv_2'):
|
||||
assert False
|
||||
|
||||
await asyncio.sleep(0.01)
|
||||
assert Infos.stat['proxy']['Inverter_Cnt'] == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_modbus_value_err(config_conn, patch_open_value_error):
|
||||
_ = config_conn
|
||||
_ = patch_open_value_error
|
||||
assert asyncio.get_running_loop()
|
||||
Proxy.class_init()
|
||||
|
||||
assert Infos.stat['proxy']['Inverter_Cnt'] == 0
|
||||
loop = asyncio.get_event_loop()
|
||||
ModbusTcp(loop)
|
||||
await asyncio.sleep(0.01)
|
||||
for m in Message:
|
||||
if (m.node_id == 'inv_2'):
|
||||
assert False
|
||||
|
||||
await asyncio.sleep(0.01)
|
||||
assert Infos.stat['proxy']['Inverter_Cnt'] == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_modbus_conn_abort(config_conn, patch_open_conn_abort):
|
||||
_ = config_conn
|
||||
_ = patch_open_conn_abort
|
||||
assert asyncio.get_running_loop()
|
||||
Proxy.class_init()
|
||||
|
||||
assert Infos.stat['proxy']['Inverter_Cnt'] == 0
|
||||
loop = asyncio.get_event_loop()
|
||||
ModbusTcp(loop)
|
||||
await asyncio.sleep(0.01)
|
||||
for m in Message:
|
||||
if (m.node_id == 'inv_2'):
|
||||
assert False
|
||||
|
||||
await asyncio.sleep(0.01)
|
||||
assert Infos.stat['proxy']['Inverter_Cnt'] == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_modbus_cnf2(config_conn, patch_no_mqtt, patch_open):
|
||||
_ = config_conn
|
||||
_ = patch_open
|
||||
_ = patch_no_mqtt
|
||||
assert asyncio.get_running_loop()
|
||||
Proxy.class_init()
|
||||
|
||||
assert Infos.stat['proxy']['Inverter_Cnt'] == 0
|
||||
ModbusTcp(asyncio.get_event_loop())
|
||||
await asyncio.sleep(0.01)
|
||||
test = 0
|
||||
for m in Message:
|
||||
if (m.node_id == 'inv_2'):
|
||||
test += 1
|
||||
assert Infos.stat['proxy']['Inverter_Cnt'] == 1
|
||||
m.shutdown_started = True
|
||||
m.ifc._reader.on_recv.set()
|
||||
del m
|
||||
|
||||
assert 1 == test
|
||||
await asyncio.sleep(0.01)
|
||||
assert Infos.stat['proxy']['Inverter_Cnt'] == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_modbus_cnf3(config_conn, patch_no_mqtt, patch_open):
|
||||
_ = config_conn
|
||||
_ = patch_open
|
||||
_ = patch_no_mqtt
|
||||
assert asyncio.get_running_loop()
|
||||
Proxy.class_init()
|
||||
|
||||
assert Infos.stat['proxy']['Inverter_Cnt'] == 0
|
||||
ModbusTcp(asyncio.get_event_loop(), tim_restart= 0)
|
||||
await asyncio.sleep(0.01)
|
||||
test = 0
|
||||
for m in Message:
|
||||
if (m.node_id == 'inv_2'):
|
||||
assert Infos.stat['proxy']['Inverter_Cnt'] == 1
|
||||
test += 1
|
||||
if test == 1:
|
||||
m.shutdown_started = False
|
||||
m.ifc._reader.on_recv.set()
|
||||
await asyncio.sleep(0.1)
|
||||
assert m.state == State.closed
|
||||
await asyncio.sleep(0.1)
|
||||
else:
|
||||
m.shutdown_started = True
|
||||
m.ifc._reader.on_recv.set()
|
||||
del m
|
||||
|
||||
assert 2 == test
|
||||
await asyncio.sleep(0.01)
|
||||
assert Infos.stat['proxy']['Inverter_Cnt'] == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_mqtt_err(config_conn, patch_mqtt_err, patch_open):
|
||||
_ = config_conn
|
||||
_ = patch_open
|
||||
_ = patch_mqtt_err
|
||||
assert asyncio.get_running_loop()
|
||||
Proxy.class_init()
|
||||
|
||||
assert Infos.stat['proxy']['Inverter_Cnt'] == 0
|
||||
ModbusTcp(asyncio.get_event_loop(), tim_restart= 0)
|
||||
await asyncio.sleep(0.01)
|
||||
test = 0
|
||||
for m in Message:
|
||||
if (m.node_id == 'inv_2'):
|
||||
assert Infos.stat['proxy']['Inverter_Cnt'] == 1
|
||||
test += 1
|
||||
if test == 1:
|
||||
m.shutdown_started = False
|
||||
m.ifc._reader.on_recv.set()
|
||||
await asyncio.sleep(0.1)
|
||||
assert m.state == State.closed
|
||||
await asyncio.sleep(0.1)
|
||||
await asyncio.sleep(0.1)
|
||||
else:
|
||||
m.shutdown_started = True
|
||||
m.ifc._reader.on_recv.set()
|
||||
del m
|
||||
|
||||
await asyncio.sleep(0.01)
|
||||
assert Infos.stat['proxy']['Inverter_Cnt'] == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_mqtt_except(config_conn, patch_mqtt_except, patch_open):
|
||||
_ = config_conn
|
||||
_ = patch_open
|
||||
_ = patch_mqtt_except
|
||||
assert asyncio.get_running_loop()
|
||||
Proxy.class_init()
|
||||
|
||||
assert Infos.stat['proxy']['Inverter_Cnt'] == 0
|
||||
ModbusTcp(asyncio.get_event_loop(), tim_restart= 0)
|
||||
await asyncio.sleep(0.01)
|
||||
test = 0
|
||||
for m in Message:
|
||||
if (m.node_id == 'inv_2'):
|
||||
assert Infos.stat['proxy']['Inverter_Cnt'] == 1
|
||||
test += 1
|
||||
if test == 1:
|
||||
m.shutdown_started = False
|
||||
m.ifc._reader.on_recv.set()
|
||||
await asyncio.sleep(0.1)
|
||||
assert m.state == State.closed
|
||||
await asyncio.sleep(0.1)
|
||||
else:
|
||||
m.shutdown_started = True
|
||||
m.ifc._reader.on_recv.set()
|
||||
del m
|
||||
|
||||
await asyncio.sleep(0.01)
|
||||
assert Infos.stat['proxy']['Inverter_Cnt'] == 0
|
||||
270
app/tests/test_mqtt.py
Normal file
270
app/tests/test_mqtt.py
Normal file
@@ -0,0 +1,270 @@
|
||||
# test_with_pytest.py
|
||||
import pytest
|
||||
import asyncio
|
||||
import aiomqtt
|
||||
import logging
|
||||
|
||||
from mock import patch, Mock
|
||||
from async_stream import AsyncIfcImpl
|
||||
from singleton import Singleton
|
||||
from mqtt import Mqtt
|
||||
from modbus import Modbus
|
||||
from gen3plus.solarman_v5 import SolarmanV5
|
||||
from cnf.config import Config
|
||||
|
||||
NO_MOSQUITTO_TEST = False
|
||||
'''disable all tests with connections to test.mosquitto.org'''
|
||||
|
||||
pytest_plugins = ('pytest_asyncio',)
|
||||
|
||||
@pytest.fixture(scope="module", autouse=True)
|
||||
def module_init():
|
||||
Singleton._instances.clear()
|
||||
yield
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def test_port():
|
||||
return 1883
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def test_hostname():
|
||||
# if getenv("GITHUB_ACTIONS") == "true":
|
||||
# return 'mqtt'
|
||||
# else:
|
||||
return 'test.mosquitto.org'
|
||||
|
||||
@pytest.fixture
|
||||
def config_mqtt_conn(test_hostname, test_port):
|
||||
Config.act_config = {'mqtt':{'host': test_hostname, 'port': test_port, 'user': '', 'passwd': ''},
|
||||
'ha':{'auto_conf_prefix': 'homeassistant','discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun'}
|
||||
}
|
||||
|
||||
@pytest.fixture
|
||||
def config_no_conn(test_port):
|
||||
Config.act_config = {'mqtt':{'host': "", 'port': test_port, 'user': '', 'passwd': ''},
|
||||
'ha':{'auto_conf_prefix': 'homeassistant','discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun'}
|
||||
}
|
||||
|
||||
@pytest.fixture
|
||||
def spy_at_cmd():
|
||||
conn = SolarmanV5(('test.local', 1234), server_side=True, client_mode= False, ifc=AsyncIfcImpl())
|
||||
conn.node_id = 'inv_2/'
|
||||
with patch.object(conn, 'send_at_cmd', wraps=conn.send_at_cmd) as wrapped_conn:
|
||||
yield wrapped_conn
|
||||
conn.close()
|
||||
|
||||
@pytest.fixture
|
||||
def spy_modbus_cmd():
|
||||
conn = SolarmanV5(('test.local', 1234), server_side=True, client_mode= False, ifc=AsyncIfcImpl())
|
||||
conn.node_id = 'inv_1/'
|
||||
with patch.object(conn, 'send_modbus_cmd', wraps=conn.send_modbus_cmd) as wrapped_conn:
|
||||
yield wrapped_conn
|
||||
conn.close()
|
||||
|
||||
@pytest.fixture
|
||||
def spy_modbus_cmd_client():
|
||||
conn = SolarmanV5(('test.local', 1234), server_side=False, client_mode= False, ifc=AsyncIfcImpl())
|
||||
conn.node_id = 'inv_1/'
|
||||
with patch.object(conn, 'send_modbus_cmd', wraps=conn.send_modbus_cmd) as wrapped_conn:
|
||||
yield wrapped_conn
|
||||
conn.close()
|
||||
|
||||
def test_native_client(test_hostname, test_port):
|
||||
"""Sanity check: Make sure the paho-mqtt client can connect to the test
|
||||
MQTT server. Otherwise the test set NO_MOSQUITTO_TEST to True and disable
|
||||
all test cases which depends on the test.mosquitto.org server
|
||||
"""
|
||||
global NO_MOSQUITTO_TEST
|
||||
if NO_MOSQUITTO_TEST:
|
||||
pytest.skip('skipping, since Mosquitto is not reliable at the moment')
|
||||
|
||||
import paho.mqtt.client as mqtt
|
||||
import threading
|
||||
|
||||
c = mqtt.Client(mqtt.CallbackAPIVersion.VERSION2)
|
||||
c.loop_start()
|
||||
try:
|
||||
# Just make sure the client connects successfully
|
||||
on_connect = threading.Event()
|
||||
c.on_connect = Mock(side_effect=lambda *_: on_connect.set())
|
||||
c.connect_async(test_hostname, test_port)
|
||||
if not on_connect.wait(3):
|
||||
NO_MOSQUITTO_TEST = True # skip all mosquitto tests
|
||||
pytest.skip('skipping, since Mosquitto is not reliable at the moment')
|
||||
finally:
|
||||
c.loop_stop()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_mqtt_connection(config_mqtt_conn):
|
||||
global NO_MOSQUITTO_TEST
|
||||
if NO_MOSQUITTO_TEST:
|
||||
pytest.skip('skipping, since Mosquitto is not reliable at the moment')
|
||||
|
||||
_ = config_mqtt_conn
|
||||
assert asyncio.get_running_loop()
|
||||
|
||||
on_connect = asyncio.Event()
|
||||
async def cb():
|
||||
on_connect.set()
|
||||
|
||||
try:
|
||||
m = Mqtt(cb)
|
||||
assert m.task
|
||||
assert await asyncio.wait_for(on_connect.wait(), 5)
|
||||
# await asyncio.sleep(1)
|
||||
assert 0 == m.ha_restarts
|
||||
await m.publish('homeassistant/status', 'online')
|
||||
except TimeoutError:
|
||||
assert False
|
||||
finally:
|
||||
await m.close()
|
||||
await m.publish('homeassistant/status', 'online')
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_ha_reconnect(config_mqtt_conn):
|
||||
global NO_MOSQUITTO_TEST
|
||||
if NO_MOSQUITTO_TEST:
|
||||
pytest.skip('skipping, since Mosquitto is not reliable at the moment')
|
||||
|
||||
_ = config_mqtt_conn
|
||||
on_connect = asyncio.Event()
|
||||
async def cb():
|
||||
on_connect.set()
|
||||
|
||||
try:
|
||||
m = Mqtt(cb)
|
||||
msg = aiomqtt.Message(topic= 'homeassistant/status', payload= b'offline', qos= 0, retain = False, mid= 0, properties= None)
|
||||
await m.dispatch_msg(msg)
|
||||
assert not on_connect.is_set()
|
||||
|
||||
msg = aiomqtt.Message(topic= 'homeassistant/status', payload= b'online', qos= 0, retain = False, mid= 0, properties= None)
|
||||
await m.dispatch_msg(msg)
|
||||
assert on_connect.is_set()
|
||||
|
||||
finally:
|
||||
await m.close()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_mqtt_no_config(config_no_conn):
|
||||
_ = config_no_conn
|
||||
assert asyncio.get_running_loop()
|
||||
|
||||
on_connect = asyncio.Event()
|
||||
async def cb():
|
||||
on_connect.set()
|
||||
|
||||
try:
|
||||
m = Mqtt(cb)
|
||||
assert m.task
|
||||
await asyncio.sleep(0)
|
||||
assert not on_connect.is_set()
|
||||
try:
|
||||
await m.publish('homeassistant/status', 'online')
|
||||
assert False
|
||||
except Exception:
|
||||
pass
|
||||
except TimeoutError:
|
||||
assert False
|
||||
finally:
|
||||
await m.close()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_msg_dispatch(config_mqtt_conn, spy_modbus_cmd):
|
||||
_ = config_mqtt_conn
|
||||
spy = spy_modbus_cmd
|
||||
try:
|
||||
m = Mqtt(None)
|
||||
msg = aiomqtt.Message(topic= 'tsun/inv_1/rated_load', payload= b'2', qos= 0, retain = False, mid= 0, properties= None)
|
||||
await m.dispatch_msg(msg)
|
||||
spy.assert_awaited_once_with(Modbus.WRITE_SINGLE_REG, 0x2008, 2, logging.INFO)
|
||||
|
||||
spy.reset_mock()
|
||||
msg = aiomqtt.Message(topic= 'tsun/inv_1/out_coeff', payload= b'100', qos= 0, retain = False, mid= 0, properties= None)
|
||||
await m.dispatch_msg(msg)
|
||||
spy.assert_awaited_once_with(Modbus.WRITE_SINGLE_REG, 0x202c, 1024, logging.INFO)
|
||||
|
||||
spy.reset_mock()
|
||||
msg = aiomqtt.Message(topic= 'tsun/inv_1/out_coeff', payload= b'50', qos= 0, retain = False, mid= 0, properties= None)
|
||||
await m.dispatch_msg(msg)
|
||||
spy.assert_awaited_once_with(Modbus.WRITE_SINGLE_REG, 0x202c, 512, logging.INFO)
|
||||
|
||||
spy.reset_mock()
|
||||
msg = aiomqtt.Message(topic= 'tsun/inv_1/modbus_read_regs', payload= b'0x3000, 10', qos= 0, retain = False, mid= 0, properties= None)
|
||||
await m.dispatch_msg(msg)
|
||||
spy.assert_awaited_once_with(Modbus.READ_REGS, 0x3000, 10, logging.INFO)
|
||||
|
||||
spy.reset_mock()
|
||||
msg = aiomqtt.Message(topic= 'tsun/inv_1/modbus_read_inputs', payload= b'0x3000, 10', qos= 0, retain = False, mid= 0, properties= None)
|
||||
await m.dispatch_msg(msg)
|
||||
spy.assert_awaited_once_with(Modbus.READ_INPUTS, 0x3000, 10, logging.INFO)
|
||||
|
||||
finally:
|
||||
await m.close()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_msg_dispatch_err(config_mqtt_conn, spy_modbus_cmd):
|
||||
_ = config_mqtt_conn
|
||||
spy = spy_modbus_cmd
|
||||
try:
|
||||
m = Mqtt(None)
|
||||
# test out of range param
|
||||
msg = aiomqtt.Message(topic= 'tsun/inv_1/out_coeff', payload= b'-1', qos= 0, retain = False, mid= 0, properties= None)
|
||||
await m.dispatch_msg(msg)
|
||||
spy.assert_not_called()
|
||||
|
||||
# test unknown node_id
|
||||
spy.reset_mock()
|
||||
msg = aiomqtt.Message(topic= 'tsun/inv_2/out_coeff', payload= b'2', qos= 0, retain = False, mid= 0, properties= None)
|
||||
await m.dispatch_msg(msg)
|
||||
spy.assert_not_called()
|
||||
|
||||
# test invalid fload param
|
||||
spy.reset_mock()
|
||||
msg = aiomqtt.Message(topic= 'tsun/inv_1/out_coeff', payload= b'2, 3', qos= 0, retain = False, mid= 0, properties= None)
|
||||
await m.dispatch_msg(msg)
|
||||
spy.assert_not_called()
|
||||
|
||||
spy.reset_mock()
|
||||
msg = aiomqtt.Message(topic= 'tsun/inv_1/modbus_read_regs', payload= b'0x3000, 10, 7', qos= 0, retain = False, mid= 0, properties= None)
|
||||
await m.dispatch_msg(msg)
|
||||
spy.assert_not_called()
|
||||
finally:
|
||||
await m.close()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_msg_ignore_client_conn(config_mqtt_conn, spy_modbus_cmd_client):
|
||||
'''don't call function if connnection is not in server mode'''
|
||||
_ = config_mqtt_conn
|
||||
spy = spy_modbus_cmd_client
|
||||
try:
|
||||
m = Mqtt(None)
|
||||
msg = aiomqtt.Message(topic= 'tsun/inv_1/rated_load', payload= b'2', qos= 0, retain = False, mid= 0, properties= None)
|
||||
await m.dispatch_msg(msg)
|
||||
spy.assert_not_called()
|
||||
finally:
|
||||
await m.close()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_ignore_unknown_func(config_mqtt_conn):
|
||||
'''don't dispatch for unknwon function names'''
|
||||
_ = config_mqtt_conn
|
||||
try:
|
||||
m = Mqtt(None)
|
||||
msg = aiomqtt.Message(topic= 'tsun/inv_1/rated_load', payload= b'2', qos= 0, retain = False, mid= 0, properties= None)
|
||||
for _ in m.each_inverter(msg, 'unkown_fnc'):
|
||||
assert False
|
||||
finally:
|
||||
await m.close()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_at_cmd_dispatch(config_mqtt_conn, spy_at_cmd):
|
||||
_ = config_mqtt_conn
|
||||
spy = spy_at_cmd
|
||||
try:
|
||||
m = Mqtt(None)
|
||||
msg = aiomqtt.Message(topic= 'tsun/inv_2/at_cmd', payload= b'AT+', qos= 0, retain = False, mid= 0, properties= None)
|
||||
await m.dispatch_msg(msg)
|
||||
spy.assert_awaited_once_with('AT+')
|
||||
|
||||
finally:
|
||||
await m.close()
|
||||
91
app/tests/test_proxy.py
Normal file
91
app/tests/test_proxy.py
Normal file
@@ -0,0 +1,91 @@
|
||||
# test_with_pytest.py
|
||||
import pytest
|
||||
import asyncio
|
||||
import aiomqtt
|
||||
import logging
|
||||
|
||||
from mock import patch, Mock
|
||||
from singleton import Singleton
|
||||
from proxy import Proxy
|
||||
from mqtt import Mqtt
|
||||
from gen3plus.solarman_v5 import SolarmanV5
|
||||
from cnf.config import Config
|
||||
|
||||
|
||||
pytest_plugins = ('pytest_asyncio',)
|
||||
|
||||
|
||||
@pytest.fixture(scope="module", autouse=True)
|
||||
def module_init():
|
||||
def new_init(cls, cb_mqtt_is_up):
|
||||
pass # empty test methos
|
||||
|
||||
Singleton._instances.clear()
|
||||
with patch.object(Mqtt, '__init__', new_init):
|
||||
yield
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def test_port():
|
||||
return 1883
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def test_hostname():
|
||||
# if getenv("GITHUB_ACTIONS") == "true":
|
||||
# return 'mqtt'
|
||||
# else:
|
||||
return 'test.mosquitto.org'
|
||||
|
||||
@pytest.fixture
|
||||
def config_conn(test_hostname, test_port):
|
||||
Config.act_config = {
|
||||
'mqtt':{
|
||||
'host': test_hostname,
|
||||
'port': test_port,
|
||||
'user': '',
|
||||
'passwd': ''
|
||||
},
|
||||
'ha':{
|
||||
'auto_conf_prefix': 'homeassistant',
|
||||
'discovery_prefix': 'homeassistant',
|
||||
'entity_prefix': 'tsun',
|
||||
'proxy_node_id': 'test_1',
|
||||
'proxy_unique_id': ''
|
||||
},
|
||||
'inverters': {
|
||||
'allow_all': True,
|
||||
"R170000000000001":{
|
||||
'node_id': 'inv_1'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_inverter_cb(config_conn):
|
||||
_ = config_conn
|
||||
|
||||
with patch.object(Proxy, '_cb_mqtt_is_up', wraps=Proxy._cb_mqtt_is_up) as spy:
|
||||
print('call Proxy.class_init')
|
||||
Proxy.class_init()
|
||||
assert 'homeassistant/' == Proxy.discovery_prfx
|
||||
assert 'tsun/' == Proxy.entity_prfx
|
||||
assert 'test_1/' == Proxy.proxy_node_id
|
||||
await Proxy._cb_mqtt_is_up()
|
||||
spy.assert_called_once()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_mqtt_is_up(config_conn):
|
||||
_ = config_conn
|
||||
|
||||
with patch.object(Mqtt, 'publish') as spy:
|
||||
Proxy.class_init()
|
||||
await Proxy._cb_mqtt_is_up()
|
||||
spy.assert_called()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_mqtt_proxy_statt_invalid(config_conn):
|
||||
_ = config_conn
|
||||
|
||||
with patch.object(Mqtt, 'publish') as spy:
|
||||
Proxy.class_init()
|
||||
await Proxy._async_publ_mqtt_proxy_stat('InValId_kEy')
|
||||
spy.assert_not_called()
|
||||
24
app/tests/test_server.py
Normal file
24
app/tests/test_server.py
Normal file
@@ -0,0 +1,24 @@
|
||||
# test_with_pytest.py
|
||||
import pytest
|
||||
import logging
|
||||
import os
|
||||
from mock import patch
|
||||
from server import get_log_level
|
||||
|
||||
def test_get_log_level():
|
||||
|
||||
with patch.dict(os.environ, {'LOG_LVL': ''}):
|
||||
log_lvl = get_log_level()
|
||||
assert log_lvl == logging.INFO
|
||||
|
||||
with patch.dict(os.environ, {'LOG_LVL': 'DEBUG'}):
|
||||
log_lvl = get_log_level()
|
||||
assert log_lvl == logging.DEBUG
|
||||
|
||||
with patch.dict(os.environ, {'LOG_LVL': 'WARN'}):
|
||||
log_lvl = get_log_level()
|
||||
assert log_lvl == logging.WARNING
|
||||
|
||||
with patch.dict(os.environ, {'LOG_LVL': 'UNKNOWN'}):
|
||||
log_lvl = get_log_level()
|
||||
assert log_lvl == logging.INFO
|
||||
19
app/tests/test_singleton.py
Normal file
19
app/tests/test_singleton.py
Normal file
@@ -0,0 +1,19 @@
|
||||
# test_with_pytest.py
|
||||
import pytest
|
||||
from singleton import Singleton
|
||||
|
||||
class Example(metaclass=Singleton):
|
||||
def __init__(self):
|
||||
pass # is a dummy test class
|
||||
|
||||
def test_singleton_metaclass():
|
||||
Singleton._instances.clear()
|
||||
a = Example()
|
||||
assert 1 == len(Singleton._instances)
|
||||
b = Example()
|
||||
assert 1 == len(Singleton._instances)
|
||||
assert a is b
|
||||
del a
|
||||
assert 1 == len(Singleton._instances)
|
||||
del b
|
||||
assert 0 == len(Singleton._instances)
|
||||
1835
app/tests/test_solarman.py
Normal file
1835
app/tests/test_solarman.py
Normal file
File diff suppressed because it is too large
Load Diff
233
app/tests/test_solarman_emu.py
Normal file
233
app/tests/test_solarman_emu.py
Normal file
@@ -0,0 +1,233 @@
|
||||
import pytest
|
||||
import asyncio
|
||||
|
||||
from async_stream import AsyncIfcImpl, StreamPtr
|
||||
from gen3plus.solarman_v5 import SolarmanV5, SolarmanBase
|
||||
from gen3plus.solarman_emu import SolarmanEmu
|
||||
from infos import Infos, Register
|
||||
|
||||
from test_solarman import FakeIfc, MemoryStream, get_sn_int, get_sn, correct_checksum, config_tsun_inv1, msg_modbus_rsp
|
||||
from test_infos_g3p import str_test_ip, bytes_test_ip
|
||||
|
||||
timestamp = 0x3224c8bc
|
||||
|
||||
class InvStream(MemoryStream):
|
||||
def __init__(self, msg=b''):
|
||||
super().__init__(msg)
|
||||
|
||||
def _emu_timestamp(self):
|
||||
return timestamp
|
||||
|
||||
class CldStream(SolarmanEmu):
|
||||
def __init__(self, inv: InvStream):
|
||||
_ifc = FakeIfc()
|
||||
_ifc.remote.stream = inv
|
||||
super().__init__(('test.local', 1234), _ifc, server_side=False, client_mode=False)
|
||||
self.__msg = b''
|
||||
self.__msg_len = 0
|
||||
self.__offs = 0
|
||||
self.msg_count = 0
|
||||
self.msg_recvd = []
|
||||
|
||||
def _emu_timestamp(self):
|
||||
return timestamp
|
||||
|
||||
def append_msg(self, msg):
|
||||
self.__msg += msg
|
||||
self.__msg_len += len(msg)
|
||||
|
||||
def _read(self) -> int:
|
||||
copied_bytes = 0
|
||||
try:
|
||||
if (self.__offs < self.__msg_len):
|
||||
self.ifc.rx_fifo += self.__msg[self.__offs:]
|
||||
copied_bytes = self.__msg_len - self.__offs
|
||||
self.__offs = self.__msg_len
|
||||
except Exception:
|
||||
pass # ignore exceptions here
|
||||
return copied_bytes
|
||||
|
||||
def _SolarmanBase__flush_recv_msg(self) -> None:
|
||||
self.msg_recvd.append(
|
||||
{
|
||||
'control': self.control,
|
||||
'seq': str(self.seq),
|
||||
'data_len': self.data_len
|
||||
}
|
||||
)
|
||||
super()._SolarmanBase__flush_recv_msg()
|
||||
self.msg_count += 1
|
||||
|
||||
@pytest.fixture
|
||||
def device_ind_msg(bytes_test_ip): # 0x4110
|
||||
msg = b'\xa5\xd4\x00\x10\x41\x00\x01' +get_sn() +b'\x02\xbc\xc8\x24\x32'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x05\x3c\x78\x01\x00\x01\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + bytes_test_ip
|
||||
msg += b'\x0f\x00\x01\xb0'
|
||||
msg += b'\x02\x0f\x00\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xfe\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += correct_checksum(msg)
|
||||
msg += b'\x15'
|
||||
return msg
|
||||
|
||||
@pytest.fixture
|
||||
def inverter_ind_msg(): # 0x4210
|
||||
msg = b'\xa5\x99\x01\x10\x42\x00\x01' +get_sn() +b'\x01\xb0\x02\xbc\xc8'
|
||||
msg += b'\x24\x32\x3c\x00\x00\x00\xa0\x47\xe4\x33\x01\x00\x03\x08\x00\x00'
|
||||
msg += b'\x59\x31\x37\x30\x30\x30\x30\x30\x30\x30\x30\x30\x30\x30\x30\x31'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x01\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x40\x10\x08\xc8\x00\x49\x13\x8d\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00'
|
||||
msg += b'\x04\x00\x00\x01\xff\xff\x00\x01\x00\x06\x00\x68\x00\x68\x05\x00'
|
||||
msg += b'\x09\xcd\x07\xb6\x13\x9c\x13\x24\x00\x01\x07\xae\x04\x0f\x00\x41'
|
||||
msg += b'\x00\x0f\x0a\x64\x0a\x64\x00\x06\x00\x06\x09\xf6\x12\x8c\x12\x8c'
|
||||
msg += b'\x00\x10\x00\x10\x14\x52\x14\x52\x00\x10\x00\x10\x01\x51\x00\x05'
|
||||
msg += b'\x00\x00\x00\x01\x13\x9c\x0f\xa0\x00\x4e\x00\x66\x03\xe8\x04\x00'
|
||||
msg += b'\x09\xce\x07\xa8\x13\x9c\x13\x26\x00\x00\x00\x00\x00\x00\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00\x04\x00\x04\x00\x00\x00\x00\x00\xff\xff\x00\x00'
|
||||
msg += b'\x00\x00\x00\x00'
|
||||
msg += correct_checksum(msg)
|
||||
msg += b'\x15'
|
||||
return msg
|
||||
|
||||
@pytest.fixture
|
||||
def inverter_rsp_msg(): # 0x1210
|
||||
msg = b'\xa5\x0a\x00\x10\x12\x02\02' +get_sn() +b'\x01\x01'
|
||||
msg += b'\x00\x00\x00\x00'
|
||||
msg += b'\x3c\x00\x00\x00'
|
||||
msg += correct_checksum(msg)
|
||||
msg += b'\x15'
|
||||
return msg
|
||||
|
||||
@pytest.fixture
|
||||
def heartbeat_ind():
|
||||
msg = b'\xa5\x01\x00\x10G\x00\x01\x00\x00\x00\x00\x00Y\x15'
|
||||
return msg
|
||||
|
||||
def test_emu_init_close():
|
||||
# received a message with wrong start byte plus an valid message
|
||||
# the complete receive buffer must be cleared to
|
||||
# find the next valid message
|
||||
inv = InvStream()
|
||||
cld = CldStream(inv)
|
||||
cld.close()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_emu_start(config_tsun_inv1, msg_modbus_rsp, str_test_ip, device_ind_msg):
|
||||
_ = config_tsun_inv1
|
||||
assert asyncio.get_running_loop()
|
||||
inv = InvStream(msg_modbus_rsp)
|
||||
|
||||
assert asyncio.get_running_loop() == inv.mb_timer.loop
|
||||
await inv.send_start_cmd(get_sn_int(), str_test_ip, True, inv.mb_first_timeout)
|
||||
inv.read() # read complete msg, and dispatch msg
|
||||
assert not inv.header_valid # must be invalid, since msg was handled and buffer flushed
|
||||
assert inv.msg_count == 1
|
||||
assert inv.control == 0x1510
|
||||
|
||||
cld = CldStream(inv)
|
||||
cld.ifc.update_header_cb(inv.ifc.fwd_fifo.peek())
|
||||
assert inv.ifc.fwd_fifo.peek() == device_ind_msg
|
||||
cld.close()
|
||||
|
||||
def test_snd_hb(config_tsun_inv1, heartbeat_ind):
|
||||
_ = config_tsun_inv1
|
||||
inv = InvStream()
|
||||
cld = CldStream(inv)
|
||||
|
||||
# await inv.send_start_cmd(get_sn_int(), str_test_ip, False, inv.mb_first_timeout)
|
||||
cld.send_heartbeat_cb(0)
|
||||
assert cld.ifc.tx_fifo.peek() == heartbeat_ind
|
||||
cld.close()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_snd_inv_data(config_tsun_inv1, inverter_ind_msg, inverter_rsp_msg):
|
||||
_ = config_tsun_inv1
|
||||
inv = InvStream()
|
||||
inv.db.set_db_def_value(Register.INVERTER_STATUS, 1)
|
||||
inv.db.set_db_def_value(Register.DETECT_STATUS_1, 2)
|
||||
inv.db.set_db_def_value(Register.VERSION, 'V4.0.10')
|
||||
inv.db.set_db_def_value(Register.GRID_VOLTAGE, 224.8)
|
||||
inv.db.set_db_def_value(Register.GRID_CURRENT, 0.73)
|
||||
inv.db.set_db_def_value(Register.GRID_FREQUENCY, 50.05)
|
||||
inv.db.set_db_def_value(Register.PROD_COMPL_TYPE, 6)
|
||||
assert asyncio.get_running_loop() == inv.mb_timer.loop
|
||||
await inv.send_start_cmd(get_sn_int(), str_test_ip, False, inv.mb_first_timeout)
|
||||
inv.db.set_db_def_value(Register.DATA_UP_INTERVAL, 17) # set test value
|
||||
|
||||
cld = CldStream(inv)
|
||||
cld.time_ofs = 0x33e447a0
|
||||
cld.last_sync = cld._emu_timestamp() - 60
|
||||
cld.pkt_cnt = 0x802
|
||||
assert cld.data_up_inv == 17 # check test value
|
||||
cld.data_up_inv = 0.1 # speedup test first data msg
|
||||
cld._init_new_client_conn()
|
||||
cld.data_up_inv = 0.5 # timeout for second data msg
|
||||
await asyncio.sleep(0.2)
|
||||
assert cld.ifc.tx_fifo.get() == inverter_ind_msg
|
||||
|
||||
cld.append_msg(inverter_rsp_msg)
|
||||
cld.read() # read complete msg, and dispatch msg
|
||||
|
||||
assert not cld.header_valid # must be invalid, since msg was handled and buffer flushed
|
||||
assert cld.msg_count == 1
|
||||
assert cld.header_len==11
|
||||
assert cld.snr == 2070233889
|
||||
assert cld.unique_id == '2070233889'
|
||||
assert cld.msg_recvd[0]['control']==0x1210
|
||||
assert cld.msg_recvd[0]['seq']=='02:02'
|
||||
assert cld.msg_recvd[0]['data_len']==0x0a
|
||||
assert '02b0' == cld.db.get_db_value(Register.SENSOR_LIST, None)
|
||||
assert cld.db.stat['proxy']['Unknown_Msg'] == 0
|
||||
|
||||
cld.close()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_rcv_invalid(config_tsun_inv1, inverter_ind_msg, inverter_rsp_msg):
|
||||
_ = config_tsun_inv1
|
||||
inv = InvStream()
|
||||
assert asyncio.get_running_loop() == inv.mb_timer.loop
|
||||
await inv.send_start_cmd(get_sn_int(), str_test_ip, False, inv.mb_first_timeout)
|
||||
inv.db.set_db_def_value(Register.DATA_UP_INTERVAL, 17) # set test value
|
||||
|
||||
cld = CldStream(inv)
|
||||
cld._init_new_client_conn()
|
||||
|
||||
cld.append_msg(inverter_ind_msg)
|
||||
cld.read() # read complete msg, and dispatch msg
|
||||
|
||||
assert not cld.header_valid # must be invalid, since msg was handled and buffer flushed
|
||||
assert cld.msg_count == 1
|
||||
assert cld.header_len==11
|
||||
assert cld.snr == 2070233889
|
||||
assert cld.unique_id == '2070233889'
|
||||
assert cld.msg_recvd[0]['control']==0x4210
|
||||
assert cld.msg_recvd[0]['seq']=='00:01'
|
||||
assert cld.msg_recvd[0]['data_len']==0x199
|
||||
assert '02b0' == cld.db.get_db_value(Register.SENSOR_LIST, None)
|
||||
assert cld.db.stat['proxy']['Unknown_Msg'] == 1
|
||||
|
||||
|
||||
cld.close()
|
||||
2268
app/tests/test_talent.py
Normal file
2268
app/tests/test_talent.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,3 @@
|
||||
|
||||
version: '3.0'
|
||||
|
||||
services:
|
||||
####### H O M E - A S S I S T A N T #####
|
||||
home-assistant:
|
||||
@@ -34,7 +31,7 @@ services:
|
||||
ports:
|
||||
- 8123:8123
|
||||
volumes:
|
||||
- ${PROJECT_DIR}./homeassistant/config:/config
|
||||
- ${PROJECT_DIR:-./}homeassistant/config:/config
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
healthcheck:
|
||||
test: curl --fail http://0.0.0.0:8123/auth/providers || exit 1
|
||||
@@ -56,18 +53,18 @@ services:
|
||||
expose:
|
||||
- 1883
|
||||
volumes:
|
||||
- ${PROJECT_DIR}./mosquitto/config:/mosquitto/config
|
||||
- ${PROJECT_DIR}./mosquitto/data:/mosquitto/data
|
||||
- ${PROJECT_DIR:-./}mosquitto/config:/mosquitto/config
|
||||
- ${PROJECT_DIR:-./}mosquitto/data:/mosquitto/data
|
||||
networks:
|
||||
outside:
|
||||
ipv4_address: 172.28.1.5 # static IP required to receive mDNS traffic
|
||||
|
||||
- outside
|
||||
|
||||
|
||||
|
||||
####### T S U N - P R O X Y ######
|
||||
tsun-proxy:
|
||||
container_name: tsun-proxy
|
||||
image: ghcr.io/s-allius/tsun-gen3-proxy:latest
|
||||
# image: ghcr.io/s-allius/tsun-gen3-proxy:rc
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- mqtt
|
||||
@@ -77,12 +74,18 @@ services:
|
||||
- GID=${GID:-1000}
|
||||
dns:
|
||||
- ${DNS1:-8.8.8.8}
|
||||
- $(DNS2:-4.4.4.4}
|
||||
- ${DNS2:-4.4.4.4}
|
||||
ports:
|
||||
- 5005:5005
|
||||
- 8127:8127
|
||||
- 10000:10000
|
||||
volumes:
|
||||
- ${PROJECT_DIR}./tsun-proxy/log:/home/tsun-proxy/log
|
||||
- ${PROJECT_DIR}./tsun-proxy/config:/home/tsun-proxy/config
|
||||
- ${PROJECT_DIR:-./}tsun-proxy/log:/home/tsun-proxy/log
|
||||
- ${PROJECT_DIR:-./}tsun-proxy/config:/home/tsun-proxy/config
|
||||
healthcheck:
|
||||
test: wget --no-verbose --tries=1 --spider http://127.0.0.1:8127/-/healthy || exit 1
|
||||
interval: 10s
|
||||
timeout: 3s
|
||||
networks:
|
||||
- outside
|
||||
|
||||
@@ -92,11 +95,4 @@ services:
|
||||
networks:
|
||||
outside:
|
||||
name: home-assistant
|
||||
external: true
|
||||
ipam:
|
||||
driver: default
|
||||
config:
|
||||
- subnet: 172.28.1.0/26
|
||||
ip_range: 172.28.1.32/27
|
||||
gateway: 172.28.1.62
|
||||
|
||||
2
ha_addons/.gitignore
vendored
Normal file
2
ha_addons/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
.data.json
|
||||
config.yaml
|
||||
138
ha_addons/Makefile
Normal file
138
ha_addons/Makefile
Normal file
@@ -0,0 +1,138 @@
|
||||
#!make
|
||||
include ../.env
|
||||
|
||||
.PHONY: debug dev build clean rootfs repro rc rel
|
||||
|
||||
SHELL = /bin/sh
|
||||
JINJA = jinja2
|
||||
IMAGE = tsun-gen3-addon
|
||||
|
||||
|
||||
# Folders
|
||||
SRC=../app
|
||||
SRC_PROXY=$(SRC)/src
|
||||
CNF_PROXY=$(SRC)/config
|
||||
|
||||
ADDON_PATH = ha_addon
|
||||
DST=$(ADDON_PATH)/rootfs
|
||||
DST_PROXY=$(DST)/home/proxy
|
||||
|
||||
INST_BASE=../../ha-addons/ha-addons
|
||||
|
||||
TEMPL=templates
|
||||
|
||||
# collect source files
|
||||
SRC_FILES := $(wildcard $(SRC_PROXY)/*.py)\
|
||||
$(wildcard $(SRC_PROXY)/*.ini)\
|
||||
$(wildcard $(SRC_PROXY)/cnf/*.py)\
|
||||
$(wildcard $(SRC_PROXY)/gen3/*.py)\
|
||||
$(wildcard $(SRC_PROXY)/gen3plus/*.py)
|
||||
CNF_FILES := $(wildcard $(CNF_PROXY)/*.toml)
|
||||
|
||||
# determine destination files
|
||||
TARGET_FILES = $(SRC_FILES:$(SRC_PROXY)/%=$(DST_PROXY)/%)
|
||||
CONFIG_FILES = $(CNF_FILES:$(CNF_PROXY)/%=$(DST_PROXY)/%)
|
||||
|
||||
export BUILD_DATE := ${shell date -Iminutes}
|
||||
VERSION := $(shell cat $(SRC)/.version)
|
||||
export MAJOR := $(shell echo $(VERSION) | cut -f1 -d.)
|
||||
|
||||
PUBLIC_URL := $(shell echo $(PUBLIC_CONTAINER_REGISTRY) | cut -f1 -d/)
|
||||
PUBLIC_USER :=$(shell echo $(PUBLIC_CONTAINER_REGISTRY) | cut -f2 -d/)
|
||||
|
||||
|
||||
dev debug: build
|
||||
@echo version: $(VERSION) build-date: $(BUILD_DATE) image: $(PRIVAT_CONTAINER_REGISTRY)$(IMAGE)
|
||||
export VERSION=$(VERSION)-$@ && \
|
||||
export IMAGE=$(PRIVAT_CONTAINER_REGISTRY)$(IMAGE) && \
|
||||
docker buildx bake -f docker-bake.hcl $@
|
||||
|
||||
rc rel: build
|
||||
@echo version: $(VERSION) build-date: $(BUILD_DATE) image: $(PUBLIC_CONTAINER_REGISTRY)$(IMAGE)
|
||||
@echo login at $(PUBLIC_URL) as $(PUBLIC_USER)
|
||||
@DO_LOGIN="$(shell echo $(PUBLIC_CR_KEY) | docker login $(PUBLIC_URL) -u $(PUBLIC_USER) --password-stdin)"
|
||||
export VERSION=$(VERSION)-$@ && \
|
||||
export IMAGE=$(PUBLIC_CONTAINER_REGISTRY)$(IMAGE) && \
|
||||
docker buildx bake -f docker-bake.hcl $@
|
||||
|
||||
|
||||
build: rootfs $(ADDON_PATH)/config.yaml repro
|
||||
|
||||
clean:
|
||||
rm -r -f $(DST_PROXY)
|
||||
rm -f $(DST)/requirements.txt
|
||||
rm -f $(ADDON_PATH)/config.yaml
|
||||
rm -f $(TEMPL)/.data.json
|
||||
|
||||
#
|
||||
# Build rootfs and config.yaml as local add-on
|
||||
# The rootfs is needed to build the add-on Dockercontainers
|
||||
#
|
||||
|
||||
rootfs: $(TARGET_FILES) $(CONFIG_FILES) $(DST)/requirements.txt
|
||||
|
||||
STAGE=dev
|
||||
debug : STAGE=debug
|
||||
rc : STAGE=rc
|
||||
rel : STAGE=rel
|
||||
|
||||
$(CONFIG_FILES): $(DST_PROXY)/% : $(CNF_PROXY)/%
|
||||
@echo Copy $< to $@
|
||||
@mkdir -p $(@D)
|
||||
@cp $< $@
|
||||
|
||||
$(TARGET_FILES): $(DST_PROXY)/% : $(SRC_PROXY)/%
|
||||
@echo Copy $< to $@
|
||||
@mkdir -p $(@D)
|
||||
@cp $< $@
|
||||
|
||||
$(DST)/requirements.txt : $(SRC)/requirements.txt
|
||||
@echo Copy $< to $@
|
||||
@cp $< $@
|
||||
|
||||
$(ADDON_PATH)/%.yaml: $(TEMPL)/%.jinja $(TEMPL)/.data.json
|
||||
$(JINJA) --strict -D AppVersion=$(VERSION) --format=json $^ -o $@
|
||||
|
||||
$(TEMPL)/.data.json: FORCE
|
||||
rsync --checksum $(TEMPL)/$(STAGE)_data.json $@
|
||||
|
||||
FORCE : ;
|
||||
|
||||
|
||||
#
|
||||
# Build repository for Home Assistant Add-On
|
||||
#
|
||||
|
||||
INST=$(INST_BASE)/ha_addon_dev
|
||||
repro_files = DOCS.md icon.png logo.png translations/de.yaml translations/en.yaml rootfs/run.sh
|
||||
repro_root = CHANGELOG.md
|
||||
repro_templates = config.yaml
|
||||
repro_subdirs = translations rootfs
|
||||
repro_vers = debug dev rc rel
|
||||
|
||||
repro_all_files := $(foreach dir,$(repro_vers), $(foreach file,$(repro_files),$(INST_BASE)/ha_addon_$(dir)/$(file)))
|
||||
repro_root_files := $(foreach dir,$(repro_vers), $(foreach file,$(repro_root),$(INST_BASE)/ha_addon_$(dir)/$(file)))
|
||||
repro_all_templates := $(foreach dir,$(repro_vers), $(foreach file,$(repro_templates),$(INST_BASE)/ha_addon_$(dir)/$(file)))
|
||||
repro_all_subdirs := $(foreach dir,$(repro_vers), $(foreach file,$(repro_subdirs),$(INST_BASE)/ha_addon_$(dir)/$(file)))
|
||||
|
||||
repro: $(repro_all_subdirs) $(repro_all_templates) $(repro_all_files) $(repro_root_files)
|
||||
|
||||
$(repro_all_subdirs) :
|
||||
mkdir -p $@
|
||||
|
||||
$(repro_all_templates) : $(INST_BASE)/ha_addon_%/config.yaml: $(TEMPL)/config.jinja $(TEMPL)/%_data.json $(SRC)/.version
|
||||
$(JINJA) --strict -D AppVersion=$(VERSION)-$* $< $(filter %.json,$^) -o $@
|
||||
|
||||
$(repro_root_files) : %/CHANGELOG.md : ../CHANGELOG.md
|
||||
cp $< $@
|
||||
|
||||
$(filter $(INST_BASE)/ha_addon_debug/%,$(repro_all_files)) : $(INST_BASE)/ha_addon_debug/% : ha_addon/%
|
||||
cp $< $@
|
||||
$(filter $(INST_BASE)/ha_addon_dev/%,$(repro_all_files)) : $(INST_BASE)/ha_addon_dev/% : ha_addon/%
|
||||
cp $< $@
|
||||
$(filter $(INST_BASE)/ha_addon_rc/%,$(repro_all_files)) : $(INST_BASE)/ha_addon_rc/% : ha_addon/%
|
||||
cp $< $@
|
||||
$(filter $(INST_BASE)/ha_addon_rel/%,$(repro_all_files)) : $(INST_BASE)/ha_addon_rel/% : ha_addon/%
|
||||
cp $< $@
|
||||
|
||||
|
||||
99
ha_addons/docker-bake.hcl
Normal file
99
ha_addons/docker-bake.hcl
Normal file
@@ -0,0 +1,99 @@
|
||||
variable "IMAGE" {
|
||||
default = "tsun-gen3-addon"
|
||||
}
|
||||
variable "VERSION" {
|
||||
default = "0.0.0"
|
||||
}
|
||||
variable "MAJOR" {
|
||||
default = "0"
|
||||
}
|
||||
variable "BUILD_DATE" {
|
||||
default = "dev"
|
||||
}
|
||||
variable "BRANCH" {
|
||||
default = ""
|
||||
}
|
||||
variable "DESCRIPTION" {
|
||||
default = "This proxy enables a reliable connection between TSUN third generation inverters (eg. TSOL MS600, MS800, MS2000) and an MQTT broker to integrate the inverter into typical home automations."
|
||||
}
|
||||
|
||||
target "_common" {
|
||||
context = "ha_addon"
|
||||
dockerfile = "Dockerfile"
|
||||
args = {
|
||||
VERSION = "${VERSION}"
|
||||
environment = "production"
|
||||
}
|
||||
attest = [
|
||||
"type =provenance,mode=max",
|
||||
"type =sbom,generator=docker/scout-sbom-indexer:latest"
|
||||
]
|
||||
annotations = [
|
||||
"index:io.hass.version=${VERSION}",
|
||||
"index:io.hass.type=addon",
|
||||
"index:io.hass.arch=armhf|aarch64|i386|amd64",
|
||||
"index:org.opencontainers.image.title=TSUN-Proxy",
|
||||
"index:org.opencontainers.image.authors=Stefan Allius",
|
||||
"index:org.opencontainers.image.created=${BUILD_DATE}",
|
||||
"index:org.opencontainers.image.version=${VERSION}",
|
||||
"index:org.opencontainers.image.revision=${BRANCH}",
|
||||
"index:org.opencontainers.image.description=${DESCRIPTION}",
|
||||
"index:org.opencontainers.image.licenses=BSD-3-Clause",
|
||||
"index:org.opencontainers.image.source=https://github.com/s-allius/tsun-gen3-proxy/ha_addons/ha_addon"
|
||||
]
|
||||
labels = {
|
||||
"io.hass.version" = "${VERSION}"
|
||||
"io.hass.type" = "addon"
|
||||
"io.hass.arch" = "armhf|aarch64|i386|amd64"
|
||||
"org.opencontainers.image.title" = "TSUN-Proxy"
|
||||
"org.opencontainers.image.authors" = "Stefan Allius"
|
||||
"org.opencontainers.image.created" = "${BUILD_DATE}"
|
||||
"org.opencontainers.image.version" = "${VERSION}"
|
||||
"org.opencontainers.image.revision" = "${BRANCH}"
|
||||
"org.opencontainers.image.description" = "${DESCRIPTION}"
|
||||
"org.opencontainers.image.licenses" = "BSD-3-Clause"
|
||||
"org.opencontainers.image.source" = "https://github.com/s-allius/tsun-gen3-proxy/ha_addonsha_addon"
|
||||
}
|
||||
output = [
|
||||
"type=image,push=true"
|
||||
]
|
||||
|
||||
no-cache = false
|
||||
platforms = ["linux/amd64", "linux/arm64", "linux/arm/v7"]
|
||||
}
|
||||
|
||||
target "_debug" {
|
||||
args = {
|
||||
LOG_LVL = "DEBUG"
|
||||
environment = "dev"
|
||||
}
|
||||
}
|
||||
target "_prod" {
|
||||
args = {
|
||||
}
|
||||
}
|
||||
target "debug" {
|
||||
inherits = ["_common", "_debug"]
|
||||
tags = ["${IMAGE}:debug"]
|
||||
}
|
||||
|
||||
target "dev" {
|
||||
inherits = ["_common"]
|
||||
tags = ["${IMAGE}:dev"]
|
||||
}
|
||||
|
||||
target "preview" {
|
||||
inherits = ["_common", "_prod"]
|
||||
tags = ["${IMAGE}:preview", "${IMAGE}:${VERSION}"]
|
||||
}
|
||||
|
||||
target "rc" {
|
||||
inherits = ["_common", "_prod"]
|
||||
tags = ["${IMAGE}:rc", "${IMAGE}:${VERSION}"]
|
||||
}
|
||||
|
||||
target "rel" {
|
||||
inherits = ["_common", "_prod"]
|
||||
tags = ["${IMAGE}:latest", "${IMAGE}:${MAJOR}", "${IMAGE}:${VERSION}"]
|
||||
no-cache = true
|
||||
}
|
||||
162
ha_addons/ha_addon/DOCS.md
Normal file
162
ha_addons/ha_addon/DOCS.md
Normal file
@@ -0,0 +1,162 @@
|
||||
# Home Assistant Add-on: TSUN Proxy
|
||||
|
||||
[TSUN Proxy][tsunproxy] enables a reliable connection between TSUN third generation
|
||||
inverters and an MQTT broker. With the proxy, you can easily retrieve real-time values
|
||||
such as power, current and daily energy and integrate the inverter into Home Assistant.
|
||||
This works even without an internet connection.
|
||||
The optional connection to the TSUN Cloud can be disabled!
|
||||
|
||||
## Pre-requisites
|
||||
|
||||
1. This Add-on requires an MQTT broker to work.
|
||||
For a typical installation, we recommend the [Mosquitto add-on][Mosquitto] running on your Home Assistant.
|
||||
|
||||
2. You need to loop the proxy into the connection between the inverter and the TSUN Cloud,
|
||||
you must adapt the DNS record within the network that your inverter uses. You need a mapping
|
||||
from logger.talent-monitoring.com and/or iot.talent-monitoring.com to the IP address of your
|
||||
Home Assistant.
|
||||
This can be done, for example, by adding a local DNS record to [AdGuard Home Add-on][AdGuard]
|
||||
(navigate to `filters` on the AdGuard panel and add an entry under `custom filtering rules`).
|
||||
|
||||
## Installation
|
||||
|
||||
The installation of this add-on is pretty straightforward and not different in
|
||||
comparison to installing any other Home Assistant add-on.
|
||||
|
||||
1. Add the repository URL to the Home Assistant add-on store
|
||||
[![Add repository on my Home Assistant][repository-badge]][repository-url]
|
||||
2. Reload the add-on store page
|
||||
3. Click the "Install" button to install the add-on.
|
||||
4. Add your inverter configuration to the add-on configuration
|
||||
5. Start the "TSUN-Proxy" add-on
|
||||
6. Check the logs of the "TSUN-Proxy" add-on to see if everything went well.
|
||||
|
||||
_Please note, the add-on is pre-configured to connect with
|
||||
Home Assistants default MQTT Broker. There is no need to configure any MQTT parameters
|
||||
if you're running an MOSQUITTO add-on. Home Assistant communication and TSUN Cloud URL
|
||||
and Ports are also pre-configured._
|
||||
|
||||
This automatic handling of the TSUN Cloud and MQTT Broker conflicts with the
|
||||
[TSUN Proxy official documentation][tsunproxy]. The official documentation
|
||||
will state `mqtt.host`, `mqtt.port`, `mqtt.user`, `mqtt.passwd` `solarman.host`,
|
||||
`solarman.port` `tsun.host`, `tsun.port` and Home Assistant options are required.
|
||||
For the add-on, however, this isn't needed.
|
||||
|
||||
## Configuration
|
||||
|
||||
**Note**: _Remember to restart the add-on when the configuration is changed._
|
||||
|
||||
Example add-on configuration after installation:
|
||||
|
||||
```yaml
|
||||
inverters:
|
||||
- serial: R17E760702080400
|
||||
node_id: PV-Garage
|
||||
suggested_area: Garage
|
||||
modbus_polling: false
|
||||
pv1.manufacturer: Shinefar
|
||||
pv1.type: SF-M18/144550
|
||||
pv2.manufacturer: Shinefar
|
||||
pv2.type: SF-M18/144550
|
||||
```
|
||||
|
||||
**Note**: _This is just an example, you need to replace the values with your own!_
|
||||
|
||||
Example add-on configuration for GEN3PLUS inverters:
|
||||
|
||||
```yaml
|
||||
inverters:
|
||||
- serial: Y17000000000000
|
||||
monitor_sn: 2000000000
|
||||
node_id: PV-Garage
|
||||
suggested_area: Garage
|
||||
modbus_polling: true
|
||||
client_mode.host: 192.168.x.x
|
||||
client_mode.port: 8899
|
||||
client_mode.forward: true
|
||||
pv1.manufacturer: Shinefar
|
||||
pv1.type: SF-M18/144550
|
||||
pv2.manufacturer: Shinefar
|
||||
pv2.type: SF-M18/144550
|
||||
pv3.manufacturer: Shinefar
|
||||
pv3.type: SF-M18/144550
|
||||
pv4.manufacturer: Shinefar
|
||||
pv4.type: SF-M18/144550
|
||||
```
|
||||
|
||||
**Note**: _This is just an example, you need to replace the values with your own!_
|
||||
|
||||
more information about the configuration can be found in the [configuration details page][configdetails].
|
||||
|
||||
## MQTT settings
|
||||
|
||||
By default, this add-on requires no `mqtt` config from the user. **This is not an error!**
|
||||
|
||||
However, you are free to set them if you want to override, however, in
|
||||
general usage, that should not be needed and is not recommended for this add-on.
|
||||
|
||||
## Changelog & Releases
|
||||
|
||||
This repository keeps a change log using [GitHub's releases][releases]
|
||||
functionality.
|
||||
|
||||
Releases are based on [Semantic Versioning][semver], and use the format
|
||||
of `MAJOR.MINOR.PATCH`. In a nutshell, the version will be incremented
|
||||
based on the following:
|
||||
|
||||
- `MAJOR`: Incompatible or major changes.
|
||||
- `MINOR`: Backwards-compatible new features and enhancements.
|
||||
- `PATCH`: Backwards-compatible bugfixes and package updates.
|
||||
|
||||
## Support
|
||||
|
||||
Got questions?
|
||||
|
||||
You have several options to get them answered:
|
||||
|
||||
- The Discussions section on [GitHub][discussions].
|
||||
- The [Home Assistant Discord chat server][discord-ha] for general Home
|
||||
Assistant discussions and questions.
|
||||
|
||||
You could also [open an issue here][issue] GitHub.
|
||||
|
||||
## Authors & contributors
|
||||
|
||||
The original setup of this repository is by [Stefan Allius][author].
|
||||
|
||||
We're very happy to receive contributions to this project! You can get started by reading [CONTRIBUTING.md][contribute].
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the [BSD 3-clause License][bsd].
|
||||
|
||||
Note the aiomqtt library used is based on the paho-mqtt library, which has a dual license.
|
||||
One of the licenses is the so-called [Eclipse Distribution License v1.0.][eclipse]
|
||||
It is almost word-for-word identical to the BSD 3-clause License. The only differences are:
|
||||
|
||||
- One use of "COPYRIGHT OWNER" (EDL) instead of "COPYRIGHT HOLDER" (BSD)
|
||||
- One use of "Eclipse Foundation, Inc." (EDL) instead of "copyright holder" (BSD)
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
[tsunproxy]: https://github.com/s-allius/tsun-gen3-proxy
|
||||
[discussions]: https://github.com/s-allius/tsun-gen3-proxy/discussions
|
||||
[author]: https://github.com/s-allius
|
||||
[discord-ha]: https://discord.gg/c5DvZ4e
|
||||
[issue]: https://github.com/s-allius/tsun-gen3-proxy/issues
|
||||
[releases]: https://github.com/s-allius/tsun-gen3-proxy/releases
|
||||
[contribute]: https://github.com/s-allius/tsun-gen3-proxy/blob/main/CONTRIBUTING.md
|
||||
[semver]: http://semver.org/spec/v2.0.0.htm
|
||||
[bsd]: https://opensource.org/licenses/BSD-3-Clause
|
||||
[eclipse]: https://www.eclipse.org/org/documents/edl-v10.php
|
||||
[Mosquitto]: https://github.com/home-assistant/addons/blob/master/mosquitto/DOCS.md
|
||||
[AdGuard]: https://github.com/hassio-addons/addon-adguard-home
|
||||
[repository-badge]: https://img.shields.io/badge/Add%20repository%20to%20my-Home%20Assistant-41BDF5?logo=home-assistant&style=for-the-badge
|
||||
[repository-url]: https://my.home-assistant.io/redirect/supervisor_add_addon_repository/?repository_url=https%3A%2F%2Fgithub.com%2Fs-allius%2Fha-addons
|
||||
[configdetails]: https://github.com/s-allius/tsun-gen3-proxy/wiki/Configuration-toml
|
||||
90
ha_addons/ha_addon/Dockerfile
Executable file
90
ha_addons/ha_addon/Dockerfile
Executable file
@@ -0,0 +1,90 @@
|
||||
|
||||
############################################################################
|
||||
#
|
||||
# TSUN Proxy
|
||||
# Homeassistant Add-on
|
||||
#
|
||||
# based on https://github.com/s-allius/tsun-gen3-proxy/tree/main
|
||||
#
|
||||
############################################################################
|
||||
|
||||
|
||||
######################
|
||||
# 1 Build Base Image #
|
||||
######################
|
||||
|
||||
ARG BUILD_FROM="ghcr.io/hassio-addons/base:17.1.0"
|
||||
# hadolint ignore=DL3006
|
||||
FROM $BUILD_FROM AS base
|
||||
|
||||
# Installiere Python, pip und virtuelle Umgebungstools
|
||||
RUN apk add --no-cache python3=3.12.8-r1 py3-pip=24.3.1-r0 && \
|
||||
python -m venv /opt/venv && \
|
||||
. /opt/venv/bin/activate
|
||||
|
||||
ENV PATH="/opt/venv/bin:$PATH"
|
||||
|
||||
|
||||
|
||||
#######################
|
||||
# 2 Build wheel #
|
||||
#######################
|
||||
FROM base AS builder
|
||||
|
||||
COPY rootfs/requirements.txt /root/
|
||||
|
||||
RUN apk add --no-cache build-base=0.5-r3 && \
|
||||
python -m pip install --no-cache-dir wheel==0.45.1 && \
|
||||
python -OO -m pip wheel --no-cache-dir --wheel-dir=/root/wheels -r /root/requirements.txt
|
||||
|
||||
|
||||
|
||||
|
||||
#######################
|
||||
# 3 Build runtime #
|
||||
#######################
|
||||
FROM base AS runtime
|
||||
|
||||
ARG SERVICE_NAME
|
||||
ARG VERSION
|
||||
ENV SERVICE_NAME=${SERVICE_NAME}
|
||||
|
||||
|
||||
|
||||
#######################
|
||||
# 4 Install libraries #
|
||||
#######################
|
||||
|
||||
# install the requirements from the wheels packages from the builder stage
|
||||
# and unistall python packages and alpine package manger to reduce attack surface
|
||||
|
||||
COPY --from=builder /root/wheels /root/wheels
|
||||
RUN python -m pip install --no-cache-dir --no-cache --no-index /root/wheels/* && \
|
||||
rm -rf /root/wheels && \
|
||||
python -m pip uninstall --yes wheel pip && \
|
||||
apk --purge del apk-tools
|
||||
|
||||
|
||||
#######################
|
||||
# 5 copy data #
|
||||
#######################
|
||||
|
||||
COPY rootfs/ /
|
||||
|
||||
|
||||
|
||||
#######################
|
||||
# 6 run app #
|
||||
#######################
|
||||
|
||||
# make run.sh executable
|
||||
RUN chmod a+x /run.sh && \
|
||||
echo ${VERSION} > /proxy-version.txt
|
||||
|
||||
# command to run on container start
|
||||
CMD [ "/run.sh" ]
|
||||
|
||||
|
||||
|
||||
#######################
|
||||
|
||||
BIN
ha_addons/ha_addon/icon.png
Normal file
BIN
ha_addons/ha_addon/icon.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 113 KiB |
BIN
ha_addons/ha_addon/logo.png
Normal file
BIN
ha_addons/ha_addon/logo.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 113 KiB |
33
ha_addons/ha_addon/rootfs/run.sh
Executable file
33
ha_addons/ha_addon/rootfs/run.sh
Executable file
@@ -0,0 +1,33 @@
|
||||
#!/usr/bin/with-contenv bashio
|
||||
|
||||
echo "Add-on environment started"
|
||||
|
||||
echo "check for Home Assistant MQTT"
|
||||
MQTT_HOST=$(bashio::services mqtt "host")
|
||||
MQTT_PORT=$(bashio::services mqtt "port")
|
||||
MQTT_USER=$(bashio::services mqtt "username")
|
||||
MQTT_PASSWORD=$(bashio::services mqtt "password")
|
||||
|
||||
# if a MQTT was/not found, drop a note
|
||||
if [ -z "$MQTT_HOST" ]; then
|
||||
echo "MQTT not found"
|
||||
else
|
||||
echo "MQTT found"
|
||||
export MQTT_HOST
|
||||
export MQTT_PORT
|
||||
export MQTT_USER
|
||||
export MQTT_PASSWORD
|
||||
fi
|
||||
|
||||
|
||||
|
||||
|
||||
# Create folder for log und config files
|
||||
mkdir -p /homeassistant/tsun-proxy/logs
|
||||
|
||||
cd /home/proxy || exit
|
||||
|
||||
export VERSION=$(cat /proxy-version.txt)
|
||||
|
||||
echo "Start Proxyserver..."
|
||||
python3 server.py --json_config=/data/options.json --log_path=/homeassistant/tsun-proxy/logs/ --config_path=/homeassistant/tsun-proxy/ --log_backups=2
|
||||
95
ha_addons/ha_addon/translations/de.yaml
Executable file
95
ha_addons/ha_addon/translations/de.yaml
Executable file
@@ -0,0 +1,95 @@
|
||||
---
|
||||
configuration:
|
||||
inverters:
|
||||
name: Wechselrichter
|
||||
description: >+
|
||||
Für jeden Wechselrichter muss die Seriennummer des Wechselrichters einer MQTT
|
||||
Definition zugeordnet werden. Dazu wird der entsprechende Konfigurationsblock mit der
|
||||
16-stellige Seriennummer gestartet, so dass alle nachfolgenden Parameter diesem
|
||||
Wechselrichter zugeordnet sind.
|
||||
Weitere wechselrichterspezifische Parameter (z.B. Polling Mode) können im
|
||||
Konfigurationsblock gesetzt werden.
|
||||
|
||||
Die Seriennummer der GEN3 Wechselrichter beginnen mit `R17` und die der GEN3PLUS
|
||||
Wechselrichter mir `Y17`oder `47`!
|
||||
|
||||
Siehe Beispielkonfiguration im Dokumentations-Tab
|
||||
|
||||
tsun.enabled:
|
||||
name: Verbindung zur TSUN Cloud - nur für GEN3-Wechselrichter
|
||||
description: >+
|
||||
Schaltet die Verbindung zur TSUN Cloud ein/aus.
|
||||
Diese Verbindung ist erforderlich, wenn Sie Daten an die TSUN Cloud senden möchten,
|
||||
z.B. um die TSUN-Apps zu nutzen oder Firmware-Updates zu erhalten.
|
||||
|
||||
ein => normaler Proxy-Betrieb.
|
||||
aus => Der Wechselrichter wird vom Internet isoliert.
|
||||
solarman.enabled:
|
||||
name: Verbindung zur Solarman Cloud - nur für GEN3PLUS Wechselrichter
|
||||
description: >+
|
||||
Schaltet die Verbindung zur Solarman Cloud ein/aus.
|
||||
Diese Verbindung ist erforderlich, wenn Sie Daten an die Solarman Cloud senden möchten,
|
||||
z.B. um die Solarman Apps zu nutzen oder Firmware-Updates zu erhalten.
|
||||
|
||||
ein => normaler Proxy-Betrieb.
|
||||
aus => Der Wechselrichter wird vom Internet isoliert.
|
||||
inverters.allow_all:
|
||||
name: Erlaube Verbindungen von sämtlichen Wechselrichtern
|
||||
description: >-
|
||||
Der Proxy akzeptiert normalerweise nur Verbindungen von konfigurierten Wechselrichtern.
|
||||
Schalten Sie dies für Testzwecke und unbekannte Seriennummern ein.
|
||||
mqtt.host:
|
||||
name: MQTT Broker Host
|
||||
description: >-
|
||||
Hostname oder IP-Adresse des MQTT-Brokers. Wenn nicht gesetzt, versucht das Addon, eine Verbindung zum Home Assistant MQTT-Broker herzustellen.
|
||||
mqtt.port:
|
||||
name: MQTT Broker Port
|
||||
description: >-
|
||||
Port des MQTT-Brokers. Wenn nicht gesetzt, versucht das Addon, eine Verbindung zum Home Assistant MQTT-Broker herzustellen.
|
||||
mqtt.user:
|
||||
name: MQTT Broker Benutzer
|
||||
description: >-
|
||||
Benutzer für den MQTT-Broker. Wenn nicht gesetzt, versucht das Addon, eine Verbindung zum Home Assistant MQTT-Broker herzustellen.
|
||||
mqtt.passwd:
|
||||
name: MQTT Broker Passwort
|
||||
description: >-
|
||||
Passwort für den MQTT-Broker. Wenn nicht gesetzt, versucht das Addon, eine Verbindung zum Home Assistant MQTT-Broker herzustellen.
|
||||
ha.auto_conf_prefix:
|
||||
name: MQTT-Präfix für das Abonnieren von Home Assistant-Statusaktualisierungen
|
||||
ha.discovery_prefix:
|
||||
name: MQTT-Präfix für das discovery topic
|
||||
ha.entity_prefix:
|
||||
name: MQTT-Themenpräfix für die Veröffentlichung von Wechselrichterwerten
|
||||
ha.proxy_node_id:
|
||||
name: MQTT-Knoten-ID für die proxy_node_id
|
||||
ha.proxy_unique_id:
|
||||
name: MQTT-eindeutige ID zur Identifizierung einer Proxy-Instanz
|
||||
tsun.host:
|
||||
name: TSUN Cloud Host
|
||||
description: >-
|
||||
Hostname oder IP-Adresse der TSUN-Cloud. Wenn nicht gesetzt, versucht das Addon, eine Verbindung zur Cloud logger.talent-monitoring.com herzustellen.
|
||||
solarman.host:
|
||||
name: Solarman Cloud Host
|
||||
description: >-
|
||||
Hostname oder IP-Adresse der Solarman-Cloud. Wenn nicht gesetzt, versucht das Addon, eine Verbindung zur Cloud iot.talent-monitoring.com herzustellen.
|
||||
gen3plus.at_acl.tsun.allow:
|
||||
name: TSUN GEN3PLUS ACL allow
|
||||
description: >-
|
||||
Liste erlaubter AT-Befehle für TSUN GEN3PLUS
|
||||
gen3plus.at_acl.tsun.block:
|
||||
name: TSUN GEN3 ACL block
|
||||
description: >-
|
||||
Liste blockierter AT-Befehle für TSUN GEN3PLUS
|
||||
gen3plus.at_acl.mqtt.allow:
|
||||
name: MQTT GEN3PLUS ACL allow
|
||||
description: >-
|
||||
Liste erlaubter MQTT-Befehle für GEN3PLUS
|
||||
gen3plus.at_acl.mqtt.block:
|
||||
name: MQTT GEN3PLUS ACL block
|
||||
description: >-
|
||||
Liste blockierter MQTT-Befehle für GEN3PLUS
|
||||
|
||||
|
||||
network:
|
||||
5005/tcp: listening Port für TSUN GEN3 Wechselrichter
|
||||
10000/tcp: listening Port für TSUN GEN3PLUS Wechselrichter
|
||||
95
ha_addons/ha_addon/translations/en.yaml
Executable file
95
ha_addons/ha_addon/translations/en.yaml
Executable file
@@ -0,0 +1,95 @@
|
||||
---
|
||||
configuration:
|
||||
inverters:
|
||||
name: Inverters
|
||||
description: >+
|
||||
For each GEN3 inverter, the serial number of the inverter must be mapped to an MQTT
|
||||
definition. To do this, the corresponding configuration block is started with
|
||||
16-digit serial number so that all subsequent parameters are assigned
|
||||
to this inverter. Further inverter-specific parameters (e.g. polling mode) can be set
|
||||
in the configuration block
|
||||
|
||||
The serial numbers of all GEN3 inverters start with `R17` and that of the GEN3PLUS
|
||||
inverters with ‘Y17’ or ‘47’!
|
||||
|
||||
For reference see example configuration in Documentation Tab
|
||||
|
||||
tsun.enabled:
|
||||
name: Connection to TSUN Cloud - for GEN3 inverter only
|
||||
description: >+
|
||||
switch on/off connection to the TSUN cloud.
|
||||
This connection is only required if you want send data to the TSUN cloud
|
||||
eg. to use the TSUN APPs or receive firmware updates.
|
||||
|
||||
on => normal proxy operation.
|
||||
off => The Inverter become isolated from Internet.
|
||||
solarman.enabled:
|
||||
name: Connection to Solarman Cloud - for GEN3PLUS inverter only
|
||||
description: >+
|
||||
switch on/off connection to the Solarman cloud.
|
||||
This connection is only required if you want send data to the Solarman cloud
|
||||
eg. to use the Solarman APPs or receive firmware updates.
|
||||
|
||||
on => normal proxy operation.
|
||||
off => The Inverter become isolated from Internet
|
||||
inverters.allow_all:
|
||||
name: Allow all connections from all inverters
|
||||
description: >-
|
||||
The proxy only usually accepts connections from configured inverters.
|
||||
Switch on for test purposes and unknown serial numbers.
|
||||
mqtt.host:
|
||||
name: MQTT Broker Host
|
||||
description: >-
|
||||
Hostname or IP address of the MQTT broker. if not set, the addon will try to connect to the Home Assistant MQTT broker
|
||||
mqtt.port:
|
||||
name: MQTT Broker Port
|
||||
description: >-
|
||||
Port of the MQTT broker. if not set, the addon will try to connect to the Home Assistant MQTT broker
|
||||
mqtt.user:
|
||||
name: MQTT Broker User
|
||||
description: >-
|
||||
User for the MQTT broker. if not set, the addon will try to connect to the Home Assistant MQTT broker
|
||||
mqtt.passwd:
|
||||
name: MQTT Broker Password
|
||||
description: >-
|
||||
Password for the MQTT broker. if not set, the addon will try to connect to the Home Assistant MQTT broker
|
||||
ha.auto_conf_prefix:
|
||||
name: MQTT prefix for subscribing for homeassistant status updates
|
||||
ha.discovery_prefix:
|
||||
name: MQTT prefix for discovery topic
|
||||
ha.entity_prefix:
|
||||
name: MQTT topic prefix for publishing inverter values
|
||||
ha.proxy_node_id:
|
||||
name: MQTT node id, for the proxy_node_id
|
||||
ha.proxy_unique_id:
|
||||
name: MQTT unique id, to identify a proxy instance
|
||||
tsun.host:
|
||||
name: TSUN Cloud Host
|
||||
description: >-
|
||||
Hostname or IP address of the TSUN cloud. if not set, the addon will try to connect to the cloud
|
||||
on logger.talent-monitoring.com
|
||||
solarman.host:
|
||||
name: Solarman Cloud Host
|
||||
description: >-
|
||||
Hostname or IP address of the Solarman cloud. if not set, the addon will try to connect to the cloud
|
||||
on iot.talent-monitoring.com
|
||||
gen3plus.at_acl.tsun.allow:
|
||||
name: TSUN GEN3PLUS ACL allow
|
||||
description: >-
|
||||
List of allowed TSUN GEN3PLUS AT commands
|
||||
gen3plus.at_acl.tsun.block:
|
||||
name: TSUN GEN3 ACL block
|
||||
description: >-
|
||||
List of blocked TSUN GEN3PLUS AT commands
|
||||
gen3plus.at_acl.mqtt.allow:
|
||||
name: MQTT GEN3PLUS ACL allow
|
||||
description: >-
|
||||
List of allowed MQTT GEN3PLUS commands
|
||||
gen3plus.at_acl.mqtt.block:
|
||||
name: MQTT GEN3PLUS ACL block
|
||||
description: >-
|
||||
List of blocked MQTT GEN3PLUS commands
|
||||
|
||||
network:
|
||||
5005/tcp: listening Port for TSUN GEN3 Devices
|
||||
10000/tcp: listening Port for TSUN GEN3PLUS Devices
|
||||
3
ha_addons/repository.yaml
Normal file
3
ha_addons/repository.yaml
Normal file
@@ -0,0 +1,3 @@
|
||||
name: TSUN-Proxy
|
||||
url: https://github.com/s-allius/tsun-gen3-proxy/ha_addons
|
||||
maintainer: Stefan Allius
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user