Compare commits
21 Commits
v0.10.0-pr
...
dev-0.10
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b752144e94 | ||
|
|
e56aa5d709 | ||
|
|
b364fb3f8e | ||
|
|
416fe16609 | ||
|
|
d8aee5f789 | ||
|
|
04ec8ac8a7 | ||
|
|
39577b2415 | ||
|
|
8fc1e83b42 | ||
|
|
13f9febb47 | ||
|
|
d891e486e7 | ||
|
|
155b6d4e67 | ||
|
|
24ece4fece | ||
|
|
a779c90965 | ||
|
|
e33153fc1f | ||
|
|
2a5e3b6507 | ||
|
|
6c6ce3e0a8 | ||
|
|
83a33efa18 | ||
|
|
25e55e4286 | ||
|
|
b3f1d97c3d | ||
|
|
4d1a5fbd21 | ||
|
|
6dbd9bc85f |
@@ -1,2 +1,3 @@
|
|||||||
[run]
|
[run]
|
||||||
branch = True
|
branch = True
|
||||||
|
relative_files = True
|
||||||
22
.github/workflows/python-app.yml
vendored
22
.github/workflows/python-app.yml
vendored
@@ -18,10 +18,11 @@ on:
|
|||||||
- '**.dockerfile' # Do no build on *.dockerfile changes
|
- '**.dockerfile' # Do no build on *.dockerfile changes
|
||||||
- '**.sh' # Do no build on *.sh changes
|
- '**.sh' # Do no build on *.sh changes
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: [ "main" ]
|
branches: [ "main", "dev-*" ]
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
|
pull-requests: read # allows SonarCloud to decorate PRs with analysis results
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
@@ -36,6 +37,8 @@ jobs:
|
|||||||
timezoneMacos: "Europe/Berlin"
|
timezoneMacos: "Europe/Berlin"
|
||||||
timezoneWindows: "Europe/Berlin"
|
timezoneWindows: "Europe/Berlin"
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0 # Fetch all history for all tags and branches
|
||||||
- name: Set up Python 3.12
|
- name: Set up Python 3.12
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
@@ -53,4 +56,19 @@ jobs:
|
|||||||
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
|
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
|
||||||
- name: Test with pytest
|
- name: Test with pytest
|
||||||
run: |
|
run: |
|
||||||
python -m pytest app
|
pip install pytest pytest-cov
|
||||||
|
#pytest app --doctest-modules --junitxml=junit/test-results.xml --cov=com --cov-report=xml --cov-report=html
|
||||||
|
python -m pytest app --cov=app/src --cov-report=xml
|
||||||
|
- name: Analyze with SonarCloud
|
||||||
|
uses: SonarSource/sonarcloud-github-action@v2.2.0
|
||||||
|
env:
|
||||||
|
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||||
|
with:
|
||||||
|
projectBaseDir: .
|
||||||
|
args:
|
||||||
|
-Dsonar.projectKey=s-allius_tsun-gen3-proxy
|
||||||
|
-Dsonar.organization=s-allius
|
||||||
|
-Dsonar.python.version=3.12
|
||||||
|
-Dsonar.python.coverage.reportPaths=coverage.xml
|
||||||
|
-Dsonar.tests=system_tests,app/tests
|
||||||
|
-Dsonar.source=app/src
|
||||||
|
|||||||
4
.sonarlint/connectedMode.json
Normal file
4
.sonarlint/connectedMode.json
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
{
|
||||||
|
"sonarCloudOrganization": "s-allius",
|
||||||
|
"projectKey": "s-allius_tsun-gen3-proxy"
|
||||||
|
}
|
||||||
6
.vscode/settings.json
vendored
6
.vscode/settings.json
vendored
@@ -11,5 +11,9 @@
|
|||||||
"python.testing.pytestEnabled": true,
|
"python.testing.pytestEnabled": true,
|
||||||
"flake8.args": [
|
"flake8.args": [
|
||||||
"--extend-exclude=app/tests/*.py system_tests/*.py"
|
"--extend-exclude=app/tests/*.py system_tests/*.py"
|
||||||
]
|
],
|
||||||
|
"sonarlint.connectedMode.project": {
|
||||||
|
"connectionId": "s-allius",
|
||||||
|
"projectKey": "s-allius_tsun-gen3-proxy"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
10
CHANGELOG.md
10
CHANGELOG.md
@@ -7,6 +7,16 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||||||
|
|
||||||
## [unreleased]
|
## [unreleased]
|
||||||
|
|
||||||
|
## [0.10.1] - 2024-08-10
|
||||||
|
|
||||||
|
- fix displaying the version string at startup and in HA [#153](https://github.com/s-allius/tsun-gen3-proxy/issues/153)
|
||||||
|
|
||||||
|
## [0.10.0] - 2024-08-09
|
||||||
|
|
||||||
|
- bump aiohttp to version 3.10.2
|
||||||
|
- add SonarQube and code coverage support
|
||||||
|
- don't send MODBUS request when state is note up; adapt timeouts [#141](https://github.com/s-allius/tsun-gen3-proxy/issues/141)
|
||||||
|
- build multi arch images with sboms [#144](https://github.com/s-allius/tsun-gen3-proxy/issues/144)
|
||||||
- add timestamp to MQTT topics [#138](https://github.com/s-allius/tsun-gen3-proxy/issues/138)
|
- add timestamp to MQTT topics [#138](https://github.com/s-allius/tsun-gen3-proxy/issues/138)
|
||||||
- improve the message handling, to avoid hangs
|
- improve the message handling, to avoid hangs
|
||||||
- GEN3: allow long timeouts until we received first inverter data (not only device data)
|
- GEN3: allow long timeouts until we received first inverter data (not only device data)
|
||||||
|
|||||||
81
app/build.sh
81
app/build.sh
@@ -17,10 +17,13 @@ VERSION="${VERSION:1}"
|
|||||||
arr=(${VERSION//./ })
|
arr=(${VERSION//./ })
|
||||||
MAJOR=${arr[0]}
|
MAJOR=${arr[0]}
|
||||||
IMAGE=tsun-gen3-proxy
|
IMAGE=tsun-gen3-proxy
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
BLUE='\033[0;34m'
|
||||||
|
NC='\033[0m'
|
||||||
|
|
||||||
if [[ $1 == debug ]] || [[ $1 == dev ]] ;then
|
if [[ $1 == debug ]] || [[ $1 == dev ]] ;then
|
||||||
IMAGE=docker.io/sallius/${IMAGE}
|
IMAGE=docker.io/sallius/${IMAGE}
|
||||||
VERSION=${VERSION}-$1
|
VERSION=${VERSION}+$1
|
||||||
elif [[ $1 == rc ]] || [[ $1 == rel ]] || [[ $1 == preview ]] ;then
|
elif [[ $1 == rc ]] || [[ $1 == rel ]] || [[ $1 == preview ]] ;then
|
||||||
IMAGE=ghcr.io/s-allius/${IMAGE}
|
IMAGE=ghcr.io/s-allius/${IMAGE}
|
||||||
else
|
else
|
||||||
@@ -29,47 +32,41 @@ echo try: $0 '[debug|dev|preview|rc|rel]'
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo version: $VERSION build-date: $BUILD_DATE image: $IMAGE
|
if [[ $1 == debug ]] ;then
|
||||||
if [[ $1 == debug ]];then
|
BUILD_ENV="dev"
|
||||||
docker build --build-arg "VERSION=${VERSION}" --build-arg environment=dev --build-arg "LOG_LVL=DEBUG" --label "org.opencontainers.image.created=${BUILD_DATE}" --label "org.opencontainers.image.version=${VERSION}" --label "org.opencontainers.image.revision=${BRANCH}" -t ${IMAGE}:debug app
|
else
|
||||||
echo " => pushing ${IMAGE}:debug"
|
BUILD_ENV="production"
|
||||||
docker push -q ${IMAGE}:debug
|
|
||||||
|
|
||||||
elif [[ $1 == dev ]];then
|
|
||||||
docker build --build-arg "VERSION=${VERSION}" --build-arg environment=production --label "org.opencontainers.image.created=${BUILD_DATE}" --label "org.opencontainers.image.version=${VERSION}" --label "org.opencontainers.image.revision=${BRANCH}" -t ${IMAGE}:dev app
|
|
||||||
echo " => pushing ${IMAGE}:dev"
|
|
||||||
docker push -q ${IMAGE}:dev
|
|
||||||
|
|
||||||
elif [[ $1 == preview ]];then
|
|
||||||
docker build --build-arg "VERSION=${VERSION}" --build-arg environment=production --label "org.opencontainers.image.created=${BUILD_DATE}" --label "org.opencontainers.image.version=${VERSION}" --label "org.opencontainers.image.revision=${BRANCH}" -t ${IMAGE}:preview -t ${IMAGE}:${VERSION} app
|
|
||||||
echo 'login to ghcr.io'
|
|
||||||
echo $GHCR_TOKEN | docker login ghcr.io -u s-allius --password-stdin
|
|
||||||
echo " => pushing ${IMAGE}:preview"
|
|
||||||
docker push -q ${IMAGE}:preview
|
|
||||||
echo " => pushing ${IMAGE}:${VERSION}"
|
|
||||||
docker push -q ${IMAGE}:${VERSION}
|
|
||||||
|
|
||||||
elif [[ $1 == rc ]];then
|
|
||||||
docker build --build-arg "VERSION=${VERSION}" --build-arg environment=production --label "org.opencontainers.image.created=${BUILD_DATE}" --label "org.opencontainers.image.version=${VERSION}" --label "org.opencontainers.image.revision=${BRANCH}" -t ${IMAGE}:rc -t ${IMAGE}:${VERSION} app
|
|
||||||
echo 'login to ghcr.io'
|
|
||||||
echo $GHCR_TOKEN | docker login ghcr.io -u s-allius --password-stdin
|
|
||||||
echo " => pushing ${IMAGE}:rc"
|
|
||||||
docker push -q ${IMAGE}:rc
|
|
||||||
echo " => pushing ${IMAGE}:${VERSION}"
|
|
||||||
docker push -q ${IMAGE}:${VERSION}
|
|
||||||
|
|
||||||
elif [[ $1 == rel ]];then
|
|
||||||
docker build --no-cache --build-arg "VERSION=${VERSION}" --build-arg environment=production --label "org.opencontainers.image.created=${BUILD_DATE}" --label "org.opencontainers.image.version=${VERSION}" --label "org.opencontainers.image.revision=${BRANCH}" -t ${IMAGE}:latest -t ${IMAGE}:${MAJOR} -t ${IMAGE}:${VERSION} app
|
|
||||||
echo 'login to ghcr.io'
|
|
||||||
echo $GHCR_TOKEN | docker login ghcr.io -u s-allius --password-stdin
|
|
||||||
echo " => pushing ${IMAGE}:latest"
|
|
||||||
docker push -q ${IMAGE}:latest
|
|
||||||
echo " => pushing ${IMAGE}:${MAJOR}"
|
|
||||||
docker push -q ${IMAGE}:${MAJOR}
|
|
||||||
echo " => pushing ${IMAGE}:${VERSION}"
|
|
||||||
docker push -q ${IMAGE}:${VERSION}
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo ' => checking docker-compose.yaml file'
|
BUILD_CMD="buildx build --push --build-arg VERSION=${VERSION} --build-arg environment=${BUILD_ENV} --attest type=provenance,mode=max --attest type=sbom,generator=docker/scout-sbom-indexer:latest"
|
||||||
|
ARCH="--platform linux/amd64,linux/arm64,linux/arm/v7"
|
||||||
|
LABELS="--label org.opencontainers.image.created=${BUILD_DATE} --label org.opencontainers.image.version=${VERSION} --label org.opencontainers.image.revision=${BRANCH}"
|
||||||
|
|
||||||
|
echo version: $VERSION build-date: $BUILD_DATE image: $IMAGE
|
||||||
|
if [[ $1 == debug ]];then
|
||||||
|
docker ${BUILD_CMD} ${ARCH} ${LABELS} --build-arg "LOG_LVL=DEBUG" -t ${IMAGE}:debug app
|
||||||
|
|
||||||
|
elif [[ $1 == dev ]];then
|
||||||
|
docker ${BUILD_CMD} ${ARCH} ${LABELS} -t ${IMAGE}:dev app
|
||||||
|
|
||||||
|
elif [[ $1 == preview ]];then
|
||||||
|
echo 'login to ghcr.io'
|
||||||
|
echo $GHCR_TOKEN | docker login ghcr.io -u s-allius --password-stdin
|
||||||
|
docker ${BUILD_CMD} ${ARCH} ${LABELS} -t ${IMAGE}:preview -t ${IMAGE}:${VERSION} app
|
||||||
|
|
||||||
|
elif [[ $1 == rc ]];then
|
||||||
|
echo 'login to ghcr.io'
|
||||||
|
echo $GHCR_TOKEN | docker login ghcr.io -u s-allius --password-stdin
|
||||||
|
docker ${BUILD_CMD} ${ARCH} ${LABELS} -t ${IMAGE}:rc -t ${IMAGE}:${VERSION} app
|
||||||
|
|
||||||
|
elif [[ $1 == rel ]];then
|
||||||
|
echo 'login to ghcr.io'
|
||||||
|
echo $GHCR_TOKEN | docker login ghcr.io -u s-allius --password-stdin
|
||||||
|
docker ${BUILD_CMD} ${ARCH} ${LABELS} --no-cache -t ${IMAGE}:latest -t ${IMAGE}:${MAJOR} -t ${IMAGE}:${VERSION} app
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${BLUE} => checking docker-compose.yaml file${NC}"
|
||||||
docker-compose config -q
|
docker-compose config -q
|
||||||
echo 'done'
|
echo
|
||||||
|
echo -e "${GREEN}${BUILD_DATE} => Version: ${VERSION}${NC} finished"
|
||||||
|
echo
|
||||||
|
|||||||
@@ -31,12 +31,14 @@ inverters.allow_all = true # allow inverters, even if we have no inverter mapp
|
|||||||
[inverters."R170000000000001"]
|
[inverters."R170000000000001"]
|
||||||
#node_id = '' # Optional, MQTT replacement for inverters serial number
|
#node_id = '' # Optional, MQTT replacement for inverters serial number
|
||||||
#suggested_area = '' # Optional, suggested installation area for home-assistant
|
#suggested_area = '' # Optional, suggested installation area for home-assistant
|
||||||
|
modbus_polling = false # Disable optional MODBUS polling
|
||||||
#pv1 = {type = 'RSM40-8-395M', manufacturer = 'Risen'} # Optional, PV module descr
|
#pv1 = {type = 'RSM40-8-395M', manufacturer = 'Risen'} # Optional, PV module descr
|
||||||
#pv2 = {type = 'RSM40-8-395M', manufacturer = 'Risen'} # Optional, PV module descr
|
#pv2 = {type = 'RSM40-8-395M', manufacturer = 'Risen'} # Optional, PV module descr
|
||||||
|
|
||||||
#[inverters."R17xxxxxxxxxxxx2"]
|
#[inverters."R17xxxxxxxxxxxx2"]
|
||||||
#node_id = '' # Optional, MQTT replacement for inverters serial number
|
#node_id = '' # Optional, MQTT replacement for inverters serial number
|
||||||
#suggested_area = '' # Optional, suggested installation area for home-assistant
|
#suggested_area = '' # Optional, suggested installation area for home-assistant
|
||||||
|
#modbus_polling = false # Disable optional MODBUS polling
|
||||||
#pv1 = {type = 'RSM40-8-405M', manufacturer = 'Risen'} # Optional, PV module descr
|
#pv1 = {type = 'RSM40-8-405M', manufacturer = 'Risen'} # Optional, PV module descr
|
||||||
#pv2 = {type = 'RSM40-8-405M', manufacturer = 'Risen'} # Optional, PV module descr
|
#pv2 = {type = 'RSM40-8-405M', manufacturer = 'Risen'} # Optional, PV module descr
|
||||||
|
|
||||||
@@ -44,6 +46,7 @@ inverters.allow_all = true # allow inverters, even if we have no inverter mapp
|
|||||||
monitor_sn = 2000000000 # The "Monitoring SN:" can be found on a sticker enclosed with the inverter
|
monitor_sn = 2000000000 # The "Monitoring SN:" can be found on a sticker enclosed with the inverter
|
||||||
#node_id = '' # Optional, MQTT replacement for inverters serial number
|
#node_id = '' # Optional, MQTT replacement for inverters serial number
|
||||||
#suggested_area = '' # Optional, suggested installation place for home-assistant
|
#suggested_area = '' # Optional, suggested installation place for home-assistant
|
||||||
|
modbus_polling = true # Enable optional MODBUS polling
|
||||||
|
|
||||||
# if your inverter supports SSL connections you must use the client_mode. Pls, uncomment
|
# if your inverter supports SSL connections you must use the client_mode. Pls, uncomment
|
||||||
# the next line and configure the fixed IP of your inverter
|
# the next line and configure the fixed IP of your inverter
|
||||||
|
|||||||
@@ -188,7 +188,7 @@
|
|||||||
<polygon fill="none" stroke="#000000" points="410.5,-330 410.5,-362 560.5,-362 560.5,-330 410.5,-330"/>
|
<polygon fill="none" stroke="#000000" points="410.5,-330 410.5,-362 560.5,-362 560.5,-330 410.5,-330"/>
|
||||||
<text text-anchor="start" x="453.5455" y="-343" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">ConnectionG3</text>
|
<text text-anchor="start" x="453.5455" y="-343" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">ConnectionG3</text>
|
||||||
<polygon fill="none" stroke="#000000" points="410.5,-298 410.5,-330 560.5,-330 560.5,-298 410.5,-298"/>
|
<polygon fill="none" stroke="#000000" points="410.5,-298 410.5,-330 560.5,-330 560.5,-298 410.5,-298"/>
|
||||||
<text text-anchor="start" x="420.487" y="-311" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">remoteStream:ConnectionG3</text>
|
<text text-anchor="start" x="420.487" y="-311" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">remote_stream:ConnectionG3</text>
|
||||||
<polygon fill="none" stroke="#000000" points="410.5,-254 410.5,-298 560.5,-298 560.5,-254 410.5,-254"/>
|
<polygon fill="none" stroke="#000000" points="410.5,-254 410.5,-298 560.5,-298 560.5,-254 410.5,-254"/>
|
||||||
<text text-anchor="start" x="466.054" y="-279" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">healthy()</text>
|
<text text-anchor="start" x="466.054" y="-279" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">healthy()</text>
|
||||||
<text text-anchor="start" x="470.5025" y="-267" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">close()</text>
|
<text text-anchor="start" x="470.5025" y="-267" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">close()</text>
|
||||||
@@ -213,7 +213,7 @@
|
|||||||
<polygon fill="none" stroke="#000000" points="125.5,-330 125.5,-362 281.5,-362 281.5,-330 125.5,-330"/>
|
<polygon fill="none" stroke="#000000" points="125.5,-330 125.5,-362 281.5,-362 281.5,-330 125.5,-330"/>
|
||||||
<text text-anchor="start" x="168.211" y="-343" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">ConnectionG3P</text>
|
<text text-anchor="start" x="168.211" y="-343" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">ConnectionG3P</text>
|
||||||
<polygon fill="none" stroke="#000000" points="125.5,-298 125.5,-330 281.5,-330 281.5,-298 125.5,-298"/>
|
<polygon fill="none" stroke="#000000" points="125.5,-298 125.5,-330 281.5,-330 281.5,-298 125.5,-298"/>
|
||||||
<text text-anchor="start" x="135.1525" y="-311" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">remoteStream:ConnectionG3P</text>
|
<text text-anchor="start" x="135.1525" y="-311" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">remote_stream:ConnectionG3P</text>
|
||||||
<polygon fill="none" stroke="#000000" points="125.5,-254 125.5,-298 281.5,-298 281.5,-254 125.5,-254"/>
|
<polygon fill="none" stroke="#000000" points="125.5,-254 125.5,-298 281.5,-298 281.5,-254 125.5,-254"/>
|
||||||
<text text-anchor="start" x="184.054" y="-279" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">healthy()</text>
|
<text text-anchor="start" x="184.054" y="-279" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">healthy()</text>
|
||||||
<text text-anchor="start" x="188.5025" y="-267" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">close()</text>
|
<text text-anchor="start" x="188.5025" y="-267" font-family="Helvetica,sans-Serif" font-size="10.00" fill="#000000">close()</text>
|
||||||
|
|||||||
|
Before Width: | Height: | Size: 34 KiB After Width: | Height: | Size: 34 KiB |
@@ -8,9 +8,9 @@
|
|||||||
[IterRegistry||__iter__]^[Message|server_side:bool;header_valid:bool;header_len:unsigned;data_len:unsigned;unique_id;node_id;sug_area;_recv_buffer:bytearray;_send_buffer:bytearray;_forward_buffer:bytearray;db:Infos;new_data:list;state|_read():void<abstract>;close():void;inc_counter():void;dec_counter():void]
|
[IterRegistry||__iter__]^[Message|server_side:bool;header_valid:bool;header_len:unsigned;data_len:unsigned;unique_id;node_id;sug_area;_recv_buffer:bytearray;_send_buffer:bytearray;_forward_buffer:bytearray;db:Infos;new_data:list;state|_read():void<abstract>;close():void;inc_counter():void;dec_counter():void]
|
||||||
[Message]^[Talent|await_conn_resp_cnt;id_str;contact_name;contact_mail;db:InfosG3;mb:Modbus;switch|msg_contact_info();msg_ota_update();msg_get_time();msg_collector_data();msg_inverter_data();msg_unknown();;close()]
|
[Message]^[Talent|await_conn_resp_cnt;id_str;contact_name;contact_mail;db:InfosG3;mb:Modbus;switch|msg_contact_info();msg_ota_update();msg_get_time();msg_collector_data();msg_inverter_data();msg_unknown();;close()]
|
||||||
[Message]^[SolarmanV5|control;serial;snr;db:InfosG3P;mb:Modbus;switch|msg_unknown();;close()]
|
[Message]^[SolarmanV5|control;serial;snr;db:InfosG3P;mb:Modbus;switch|msg_unknown();;close()]
|
||||||
[Talent]^[ConnectionG3|remoteStream:ConnectionG3|healthy();close()]
|
[Talent]^[ConnectionG3|remote_stream:ConnectionG3|healthy();close()]
|
||||||
[Talent]has-1>[Modbus]
|
[Talent]has-1>[Modbus]
|
||||||
[SolarmanV5]^[ConnectionG3P|remoteStream:ConnectionG3P|healthy();close()]
|
[SolarmanV5]^[ConnectionG3P|remote_stream:ConnectionG3P|healthy();close()]
|
||||||
[SolarmanV5]has-1>[Modbus]
|
[SolarmanV5]has-1>[Modbus]
|
||||||
[AsyncStream|reader;writer;addr;r_addr;l_addr|<async>server_loop();<async>client_loop();<async>loop;disc();close();;__async_read();async_write();__async_forward()]^[ConnectionG3]
|
[AsyncStream|reader;writer;addr;r_addr;l_addr|<async>server_loop();<async>client_loop();<async>loop;disc();close();;__async_read();async_write();__async_forward()]^[ConnectionG3]
|
||||||
[AsyncStream]^[ConnectionG3P]
|
[AsyncStream]^[ConnectionG3P]
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
aiomqtt==2.2.0
|
aiomqtt==2.2.0
|
||||||
schema==0.7.7
|
schema==0.7.7
|
||||||
aiocron==1.8
|
aiocron==1.8
|
||||||
aiohttp==3.9.5
|
aiohttp==3.10.2
|
||||||
@@ -17,10 +17,10 @@ class AsyncStream():
|
|||||||
'''maximum processing time for a received msg in sec'''
|
'''maximum processing time for a received msg in sec'''
|
||||||
MAX_START_TIME = 400
|
MAX_START_TIME = 400
|
||||||
'''maximum time without a received msg in sec'''
|
'''maximum time without a received msg in sec'''
|
||||||
MAX_INV_IDLE_TIME = 90
|
MAX_INV_IDLE_TIME = 120
|
||||||
'''maximum time without a received msg from the inverter in sec'''
|
'''maximum time without a received msg from the inverter in sec'''
|
||||||
MAX_CLOUD_IDLE_TIME = 360
|
MAX_DEF_IDLE_TIME = 360
|
||||||
'''maximum time without a received msg from cloud side in sec'''
|
'''maximum default time without a received msg in sec'''
|
||||||
|
|
||||||
def __init__(self, reader: StreamReader, writer: StreamWriter,
|
def __init__(self, reader: StreamReader, writer: StreamWriter,
|
||||||
addr) -> None:
|
addr) -> None:
|
||||||
@@ -37,11 +37,11 @@ class AsyncStream():
|
|||||||
def __timeout(self) -> int:
|
def __timeout(self) -> int:
|
||||||
if self.state == State.init or self.state == State.received:
|
if self.state == State.init or self.state == State.received:
|
||||||
to = self.MAX_START_TIME
|
to = self.MAX_START_TIME
|
||||||
|
elif self.state == State.up and \
|
||||||
|
self.server_side and self.modbus_polling:
|
||||||
|
to = self.MAX_INV_IDLE_TIME
|
||||||
else:
|
else:
|
||||||
if self.server_side and self.modbus_polling:
|
to = self.MAX_DEF_IDLE_TIME
|
||||||
to = self.MAX_INV_IDLE_TIME
|
|
||||||
else:
|
|
||||||
to = self.MAX_CLOUD_IDLE_TIME
|
|
||||||
return to
|
return to
|
||||||
|
|
||||||
async def publish_outstanding_mqtt(self):
|
async def publish_outstanding_mqtt(self):
|
||||||
@@ -67,18 +67,18 @@ class AsyncStream():
|
|||||||
|
|
||||||
# if the server connection closes, we also have to disconnect
|
# if the server connection closes, we also have to disconnect
|
||||||
# the connection to te TSUN cloud
|
# the connection to te TSUN cloud
|
||||||
if self.remoteStream:
|
if self.remote_stream:
|
||||||
logger.info(f'[{self.node_id}:{self.conn_no}] disc client '
|
logger.info(f'[{self.node_id}:{self.conn_no}] disc client '
|
||||||
f'connection: [{self.remoteStream.node_id}:'
|
f'connection: [{self.remote_stream.node_id}:'
|
||||||
f'{self.remoteStream.conn_no}]')
|
f'{self.remote_stream.conn_no}]')
|
||||||
await self.remoteStream.disc()
|
await self.remote_stream.disc()
|
||||||
|
|
||||||
async def client_loop(self, addr: str) -> None:
|
async def client_loop(self, _: str) -> None:
|
||||||
'''Loop for receiving messages from the TSUN cloud (client-side)'''
|
'''Loop for receiving messages from the TSUN cloud (client-side)'''
|
||||||
clientStream = await self.remoteStream.loop()
|
client_stream = await self.remote_stream.loop()
|
||||||
logger.info(f'[{clientStream.node_id}:{clientStream.conn_no}] '
|
logger.info(f'[{client_stream.node_id}:{client_stream.conn_no}] '
|
||||||
'Client loop stopped for'
|
'Client loop stopped for'
|
||||||
f' l{clientStream.l_addr}')
|
f' l{client_stream.l_addr}')
|
||||||
|
|
||||||
# if the client connection closes, we don't touch the server
|
# if the client connection closes, we don't touch the server
|
||||||
# connection. Instead we erase the client connection stream,
|
# connection. Instead we erase the client connection stream,
|
||||||
@@ -86,13 +86,13 @@ class AsyncStream():
|
|||||||
# establish a new connection to the TSUN cloud
|
# establish a new connection to the TSUN cloud
|
||||||
|
|
||||||
# erase backlink to inverter
|
# erase backlink to inverter
|
||||||
clientStream.remoteStream = None
|
client_stream.remote_stream = None
|
||||||
|
|
||||||
if self.remoteStream == clientStream:
|
if self.remote_stream == client_stream:
|
||||||
# logging.debug(f'Client l{clientStream.l_addr} refs:'
|
# logging.debug(f'Client l{client_stream.l_addr} refs:'
|
||||||
# f' {gc.get_referrers(clientStream)}')
|
# f' {gc.get_referrers(client_stream)}')
|
||||||
# than erase client connection
|
# than erase client connection
|
||||||
self.remoteStream = None
|
self.remote_stream = None
|
||||||
|
|
||||||
async def loop(self) -> Self:
|
async def loop(self) -> Self:
|
||||||
"""Async loop handler for precessing all received messages"""
|
"""Async loop handler for precessing all received messages"""
|
||||||
@@ -203,35 +203,35 @@ class AsyncStream():
|
|||||||
if not self._forward_buffer:
|
if not self._forward_buffer:
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
if not self.remoteStream:
|
if not self.remote_stream:
|
||||||
await self.async_create_remote()
|
await self.async_create_remote()
|
||||||
if self.remoteStream:
|
if self.remote_stream:
|
||||||
if self.remoteStream._init_new_client_conn():
|
if self.remote_stream._init_new_client_conn():
|
||||||
await self.remoteStream.async_write()
|
await self.remote_stream.async_write()
|
||||||
|
|
||||||
if self.remoteStream:
|
if self.remote_stream:
|
||||||
self.remoteStream._update_header(self._forward_buffer)
|
self.remote_stream._update_header(self._forward_buffer)
|
||||||
hex_dump_memory(logging.INFO,
|
hex_dump_memory(logging.INFO,
|
||||||
f'Forward to {self.remoteStream.addr}:',
|
f'Forward to {self.remote_stream.addr}:',
|
||||||
self._forward_buffer,
|
self._forward_buffer,
|
||||||
len(self._forward_buffer))
|
len(self._forward_buffer))
|
||||||
self.remoteStream.writer.write(self._forward_buffer)
|
self.remote_stream.writer.write(self._forward_buffer)
|
||||||
await self.remoteStream.writer.drain()
|
await self.remote_stream.writer.drain()
|
||||||
self._forward_buffer = bytearray(0)
|
self._forward_buffer = bytearray(0)
|
||||||
|
|
||||||
except OSError as error:
|
except OSError as error:
|
||||||
if self.remoteStream:
|
if self.remote_stream:
|
||||||
rmt = self.remoteStream
|
rmt = self.remote_stream
|
||||||
self.remoteStream = None
|
self.remote_stream = None
|
||||||
logger.error(f'[{rmt.node_id}:{rmt.conn_no}] Fwd: {error} for '
|
logger.error(f'[{rmt.node_id}:{rmt.conn_no}] Fwd: {error} for '
|
||||||
f'l{rmt.l_addr} | r{rmt.r_addr}')
|
f'l{rmt.l_addr} | r{rmt.r_addr}')
|
||||||
await rmt.disc()
|
await rmt.disc()
|
||||||
rmt.close()
|
rmt.close()
|
||||||
|
|
||||||
except RuntimeError as error:
|
except RuntimeError as error:
|
||||||
if self.remoteStream:
|
if self.remote_stream:
|
||||||
rmt = self.remoteStream
|
rmt = self.remote_stream
|
||||||
self.remoteStream = None
|
self.remote_stream = None
|
||||||
logger.info(f'[{rmt.node_id}:{rmt.conn_no}] '
|
logger.info(f'[{rmt.node_id}:{rmt.conn_no}] '
|
||||||
f'Fwd: {error} for {rmt.l_addr}')
|
f'Fwd: {error} for {rmt.l_addr}')
|
||||||
await rmt.disc()
|
await rmt.disc()
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
import logging
|
import logging
|
||||||
# import gc
|
|
||||||
from asyncio import StreamReader, StreamWriter
|
from asyncio import StreamReader, StreamWriter
|
||||||
from async_stream import AsyncStream
|
from async_stream import AsyncStream
|
||||||
from gen3.talent import Talent
|
from gen3.talent import Talent
|
||||||
@@ -15,7 +14,7 @@ class ConnectionG3(AsyncStream, Talent):
|
|||||||
AsyncStream.__init__(self, reader, writer, addr)
|
AsyncStream.__init__(self, reader, writer, addr)
|
||||||
Talent.__init__(self, server_side, id_str)
|
Talent.__init__(self, server_side, id_str)
|
||||||
|
|
||||||
self.remoteStream: 'ConnectionG3' = remote_stream
|
self.remote_stream: 'ConnectionG3' = remote_stream
|
||||||
|
|
||||||
'''
|
'''
|
||||||
Our puplic methods
|
Our puplic methods
|
||||||
@@ -26,10 +25,10 @@ class ConnectionG3(AsyncStream, Talent):
|
|||||||
# logger.info(f'AsyncStream refs: {gc.get_referrers(self)}')
|
# logger.info(f'AsyncStream refs: {gc.get_referrers(self)}')
|
||||||
|
|
||||||
async def async_create_remote(self) -> None:
|
async def async_create_remote(self) -> None:
|
||||||
pass
|
pass # virtual interface
|
||||||
|
|
||||||
async def async_publ_mqtt(self) -> None:
|
async def async_publ_mqtt(self) -> None:
|
||||||
pass
|
pass # virtual interface
|
||||||
|
|
||||||
def healthy(self) -> bool:
|
def healthy(self) -> bool:
|
||||||
logger.debug('ConnectionG3 healthy()')
|
logger.debug('ConnectionG3 healthy()')
|
||||||
|
|||||||
@@ -9,9 +9,7 @@ from gen3.connection_g3 import ConnectionG3
|
|||||||
from aiomqtt import MqttCodeError
|
from aiomqtt import MqttCodeError
|
||||||
from infos import Infos
|
from infos import Infos
|
||||||
|
|
||||||
# import gc
|
|
||||||
|
|
||||||
# logger = logging.getLogger('conn')
|
|
||||||
logger_mqtt = logging.getLogger('mqtt')
|
logger_mqtt = logging.getLogger('mqtt')
|
||||||
|
|
||||||
|
|
||||||
@@ -60,10 +58,10 @@ class InverterG3(Inverter, ConnectionG3):
|
|||||||
logging.info(f'[{self.node_id}] Connect to {addr}')
|
logging.info(f'[{self.node_id}] Connect to {addr}')
|
||||||
connect = asyncio.open_connection(host, port)
|
connect = asyncio.open_connection(host, port)
|
||||||
reader, writer = await connect
|
reader, writer = await connect
|
||||||
self.remoteStream = ConnectionG3(reader, writer, addr, self,
|
self.remote_stream = ConnectionG3(reader, writer, addr, self,
|
||||||
False, self.id_str)
|
False, self.id_str)
|
||||||
logging.info(f'[{self.remoteStream.node_id}:'
|
logging.info(f'[{self.remote_stream.node_id}:'
|
||||||
f'{self.remoteStream.conn_no}] '
|
f'{self.remote_stream.conn_no}] '
|
||||||
f'Connected to {addr}')
|
f'Connected to {addr}')
|
||||||
asyncio.create_task(self.client_loop(addr))
|
asyncio.create_task(self.client_loop(addr))
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
import struct
|
import struct
|
||||||
import logging
|
import logging
|
||||||
import time
|
|
||||||
import pytz
|
import pytz
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from tzlocal import get_localzone
|
from tzlocal import get_localzone
|
||||||
@@ -45,7 +44,7 @@ class Talent(Message):
|
|||||||
MB_REGULAR_TIMEOUT = 60
|
MB_REGULAR_TIMEOUT = 60
|
||||||
|
|
||||||
def __init__(self, server_side: bool, id_str=b''):
|
def __init__(self, server_side: bool, id_str=b''):
|
||||||
super().__init__(server_side, self.send_modbus_cb, mb_timeout=11)
|
super().__init__(server_side, self.send_modbus_cb, mb_timeout=15)
|
||||||
self.await_conn_resp_cnt = 0
|
self.await_conn_resp_cnt = 0
|
||||||
self.id_str = id_str
|
self.id_str = id_str
|
||||||
self.contact_name = b''
|
self.contact_name = b''
|
||||||
@@ -169,7 +168,6 @@ class Talent(Message):
|
|||||||
fnc = self.switch.get(self.msg_id, self.msg_unknown)
|
fnc = self.switch.get(self.msg_id, self.msg_unknown)
|
||||||
logger.info(self.__flow_str(self.server_side, 'forwrd') +
|
logger.info(self.__flow_str(self.server_side, 'forwrd') +
|
||||||
f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}')
|
f' Ctl: {int(self.ctrl):#02x} Msg: {fnc.__name__!r}')
|
||||||
return
|
|
||||||
|
|
||||||
def send_modbus_cb(self, modbus_pdu: bytearray, log_lvl: int, state: str):
|
def send_modbus_cb(self, modbus_pdu: bytearray, log_lvl: int, state: str):
|
||||||
if self.state != State.up:
|
if self.state != State.up:
|
||||||
@@ -242,12 +240,8 @@ class Talent(Message):
|
|||||||
def _timestamp(self): # pragma: no cover
|
def _timestamp(self): # pragma: no cover
|
||||||
'''returns timestamp fo the inverter as localtime
|
'''returns timestamp fo the inverter as localtime
|
||||||
since 1.1.1970 in msec'''
|
since 1.1.1970 in msec'''
|
||||||
if False:
|
# convert localtime in epoche
|
||||||
# utc as epoche
|
ts = (datetime.now() - datetime(1970, 1, 1)).total_seconds()
|
||||||
ts = time.time()
|
|
||||||
else:
|
|
||||||
# convert localtime in epoche
|
|
||||||
ts = (datetime.now() - datetime(1970, 1, 1)).total_seconds()
|
|
||||||
return round(ts*1000)
|
return round(ts*1000)
|
||||||
|
|
||||||
def _utcfromts(self, ts: float):
|
def _utcfromts(self, ts: float):
|
||||||
@@ -297,7 +291,7 @@ class Talent(Message):
|
|||||||
if (buf_len < 5): # enough bytes to read len and id_len?
|
if (buf_len < 5): # enough bytes to read len and id_len?
|
||||||
return
|
return
|
||||||
result = struct.unpack_from('!lB', buf, 0)
|
result = struct.unpack_from('!lB', buf, 0)
|
||||||
len = result[0] # len of complete message
|
msg_len = result[0] # len of complete message
|
||||||
id_len = result[1] # len of variable id string
|
id_len = result[1] # len of variable id string
|
||||||
|
|
||||||
hdr_len = 5+id_len+2
|
hdr_len = 5+id_len+2
|
||||||
@@ -311,10 +305,9 @@ class Talent(Message):
|
|||||||
self.id_str = result[0]
|
self.id_str = result[0]
|
||||||
self.ctrl = Control(result[1])
|
self.ctrl = Control(result[1])
|
||||||
self.msg_id = result[2]
|
self.msg_id = result[2]
|
||||||
self.data_len = len-id_len-3
|
self.data_len = msg_len-id_len-3
|
||||||
self.header_len = hdr_len
|
self.header_len = hdr_len
|
||||||
self.header_valid = True
|
self.header_valid = True
|
||||||
return
|
|
||||||
|
|
||||||
def __build_header(self, ctrl, msg_id=None) -> None:
|
def __build_header(self, ctrl, msg_id=None) -> None:
|
||||||
if not msg_id:
|
if not msg_id:
|
||||||
@@ -360,7 +353,6 @@ class Talent(Message):
|
|||||||
self.await_conn_resp_cnt -= 1
|
self.await_conn_resp_cnt -= 1
|
||||||
else:
|
else:
|
||||||
self.forward()
|
self.forward()
|
||||||
return
|
|
||||||
else:
|
else:
|
||||||
logger.warning('Unknown Ctrl')
|
logger.warning('Unknown Ctrl')
|
||||||
self.inc_counter('Unknown_Ctrl')
|
self.inc_counter('Unknown_Ctrl')
|
||||||
@@ -388,10 +380,6 @@ class Talent(Message):
|
|||||||
if self.data_len == 0:
|
if self.data_len == 0:
|
||||||
if self.state == State.up:
|
if self.state == State.up:
|
||||||
self.state = State.pend # block MODBUS cmds
|
self.state = State.pend # block MODBUS cmds
|
||||||
if (self.modbus_polling):
|
|
||||||
self.mb_timer.start(self.mb_start_timeout)
|
|
||||||
self.db.set_db_def_value(Register.POLLING_INTERVAL,
|
|
||||||
self.mb_timeout)
|
|
||||||
|
|
||||||
ts = self._timestamp()
|
ts = self._timestamp()
|
||||||
logger.debug(f'time: {ts:08x}')
|
logger.debug(f'time: {ts:08x}')
|
||||||
@@ -455,6 +443,10 @@ class Talent(Message):
|
|||||||
self.__finish_send_msg()
|
self.__finish_send_msg()
|
||||||
self.__process_data()
|
self.__process_data()
|
||||||
self.state = State.up # allow MODBUS cmds
|
self.state = State.up # allow MODBUS cmds
|
||||||
|
if (self.modbus_polling):
|
||||||
|
self.mb_timer.start(self.mb_start_timeout)
|
||||||
|
self.db.set_db_def_value(Register.POLLING_INTERVAL,
|
||||||
|
self.mb_timeout)
|
||||||
|
|
||||||
elif self.ctrl.is_resp():
|
elif self.ctrl.is_resp():
|
||||||
return # ignore received response
|
return # ignore received response
|
||||||
@@ -477,7 +469,7 @@ class Talent(Message):
|
|||||||
if self.ctrl.is_req():
|
if self.ctrl.is_req():
|
||||||
self.inc_counter('OTA_Start_Msg')
|
self.inc_counter('OTA_Start_Msg')
|
||||||
elif self.ctrl.is_ind():
|
elif self.ctrl.is_ind():
|
||||||
pass
|
pass # Ok, nothing to do
|
||||||
else:
|
else:
|
||||||
logger.warning('Unknown Ctrl')
|
logger.warning('Unknown Ctrl')
|
||||||
self.inc_counter('Unknown_Ctrl')
|
self.inc_counter('Unknown_Ctrl')
|
||||||
@@ -490,27 +482,24 @@ class Talent(Message):
|
|||||||
result = struct.unpack_from('!lBB', self._recv_buffer,
|
result = struct.unpack_from('!lBB', self._recv_buffer,
|
||||||
self.header_len)
|
self.header_len)
|
||||||
modbus_len = result[1]
|
modbus_len = result[1]
|
||||||
# logger.debug(f'Ref: {result[0]}')
|
|
||||||
# logger.debug(f'Modbus MsgLen: {modbus_len} Func:{result[2]}')
|
|
||||||
return msg_hdr_len, modbus_len
|
return msg_hdr_len, modbus_len
|
||||||
|
|
||||||
def get_modbus_log_lvl(self) -> int:
|
def get_modbus_log_lvl(self) -> int:
|
||||||
if self.ctrl.is_req():
|
if self.ctrl.is_req():
|
||||||
return logging.INFO
|
return logging.INFO
|
||||||
elif self.ctrl.is_ind():
|
elif self.ctrl.is_ind() and self.server_side:
|
||||||
if self.server_side:
|
return self.mb.last_log_lvl
|
||||||
return self.mb.last_log_lvl
|
|
||||||
return logging.WARNING
|
return logging.WARNING
|
||||||
|
|
||||||
def msg_modbus(self):
|
def msg_modbus(self):
|
||||||
hdr_len, modbus_len = self.parse_modbus_header()
|
hdr_len, _ = self.parse_modbus_header()
|
||||||
data = self._recv_buffer[self.header_len:
|
data = self._recv_buffer[self.header_len:
|
||||||
self.header_len+self.data_len]
|
self.header_len+self.data_len]
|
||||||
|
|
||||||
if self.ctrl.is_req():
|
if self.ctrl.is_req():
|
||||||
if self.remoteStream.mb.recv_req(data[hdr_len:],
|
if self.remote_stream.mb.recv_req(data[hdr_len:],
|
||||||
self.remoteStream.
|
self.remote_stream.
|
||||||
msg_forward):
|
msg_forward):
|
||||||
self.inc_counter('Modbus_Command')
|
self.inc_counter('Modbus_Command')
|
||||||
else:
|
else:
|
||||||
self.inc_counter('Invalid_Msg_Format')
|
self.inc_counter('Invalid_Msg_Format')
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
import logging
|
import logging
|
||||||
# import gc
|
|
||||||
from asyncio import StreamReader, StreamWriter
|
from asyncio import StreamReader, StreamWriter
|
||||||
from async_stream import AsyncStream
|
from async_stream import AsyncStream
|
||||||
from gen3plus.solarman_v5 import SolarmanV5
|
from gen3plus.solarman_v5 import SolarmanV5
|
||||||
@@ -16,7 +15,7 @@ class ConnectionG3P(AsyncStream, SolarmanV5):
|
|||||||
AsyncStream.__init__(self, reader, writer, addr)
|
AsyncStream.__init__(self, reader, writer, addr)
|
||||||
SolarmanV5.__init__(self, server_side, client_mode)
|
SolarmanV5.__init__(self, server_side, client_mode)
|
||||||
|
|
||||||
self.remoteStream: 'ConnectionG3P' = remote_stream
|
self.remote_stream: 'ConnectionG3P' = remote_stream
|
||||||
|
|
||||||
'''
|
'''
|
||||||
Our puplic methods
|
Our puplic methods
|
||||||
@@ -27,10 +26,10 @@ class ConnectionG3P(AsyncStream, SolarmanV5):
|
|||||||
# logger.info(f'AsyncStream refs: {gc.get_referrers(self)}')
|
# logger.info(f'AsyncStream refs: {gc.get_referrers(self)}')
|
||||||
|
|
||||||
async def async_create_remote(self) -> None:
|
async def async_create_remote(self) -> None:
|
||||||
pass
|
pass # virtual interface
|
||||||
|
|
||||||
async def async_publ_mqtt(self) -> None:
|
async def async_publ_mqtt(self) -> None:
|
||||||
pass
|
pass # virtual interface
|
||||||
|
|
||||||
def healthy(self) -> bool:
|
def healthy(self) -> bool:
|
||||||
logger.debug('ConnectionG3P healthy()')
|
logger.debug('ConnectionG3P healthy()')
|
||||||
|
|||||||
@@ -9,9 +9,7 @@ from gen3plus.connection_g3p import ConnectionG3P
|
|||||||
from aiomqtt import MqttCodeError
|
from aiomqtt import MqttCodeError
|
||||||
from infos import Infos
|
from infos import Infos
|
||||||
|
|
||||||
# import gc
|
|
||||||
|
|
||||||
# logger = logging.getLogger('conn')
|
|
||||||
logger_mqtt = logging.getLogger('mqtt')
|
logger_mqtt = logging.getLogger('mqtt')
|
||||||
|
|
||||||
|
|
||||||
@@ -62,11 +60,11 @@ class InverterG3P(Inverter, ConnectionG3P):
|
|||||||
logging.info(f'[{self.node_id}] Connect to {addr}')
|
logging.info(f'[{self.node_id}] Connect to {addr}')
|
||||||
connect = asyncio.open_connection(host, port)
|
connect = asyncio.open_connection(host, port)
|
||||||
reader, writer = await connect
|
reader, writer = await connect
|
||||||
self.remoteStream = ConnectionG3P(reader, writer, addr, self,
|
self.remote_stream = ConnectionG3P(reader, writer, addr, self,
|
||||||
server_side=False,
|
server_side=False,
|
||||||
client_mode=False)
|
client_mode=False)
|
||||||
logging.info(f'[{self.remoteStream.node_id}:'
|
logging.info(f'[{self.remote_stream.node_id}:'
|
||||||
f'{self.remoteStream.conn_no}] '
|
f'{self.remote_stream.conn_no}] '
|
||||||
f'Connected to {addr}')
|
f'Connected to {addr}')
|
||||||
asyncio.create_task(self.client_loop(addr))
|
asyncio.create_task(self.client_loop(addr))
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
import struct
|
import struct
|
||||||
# import json
|
|
||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
import asyncio
|
import asyncio
|
||||||
@@ -19,7 +18,6 @@ else: # pragma: no cover
|
|||||||
from my_timer import Timer
|
from my_timer import Timer
|
||||||
from gen3plus.infos_g3p import InfosG3P
|
from gen3plus.infos_g3p import InfosG3P
|
||||||
from infos import Register
|
from infos import Register
|
||||||
# import traceback
|
|
||||||
|
|
||||||
logger = logging.getLogger('msg')
|
logger = logging.getLogger('msg')
|
||||||
|
|
||||||
@@ -258,7 +256,6 @@ class SolarmanV5(Message):
|
|||||||
logger.info(self.__flow_str(self.server_side, 'forwrd') +
|
logger.info(self.__flow_str(self.server_side, 'forwrd') +
|
||||||
f' Ctl: {int(self.control):#04x}'
|
f' Ctl: {int(self.control):#04x}'
|
||||||
f' Msg: {fnc.__name__!r}')
|
f' Msg: {fnc.__name__!r}')
|
||||||
return
|
|
||||||
|
|
||||||
def _init_new_client_conn(self) -> bool:
|
def _init_new_client_conn(self) -> bool:
|
||||||
return False
|
return False
|
||||||
@@ -312,7 +309,6 @@ class SolarmanV5(Message):
|
|||||||
self._recv_buffer = bytearray()
|
self._recv_buffer = bytearray()
|
||||||
return
|
return
|
||||||
self.header_valid = True
|
self.header_valid = True
|
||||||
return
|
|
||||||
|
|
||||||
def __trailer_is_ok(self, buf: bytes, buf_len: int) -> bool:
|
def __trailer_is_ok(self, buf: bytes, buf_len: int) -> bool:
|
||||||
crc = buf[self.data_len+11]
|
crc = buf[self.data_len+11]
|
||||||
@@ -436,15 +432,15 @@ class SolarmanV5(Message):
|
|||||||
return not cmd.startswith(tuple(self.at_acl[connection]['allow'])) or \
|
return not cmd.startswith(tuple(self.at_acl[connection]['allow'])) or \
|
||||||
cmd.startswith(tuple(self.at_acl[connection]['block']))
|
cmd.startswith(tuple(self.at_acl[connection]['block']))
|
||||||
|
|
||||||
async def send_at_cmd(self, AT_cmd: str) -> None:
|
async def send_at_cmd(self, at_cmd: str) -> None:
|
||||||
if self.state != State.up:
|
if self.state != State.up:
|
||||||
logger.warning(f'[{self.node_id}] ignore AT+ cmd,'
|
logger.warning(f'[{self.node_id}] ignore AT+ cmd,'
|
||||||
' as the state is not UP')
|
' as the state is not UP')
|
||||||
return
|
return
|
||||||
AT_cmd = AT_cmd.strip()
|
at_cmd = at_cmd.strip()
|
||||||
|
|
||||||
if self.at_cmd_forbidden(cmd=AT_cmd, connection='mqtt'):
|
if self.at_cmd_forbidden(cmd=at_cmd, connection='mqtt'):
|
||||||
data_json = f'\'{AT_cmd}\' is forbidden'
|
data_json = f'\'{at_cmd}\' is forbidden'
|
||||||
node_id = self.node_id
|
node_id = self.node_id
|
||||||
key = 'at_resp'
|
key = 'at_resp'
|
||||||
logger.info(f'{key}: {data_json}')
|
logger.info(f'{key}: {data_json}')
|
||||||
@@ -453,8 +449,8 @@ class SolarmanV5(Message):
|
|||||||
|
|
||||||
self.forward_at_cmd_resp = False
|
self.forward_at_cmd_resp = False
|
||||||
self.__build_header(0x4510)
|
self.__build_header(0x4510)
|
||||||
self._send_buffer += struct.pack(f'<BHLLL{len(AT_cmd)}sc', self.AT_CMD,
|
self._send_buffer += struct.pack(f'<BHLLL{len(at_cmd)}sc', self.AT_CMD,
|
||||||
2, 0, 0, 0, AT_cmd.encode('utf-8'),
|
2, 0, 0, 0, at_cmd.encode('utf-8'),
|
||||||
b'\r')
|
b'\r')
|
||||||
self.__finish_send_msg()
|
self.__finish_send_msg()
|
||||||
try:
|
try:
|
||||||
@@ -467,19 +463,19 @@ class SolarmanV5(Message):
|
|||||||
|
|
||||||
def __build_model_name(self):
|
def __build_model_name(self):
|
||||||
db = self.db
|
db = self.db
|
||||||
MaxPow = db.get_db_value(Register.MAX_DESIGNED_POWER, 0)
|
max_pow = db.get_db_value(Register.MAX_DESIGNED_POWER, 0)
|
||||||
Rated = db.get_db_value(Register.RATED_POWER, 0)
|
rated = db.get_db_value(Register.RATED_POWER, 0)
|
||||||
Model = None
|
model = None
|
||||||
if MaxPow == 2000:
|
if max_pow == 2000:
|
||||||
if Rated == 800 or Rated == 600:
|
if rated == 800 or rated == 600:
|
||||||
Model = f'TSOL-MS{MaxPow}({Rated})'
|
model = f'TSOL-MS{max_pow}({rated})'
|
||||||
else:
|
else:
|
||||||
Model = f'TSOL-MS{MaxPow}'
|
model = f'TSOL-MS{max_pow}'
|
||||||
elif MaxPow == 1800 or MaxPow == 1600:
|
elif max_pow == 1800 or max_pow == 1600:
|
||||||
Model = f'TSOL-MS{MaxPow}'
|
model = f'TSOL-MS{max_pow}'
|
||||||
if Model:
|
if model:
|
||||||
logger.info(f'Model: {Model}')
|
logger.info(f'Model: {model}')
|
||||||
self.db.set_db_def_value(Register.EQUIPMENT_MODEL, Model)
|
self.db.set_db_def_value(Register.EQUIPMENT_MODEL, model)
|
||||||
|
|
||||||
def __process_data(self, ftype, ts):
|
def __process_data(self, ftype, ts):
|
||||||
inv_update = False
|
inv_update = False
|
||||||
@@ -564,17 +560,17 @@ class SolarmanV5(Message):
|
|||||||
result = struct.unpack_from('<B', data, 0)
|
result = struct.unpack_from('<B', data, 0)
|
||||||
ftype = result[0]
|
ftype = result[0]
|
||||||
if ftype == self.AT_CMD:
|
if ftype == self.AT_CMD:
|
||||||
AT_cmd = data[15:].decode()
|
at_cmd = data[15:].decode()
|
||||||
if self.at_cmd_forbidden(cmd=AT_cmd, connection='tsun'):
|
if self.at_cmd_forbidden(cmd=at_cmd, connection='tsun'):
|
||||||
self.inc_counter('AT_Command_Blocked')
|
self.inc_counter('AT_Command_Blocked')
|
||||||
return
|
return
|
||||||
self.inc_counter('AT_Command')
|
self.inc_counter('AT_Command')
|
||||||
self.forward_at_cmd_resp = True
|
self.forward_at_cmd_resp = True
|
||||||
|
|
||||||
elif ftype == self.MB_RTU_CMD:
|
elif ftype == self.MB_RTU_CMD:
|
||||||
if self.remoteStream.mb.recv_req(data[15:],
|
if self.remote_stream.mb.recv_req(data[15:],
|
||||||
self.remoteStream.
|
self.remote_stream.
|
||||||
__forward_msg):
|
__forward_msg):
|
||||||
self.inc_counter('Modbus_Command')
|
self.inc_counter('Modbus_Command')
|
||||||
else:
|
else:
|
||||||
logger.error('Invalid Modbus Msg')
|
logger.error('Invalid Modbus Msg')
|
||||||
@@ -593,9 +589,9 @@ class SolarmanV5(Message):
|
|||||||
if self.forward_at_cmd_resp:
|
if self.forward_at_cmd_resp:
|
||||||
return logging.INFO
|
return logging.INFO
|
||||||
return logging.DEBUG
|
return logging.DEBUG
|
||||||
elif ftype == self.MB_RTU_CMD:
|
elif ftype == self.MB_RTU_CMD \
|
||||||
if self.server_side:
|
and self.server_side:
|
||||||
return self.mb.last_log_lvl
|
return self.mb.last_log_lvl
|
||||||
|
|
||||||
return logging.WARNING
|
return logging.WARNING
|
||||||
|
|
||||||
|
|||||||
@@ -130,16 +130,16 @@ class ClrAtMidnight:
|
|||||||
return
|
return
|
||||||
|
|
||||||
prfx += f'{keys[0]}'
|
prfx += f'{keys[0]}'
|
||||||
dict = cls.db
|
db_dict = cls.db
|
||||||
if prfx not in dict:
|
if prfx not in db_dict:
|
||||||
dict[prfx] = {}
|
db_dict[prfx] = {}
|
||||||
dict = dict[prfx]
|
db_dict = db_dict[prfx]
|
||||||
|
|
||||||
for key in keys[1:-1]:
|
for key in keys[1:-1]:
|
||||||
if key not in dict:
|
if key not in db_dict:
|
||||||
dict[key] = {}
|
db_dict[key] = {}
|
||||||
dict = dict[key]
|
db_dict = db_dict[key]
|
||||||
dict[keys[-1]] = 0
|
db_dict[keys[-1]] = 0
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def elm(cls) -> Generator[tuple[str, dict], None, None]:
|
def elm(cls) -> Generator[tuple[str, dict], None, None]:
|
||||||
@@ -333,7 +333,7 @@ class Infos:
|
|||||||
def info_defs(self) -> dict:
|
def info_defs(self) -> dict:
|
||||||
return self.__info_defs
|
return self.__info_defs
|
||||||
|
|
||||||
def dev_value(self, idx: str | int) -> str | int | float | None:
|
def dev_value(self, idx: str | int) -> str | int | float | dict | None:
|
||||||
'''returns the stored device value from our database
|
'''returns the stored device value from our database
|
||||||
|
|
||||||
idx:int ==> lookup the value in the database and return it as str,
|
idx:int ==> lookup the value in the database and return it as str,
|
||||||
@@ -346,29 +346,29 @@ class Infos:
|
|||||||
elif idx in self.info_defs:
|
elif idx in self.info_defs:
|
||||||
row = self.info_defs[idx]
|
row = self.info_defs[idx]
|
||||||
if 'singleton' in row and row['singleton']:
|
if 'singleton' in row and row['singleton']:
|
||||||
dict = self.stat
|
db_dict = self.stat
|
||||||
else:
|
else:
|
||||||
dict = self.db
|
db_dict = self.db
|
||||||
|
|
||||||
keys = row['name']
|
keys = row['name']
|
||||||
|
|
||||||
for key in keys:
|
for key in keys:
|
||||||
if key not in dict:
|
if key not in db_dict:
|
||||||
return None # value not found in the database
|
return None # value not found in the database
|
||||||
dict = dict[key]
|
db_dict = db_dict[key]
|
||||||
return dict # value of the reqeusted entry
|
return db_dict # value of the reqeusted entry
|
||||||
|
|
||||||
return None # unknwon idx, not in info_defs
|
return None # unknwon idx, not in info_defs
|
||||||
|
|
||||||
def inc_counter(self, counter: str) -> None:
|
def inc_counter(self, counter: str) -> None:
|
||||||
'''inc proxy statistic counter'''
|
'''inc proxy statistic counter'''
|
||||||
dict = self.stat['proxy']
|
db_dict = self.stat['proxy']
|
||||||
dict[counter] += 1
|
db_dict[counter] += 1
|
||||||
|
|
||||||
def dec_counter(self, counter: str) -> None:
|
def dec_counter(self, counter: str) -> None:
|
||||||
'''dec proxy statistic counter'''
|
'''dec proxy statistic counter'''
|
||||||
dict = self.stat['proxy']
|
db_dict = self.stat['proxy']
|
||||||
dict[counter] -= 1
|
db_dict[counter] -= 1
|
||||||
|
|
||||||
def ha_proxy_confs(self, ha_prfx: str, node_id: str, snr: str) \
|
def ha_proxy_confs(self, ha_prfx: str, node_id: str, snr: str) \
|
||||||
-> Generator[tuple[str, str, str, str], None, None]:
|
-> Generator[tuple[str, str, str, str], None, None]:
|
||||||
@@ -525,9 +525,8 @@ class Infos:
|
|||||||
return None
|
return None
|
||||||
row = self.info_defs[key]
|
row = self.info_defs[key]
|
||||||
|
|
||||||
if 'singleton' in row:
|
if 'singleton' in row and row['singleton']:
|
||||||
if row['singleton']:
|
return None
|
||||||
return None
|
|
||||||
|
|
||||||
# check if we have details for home assistant
|
# check if we have details for home assistant
|
||||||
if 'ha' in row:
|
if 'ha' in row:
|
||||||
@@ -542,7 +541,7 @@ class Infos:
|
|||||||
return json.dumps(attr), component, node_id, uniq_id
|
return json.dumps(attr), component, node_id, uniq_id
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def _key_obj(self, id: Register) -> list:
|
def _key_obj(self, id: Register) -> tuple:
|
||||||
d = self.info_defs.get(id, {'name': None, 'level': logging.DEBUG,
|
d = self.info_defs.get(id, {'name': None, 'level': logging.DEBUG,
|
||||||
'unit': ''})
|
'unit': ''})
|
||||||
if 'ha' in d and 'must_incr' in d['ha']:
|
if 'ha' in d and 'must_incr' in d['ha']:
|
||||||
@@ -554,21 +553,21 @@ class Infos:
|
|||||||
|
|
||||||
def update_db(self, keys: list, must_incr: bool, result):
|
def update_db(self, keys: list, must_incr: bool, result):
|
||||||
name = ''
|
name = ''
|
||||||
dict = self.db
|
db_dict = self.db
|
||||||
for key in keys[:-1]:
|
for key in keys[:-1]:
|
||||||
if key not in dict:
|
if key not in db_dict:
|
||||||
dict[key] = {}
|
db_dict[key] = {}
|
||||||
dict = dict[key]
|
db_dict = db_dict[key]
|
||||||
name += key + '.'
|
name += key + '.'
|
||||||
if keys[-1] not in dict:
|
if keys[-1] not in db_dict:
|
||||||
update = (not must_incr or result > 0)
|
update = (not must_incr or result > 0)
|
||||||
else:
|
else:
|
||||||
if must_incr:
|
if must_incr:
|
||||||
update = dict[keys[-1]] < result
|
update = db_dict[keys[-1]] < result
|
||||||
else:
|
else:
|
||||||
update = dict[keys[-1]] != result
|
update = db_dict[keys[-1]] != result
|
||||||
if update:
|
if update:
|
||||||
dict[keys[-1]] = result
|
db_dict[keys[-1]] = result
|
||||||
name += keys[-1]
|
name += keys[-1]
|
||||||
return name, update
|
return name, update
|
||||||
|
|
||||||
@@ -622,13 +621,13 @@ class Infos:
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
if 'gte' in dep:
|
if 'gte' in dep:
|
||||||
return not value >= dep['gte']
|
return value < dep['gte']
|
||||||
elif 'less_eq' in dep:
|
elif 'less_eq' in dep:
|
||||||
return not value <= dep['less_eq']
|
return value > dep['less_eq']
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def set_pv_module_details(self, inv: dict) -> None:
|
def set_pv_module_details(self, inv: dict) -> None:
|
||||||
map = {'pv1': {'manufacturer': Register.PV1_MANUFACTURER, 'model': Register.PV1_MODEL}, # noqa: E501
|
pvs = {'pv1': {'manufacturer': Register.PV1_MANUFACTURER, 'model': Register.PV1_MODEL}, # noqa: E501
|
||||||
'pv2': {'manufacturer': Register.PV2_MANUFACTURER, 'model': Register.PV2_MODEL}, # noqa: E501
|
'pv2': {'manufacturer': Register.PV2_MANUFACTURER, 'model': Register.PV2_MODEL}, # noqa: E501
|
||||||
'pv3': {'manufacturer': Register.PV3_MANUFACTURER, 'model': Register.PV3_MODEL}, # noqa: E501
|
'pv3': {'manufacturer': Register.PV3_MANUFACTURER, 'model': Register.PV3_MODEL}, # noqa: E501
|
||||||
'pv4': {'manufacturer': Register.PV4_MANUFACTURER, 'model': Register.PV4_MODEL}, # noqa: E501
|
'pv4': {'manufacturer': Register.PV4_MANUFACTURER, 'model': Register.PV4_MODEL}, # noqa: E501
|
||||||
@@ -636,7 +635,7 @@ class Infos:
|
|||||||
'pv6': {'manufacturer': Register.PV6_MANUFACTURER, 'model': Register.PV6_MODEL} # noqa: E501
|
'pv6': {'manufacturer': Register.PV6_MANUFACTURER, 'model': Register.PV6_MODEL} # noqa: E501
|
||||||
}
|
}
|
||||||
|
|
||||||
for key, reg in map.items():
|
for key, reg in pvs.items():
|
||||||
if key in inv:
|
if key in inv:
|
||||||
if 'manufacturer' in inv[key]:
|
if 'manufacturer' in inv[key]:
|
||||||
self.set_db_def_value(reg['manufacturer'],
|
self.set_db_def_value(reg['manufacturer'],
|
||||||
|
|||||||
@@ -103,11 +103,10 @@ class Message(metaclass=IterRegistry):
|
|||||||
|
|
||||||
def _update_header(self, _forward_buffer):
|
def _update_header(self, _forward_buffer):
|
||||||
'''callback for updating the header of the forward buffer'''
|
'''callback for updating the header of the forward buffer'''
|
||||||
return # pragma: no cover
|
pass # pragma: no cover
|
||||||
|
|
||||||
def _set_mqtt_timestamp(self, key, ts: float | None):
|
def _set_mqtt_timestamp(self, key, ts: float | None):
|
||||||
if type(ts) is not None and \
|
if key not in self.new_data or \
|
||||||
key not in self.new_data or \
|
|
||||||
not self.new_data[key]:
|
not self.new_data[key]:
|
||||||
if key == 'grid':
|
if key == 'grid':
|
||||||
info_id = Register.TS_GRID
|
info_id = Register.TS_GRID
|
||||||
@@ -128,7 +127,7 @@ class Message(metaclass=IterRegistry):
|
|||||||
if self.mb:
|
if self.mb:
|
||||||
self.mb.close()
|
self.mb.close()
|
||||||
self.mb = None
|
self.mb = None
|
||||||
pass # pragma: no cover
|
# pragma: no cover
|
||||||
|
|
||||||
def inc_counter(self, counter: str) -> None:
|
def inc_counter(self, counter: str) -> None:
|
||||||
self.db.inc_counter(counter)
|
self.db.inc_counter(counter)
|
||||||
|
|||||||
@@ -106,6 +106,7 @@ class Modbus():
|
|||||||
self.loop = asyncio.get_event_loop()
|
self.loop = asyncio.get_event_loop()
|
||||||
self.req_pend = False
|
self.req_pend = False
|
||||||
self.tim = None
|
self.tim = None
|
||||||
|
self.node_id = ''
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
"""free the queue and erase the callback handlers"""
|
"""free the queue and erase the callback handlers"""
|
||||||
@@ -180,6 +181,8 @@ class Modbus():
|
|||||||
5: No MODBUS request pending
|
5: No MODBUS request pending
|
||||||
"""
|
"""
|
||||||
# logging.info(f'recv_resp: first byte modbus:{buf[0]} len:{len(buf)}')
|
# logging.info(f'recv_resp: first byte modbus:{buf[0]} len:{len(buf)}')
|
||||||
|
self.node_id = node_id
|
||||||
|
|
||||||
if not self.req_pend:
|
if not self.req_pend:
|
||||||
self.err = 5
|
self.err = 5
|
||||||
return
|
return
|
||||||
@@ -267,7 +270,10 @@ class Modbus():
|
|||||||
self.__start_timer()
|
self.__start_timer()
|
||||||
self.snd_handler(self.last_req, self.last_log_lvl, state='Retrans')
|
self.snd_handler(self.last_req, self.last_log_lvl, state='Retrans')
|
||||||
else:
|
else:
|
||||||
logger.info(f'Modbus timeout {self}')
|
logger.info(f'[{self.node_id}] Modbus timeout '
|
||||||
|
f'(FCode: {self.last_fcode} '
|
||||||
|
f'Reg: 0x{self.last_reg:04x}, '
|
||||||
|
f'{self.last_len})')
|
||||||
self.counter['timeouts'] += 1
|
self.counter['timeouts'] += 1
|
||||||
self.__send_next_from_que()
|
self.__send_next_from_que()
|
||||||
|
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ def test_empty_config():
|
|||||||
Config.conf_schema.validate(cnf)
|
Config.conf_schema.validate(cnf)
|
||||||
assert False
|
assert False
|
||||||
except SchemaMissingKeyError:
|
except SchemaMissingKeyError:
|
||||||
assert True
|
pass
|
||||||
|
|
||||||
def test_default_config():
|
def test_default_config():
|
||||||
with open("app/config/default_config.toml", "rb") as f:
|
with open("app/config/default_config.toml", "rb") as f:
|
||||||
@@ -28,10 +28,9 @@ def test_default_config():
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
validated = Config.conf_schema.validate(cnf)
|
validated = Config.conf_schema.validate(cnf)
|
||||||
assert True
|
except Exception:
|
||||||
except:
|
|
||||||
assert False
|
assert False
|
||||||
assert validated == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'node_id': '', 'modbus_polling': True, 'monitor_sn': 0, 'suggested_area': ''}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'node_id': '', 'suggested_area': ''}}}
|
assert validated == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'node_id': '', 'modbus_polling': False, 'monitor_sn': 0, 'suggested_area': ''}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'node_id': '', 'suggested_area': ''}}}
|
||||||
|
|
||||||
def test_full_config():
|
def test_full_config():
|
||||||
cnf = {'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005},
|
cnf = {'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005},
|
||||||
@@ -45,8 +44,7 @@ def test_full_config():
|
|||||||
'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'node_id': '', 'suggested_area': ''}}}
|
'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'node_id': '', 'suggested_area': ''}}}
|
||||||
try:
|
try:
|
||||||
validated = Config.conf_schema.validate(cnf)
|
validated = Config.conf_schema.validate(cnf)
|
||||||
assert True
|
except Exception:
|
||||||
except:
|
|
||||||
assert False
|
assert False
|
||||||
assert validated == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'node_id': '', 'modbus_polling': True, 'monitor_sn': 0, 'pv1': {'manufacturer': 'man1','type': 'type1'},'pv2': {'manufacturer': 'man2','type': 'type2'},'pv3': {'manufacturer': 'man3','type': 'type3'}, 'suggested_area': ''}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'node_id': '', 'suggested_area': ''}}}
|
assert validated == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'node_id': '', 'modbus_polling': True, 'monitor_sn': 0, 'pv1': {'manufacturer': 'man1','type': 'type1'},'pv2': {'manufacturer': 'man2','type': 'type2'},'pv3': {'manufacturer': 'man3','type': 'type3'}, 'suggested_area': ''}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'node_id': '', 'suggested_area': ''}}}
|
||||||
|
|
||||||
@@ -63,8 +61,7 @@ def test_mininum_config():
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
validated = Config.conf_schema.validate(cnf)
|
validated = Config.conf_schema.validate(cnf)
|
||||||
assert True
|
except Exception:
|
||||||
except:
|
|
||||||
assert False
|
assert False
|
||||||
assert validated == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'node_id': '', 'modbus_polling': True, 'monitor_sn': 0, 'suggested_area': ''}}}
|
assert validated == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'node_id': '', 'modbus_polling': True, 'monitor_sn': 0, 'suggested_area': ''}}}
|
||||||
|
|
||||||
@@ -74,7 +71,7 @@ def test_read_empty():
|
|||||||
err = TstConfig.read('app/config/')
|
err = TstConfig.read('app/config/')
|
||||||
assert err == None
|
assert err == None
|
||||||
cnf = TstConfig.get()
|
cnf = TstConfig.get()
|
||||||
assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'suggested_area': '', 'modbus_polling': True, 'monitor_sn': 0, 'node_id': ''}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'suggested_area': '', 'node_id': ''}}}
|
assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'suggested_area': '', 'modbus_polling': False, 'monitor_sn': 0, 'node_id': ''}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'suggested_area': '', 'node_id': ''}}}
|
||||||
|
|
||||||
defcnf = TstConfig.def_config.get('solarman')
|
defcnf = TstConfig.def_config.get('solarman')
|
||||||
assert defcnf == {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}
|
assert defcnf == {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}
|
||||||
@@ -96,7 +93,7 @@ def test_read_cnf1():
|
|||||||
err = TstConfig.read('app/config/')
|
err = TstConfig.read('app/config/')
|
||||||
assert err == None
|
assert err == None
|
||||||
cnf = TstConfig.get()
|
cnf = TstConfig.get()
|
||||||
assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': False, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'suggested_area': '', 'modbus_polling': True, 'monitor_sn': 0, 'node_id': ''}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'suggested_area': '', 'node_id': ''}}}
|
assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': False, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'suggested_area': '', 'modbus_polling': False, 'monitor_sn': 0, 'node_id': ''}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'suggested_area': '', 'node_id': ''}}}
|
||||||
cnf = TstConfig.get('solarman')
|
cnf = TstConfig.get('solarman')
|
||||||
assert cnf == {'enabled': False, 'host': 'iot.talent-monitoring.com', 'port': 10000}
|
assert cnf == {'enabled': False, 'host': 'iot.talent-monitoring.com', 'port': 10000}
|
||||||
defcnf = TstConfig.def_config.get('solarman')
|
defcnf = TstConfig.def_config.get('solarman')
|
||||||
@@ -109,7 +106,7 @@ def test_read_cnf2():
|
|||||||
err = TstConfig.read('app/config/')
|
err = TstConfig.read('app/config/')
|
||||||
assert err == None
|
assert err == None
|
||||||
cnf = TstConfig.get()
|
cnf = TstConfig.get()
|
||||||
assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'suggested_area': '', 'modbus_polling': True, 'monitor_sn': 0, 'node_id': ''}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'suggested_area': '', 'node_id': ''}}}
|
assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'suggested_area': '', 'modbus_polling': False, 'monitor_sn': 0, 'node_id': ''}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'suggested_area': '', 'node_id': ''}}}
|
||||||
assert True == TstConfig.is_default('solarman')
|
assert True == TstConfig.is_default('solarman')
|
||||||
|
|
||||||
def test_read_cnf3():
|
def test_read_cnf3():
|
||||||
@@ -126,7 +123,7 @@ def test_read_cnf4():
|
|||||||
err = TstConfig.read('app/config/')
|
err = TstConfig.read('app/config/')
|
||||||
assert err == None
|
assert err == None
|
||||||
cnf = TstConfig.get()
|
cnf = TstConfig.get()
|
||||||
assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 5000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'suggested_area': '', 'modbus_polling': True, 'monitor_sn': 0, 'node_id': ''}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'suggested_area': '', 'node_id': ''}}}
|
assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 5000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'suggested_area': '', 'modbus_polling': False, 'monitor_sn': 0, 'node_id': ''}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'suggested_area': '', 'node_id': ''}}}
|
||||||
assert False == TstConfig.is_default('solarman')
|
assert False == TstConfig.is_default('solarman')
|
||||||
|
|
||||||
def test_read_cnf5():
|
def test_read_cnf5():
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# test_with_pytest.py
|
# test_with_pytest.py
|
||||||
import pytest
|
import pytest
|
||||||
import json
|
import json, math
|
||||||
import logging
|
import logging
|
||||||
from app.src.infos import Register, ClrAtMidnight
|
from app.src.infos import Register, ClrAtMidnight
|
||||||
from app.src.infos import Infos
|
from app.src.infos import Infos
|
||||||
@@ -77,7 +77,7 @@ def test_table_definition():
|
|||||||
|
|
||||||
|
|
||||||
for d_json, comp, node_id, id in i.ha_proxy_confs(ha_prfx="tsun/", node_id = 'proxy/', snr = '456'):
|
for d_json, comp, node_id, id in i.ha_proxy_confs(ha_prfx="tsun/", node_id = 'proxy/', snr = '456'):
|
||||||
pass
|
pass # sideeffect is calling generator i.ha_proxy_confs()
|
||||||
|
|
||||||
val = i.dev_value(Register.INTERNAL_ERROR) # check internal error counter
|
val = i.dev_value(Register.INTERNAL_ERROR) # check internal error counter
|
||||||
assert val == 0
|
assert val == 0
|
||||||
@@ -222,24 +222,24 @@ def test_get_value():
|
|||||||
|
|
||||||
i.set_db_def_value(Register.PV2_VOLTAGE, 30.3)
|
i.set_db_def_value(Register.PV2_VOLTAGE, 30.3)
|
||||||
assert 30 == i.get_db_value(Register.PV1_VOLTAGE, None)
|
assert 30 == i.get_db_value(Register.PV1_VOLTAGE, None)
|
||||||
assert 30.3 == i.get_db_value(Register.PV2_VOLTAGE, None)
|
assert math.isclose(30.3,i.get_db_value(Register.PV2_VOLTAGE, None), rel_tol=1e-09, abs_tol=1e-09)
|
||||||
|
|
||||||
def test_update_value():
|
def test_update_value():
|
||||||
i = Infos()
|
i = Infos()
|
||||||
assert None == i.get_db_value(Register.PV1_VOLTAGE, None)
|
assert None == i.get_db_value(Register.PV1_VOLTAGE, None)
|
||||||
|
|
||||||
keys = i.info_defs[Register.PV1_VOLTAGE]['name']
|
keys = i.info_defs[Register.PV1_VOLTAGE]['name']
|
||||||
name, update = i.update_db(keys, True, 30)
|
_, update = i.update_db(keys, True, 30)
|
||||||
assert update == True
|
assert update == True
|
||||||
assert 30 == i.get_db_value(Register.PV1_VOLTAGE, None)
|
assert 30 == i.get_db_value(Register.PV1_VOLTAGE, None)
|
||||||
|
|
||||||
keys = i.info_defs[Register.PV1_VOLTAGE]['name']
|
keys = i.info_defs[Register.PV1_VOLTAGE]['name']
|
||||||
name, update = i.update_db(keys, True, 30)
|
_, update = i.update_db(keys, True, 30)
|
||||||
assert update == False
|
assert update == False
|
||||||
assert 30 == i.get_db_value(Register.PV1_VOLTAGE, None)
|
assert 30 == i.get_db_value(Register.PV1_VOLTAGE, None)
|
||||||
|
|
||||||
keys = i.info_defs[Register.PV1_VOLTAGE]['name']
|
keys = i.info_defs[Register.PV1_VOLTAGE]['name']
|
||||||
name, update = i.update_db(keys, False, 29)
|
_, update = i.update_db(keys, False, 29)
|
||||||
assert update == True
|
assert update == True
|
||||||
assert 29 == i.get_db_value(Register.PV1_VOLTAGE, None)
|
assert 29 == i.get_db_value(Register.PV1_VOLTAGE, None)
|
||||||
|
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
|
|
||||||
# test_with_pytest.py
|
# test_with_pytest.py
|
||||||
import pytest, json
|
import pytest, json, math
|
||||||
from app.src.infos import Register
|
from app.src.infos import Register
|
||||||
from app.src.gen3plus.infos_g3p import InfosG3P
|
from app.src.gen3plus.infos_g3p import InfosG3P
|
||||||
from app.src.gen3plus.infos_g3p import RegisterMap
|
from app.src.gen3plus.infos_g3p import RegisterMap
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def DeviceData(): # 0x4110 ftype: 0x02
|
def device_data(): # 0x4110 ftype: 0x02
|
||||||
msg = b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xba\xd2\x00\x00'
|
msg = b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xba\xd2\x00\x00'
|
||||||
msg += b'\x19\x00\x00\x00\x00\x00\x00\x00\x05\x3c\x78\x01\x64\x01\x4c\x53'
|
msg += b'\x19\x00\x00\x00\x00\x00\x00\x00\x05\x3c\x78\x01\x64\x01\x4c\x53'
|
||||||
msg += b'\x57\x35\x42\x4c\x45\x5f\x31\x37\x5f\x30\x32\x42\x30\x5f\x31\x2e'
|
msg += b'\x57\x35\x42\x4c\x45\x5f\x31\x37\x5f\x30\x32\x42\x30\x5f\x31\x2e'
|
||||||
@@ -24,7 +24,7 @@ def DeviceData(): # 0x4110 ftype: 0x02
|
|||||||
return msg
|
return msg
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def InverterData(): # 0x4210 ftype: 0x01
|
def inverter_data(): # 0x4210 ftype: 0x01
|
||||||
msg = b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xb0\x02\xbc\xc8'
|
msg = b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xb0\x02\xbc\xc8'
|
||||||
msg += b'\x24\x32\x6c\x1f\x00\x00\xa0\x47\xe4\x33\x01\x00\x03\x08\x00\x00'
|
msg += b'\x24\x32\x6c\x1f\x00\x00\xa0\x47\xe4\x33\x01\x00\x03\x08\x00\x00'
|
||||||
msg += b'\x59\x31\x37\x45\x30\x30\x30\x30\x30\x30\x30\x30\x30\x30\x30\x45'
|
msg += b'\x59\x31\x37\x45\x30\x30\x30\x30\x30\x30\x30\x30\x30\x30\x30\x45'
|
||||||
@@ -63,23 +63,23 @@ def test_default_db():
|
|||||||
"collector": {"Chip_Type": "IGEN TECH"},
|
"collector": {"Chip_Type": "IGEN TECH"},
|
||||||
})
|
})
|
||||||
|
|
||||||
def test_parse_4110(DeviceData: bytes):
|
def test_parse_4110(device_data: bytes):
|
||||||
i = InfosG3P(client_mode=False)
|
i = InfosG3P(client_mode=False)
|
||||||
i.db.clear()
|
i.db.clear()
|
||||||
for key, update in i.parse (DeviceData, 0x41, 2):
|
for key, update in i.parse (device_data, 0x41, 2):
|
||||||
pass
|
pass # side effect is calling generator i.parse()
|
||||||
|
|
||||||
assert json.dumps(i.db) == json.dumps({
|
assert json.dumps(i.db) == json.dumps({
|
||||||
'controller': {"Data_Up_Interval": 300, "Collect_Interval": 1, "Heartbeat_Interval": 120, "Signal_Strength": 100, "IP_Address": "192.168.80.49"},
|
'controller': {"Data_Up_Interval": 300, "Collect_Interval": 1, "Heartbeat_Interval": 120, "Signal_Strength": 100, "IP_Address": "192.168.80.49"},
|
||||||
'collector': {"Chip_Model": "LSW5BLE_17_02B0_1.05", "Collector_Fw_Version": "V1.1.00.0B"},
|
'collector': {"Chip_Model": "LSW5BLE_17_02B0_1.05", "Collector_Fw_Version": "V1.1.00.0B"},
|
||||||
})
|
})
|
||||||
|
|
||||||
def test_parse_4210(InverterData: bytes):
|
def test_parse_4210(inverter_data: bytes):
|
||||||
i = InfosG3P(client_mode=False)
|
i = InfosG3P(client_mode=False)
|
||||||
i.db.clear()
|
i.db.clear()
|
||||||
|
|
||||||
for key, update in i.parse (InverterData, 0x42, 1):
|
for key, update in i.parse (inverter_data, 0x42, 1):
|
||||||
pass
|
pass # side effect is calling generator i.parse()
|
||||||
|
|
||||||
assert json.dumps(i.db) == json.dumps({
|
assert json.dumps(i.db) == json.dumps({
|
||||||
"controller": {"Power_On_Time": 2051},
|
"controller": {"Power_On_Time": 2051},
|
||||||
@@ -233,28 +233,27 @@ def test_build_ha_conf2():
|
|||||||
|
|
||||||
assert tests==8
|
assert tests==8
|
||||||
|
|
||||||
def test_exception_and_eval(InverterData: bytes):
|
def test_exception_and_eval(inverter_data: bytes):
|
||||||
|
|
||||||
# add eval to convert temperature from °F to °C
|
# add eval to convert temperature from °F to °C
|
||||||
RegisterMap.map[0x420100d8]['eval'] = '(result-32)/1.8'
|
RegisterMap.map[0x420100d8]['eval'] = '(result-32)/1.8'
|
||||||
# map PV1_VOLTAGE to invalid register
|
# map PV1_VOLTAGE to invalid register
|
||||||
RegisterMap.map[0x420100e0]['reg'] = Register.TEST_REG2
|
RegisterMap.map[0x420100e0]['reg'] = Register.TEST_REG2
|
||||||
# set invalid maping entry for OUTPUT_POWER (string instead of dict type)
|
# set invalid maping entry for OUTPUT_POWER (string instead of dict type)
|
||||||
Backup = RegisterMap.map[0x420100de]
|
backup = RegisterMap.map[0x420100de]
|
||||||
RegisterMap.map[0x420100de] = 'invalid_entry'
|
RegisterMap.map[0x420100de] = 'invalid_entry'
|
||||||
|
|
||||||
i = InfosG3P(client_mode=False)
|
i = InfosG3P(client_mode=False)
|
||||||
# i.db.clear()
|
# i.db.clear()
|
||||||
|
|
||||||
for key, update in i.parse (InverterData, 0x42, 1):
|
for key, update in i.parse (inverter_data, 0x42, 1):
|
||||||
pass
|
pass # side effect is calling generator i.parse()
|
||||||
assert 12.2222 == round (i.get_db_value(Register.INVERTER_TEMP, 0),4)
|
assert math.isclose(12.2222, round (i.get_db_value(Register.INVERTER_TEMP, 0),4), rel_tol=1e-09, abs_tol=1e-09)
|
||||||
|
|
||||||
del RegisterMap.map[0x420100d8]['eval'] # remove eval
|
del RegisterMap.map[0x420100d8]['eval'] # remove eval
|
||||||
RegisterMap.map[0x420100e0]['reg'] = Register.PV1_VOLTAGE # reset mapping
|
RegisterMap.map[0x420100e0]['reg'] = Register.PV1_VOLTAGE # reset mapping
|
||||||
RegisterMap.map[0x420100de] = Backup # reset mapping
|
RegisterMap.map[0x420100de] = backup # reset mapping
|
||||||
|
|
||||||
for key, update in i.parse (InverterData, 0x42, 1):
|
for key, update in i.parse (inverter_data, 0x42, 1):
|
||||||
pass
|
pass # side effect is calling generator i.parse()
|
||||||
assert 54 == i.get_db_value(Register.INVERTER_TEMP, 0)
|
assert 54 == i.get_db_value(Register.INVERTER_TEMP, 0)
|
||||||
|
|
||||||
@@ -5,7 +5,6 @@ from app.src.modbus import Modbus
|
|||||||
from app.src.infos import Infos, Register
|
from app.src.infos import Infos, Register
|
||||||
|
|
||||||
pytest_plugins = ('pytest_asyncio',)
|
pytest_plugins = ('pytest_asyncio',)
|
||||||
# pytestmark = pytest.mark.asyncio(scope="module")
|
|
||||||
|
|
||||||
class ModbusTestHelper(Modbus):
|
class ModbusTestHelper(Modbus):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
@@ -76,8 +75,8 @@ def test_recv_resp_crc_err():
|
|||||||
mb.req_pend = True
|
mb.req_pend = True
|
||||||
mb.last_addr = 1
|
mb.last_addr = 1
|
||||||
mb.last_fcode = 3
|
mb.last_fcode = 3
|
||||||
mb.last_reg == 0x300e
|
mb.last_reg = 0x300e
|
||||||
mb.last_len == 2
|
mb.last_len = 2
|
||||||
# check matching response, but with CRC error
|
# check matching response, but with CRC error
|
||||||
call = 0
|
call = 0
|
||||||
for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x04\x01\x2c\x00\x46\xbb\xf3', 'test'):
|
for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x04\x01\x2c\x00\x46\xbb\xf3', 'test'):
|
||||||
@@ -96,8 +95,8 @@ def test_recv_resp_invalid_addr():
|
|||||||
# simulate a transmitted request
|
# simulate a transmitted request
|
||||||
mb.last_addr = 1
|
mb.last_addr = 1
|
||||||
mb.last_fcode = 3
|
mb.last_fcode = 3
|
||||||
mb.last_reg == 0x300e
|
mb.last_reg = 0x300e
|
||||||
mb.last_len == 2
|
mb.last_len = 2
|
||||||
|
|
||||||
# check not matching response, with wrong server addr
|
# check not matching response, with wrong server addr
|
||||||
call = 0
|
call = 0
|
||||||
@@ -247,7 +246,7 @@ def test_queue2():
|
|||||||
assert mb.pdu == b'\x01\x06\x20\x08\x00\x04\x02\x0b'
|
assert mb.pdu == b'\x01\x06\x20\x08\x00\x04\x02\x0b'
|
||||||
|
|
||||||
for key, update, val in mb.recv_resp(mb.db, b'\x01\x06\x20\x08\x00\x04\x02\x0b', 'test'):
|
for key, update, val in mb.recv_resp(mb.db, b'\x01\x06\x20\x08\x00\x04\x02\x0b', 'test'):
|
||||||
pass
|
pass # call generator mb.recv_resp()
|
||||||
|
|
||||||
assert mb.que.qsize() == 0
|
assert mb.que.qsize() == 0
|
||||||
assert mb.send_calls == 3
|
assert mb.send_calls == 3
|
||||||
@@ -298,7 +297,7 @@ def test_queue3():
|
|||||||
assert mb.pdu == b'\x01\x06\x20\x08\x00\x04\x02\x0b'
|
assert mb.pdu == b'\x01\x06\x20\x08\x00\x04\x02\x0b'
|
||||||
|
|
||||||
for key, update, val in mb.recv_resp(mb.db, b'\x01\x06\x20\x08\x00\x04\x02\x0b', 'test'):
|
for key, update, val in mb.recv_resp(mb.db, b'\x01\x06\x20\x08\x00\x04\x02\x0b', 'test'):
|
||||||
pass
|
pass # no code in loop is OK; calling the generator is the purpose
|
||||||
assert 0 == mb.err
|
assert 0 == mb.err
|
||||||
assert mb.recv_responses == 2
|
assert mb.recv_responses == 2
|
||||||
|
|
||||||
@@ -364,8 +363,6 @@ async def test_timeout():
|
|||||||
assert mb.retry_cnt == 0
|
assert mb.retry_cnt == 0
|
||||||
assert mb.send_calls == 4
|
assert mb.send_calls == 4
|
||||||
|
|
||||||
# assert mb.counter == {}
|
|
||||||
|
|
||||||
def test_recv_unknown_data():
|
def test_recv_unknown_data():
|
||||||
'''Receive a response with an unknwon register'''
|
'''Receive a response with an unknwon register'''
|
||||||
mb = ModbusTestHelper()
|
mb = ModbusTestHelper()
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,8 +1,6 @@
|
|||||||
# test_with_pytest.py and scapy
|
# test_with_pytest.py and scapy
|
||||||
#
|
#
|
||||||
import pytest, socket, time
|
import pytest, socket, time
|
||||||
#from scapy.all import *
|
|
||||||
#from scapy.layers.inet import IP, TCP, TCP_client
|
|
||||||
|
|
||||||
def get_sn() -> bytes:
|
def get_sn() -> bytes:
|
||||||
return b'R170000000000001'
|
return b'R170000000000001'
|
||||||
@@ -120,9 +118,7 @@ def MsgOtaUpdateReq(): # Over the air update request from talent cloud
|
|||||||
|
|
||||||
@pytest.fixture(scope="session")
|
@pytest.fixture(scope="session")
|
||||||
def ClientConnection():
|
def ClientConnection():
|
||||||
#host = '172.16.30.7'
|
|
||||||
host = 'logger.talent-monitoring.com'
|
host = 'logger.talent-monitoring.com'
|
||||||
#host = '127.0.0.1'
|
|
||||||
port = 5005
|
port = 5005
|
||||||
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
||||||
s.connect((host, port))
|
s.connect((host, port))
|
||||||
@@ -132,9 +128,7 @@ def ClientConnection():
|
|||||||
s.close()
|
s.close()
|
||||||
|
|
||||||
def tempClientConnection():
|
def tempClientConnection():
|
||||||
#host = '172.16.30.7'
|
|
||||||
host = 'logger.talent-monitoring.com'
|
host = 'logger.talent-monitoring.com'
|
||||||
#host = '127.0.0.1'
|
|
||||||
port = 5005
|
port = 5005
|
||||||
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
||||||
s.connect((host, port))
|
s.connect((host, port))
|
||||||
@@ -148,7 +142,6 @@ def test_open_close():
|
|||||||
pass
|
pass
|
||||||
except:
|
except:
|
||||||
assert False
|
assert False
|
||||||
assert True
|
|
||||||
|
|
||||||
def test_send_contact_info1(ClientConnection, MsgContactInfo, MsgContactResp):
|
def test_send_contact_info1(ClientConnection, MsgContactInfo, MsgContactResp):
|
||||||
s = ClientConnection
|
s = ClientConnection
|
||||||
@@ -166,7 +159,7 @@ def test_send_contact_info2(ClientConnection, MsgContactInfo2, MsgContactInfo, M
|
|||||||
s.sendall(MsgContactInfo2)
|
s.sendall(MsgContactInfo2)
|
||||||
data = s.recv(1024)
|
data = s.recv(1024)
|
||||||
except TimeoutError:
|
except TimeoutError:
|
||||||
assert True
|
pass
|
||||||
else:
|
else:
|
||||||
assert False
|
assert False
|
||||||
|
|
||||||
@@ -198,7 +191,7 @@ def test_send_contact_resp(ClientConnection, MsgContactResp):
|
|||||||
s.sendall(MsgContactResp)
|
s.sendall(MsgContactResp)
|
||||||
data = s.recv(1024)
|
data = s.recv(1024)
|
||||||
except TimeoutError:
|
except TimeoutError:
|
||||||
assert True
|
pass
|
||||||
else:
|
else:
|
||||||
assert data == b''
|
assert data == b''
|
||||||
|
|
||||||
|
|||||||
@@ -3,9 +3,6 @@
|
|||||||
import pytest, socket, time, os
|
import pytest, socket, time, os
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
#from scapy.all import *
|
|
||||||
#from scapy.layers.inet import IP, TCP, TCP_client
|
|
||||||
|
|
||||||
load_dotenv()
|
load_dotenv()
|
||||||
|
|
||||||
SOLARMAN_SNR = os.getenv('SOLARMAN_SNR', '00000080')
|
SOLARMAN_SNR = os.getenv('SOLARMAN_SNR', '00000080')
|
||||||
@@ -111,10 +108,7 @@ def MsgInvalidInfo(): # Contact Info message wrong start byte
|
|||||||
|
|
||||||
@pytest.fixture(scope="session")
|
@pytest.fixture(scope="session")
|
||||||
def ClientConnection():
|
def ClientConnection():
|
||||||
#host = '172.16.30.7'
|
|
||||||
host = 'logger.talent-monitoring.com'
|
host = 'logger.talent-monitoring.com'
|
||||||
#host = 'iot.talent-monitoring.com'
|
|
||||||
#host = '127.0.0.1'
|
|
||||||
port = 10000
|
port = 10000
|
||||||
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
||||||
s.connect((host, port))
|
s.connect((host, port))
|
||||||
@@ -131,10 +125,7 @@ def checkResponse(data, Msg):
|
|||||||
|
|
||||||
|
|
||||||
def tempClientConnection():
|
def tempClientConnection():
|
||||||
#host = '172.16.30.7'
|
|
||||||
host = 'logger.talent-monitoring.com'
|
host = 'logger.talent-monitoring.com'
|
||||||
#host = 'iot.talent-monitoring.com'
|
|
||||||
#host = '127.0.0.1'
|
|
||||||
port = 10000
|
port = 10000
|
||||||
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
||||||
s.connect((host, port))
|
s.connect((host, port))
|
||||||
@@ -145,11 +136,10 @@ def tempClientConnection():
|
|||||||
|
|
||||||
def test_open_close():
|
def test_open_close():
|
||||||
try:
|
try:
|
||||||
for s in tempClientConnection():
|
for _ in tempClientConnection():
|
||||||
pass
|
pass # test generator tempClientConnection()
|
||||||
except:
|
except:
|
||||||
assert False
|
assert False
|
||||||
assert True
|
|
||||||
|
|
||||||
def test_conn_msg(ClientConnection,MsgContactInfo, MsgContactResp):
|
def test_conn_msg(ClientConnection,MsgContactInfo, MsgContactResp):
|
||||||
s = ClientConnection
|
s = ClientConnection
|
||||||
|
|||||||
Reference in New Issue
Block a user