diff --git a/.cover_ghaction_rc b/.cover_ghaction_rc new file mode 100644 index 0000000..d5b2526 --- /dev/null +++ b/.cover_ghaction_rc @@ -0,0 +1,3 @@ +[run] +branch = True +relative_files = True diff --git a/.coveragerc b/.coveragerc index 890dd1b..398ff08 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,3 +1,2 @@ [run] branch = True -relative_files = True \ No newline at end of file diff --git a/.env_example b/.env_example new file mode 100644 index 0000000..4d28078 --- /dev/null +++ b/.env_example @@ -0,0 +1,9 @@ +# example file for the .env file. The .env set private values +# which are needed for builing containers + +# registry for debug an dev container +PRIVAT_CONTAINER_REGISTRY=docker.io// + +# registry for official container (preview, rc, rel) +PUBLIC_CONTAINER_REGISTRY=ghcr.io// +PUBLIC_CR_KEY= \ No newline at end of file diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index 97f390f..4184019 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -31,7 +31,7 @@ env: jobs: build: - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v4 @@ -54,11 +54,11 @@ jobs: flake8 --exit-zero --ignore=C901,E121,E123,E126,E133,E226,E241,E242,E704,W503,W504,W505 --format=pylint --output-file=output_flake.txt --exclude=*.pyc app/src/ - name: Test with pytest run: | - python -m pytest app ha_addon --cov=app/src --cov=ha_addon/rootfs/home --cov-report=xml + python -m pytest app --cov=app/src --cov-config=.cover_ghaction_rc --cov-report=xml coverage report - name: Analyze with SonarCloud if: ${{ env.SONAR_TOKEN != 0 }} - uses: SonarSource/sonarcloud-github-action@v3.1.0 + uses: SonarSource/sonarqube-scan-action@v4 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: diff --git a/.gitignore b/.gitignore index 785c737..3e39de3 100644 --- a/.gitignore +++ b/.gitignore @@ -1,10 +1,11 @@ __pycache__ .pytest_cache +.venv/** bin/** mosquitto/** homeassistant/** -ha_addon/rootfs/home/proxy/* -ha_addon/rootfs/requirements.txt +ha_addons/ha_addon/rootfs/home/proxy/* +ha_addons/ha_addon/rootfs/requirements.txt tsun_proxy/** Doku/** .DS_Store diff --git a/.python-version b/.python-version new file mode 100644 index 0000000..56bb660 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.12.7 diff --git a/.vscode/settings.json b/.vscode/settings.json index c8dd696..626f77f 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,18 +1,20 @@ { + "python.analysis.extraPaths": [ + "app/src", + "app/tests", + ".venv/lib", + ], "python.testing.pytestArgs": [ - "-vv", - "app", + "-vvv", "--cov=app/src", - "--cov=ha_addon/rootfs/home", "--cov-report=xml", - "--cov-report=html", - "system_tests", - "ha_addon" + "app", + "system_tests" ], "python.testing.unittestEnabled": false, "python.testing.pytestEnabled": true, "flake8.args": [ - "--extend-exclude=app/tests/*.py system_tests/*.py" + "--extend-exclude=app/tests/*.py,system_tests/*.py" ], "sonarlint.connectedMode.project": { "connectionId": "s-allius", @@ -20,5 +22,11 @@ }, "files.exclude": { "**/*.pyi": true - } + }, + "python.analysis.typeEvaluation.deprecateTypingAliases": true, + "python.autoComplete.extraPaths": [ + ".venv/lib" + ], + "coverage-gutters.coverageBaseDir": "tsun", + "makefile.configureOnOpen": false } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index d84e301..e299e75 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,8 +7,26 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [unreleased] +- make the configuration more flexible, add command line args to control this +- fix the python path so we don't need special import paths for unit tests anymore +- support test coverager in vscode +- upgrade SonarQube action to version 4 +- update github action to Ubuntu 24-04 +- add initial support for home assistant add-ons from @mime24 +- github action: use ubuntu 24.04 and sonar-scanner-action 4 [#222](https://github.com/s-allius/tsun-gen3-proxy/issues/222) +- migrate paho.mqtt CallbackAPIVersion to VERSION2 [#224](https://github.com/s-allius/tsun-gen3-proxy/issues/224) +- add PROD_COMPL_TYPE to trace +- add SolarmanV5 messages builder +- report inverter alarms and faults per MQTT [#7](https://github.com/s-allius/tsun-gen3-proxy/issues/7) + ## [0.11.1] - 2024-11-20 +- fix pytest setup that can be startet from the rootdir + - support python venv environment + - add pytest.ini + - move common settings from .vscode/settings.json into pytest.ini + - add missing requirements + - fix import paths for pytests - Bumps [aiohttp](https://github.com/aio-libs/aiohttp) from 3.10.5 to 3.10.11. ## [0.11.0] - 2024-10-13 diff --git a/Makefile b/Makefile index a09ae26..493875d 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,10 @@ -.PHONY: build clean +.PHONY: build clean addon-dev addon-debug sddon-rc # debug dev: # $(MAKE) -C app $@ clean build: - $(MAKE) -C ha_addon $@ \ No newline at end of file + $(MAKE) -C ha_addons/ha_addon $@ + +addon-dev addon-debug addon-rc: + $(MAKE) -C ha_addons/ha_addon $(patsubst addon-%,%,$@) \ No newline at end of file diff --git a/app/.version b/app/.version new file mode 100644 index 0000000..d33c3a2 --- /dev/null +++ b/app/.version @@ -0,0 +1 @@ +0.12.0 \ No newline at end of file diff --git a/app/proxy_2.svg b/app/proxy_2.svg index 6a6fb51..232983f 100644 --- a/app/proxy_2.svg +++ b/app/proxy_2.svg @@ -4,429 +4,368 @@ - - + + G - + A0 - - - -You can stick notes -on diagrams too! + + + +Example of +instantiation for a +GEN3 inverter! A1 - -<<AbstractIterMeta>> - - -__iter__() + +<<AbstractIterMeta>> + + +__iter__() A14 - -<<ProtocolIfc>> - -_registry - -close() + +<<ProtocolIfc>> + +_registry + +close() - + A1->A14 - - + + A2 - -InverterG3 - -addr -remote:StreamPtr -local:StreamPtr - -create_remote() -close() - - - -A7 - -AsyncStreamServer - -create_remote - -<async>server_loop() -<async>_async_forward() -<async>publish_outstanding_mqtt() -close() - - - -A2->A7 - - - -local - - - -A8 - -AsyncStreamClient - - -<async>client_loop() -<async>_async_forward()) - - - -A2->A8 - - -remote + +InverterG3 + +addr +remote:StreamPtr +local:StreamPtr + +create_remote() +close() A3 - -InverterG3P - -addr -remote:StreamPtr -local:StreamPtr - -create_remote() -close() + +local:StreamPtr - - -A3->A7 - - - -local - - - -A3->A8 - - -remote + + +A2->A3 + + + A4 - -<<AsyncIfc>> - - -set_node_id() -get_conn_no() -tx_add() -tx_flush() -tx_get() -tx_peek() -tx_log() -tx_clear() -tx_len() -fwd_add() -fwd_log() -rx_get() -rx_peek() -rx_log() -rx_clear() -rx_len() -rx_set_cb() -prot_set_timeout_cb() + +remote:StreamPtr - - -A5 - -AsyncIfcImpl - -fwd_fifo:ByteFifo -tx_fifo:ByteFifo -rx_fifo:ByteFifo -conn_no:Count -node_id -timeout_cb - - - -A4->A5 - - - - - -A6 - -AsyncStream - -reader -writer -addr -r_addr -l_addr - -<async>loop -disc() -close() -healthy() -__async_read() -__async_write() -__async_forward() - - + -A5->A6 - - +A2->A4 + + + - - -A6->A7 - - + + +A8 + +AsyncStreamServer + +create_remote + +<async>server_loop() +<async>_async_forward() +<async>publish_outstanding_mqtt() +close() - - -A6->A8 - - + + +A3->A8 + + + A9 - -Talent - -ifc:AsyncIfc -conn_no -addr -await_conn_resp_cnt -id_str -contact_name -contact_mail -db:InfosG3 -mb:Modbus -switch - -msg_contact_info() -msg_ota_update() -msg_get_time() -msg_collector_data() -msg_inverter_data() -msg_unknown() -healthy() -close() + +AsyncStreamClient + + +<async>client_loop() +<async>_async_forward()) - + + +A4->A9 + + +0..1 + + + +A5 + +<<AsyncIfc>> + + +set_node_id() +get_conn_no() +tx_add() +tx_flush() +tx_get() +tx_peek() +tx_log() +tx_clear() +tx_len() +fwd_add() +fwd_log() +rx_get() +rx_peek() +rx_log() +rx_clear() +rx_len() +rx_set_cb() +prot_set_timeout_cb() + + + +A6 + +AsyncIfcImpl + +fwd_fifo:ByteFifo +tx_fifo:ByteFifo +rx_fifo:ByteFifo +conn_no:Count +node_id +timeout_cb + + + +A5->A6 + + + + + +A7 + +AsyncStream + +reader +writer +addr +r_addr +l_addr + +<async>loop +disc() +close() +healthy() +__async_read() +__async_write() +__async_forward() + + + +A6->A7 + + + + -A9->A2 - - -remote +A7->A8 + + - - -A9->A2 - - - -local - - - -A9->A4 - - -use - - - -A12 - -InfosG3 - - -ha_confs() -parse() - - - -A9->A12 - - + + +A7->A9 + + A10 - -SolarmanV5 - -ifc:AsyncIfc -conn_no -addr -control -serial -snr -db:InfosG3P -mb:Modbus -switch - -msg_unknown() -healthy() -close() + +Talent + +conn_no +addr +await_conn_resp_cnt +id_str +contact_name +contact_mail +db:InfosG3 +mb:Modbus +switch + +msg_contact_info() +msg_ota_update() +msg_get_time() +msg_collector_data() +msg_inverter_data() +msg_unknown() +healthy() +close() - + A10->A3 - - -remote - - - -A10->A3 - - - -local + + + - + A10->A4 - - -use + + +0..1 - - -A13 - -InfosG3P - - -ha_confs() -parse() + + +A12 + +InfosG3 + + +ha_confs() +parse() - - -A10->A13 - - + + +A10->A12 + + A11 - -Infos - -stat -new_stat_data -info_dev - -static_init() -dev_value() -inc_counter() -dec_counter() -ha_proxy_conf -ha_conf -ha_remove -update_db -set_db_def_value -get_db_value -ignore_this_device + +Infos + +stat +new_stat_data +info_dev + +static_init() +dev_value() +inc_counter() +dec_counter() +ha_proxy_conf +ha_conf +ha_remove +update_db +set_db_def_value +get_db_value +ignore_this_device - + A11->A12 - - + + - - -A11->A13 - - + + +A13 + +Message + +server_side:bool +mb:Modbus +ifc:AsyncIfc +node_id +header_valid:bool +header_len +data_len +unique_id +sug_area:str +new_data:dict +state:State +shutdown_started:bool +modbus_elms +mb_timer:Timer +mb_timeout +mb_first_timeout +modbus_polling:bool + +_set_mqtt_timestamp() +_timeout() +_send_modbus_cmd() +<async> end_modbus_cmd() +close() +inc_counter() +dec_counter() + + + +A13->A5 + + +use + + + +A13->A10 + + + + + +A14->A13 + + A15 - -Message - -node_id - -inc_counter() -dec_counter() + +Modbus + +que +snd_handler +rsp_handler +timeout +max_retires +last_xxx +err +retry_cnt +req_pend +tim + +build_msg() +recv_req() +recv_resp() +close() - - -A14->A15 - - - - - -A15->A9 - - - - - -A15->A10 - - - - - -A16 - -Modbus - -que -snd_handler -rsp_handler -timeout -max_retires -last_xxx -err -retry_cnt -req_pend -tim - -build_msg() -recv_req() -recv_resp() -close() - - - -A16->A9 - - -has -1 - - - -A16->A10 - - -has -1 + + +A15->A13 + + +has +0..1 diff --git a/app/proxy_2.yuml b/app/proxy_2.yuml index 39a399e..5138428 100644 --- a/app/proxy_2.yuml +++ b/app/proxy_2.yuml @@ -2,11 +2,12 @@ // {direction:topDown} // {generate:true} -[note: You can stick notes on diagrams too!{bg:cornsilk}] +[note: Example of instantiation for a GEN3 inverter!{bg:cornsilk}] [<>||__iter__()] [InverterG3|addr;remote:StreamPtr;local:StreamPtr|create_remote();;close()] -[InverterG3P|addr;remote:StreamPtr;local:StreamPtr|create_remote();;close()] +[InverterG3]++->[local:StreamPtr] +[InverterG3]++->[remote:StreamPtr] [<>||set_node_id();get_conn_no();;tx_add();tx_flush();tx_get();tx_peek();tx_log();tx_clear();tx_len();;fwd_add();fwd_log();rx_get();rx_peek();rx_log();rx_clear();rx_len();rx_set_cb();;prot_set_timeout_cb()] [AsyncIfcImpl|fwd_fifo:ByteFifo;tx_fifo:ByteFifo;rx_fifo:ByteFifo;conn_no:Count;node_id;timeout_cb] @@ -19,33 +20,24 @@ [AsyncStream]^[AsyncStreamClient] -[Talent|ifc:AsyncIfc;conn_no;addr;;await_conn_resp_cnt;id_str;contact_name;contact_mail;db:InfosG3;mb:Modbus;switch|msg_contact_info();msg_ota_update();msg_get_time();msg_collector_data();msg_inverter_data();msg_unknown();;healthy();close()] -[Talent][AsyncStreamClient] -[Talent]<-local++[InverterG3] -[InverterG3]++local->[AsyncStreamServer] - -[SolarmanV5|ifc:AsyncIfc;conn_no;addr;;control;serial;snr;db:InfosG3P;mb:Modbus;switch|msg_unknown();;healthy();close()] -[SolarmanV5][AsyncStreamClient] -[SolarmanV5]<-local++[InverterG3P] -[InverterG3P]++local->[AsyncStreamServer] +[Talent|conn_no;addr;;await_conn_resp_cnt;id_str;contact_name;contact_mail;db:InfosG3;mb:Modbus;switch|msg_contact_info();msg_ota_update();msg_get_time();msg_collector_data();msg_inverter_data();msg_unknown();;healthy();close()] +[Talent]<-++[local:StreamPtr] +[local:StreamPtr]++->[AsyncStreamServer] +[Talent]<-0..1[remote:StreamPtr] +[remote:StreamPtr]0..1->[AsyncStreamClient] [Infos|stat;new_stat_data;info_dev|static_init();dev_value();inc_counter();dec_counter();ha_proxy_conf;ha_conf;ha_remove;update_db;set_db_def_value;get_db_value;ignore_this_device] [Infos]^[InfosG3||ha_confs();parse()] -[Infos]^[InfosG3P||ha_confs();parse()] -[Talent]use->[<>] [Talent]->[InfosG3] -[SolarmanV5]use->[<>] -[SolarmanV5]->[InfosG3P] + +[Message|server_side:bool;mb:Modbus;ifc:AsyncIfc;node_id;header_valid:bool;header_len;data_len;unique_id;sug_area:str;new_data:dict;state:State;shutdown_started:bool;modbus_elms;mb_timer:Timer;mb_timeout;mb_first_timeout;modbus_polling:bool|_set_mqtt_timestamp();_timeout();_send_modbus_cmd(); end_modbus_cmd();close();inc_counter();dec_counter()] +[Message]use->[<>] [<>|_registry|close()] [<>]^-.-[<>] -[<>]^-.-[Message|node_id|inc_counter();dec_counter()] +[<>]^-.-[Message] [Message]^[Talent] -[Message]^[SolarmanV5] [Modbus|que;;snd_handler;rsp_handler;timeout;max_retires;last_xxx;err;retry_cnt;req_pend;tim|build_msg();recv_req();recv_resp();close()] -[Modbus]<1-has[SolarmanV5] -[Modbus]<1-has[Talent] +[Modbus]<0..1-has[Message] diff --git a/app/proxy_3.svg b/app/proxy_3.svg new file mode 100644 index 0000000..37fc587 --- /dev/null +++ b/app/proxy_3.svg @@ -0,0 +1,364 @@ + + + + + + +G + + + +A0 + + + +Example of +instantiation for a +GEN3PLUS inverter! + + + +A1 + +<<AbstractIterMeta>> + + +__iter__() + + + +A14 + +<<ProtocolIfc>> + +_registry + +close() + + + +A1->A14 + + + + + +A2 + +InverterG3P + +addr +remote:StreamPtr +local:StreamPtr + +create_remote() +close() + + + +A3 + +local:StreamPtr + + + +A2->A3 + + + + + + +A4 + +remote:StreamPtr + + + +A2->A4 + + + + + + +A8 + +AsyncStreamServer + +create_remote + +<async>server_loop() +<async>_async_forward() +<async>publish_outstanding_mqtt() +close() + + + +A3->A8 + + + + + + +A9 + +AsyncStreamClient + + +<async>client_loop() +<async>_async_forward()) + + + +A4->A9 + + +0..1 + + + +A5 + +<<AsyncIfc>> + + +set_node_id() +get_conn_no() +tx_add() +tx_flush() +tx_get() +tx_peek() +tx_log() +tx_clear() +tx_len() +fwd_add() +fwd_log() +rx_get() +rx_peek() +rx_log() +rx_clear() +rx_len() +rx_set_cb() +prot_set_timeout_cb() + + + +A6 + +AsyncIfcImpl + +fwd_fifo:ByteFifo +tx_fifo:ByteFifo +rx_fifo:ByteFifo +conn_no:Count +node_id +timeout_cb + + + +A5->A6 + + + + + +A7 + +AsyncStream + +reader +writer +addr +r_addr +l_addr + +<async>loop +disc() +close() +healthy() +__async_read() +__async_write() +__async_forward() + + + +A6->A7 + + + + + +A7->A8 + + + + + +A7->A9 + + + + + +A10 + +SolarmanV5 + +conn_no +addr +control +serial +snr +db:InfosG3P +switch + +msg_unknown() +healthy() +close() + + + +A10->A3 + + + + + + +A10->A4 + + +0..1 + + + +A12 + +InfosG3P + + +ha_confs() +parse() + + + +A10->A12 + + + + + +A11 + +Infos + +stat +new_stat_data +info_dev + +static_init() +dev_value() +inc_counter() +dec_counter() +ha_proxy_conf +ha_conf +ha_remove +update_db +set_db_def_value +get_db_value +ignore_this_device + + + +A11->A12 + + + + + +A13 + +Message + +server_side:bool +mb:Modbus +ifc:AsyncIfc +node_id +header_valid:bool +header_len +data_len +unique_id +sug_area:str +new_data:dict +state:State +shutdown_started:bool +modbus_elms +mb_timer:Timer +mb_timeout +mb_first_timeout +modbus_polling:bool + +_set_mqtt_timestamp() +_timeout() +_send_modbus_cmd() +<async> end_modbus_cmd() +close() +inc_counter() +dec_counter() + + + +A13->A5 + + +use + + + +A13->A10 + + + + + +A14->A13 + + + + + +A15 + +Modbus + +que +snd_handler +rsp_handler +timeout +max_retires +last_xxx +err +retry_cnt +req_pend +tim + +build_msg() +recv_req() +recv_resp() +close() + + + +A15->A13 + + +has +0..1 + + + diff --git a/app/proxy_3.yuml b/app/proxy_3.yuml new file mode 100644 index 0000000..499c93f --- /dev/null +++ b/app/proxy_3.yuml @@ -0,0 +1,42 @@ +// {type:class} +// {direction:topDown} +// {generate:true} + +[note: Example of instantiation for a GEN3PLUS inverter!{bg:cornsilk}] +[<>||__iter__()] + +[InverterG3P|addr;remote:StreamPtr;local:StreamPtr|create_remote();;close()] +[InverterG3P]++->[local:StreamPtr] +[InverterG3P]++->[remote:StreamPtr] + +[<>||set_node_id();get_conn_no();;tx_add();tx_flush();tx_get();tx_peek();tx_log();tx_clear();tx_len();;fwd_add();fwd_log();rx_get();rx_peek();rx_log();rx_clear();rx_len();rx_set_cb();;prot_set_timeout_cb()] +[AsyncIfcImpl|fwd_fifo:ByteFifo;tx_fifo:ByteFifo;rx_fifo:ByteFifo;conn_no:Count;node_id;timeout_cb] +[AsyncStream|reader;writer;addr;r_addr;l_addr|;loop;disc();close();healthy();;__async_read();__async_write();__async_forward()] +[AsyncStreamServer|create_remote|server_loop();_async_forward();publish_outstanding_mqtt();close()] +[AsyncStreamClient||client_loop();_async_forward())] +[<>]^-.-[AsyncIfcImpl] +[AsyncIfcImpl]^[AsyncStream] +[AsyncStream]^[AsyncStreamServer] +[AsyncStream]^[AsyncStreamClient] + +[SolarmanV5|conn_no;addr;;control;serial;snr;db:InfosG3P;switch|msg_unknown();;healthy();close()] +[SolarmanV5]<-++[local:StreamPtr] +[local:StreamPtr]++->[AsyncStreamServer] +[SolarmanV5]<-0..1[remote:StreamPtr] +[remote:StreamPtr]0..1->[AsyncStreamClient] + +[Infos|stat;new_stat_data;info_dev|static_init();dev_value();inc_counter();dec_counter();ha_proxy_conf;ha_conf;ha_remove;update_db;set_db_def_value;get_db_value;ignore_this_device] +[Infos]^[InfosG3P||ha_confs();parse()] + +[SolarmanV5]->[InfosG3P] + +[Message|server_side:bool;mb:Modbus;ifc:AsyncIfc;node_id;header_valid:bool;header_len;data_len;unique_id;sug_area:str;new_data:dict;state:State;shutdown_started:bool;modbus_elms;mb_timer:Timer;mb_timeout;mb_first_timeout;modbus_polling:bool|_set_mqtt_timestamp();_timeout();_send_modbus_cmd(); end_modbus_cmd();close();inc_counter();dec_counter()] +[Message]use->[<>] + +[<>|_registry|close()] +[<>]^-.-[<>] +[<>]^-.-[Message] +[Message]^[SolarmanV5] + +[Modbus|que;;snd_handler;rsp_handler;timeout;max_retires;last_xxx;err;retry_cnt;req_pend;tim|build_msg();recv_req();recv_resp();close()] +[Modbus]<0..1-has[Message] diff --git a/app/requirements-test.txt b/app/requirements-test.txt index d07ed29..bbf4e68 100644 --- a/app/requirements-test.txt +++ b/app/requirements-test.txt @@ -2,5 +2,6 @@ pytest pytest-asyncio pytest-cov + python-dotenv mock coverage \ No newline at end of file diff --git a/app/src/async_stream.py b/app/src/async_stream.py index 2650235..ec060b2 100644 --- a/app/src/async_stream.py +++ b/app/src/async_stream.py @@ -6,16 +6,10 @@ from asyncio import StreamReader, StreamWriter from typing import Self from itertools import count -if __name__ == "app.src.async_stream": - from app.src.proxy import Proxy - from app.src.byte_fifo import ByteFifo - from app.src.async_ifc import AsyncIfc - from app.src.infos import Infos -else: # pragma: no cover - from proxy import Proxy - from byte_fifo import ByteFifo - from async_ifc import AsyncIfc - from infos import Infos +from proxy import Proxy +from byte_fifo import ByteFifo +from async_ifc import AsyncIfc +from infos import Infos import gc @@ -221,7 +215,6 @@ class AsyncStream(AsyncIfcImpl): async def disc(self) -> None: """Async disc handler for graceful disconnect""" - self.remote = None if self._writer.is_closing(): return logger.debug(f'AsyncStream.disc() l{self.l_addr} | r{self.r_addr}') @@ -306,6 +299,14 @@ class AsyncStream(AsyncIfcImpl): f"Fwd Exception for {self.r_addr}:\n" f"{traceback.format_exc()}") + async def publish_outstanding_mqtt(self): + '''Publish all outstanding MQTT topics''' + try: + await self.async_publ_mqtt() + await Proxy._async_publ_mqtt_proxy_stat('proxy') + except Exception: + pass + class AsyncStreamServer(AsyncStream): def __init__(self, reader: StreamReader, writer: StreamWriter, @@ -355,14 +356,6 @@ class AsyncStreamServer(AsyncStream): self.remote.ifc._writer.write(self.fwd_fifo.get()) await self.remote.ifc._writer.drain() - async def publish_outstanding_mqtt(self): - '''Publish all outstanding MQTT topics''' - try: - await self.async_publ_mqtt() - await Proxy._async_publ_mqtt_proxy_stat('proxy') - except Exception: - pass - class AsyncStreamClient(AsyncStream): def __init__(self, reader: StreamReader, writer: StreamWriter, @@ -370,6 +363,11 @@ class AsyncStreamClient(AsyncStream): AsyncStream.__init__(self, reader, writer, rstream) self.close_cb = close_cb + async def disc(self) -> None: + logging.debug('AsyncStreamClient.disc()') + self.remote = None + await super().disc() + def close(self) -> None: logging.debug('AsyncStreamClient.close()') self.close_cb = None @@ -377,7 +375,11 @@ class AsyncStreamClient(AsyncStream): async def client_loop(self, _: str) -> None: '''Loop for receiving messages from the TSUN cloud (client-side)''' + Infos.inc_counter('Cloud_Conn_Cnt') + await self.publish_outstanding_mqtt() await self.loop() + Infos.dec_counter('Cloud_Conn_Cnt') + await self.publish_outstanding_mqtt() logger.info(f'[{self.node_id}:{self.conn_no}] ' 'Client loop stopped for' f' l{self.l_addr}') diff --git a/app/src/byte_fifo.py b/app/src/byte_fifo.py index 27d2512..959eab2 100644 --- a/app/src/byte_fifo.py +++ b/app/src/byte_fifo.py @@ -1,8 +1,4 @@ - -if __name__ == "app.src.byte_fifo": - from app.src.messages import hex_dump_str, hex_dump_memory -else: # pragma: no cover - from messages import hex_dump_str, hex_dump_memory +from messages import hex_dump_str, hex_dump_memory class ByteFifo: diff --git a/app/src/config.py b/app/src/cnf/config.py similarity index 53% rename from app/src/config.py rename to app/src/cnf/config.py index 02138e7..b3ed188 100644 --- a/app/src/config.py +++ b/app/src/cnf/config.py @@ -1,19 +1,48 @@ -'''Config module handles the proxy configuration in the config.toml file''' +'''Config module handles the proxy configuration''' import shutil -import tomllib import logging +from abc import ABC, abstractmethod from schema import Schema, And, Or, Use, Optional +class ConfigIfc(ABC): + '''Abstract basis class for config readers''' + def __init__(self): + Config.add(self) + + @abstractmethod + def get_config(self) -> dict: # pragma: no cover + '''get the unverified config from the reader''' + pass + + @abstractmethod + def descr(self) -> str: # pragma: no cover + '''return a descriction of the source, e.g. the file name''' + pass + + def _extend_key(self, conf, key, val): + '''split a dotted dict key into a hierarchical dict tree ''' + lst = key.split('.') + d = conf + for i, idx in enumerate(lst, 1): # pragma: no branch + if i == len(lst): + d[idx] = val + break + if idx not in d: + d[idx] = {} + d = d[idx] + + class Config(): - '''Static class Config is reads and sanitize the config. + '''Static class Config build and sanitize the internal config dictenary. - Read config.toml file and sanitize it with read(). - Get named parts of the config with get()''' + Using config readers, a partial configuration is added to config. + Config readers are a derivation of the abstract ConfigIfc reader. + When a config reader is instantiated, theits `get_config` method is + called automatically and afterwards the config will be merged. + ''' - act_config = {} - def_config = {} conf_schema = Schema({ 'tsun': { 'enabled': Use(bool), @@ -28,8 +57,10 @@ class Config(): 'mqtt': { 'host': Use(str), 'port': And(Use(int), lambda n: 1024 <= n <= 65535), - 'user': And(Use(str), Use(lambda s: s if len(s) > 0 else None)), - 'passwd': And(Use(str), Use(lambda s: s if len(s) > 0 else None)) + 'user': Or(None, And(Use(str), + Use(lambda s: s if len(s) > 0 else None))), + 'passwd': Or(None, And(Use(str), + Use(lambda s: s if len(s) > 0 else None))) }, 'ha': { 'auto_conf_prefix': Use(str), @@ -57,7 +88,8 @@ class Config(): Optional('client_mode'): { 'host': Use(str), Optional('port', default=8899): - And(Use(int), lambda n: 1024 <= n <= 65535) + And(Use(int), lambda n: 1024 <= n <= 65535), + Optional('forward', default=False): Use(bool), }, Optional('modbus_polling', default=True): Use(bool), Optional('suggested_area', default=""): Use(str), @@ -92,7 +124,13 @@ class Config(): ) @classmethod - def class_init(cls) -> None | str: # pragma: no cover + def init(cls, def_reader: ConfigIfc) -> None | str: + '''Initialise the Proxy-Config + +Copy the internal default config file into the config directory +and initialise the Config with the default configuration ''' + cls.err = None + cls.def_config = {} try: # make the default config transparaent by copying it # in the config.example file @@ -102,66 +140,58 @@ class Config(): "config/config.example.toml") except Exception: pass - err_str = cls.read() - del cls.conf_schema - return err_str - - @classmethod - def _read_config_file(cls) -> dict: # pragma: no cover - usr_config = {} + # read example config file as default configuration try: - with open("config/config.toml", "rb") as f: - usr_config = tomllib.load(f) + def_config = def_reader.get_config() + cls.def_config = cls.conf_schema.validate(def_config) + logging.info(f'Read from {def_reader.descr()} => ok') except Exception as error: - err = f'Config.read: {error}' - logging.error(err) - logging.info( - '\n To create the missing config.toml file, ' - 'you can rename the template config.example.toml\n' - ' and customize it for your scenario.\n') - return usr_config + cls.err = f'Config.read: {error}' + logging.error( + f"Can't read from {def_reader.descr()} => error\n {error}") + + cls.act_config = cls.def_config.copy() @classmethod - def read(cls, path='') -> None | str: - '''Read config file, merge it with the default config + def add(cls, reader: ConfigIfc): + '''Merge the config from the Config Reader into the config + +Checks if a default config exists. If no default configuration exists, +the Config.init method has not yet been called.This is normal for the very +first Config Reader which creates the default config and must be ignored +here. The default config reader is handled in the Config.init method''' + if hasattr(cls, 'def_config'): + cls.__parse(reader) + + @classmethod + def get_error(cls) -> None | str: + '''return the last error as a string or None if there is no error''' + return cls.err + + @classmethod + def __parse(cls, reader) -> None | str: + '''Read config from the reader, merge it with the default config and sanitize the result''' - err = None - config = {} - logger = logging.getLogger('data') - + res = 'ok' try: - # read example config file as default configuration - cls.def_config = {} - with open(f"{path}default_config.toml", "rb") as f: - def_config = tomllib.load(f) - cls.def_config = cls.conf_schema.validate(def_config) - - # overwrite the default values, with values from - # the config.toml file - usr_config = cls._read_config_file() - - # merge the default and the user config - config = def_config.copy() + rd_config = reader.get_config() + config = cls.act_config.copy() for key in ['tsun', 'solarman', 'mqtt', 'ha', 'inverters', 'gen3plus']: - if key in usr_config: - config[key] |= usr_config[key] - - try: - cls.act_config = cls.conf_schema.validate(config) - except Exception as error: - err = f'Config.read: {error}' - logging.error(err) - - # logging.debug(f'Readed config: "{cls.act_config}" ') + if key in rd_config: + config[key] = config[key] | rd_config[key] + cls.act_config = cls.conf_schema.validate(config) + except FileNotFoundError: + res = 'n/a' except Exception as error: - err = f'Config.read: {error}' - logger.error(err) - cls.act_config = {} + cls.err = f'error: {error}' + logging.error( + f"Can't read from {reader.descr()} => error\n {error}") - return err + logging.info(f'Read from {reader.descr()} => {res}') + return cls.err @classmethod def get(cls, member: str = None): diff --git a/app/src/cnf/config_read_env.py b/app/src/cnf/config_read_env.py new file mode 100644 index 0000000..693d7cc --- /dev/null +++ b/app/src/cnf/config_read_env.py @@ -0,0 +1,25 @@ +'''Config Reader module which handles config values from the environment''' + +import os +from cnf.config import ConfigIfc + + +class ConfigReadEnv(ConfigIfc): + '''Reader for environment values of the configuration''' + + def get_config(self) -> dict: + conf = {} + data = [ + ('mqtt.host', 'MQTT_HOST'), + ('mqtt.port', 'MQTT_PORT'), + ('mqtt.user', 'MQTT_USER'), + ('mqtt.passwd', 'MQTT_PASSWORD'), + ] + for key, env_var in data: + val = os.getenv(env_var) + if val: + self._extend_key(conf, key, val) + return conf + + def descr(self): + return "Read environment" diff --git a/app/src/cnf/config_read_json.py b/app/src/cnf/config_read_json.py new file mode 100644 index 0000000..785dae7 --- /dev/null +++ b/app/src/cnf/config_read_json.py @@ -0,0 +1,46 @@ +'''Config Reader module which handles *.json config files''' + +import json +from cnf.config import ConfigIfc + + +class ConfigReadJson(ConfigIfc): + '''Reader for json config files''' + def __init__(self, cnf_file='/data/options.json'): + '''Read a json file and add the settings to the config''' + if not isinstance(cnf_file, str): + return + self.cnf_file = cnf_file + super().__init__() + + def convert_inv(self, conf, inv): + if 'serial' in inv: + snr = inv['serial'] + del inv['serial'] + conf[snr] = {} + + for key, val in inv.items(): + self._extend_key(conf[snr], key, val) + + def convert_inv_arr(self, conf, key, val: list): + if key not in conf: + conf[key] = {} + for elm in val: + self.convert_inv(conf[key], elm) + + def convert_to_obj(self, data): + conf = {} + for key, val in data.items(): + if key == 'inverters' and isinstance(val, list): + self.convert_inv_arr(conf, key, val) + else: + self._extend_key(conf, key, val) + return conf + + def get_config(self) -> dict: + with open(self.cnf_file) as f: + data = json.load(f) + return self.convert_to_obj(data) + + def descr(self): + return self.cnf_file diff --git a/app/src/cnf/config_read_toml.py b/app/src/cnf/config_read_toml.py new file mode 100644 index 0000000..e64fd28 --- /dev/null +++ b/app/src/cnf/config_read_toml.py @@ -0,0 +1,21 @@ +'''Config Reader module which handles *.toml config files''' + +import tomllib +from cnf.config import ConfigIfc + + +class ConfigReadToml(ConfigIfc): + '''Reader for toml config files''' + def __init__(self, cnf_file): + '''Read a toml file and add the settings to the config''' + if not isinstance(cnf_file, str): + return + self.cnf_file = cnf_file + super().__init__() + + def get_config(self) -> dict: + with open(self.cnf_file, "rb") as f: + return tomllib.load(f) + + def descr(self): + return self.cnf_file diff --git a/app/src/gen3/infos_g3.py b/app/src/gen3/infos_g3.py index 7c62eac..efa220c 100644 --- a/app/src/gen3/infos_g3.py +++ b/app/src/gen3/infos_g3.py @@ -3,10 +3,7 @@ import struct import logging from typing import Generator -if __name__ == "app.src.gen3.infos_g3": - from app.src.infos import Infos, Register -else: # pragma: no cover - from infos import Infos, Register +from infos import Infos, Register class RegisterMap: @@ -70,24 +67,21 @@ class RegisterMap: 0x000d0020: {'reg': Register.COLLECT_INTERVAL}, 0x000cf850: {'reg': Register.DATA_UP_INTERVAL}, 0x000c7f38: {'reg': Register.COMMUNICATION_TYPE}, - 0x00000191: {'reg': Register.EVENT_401}, - 0x00000192: {'reg': Register.EVENT_402}, - 0x00000193: {'reg': Register.EVENT_403}, - 0x00000194: {'reg': Register.EVENT_404}, - 0x00000195: {'reg': Register.EVENT_405}, - 0x00000196: {'reg': Register.EVENT_406}, - 0x00000197: {'reg': Register.EVENT_407}, - 0x00000198: {'reg': Register.EVENT_408}, - 0x00000199: {'reg': Register.EVENT_409}, - 0x0000019a: {'reg': Register.EVENT_410}, - 0x0000019b: {'reg': Register.EVENT_411}, - 0x0000019c: {'reg': Register.EVENT_412}, - 0x0000019d: {'reg': Register.EVENT_413}, - 0x0000019e: {'reg': Register.EVENT_414}, - 0x0000019f: {'reg': Register.EVENT_415}, - 0x000001a0: {'reg': Register.EVENT_416}, + 0x00000190: {'reg': Register.EVENT_ALARM}, + 0x000001f4: {'reg': Register.EVENT_FAULT}, + 0x00000258: {'reg': Register.EVENT_BF1}, + 0x000002bc: {'reg': Register.EVENT_BF2}, 0x00000064: {'reg': Register.INVERTER_STATUS}, + + 0x00000fa0: {'reg': Register.BOOT_STATUS}, + 0x00001004: {'reg': Register.DSP_STATUS}, + 0x000010cc: {'reg': Register.WORK_MODE}, + 0x000011f8: {'reg': Register.OUTPUT_SHUTDOWN}, 0x0000125c: {'reg': Register.MAX_DESIGNED_POWER}, + 0x000012c0: {'reg': Register.RATED_LEVEL}, + 0x00001324: {'reg': Register.INPUT_COEFFICIENT, 'ratio': 100/1024}, + 0x00001388: {'reg': Register.GRID_VOLT_CAL_COEF}, + 0x00002710: {'reg': Register.PROD_COMPL_TYPE}, 0x00003200: {'reg': Register.OUTPUT_COEFFICIENT, 'ratio': 100/1024}, } @@ -183,11 +177,8 @@ class InfosG3(Infos): i += 1 def __modify_val(self, row, result): - if row: - if 'eval' in row: - result = eval(row['eval']) - if 'ratio' in row: - result = round(result * row['ratio'], 2) + if row and 'ratio' in row: + result = round(result * row['ratio'], 2) return result def __store_result(self, addr, result, info_id, node_id): diff --git a/app/src/gen3/inverter_g3.py b/app/src/gen3/inverter_g3.py index fd09a8f..efaeca0 100644 --- a/app/src/gen3/inverter_g3.py +++ b/app/src/gen3/inverter_g3.py @@ -1,10 +1,7 @@ from asyncio import StreamReader, StreamWriter -if __name__ == "app.src.gen3.inverter_g3": - from app.src.inverter_base import InverterBase - from app.src.gen3.talent import Talent -else: # pragma: no cover - from inverter_base import InverterBase - from gen3.talent import Talent + +from inverter_base import InverterBase +from gen3.talent import Talent class InverterG3(InverterBase): diff --git a/app/src/gen3/talent.py b/app/src/gen3/talent.py index 522f4d0..efb080a 100644 --- a/app/src/gen3/talent.py +++ b/app/src/gen3/talent.py @@ -4,22 +4,12 @@ from zoneinfo import ZoneInfo from datetime import datetime from tzlocal import get_localzone -if __name__ == "app.src.gen3.talent": - from app.src.async_ifc import AsyncIfc - from app.src.messages import Message, State - from app.src.modbus import Modbus - from app.src.my_timer import Timer - from app.src.config import Config - from app.src.gen3.infos_g3 import InfosG3 - from app.src.infos import Register -else: # pragma: no cover - from async_ifc import AsyncIfc - from messages import Message, State - from modbus import Modbus - from my_timer import Timer - from config import Config - from gen3.infos_g3 import InfosG3 - from infos import Register +from async_ifc import AsyncIfc +from messages import Message, State +from modbus import Modbus +from cnf.config import Config +from gen3.infos_g3 import InfosG3 +from infos import Register logger = logging.getLogger('msg') @@ -42,19 +32,18 @@ class Control: class Talent(Message): - MB_START_TIMEOUT = 40 - MB_REGULAR_TIMEOUT = 60 TXT_UNKNOWN_CTRL = 'Unknown Ctrl' def __init__(self, addr, ifc: "AsyncIfc", server_side: bool, client_mode: bool = False, id_str=b''): - super().__init__(server_side, self.send_modbus_cb, mb_timeout=15) + super().__init__('G3', ifc, server_side, self.send_modbus_cb, + mb_timeout=15) ifc.rx_set_cb(self.read) ifc.prot_set_timeout_cb(self._timeout) ifc.prot_set_init_new_client_conn_cb(self._init_new_client_conn) ifc.prot_set_update_header_cb(self._update_header) + self.addr = addr - self.ifc = ifc self.conn_no = ifc.get_conn_no() self.await_conn_resp_cnt = 0 self.id_str = id_str @@ -86,38 +75,17 @@ class Talent(Message): 0x87: self.get_modbus_log_lvl, 0x04: logging.INFO, } - self.modbus_elms = 0 # for unit tests - self.node_id = 'G3' # will be overwritten in __set_serial_no - self.mb_timer = Timer(self.mb_timout_cb, self.node_id) - self.mb_timeout = self.MB_REGULAR_TIMEOUT - self.mb_first_timeout = self.MB_START_TIMEOUT - self.modbus_polling = False ''' Our puplic methods ''' def close(self) -> None: logging.debug('Talent.close()') - if self.server_side: - # set inverter state to offline, if output power is very low - logging.debug('close power: ' - f'{self.db.get_db_value(Register.OUTPUT_POWER, -1)}') - if self.db.get_db_value(Register.OUTPUT_POWER, 999) < 2: - self.db.set_db_def_value(Register.INVERTER_STATUS, 0) - self.new_data['env'] = True - # we have references to methods of this class in self.switch # so we have to erase self.switch, otherwise this instance can't be # deallocated by the garbage collector ==> we get a memory leak self.switch.clear() self.log_lvl.clear() - self.state = State.closed - self.mb_timer.close() - self.ifc.rx_set_cb(None) - self.ifc.prot_set_timeout_cb(None) - self.ifc.prot_set_init_new_client_conn_cb(None) - self.ifc.prot_set_update_header_cb(None) - self.ifc = None super().close() def __set_serial_no(self, serial_no: str): @@ -135,6 +103,8 @@ class Talent(Message): self.modbus_polling = inv['modbus_polling'] logger.debug(f'SerialNo {serial_no} allowed! area:{self.sug_area}') # noqa: E501 self.db.set_pv_module_details(inv) + if self.mb: + self.mb.set_node_id(self.node_id) else: self.node_id = '' self.sug_area = '' @@ -203,16 +173,6 @@ class Talent(Message): self.ifc.tx_log(log_lvl, f'Send Modbus {state}:{self.addr}:') self.ifc.tx_flush() - def _send_modbus_cmd(self, func, addr, val, log_lvl) -> None: - if self.state != State.up: - logger.log(log_lvl, f'[{self.node_id}] ignore MODBUS cmd,' - ' as the state is not UP') - return - self.mb.build_msg(Modbus.INV_ADDR, func, addr, val, log_lvl) - - async def send_modbus_cmd(self, func, addr, val, log_lvl) -> None: - self._send_modbus_cmd(func, addr, val, log_lvl) - def mb_timout_cb(self, exp_cnt): self.mb_timer.start(self.mb_timeout) @@ -590,8 +550,7 @@ class Talent(Message): return for key, update, _ in self.mb.recv_resp(self.db, data[ - hdr_len:], - self.node_id): + hdr_len:]): if update: self._set_mqtt_timestamp(key, self._utc()) self.new_data[key] = True diff --git a/app/src/gen3plus/infos_g3p.py b/app/src/gen3plus/infos_g3p.py index f31f17b..417487a 100644 --- a/app/src/gen3plus/infos_g3p.py +++ b/app/src/gen3plus/infos_g3p.py @@ -1,39 +1,57 @@ -import struct from typing import Generator -if __name__ == "app.src.gen3plus.infos_g3p": - from app.src.infos import Infos, Register, ProxyMode -else: # pragma: no cover - from infos import Infos, Register, ProxyMode +from infos import Infos, Register, ProxyMode, Fmt class RegisterMap: # make the class read/only by using __slots__ __slots__ = () + FMT_2_16BIT_VAL = '!HH' + FMT_3_16BIT_VAL = '!HHH' + FMT_4_16BIT_VAL = '!HHHH' + map = { # 0x41020007: {'reg': Register.DEVICE_SNR, 'fmt': '>12)}.{(result>>8)&0xf}.{(result>>4)&0xf}{result&0xf}'"}, # noqa: E501 + 0x420100c2: {'reg': Register.DETECT_STATUS_1, 'fmt': '!H'}, # noqa: E501 + 0x420100c4: {'reg': Register.DETECT_STATUS_2, 'fmt': '!H'}, # noqa: E501 + 0x420100c6: {'reg': Register.EVENT_ALARM, 'fmt': '!H'}, # noqa: E501 + 0x420100c8: {'reg': Register.EVENT_FAULT, 'fmt': '!H'}, # noqa: E501 + 0x420100ca: {'reg': Register.EVENT_BF1, 'fmt': '!H'}, # noqa: E501 + 0x420100cc: {'reg': Register.EVENT_BF2, 'fmt': '!H'}, # noqa: E501 + # 0x420100ce + 0x420100d0: {'reg': Register.VERSION, 'fmt': '!H', 'func': Fmt.version}, # noqa: E501 0x420100d2: {'reg': Register.GRID_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501 0x420100d4: {'reg': Register.GRID_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 0x420100d6: {'reg': Register.GRID_FREQUENCY, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 - 0x420100d8: {'reg': Register.INVERTER_TEMP, 'fmt': '!H', 'eval': 'result-40'}, # noqa: E501 - # 0x420100d8: {'reg': Register.INVERTER_TEMP, 'fmt': '!H'}, # noqa: E501 + 0x420100d8: {'reg': Register.INVERTER_TEMP, 'fmt': '!H', 'offset': -40}, # noqa: E501 + # 0x420100da 0x420100dc: {'reg': Register.RATED_POWER, 'fmt': '!H', 'ratio': 1}, # noqa: E501 0x420100de: {'reg': Register.OUTPUT_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501 0x420100e0: {'reg': Register.PV1_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501 @@ -58,12 +76,39 @@ class RegisterMap: 0x4201010c: {'reg': Register.PV3_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501 0x42010110: {'reg': Register.PV4_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 0x42010112: {'reg': Register.PV4_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501 - 0x42010126: {'reg': Register.MAX_DESIGNED_POWER, 'fmt': '!H', 'ratio': 1}, # noqa: E501 + 0x42010116: {'reg': Register.INV_UNKNOWN_1, 'fmt': '!H'}, # noqa: E501 + # Start MODBUS Block: 0x2000 (R/W Config Paramaneters) + 0x42010118: {'reg': Register.BOOT_STATUS, 'fmt': '!H'}, + 0x4201011a: {'reg': Register.DSP_STATUS, 'fmt': '!H'}, + 0x4201011c: {'reg': None, 'fmt': '!H', 'const': 1}, # noqa: E501 + 0x4201011e: {'reg': Register.WORK_MODE, 'fmt': '!H'}, + 0x42010124: {'reg': Register.OUTPUT_SHUTDOWN, 'fmt': '!H'}, + 0x42010126: {'reg': Register.MAX_DESIGNED_POWER, 'fmt': '!H'}, + 0x42010128: {'reg': Register.RATED_LEVEL, 'fmt': '!H'}, + 0x4201012a: {'reg': Register.INPUT_COEFFICIENT, 'fmt': '!H', 'ratio': 100/1024}, # noqa: E501 + 0x4201012c: {'reg': Register.GRID_VOLT_CAL_COEF, 'fmt': '!H'}, + 0x4201012e: {'reg': None, 'fmt': '!H', 'const': 1024}, # noqa: E501 + 0x42010130: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (1024, 1, 0xffff, 1)}, # noqa: E501 + 0x42010138: {'reg': Register.PROD_COMPL_TYPE, 'fmt': '!H'}, + 0x4201013a: {'reg': None, 'fmt': FMT_3_16BIT_VAL, 'const': (0x68, 0x68, 0x500)}, # noqa: E501 + 0x42010140: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0x9cd, 0x7b6, 0x139c, 0x1324)}, # noqa: E501 + 0x42010148: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (1, 0x7ae, 0x40f, 0x41)}, # noqa: E501 + 0x42010150: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0xf, 0xa64, 0xa64, 0x6)}, # noqa: E501 + 0x42010158: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0x6, 0x9f6, 0x128c, 0x128c)}, # noqa: E501 + 0x42010160: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0x10, 0x10, 0x1452, 0x1452)}, # noqa: E501 + 0x42010168: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0x10, 0x10, 0x151, 0x5)}, # noqa: E501 0x42010170: {'reg': Register.OUTPUT_COEFFICIENT, 'fmt': '!H', 'ratio': 100/1024}, # noqa: E501 + 0x42010172: {'reg': None, 'fmt': FMT_3_16BIT_VAL, 'const': (0x1, 0x139c, 0xfa0)}, # noqa: E501 + 0x42010178: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0x4e, 0x66, 0x3e8, 0x400)}, # noqa: E501 + 0x42010180: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0x9ce, 0x7a8, 0x139c, 0x1326)}, # noqa: E501 + 0x42010188: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0x0, 0x0, 0x0, 0)}, # noqa: E501 + 0x42010190: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0x0, 0x0, 1024, 1024)}, # noqa: E501 + 0x42010198: {'reg': None, 'fmt': FMT_4_16BIT_VAL, 'const': (0, 0, 0xffff, 0)}, # noqa: E501 + 0x420101a0: {'reg': None, 'fmt': FMT_2_16BIT_VAL, 'const': (0x0, 0x0)}, # noqa: E501 + 0xffffff02: {'reg': Register.POLLING_INTERVAL}, # 0x4281001c: {'reg': Register.POWER_ON_TIME, 'fmt': '> 16) & 0xff + mtype = (idx >> 24) & 0xff + if ftype != rcv_ftype or mtype != msg_type: + continue + if not isinstance(row, dict): + continue + if 'const' in row: + val = row['const'] + else: + info_id = row['reg'] + val = self.get_db_value(info_id) + if not val: + continue + Fmt.set_value(buf, addr, row, val) + return buf diff --git a/app/src/gen3plus/inverter_g3p.py b/app/src/gen3plus/inverter_g3p.py index cc27bb4..f3680c9 100644 --- a/app/src/gen3plus/inverter_g3p.py +++ b/app/src/gen3plus/inverter_g3p.py @@ -1,15 +1,15 @@ from asyncio import StreamReader, StreamWriter -if __name__ == "app.src.gen3plus.inverter_g3p": - from app.src.inverter_base import InverterBase - from app.src.gen3plus.solarman_v5 import SolarmanV5 -else: # pragma: no cover - from inverter_base import InverterBase - from gen3plus.solarman_v5 import SolarmanV5 +from inverter_base import InverterBase +from gen3plus.solarman_v5 import SolarmanV5 +from gen3plus.solarman_emu import SolarmanEmu class InverterG3P(InverterBase): def __init__(self, reader: StreamReader, writer: StreamWriter, client_mode: bool = False): + remote_prot = None + if client_mode: + remote_prot = SolarmanEmu super().__init__(reader, writer, 'solarman', - SolarmanV5, client_mode) + SolarmanV5, client_mode, remote_prot) diff --git a/app/src/gen3plus/solarman_emu.py b/app/src/gen3plus/solarman_emu.py new file mode 100644 index 0000000..66035bb --- /dev/null +++ b/app/src/gen3plus/solarman_emu.py @@ -0,0 +1,138 @@ +import logging +import struct + +from async_ifc import AsyncIfc +from gen3plus.solarman_v5 import SolarmanBase +from my_timer import Timer +from infos import Register + +logger = logging.getLogger('msg') + + +class SolarmanEmu(SolarmanBase): + def __init__(self, addr, ifc: "AsyncIfc", + server_side: bool, client_mode: bool): + super().__init__(addr, ifc, server_side=False, + _send_modbus_cb=None, + mb_timeout=8) + logging.debug('SolarmanEmu.init()') + self.db = ifc.remote.stream.db + self.snr = ifc.remote.stream.snr + self.hb_timeout = 60 + '''actual heatbeat timeout from the last response message''' + self.data_up_inv = self.db.get_db_value(Register.DATA_UP_INTERVAL) + '''time interval for getting new MQTT data messages''' + self.hb_timer = Timer(self.send_heartbeat_cb, self.node_id) + self.data_timer = Timer(self.send_data_cb, self.node_id) + self.last_sync = self._emu_timestamp() + '''timestamp when we send the last sync message (4110)''' + self.pkt_cnt = 0 + '''last sent packet number''' + + self.switch = { + + 0x4210: 'msg_data_ind', # real time data + 0x1210: self.msg_response, # at least every 5 minutes + + 0x4710: 'msg_hbeat_ind', # heatbeat + 0x1710: self.msg_response, # every 2 minutes + + 0x4110: 'msg_dev_ind', # device data, sync start + 0x1110: self.msg_response, # every 3 hours + + } + + self.log_lvl = { + + 0x4110: logging.INFO, # device data, sync start + 0x1110: logging.INFO, # every 3 hours + + 0x4210: logging.INFO, # real time data + 0x1210: logging.INFO, # at least every 5 minutes + + 0x4710: logging.DEBUG, # heatbeat + 0x1710: logging.DEBUG, # every 2 minutes + + } + + ''' + Our puplic methods + ''' + def close(self) -> None: + logging.info('SolarmanEmu.close()') + # we have references to methods of this class in self.switch + # so we have to erase self.switch, otherwise this instance can't be + # deallocated by the garbage collector ==> we get a memory leak + self.switch.clear() + self.log_lvl.clear() + self.hb_timer.close() + self.data_timer.close() + self.db = None + super().close() + + def _set_serial_no(self, snr: int): + logging.debug(f'SolarmanEmu._set_serial_no, snr: {snr}') + self.unique_id = str(snr) + + def _init_new_client_conn(self) -> bool: + logging.debug('SolarmanEmu.init_new()') + self.data_timer.start(self.data_up_inv) + return False + + def next_pkt_cnt(self): + '''get the next packet number''' + self.pkt_cnt = (self.pkt_cnt + 1) & 0xffffffff + return self.pkt_cnt + + def seconds_since_last_sync(self): + '''get seconds since last 0x4110 message was sent''' + return self._emu_timestamp() - self.last_sync + + def send_heartbeat_cb(self, exp_cnt): + '''send a heartbeat to the TSUN cloud''' + self._build_header(0x4710) + self.ifc.tx_add(struct.pack(' float: + '''process all received messages in the _recv_buffer''' + self._read() + while True: + if not self.header_valid: + self.__parse_header(self.ifc.rx_peek(), + self.ifc.rx_len()) + + if self.header_valid and self.ifc.rx_len() >= \ + (self.header_len + self.data_len+2): + self.__process_complete_received_msg() + self.__flush_recv_msg() + else: + return 0 # wait 0s before sending a response + ''' + Our public methods + ''' + def _flow_str(self, server_side: bool, type: str): # noqa: F821 + switch = { + 'rx': ' <', + 'tx': ' >', + 'forwrd': '<< ', + 'drop': ' xx', + 'rxS': '> ', + 'txS': '< ', + 'forwrdS': ' >>', + 'dropS': 'xx ', + } + if server_side: + type += 'S' + return switch.get(type, '???') + + def get_fnc_handler(self, ctrl): + fnc = self.switch.get(ctrl, self.msg_unknown) + if callable(fnc): + return fnc, repr(fnc.__name__) + else: + return self.msg_unknown, repr(fnc) + + def _build_header(self, ctrl) -> None: + '''build header for new transmit message''' + self.send_msg_ofs = self.ifc.tx_len() + + self.ifc.tx_add(struct.pack( + ' None: + '''finish the transmit message, set lenght and checksum''' + _len = self.ifc.tx_len() - self.send_msg_ofs + struct.pack_into(' None: + + if (buf_len < self.header_len): # enough bytes for complete header? + return + + result = struct.unpack_from(' bool: + crc = buf[self.data_len+11] + stop = buf[self.data_len+12] + if stop != 0x15: + hex_dump_memory(logging.ERROR, + 'Drop packet w invalid stop byte from ' + f'{self.addr}:', buf, buf_len) + self.inc_counter('Invalid_Msg_Format') + if self.ifc.rx_len() > (self.data_len+13): + next_start = buf[self.data_len+13] + if next_start != 0xa5: + # erase broken recv buffer + self.ifc.rx_clear() + + return False + + check = sum(buf[1:buf_len-2]) & 0xff + if check != crc: + self.inc_counter('Invalid_Msg_Format') + logger.debug(f'CRC {int(crc):#02x} {int(check):#08x}' + f' Stop:{int(stop):#02x}') + # start & stop byte are valid, discard only this message + return False + + return True + + def __flush_recv_msg(self) -> None: + self.ifc.rx_get(self.header_len + self.data_len+2) + self.header_valid = False + + def __dispatch_msg(self) -> None: + _fnc, _str = self.get_fnc_handler(self.control) + if self.unique_id: + logger.info(self._flow_str(self.server_side, 'rx') + + f' Ctl: {int(self.control):#04x}' + + f' Msg: {_str}') + _fnc() + else: + logger.info(self._flow_str(self.server_side, 'drop') + + f' Ctl: {int(self.control):#04x}' + + f' Msg: {_str}') + + ''' + Message handler methods + ''' + def msg_response(self): + data = self.ifc.rx_peek()[self.header_len:] + result = struct.unpack_from(' None: logging.debug('Solarman.close()') - if self.server_side: - # set inverter state to offline, if output power is very low - logging.debug('close power: ' - f'{self.db.get_db_value(Register.OUTPUT_POWER, -1)}') - if self.db.get_db_value(Register.OUTPUT_POWER, 999) < 2: - self.db.set_db_def_value(Register.INVERTER_STATUS, 0) - self.new_data['env'] = True - # we have references to methods of this class in self.switch # so we have to erase self.switch, otherwise this instance can't be # deallocated by the garbage collector ==> we get a memory leak self.switch.clear() self.log_lvl.clear() - self.state = State.closed - self.mb_timer.close() - self.ifc.rx_set_cb(None) - self.ifc.prot_set_timeout_cb(None) - self.ifc.prot_set_init_new_client_conn_cb(None) - self.ifc.prot_set_update_header_cb(None) - self.ifc = None super().close() async def send_start_cmd(self, snr: int, host: str, + forward: bool, start_timeout=MB_CLIENT_DATA_UP): self.no_forwarding = True + self.establish_inv_emu = forward self.snr = snr - self.__set_serial_no(snr) + self._set_serial_no(snr) self.mb_timeout = start_timeout self.db.set_db_def_value(Register.IP_ADDRESS, host) self.db.set_db_def_value(Register.POLLING_INTERVAL, self.mb_timeout) + self.db.set_db_def_value(Register.DATA_UP_INTERVAL, + 300) + self.db.set_db_def_value(Register.COLLECT_INTERVAL, + 1) self.db.set_db_def_value(Register.HEARTBEAT_INTERVAL, 120) + self.db.set_db_def_value(Register.SENSOR_LIST, + Fmt.hex4((self.sensor_list, ))) self.new_data['controller'] = True self.state = State.up @@ -202,14 +367,25 @@ class SolarmanV5(Message): self.db.set_db_def_value(Register.POLLING_INTERVAL, self.mb_timeout) + def establish_emu(self): + _len = 223 + build_msg = self.db.build(_len, 0x41, 2) + struct.pack_into( + ' float: - '''process all received messages in the _recv_buffer''' - self._read() - while True: - if not self.header_valid: - self.__parse_header(self.ifc.rx_peek(), - self.ifc.rx_len()) - - if self.header_valid and self.ifc.rx_len() >= \ - (self.header_len + self.data_len+2): - self.__process_complete_received_msg() - self.__flush_recv_msg() - else: - return 0 # wait 0s before sending a response - - def __process_complete_received_msg(self): - log_lvl = self.log_lvl.get(self.control, logging.WARNING) - if callable(log_lvl): - log_lvl = log_lvl() - self.ifc.rx_log(log_lvl, f'Received from {self.addr}:') - # self._recv_buffer, self.header_len + - # self.data_len+2) - if self.__trailer_is_ok(self.ifc.rx_peek(), self.header_len - + self.data_len + 2): - if self.state == State.init: - self.state = State.received - self.__set_serial_no(self.snr) - self.__dispatch_msg() - def forward(self, buffer, buflen) -> None: '''add the actual receive msg to the forwarding queue''' if self.no_forwarding: @@ -278,171 +426,37 @@ class SolarmanV5(Message): self.ifc.fwd_add(buffer[:buflen]) self.ifc.fwd_log(logging.DEBUG, 'Store for forwarding:') - fnc = self.switch.get(self.control, self.msg_unknown) - logger.info(self.__flow_str(self.server_side, 'forwrd') + + _, _str = self.get_fnc_handler(self.control) + logger.info(self._flow_str(self.server_side, 'forwrd') + f' Ctl: {int(self.control):#04x}' - f' Msg: {fnc.__name__!r}') + f' Msg: {_str}') def _init_new_client_conn(self) -> bool: return False - ''' - Our private methods - ''' - def __flow_str(self, server_side: bool, type: str): # noqa: F821 - switch = { - 'rx': ' <', - 'tx': ' >', - 'forwrd': '<< ', - 'drop': ' xx', - 'rxS': '> ', - 'txS': '< ', - 'forwrdS': ' >>', - 'dropS': 'xx ', - } - if server_side: - type += 'S' - return switch.get(type, '???') - - def _timestamp(self): - # utc as epoche - return int(time.time()) # pragma: no cover - def _heartbeat(self) -> int: return 60 # pragma: no cover - def __parse_header(self, buf: bytes, buf_len: int) -> None: - - if (buf_len < self.header_len): # enough bytes for complete header? - return - - result = struct.unpack_from(' bool: - crc = buf[self.data_len+11] - stop = buf[self.data_len+12] - if stop != 0x15: - hex_dump_memory(logging.ERROR, - 'Drop packet w invalid stop byte from ' - f'{self.addr}:', buf, buf_len) - self.inc_counter('Invalid_Msg_Format') - if self.ifc.rx_len() > (self.data_len+13): - next_start = buf[self.data_len+13] - if next_start != 0xa5: - # erase broken recv buffer - self.ifc.rx_clear() - - return False - - check = sum(buf[1:buf_len-2]) & 0xff - if check != crc: - self.inc_counter('Invalid_Msg_Format') - logger.debug(f'CRC {int(crc):#02x} {int(check):#08x}' - f' Stop:{int(stop):#02x}') - # start & stop byte are valid, discard only this message - return False - - return True - - def __build_header(self, ctrl) -> None: - '''build header for new transmit message''' - self.send_msg_ofs = self.ifc.tx_len() - - self.ifc.tx_add(struct.pack( - ' None: - '''finish the transmit message, set lenght and checksum''' - _len = self.ifc.tx_len() - self.send_msg_ofs - struct.pack_into(' None: - fnc = self.switch.get(self.control, self.msg_unknown) - if self.unique_id: - logger.info(self.__flow_str(self.server_side, 'rx') + - f' Ctl: {int(self.control):#04x}' + - f' Msg: {fnc.__name__!r}') - fnc() - else: - logger.info(self.__flow_str(self.server_side, 'drop') + - f' Ctl: {int(self.control):#04x}' + - f' Msg: {fnc.__name__!r}') - - def __flush_recv_msg(self) -> None: - self.ifc.rx_get(self.header_len + self.data_len+2) - self.header_valid = False - def __send_ack_rsp(self, msgtype, ftype, ack=1): - self.__build_header(msgtype) + self._build_header(msgtype) self.ifc.tx_add(struct.pack(' None: - if self.state != State.up: - logger.log(log_lvl, f'[{self.node_id}] ignore MODBUS cmd,' - ' as the state is not UP') - return - self.mb.build_msg(Modbus.INV_ADDR, func, addr, val, log_lvl) - - async def send_modbus_cmd(self, func, addr, val, log_lvl) -> None: - self._send_modbus_cmd(func, addr, val, log_lvl) - def mb_timout_cb(self, exp_cnt): self.mb_timer.start(self.mb_timeout) @@ -472,11 +486,11 @@ class SolarmanV5(Message): return self.forward_at_cmd_resp = False - self.__build_header(0x4510) + self._build_header(0x4510) self.ifc.tx_add(struct.pack(f' 4: # logger.info(f'first byte modbus:{data[14]}') - inv_update = False - self.modbus_elms = 0 - for key, update, _ in self.mb.recv_resp(self.db, data[14:], - self.node_id): - self.modbus_elms += 1 - if update: - if key == 'inverter': - inv_update = True - self._set_mqtt_timestamp(key, self._timestamp()) - self.new_data[key] = True + inv_update = self.__parse_modbus_rsp(data) if inv_update: self.__build_model_name() + if self.establish_inv_emu and not self.ifc.remote.stream: + self.establish_emu() + def msg_hbeat_ind(self): data = self.ifc.rx_peek()[self.header_len:] result = struct.unpack_from(' str | int: + if not reverse: + return f'{val[0]:04x}' + else: + return int(val, 16) + + @staticmethod + def mac(val: tuple | str, reverse=False) -> str | tuple: + if not reverse: + return "%02x:%02x:%02x:%02x:%02x:%02x" % val + else: + return ( + int(val[0:2], 16), int(val[3:5], 16), + int(val[6:8], 16), int(val[9:11], 16), + int(val[12:14], 16), int(val[15:], 16)) + + @staticmethod + def version(val: tuple | str, reverse=False) -> str | int: + if not reverse: + x = val[0] + return f'V{(x >> 12)}.{(x >> 8) & 0xf}' \ + f'.{(x >> 4) & 0xf}{x & 0xf:1X}' + else: + arr = val[1:].split('.') + return int(arr[0], 10) << 12 | \ + int(arr[1], 10) << 8 | \ + int(arr[2][:-1], 10) << 4 | \ + int(arr[2][-1:], 16) + + @staticmethod + def set_value(buf: bytearray, idx: int, row: dict, val): + '''Get a value from buf and interpret as in row defined''' + fmt = row['fmt'] + if 'offset' in row: + val = val - row['offset'] + if 'quotient' in row: + val = round(val * row['quotient']) + if 'ratio' in row: + val = round(val / row['ratio']) + if 'func' in row: + val = row['func'](val, reverse=True) + if isinstance(val, str): + val = bytes(val, 'UTF8') + + if isinstance(val, tuple): + struct.pack_into(fmt, buf, idx, *val) + else: + struct.pack_into(fmt, buf, idx, val) + + class ClrAtMidnight: __clr_at_midnight = [Register.PV1_DAILY_GENERATION, Register.PV2_DAILY_GENERATION, Register.PV3_DAILY_GENERATION, Register.PV4_DAILY_GENERATION, Register.PV5_DAILY_GENERATION, Register.PV6_DAILY_GENERATION, Register.DAILY_GENERATION] # noqa: E501 db = {} @@ -203,6 +276,7 @@ class Infos: } __comm_type_val_tpl = "{%set com_types = ['n/a','Wi-Fi', 'G4', 'G5', 'GPRS'] %}{{com_types[value_json['Communication_Type']|int(0)]|default(value_json['Communication_Type'])}}" # noqa: E501 + __work_mode_val_tpl = "{%set mode = ['Normal-Mode', 'Aging-Mode', 'ATE-Mode', 'Shielding GFDI', 'DTU-Mode'] %}{{mode[value_json['Work_Mode']|int(0)]|default(value_json['Work_Mode'])}}" # noqa: E501 __status_type_val_tpl = "{%set inv_status = ['Off-line', 'On-grid', 'Off-grid'] %}{{inv_status[value_json['Inverter_Status']|int(0)]|default(value_json['Inverter_Status'])}}" # noqa: E501 __rated_power_val_tpl = "{% if 'Rated_Power' in value_json and value_json['Rated_Power'] != None %}{{value_json['Rated_Power']|string() +' W'}}{% else %}{{ this.state }}{% endif %}" # noqa: E501 __designed_power_val_tpl = ''' @@ -217,6 +291,100 @@ class Infos: {{ this.state }} {% endif %} ''' + __inv_alarm_val_tpl = ''' +{% if 'Inverter_Alarm' in value_json and + value_json['Inverter_Alarm'] != None %} + {% set val_int = value_json['Inverter_Alarm'] | int %} + {% if val_int == 0 %} + {% set result = 'noAlarm'%} + {%else%} + {% set result = '' %} + {% if val_int | bitwise_and(1)%}{% set result = result + 'Bit1, '%} + {% endif %} + {% if val_int | bitwise_and(2)%}{% set result = result + 'Bit2, '%} + {% endif %} + {% if val_int | bitwise_and(3)%}{% set result = result + 'Bit3, '%} + {% endif %} + {% if val_int | bitwise_and(4)%}{% set result = result + 'Bit4, '%} + {% endif %} + {% if val_int | bitwise_and(5)%}{% set result = result + 'Bit5, '%} + {% endif %} + {% if val_int | bitwise_and(6)%}{% set result = result + 'Bit6, '%} + {% endif %} + {% if val_int | bitwise_and(7)%}{% set result = result + 'Bit7, '%} + {% endif %} + {% if val_int | bitwise_and(8)%}{% set result = result + 'Bit8, '%} + {% endif %} + {% if val_int | bitwise_and(9)%}{% set result = result + 'noUtility, '%} + {% endif %} + {% if val_int | bitwise_and(10)%}{% set result = result + 'Bit10, '%} + {% endif %} + {% if val_int | bitwise_and(11)%}{% set result = result + 'Bit11, '%} + {% endif %} + {% if val_int | bitwise_and(12)%}{% set result = result + 'Bit12, '%} + {% endif %} + {% if val_int | bitwise_and(13)%}{% set result = result + 'Bit13, '%} + {% endif %} + {% if val_int | bitwise_and(14)%}{% set result = result + 'Bit14, '%} + {% endif %} + {% if val_int | bitwise_and(15)%}{% set result = result + 'Bit15, '%} + {% endif %} + {% if val_int | bitwise_and(16)%}{% set result = result + 'Bit16, '%} + {% endif %} + {% endif %} + {{ result }} +{% else %} + {{ this.state }} +{% endif %} +''' + __inv_fault_val_tpl = ''' +{% if 'Inverter_Fault' in value_json and + value_json['Inverter_Fault'] != None %} + {% set val_int = value_json['Inverter_Fault'] | int %} + {% if val_int == 0 %} + {% set result = 'noFault'%} + {%else%} + {% set result = '' %} + {% if val_int | bitwise_and(1)%}{% set result = result + 'Bit1, '%} + {% endif %} + {% if val_int | bitwise_and(2)%}{% set result = result + 'Bit2, '%} + {% endif %} + {% if val_int | bitwise_and(3)%}{% set result = result + 'Bit3, '%} + {% endif %} + {% if val_int | bitwise_and(4)%}{% set result = result + 'Bit4, '%} + {% endif %} + {% if val_int | bitwise_and(5)%}{% set result = result + 'Bit5, '%} + {% endif %} + {% if val_int | bitwise_and(6)%}{% set result = result + 'Bit6, '%} + {% endif %} + {% if val_int | bitwise_and(7)%}{% set result = result + 'Bit7, '%} + {% endif %} + {% if val_int | bitwise_and(8)%}{% set result = result + 'Bit8, '%} + {% endif %} + {% if val_int | bitwise_and(9)%}{% set result = result + 'Bit9, '%} + {% endif %} + {% if val_int | bitwise_and(10)%}{% set result = result + 'Bit10, '%} + {% endif %} + {% if val_int | bitwise_and(11)%}{% set result = result + 'Bit11, '%} + {% endif %} + {% if val_int | bitwise_and(12)%}{% set result = result + 'Bit12, '%} + {% endif %} + {% if val_int | bitwise_and(13)%}{% set result = result + 'Bit13, '%} + {% endif %} + {% if val_int | bitwise_and(14)%}{% set result = result + 'Bit14, '%} + {% endif %} + {% if val_int | bitwise_and(15)%}{% set result = result + 'Bit15, '%} + {% endif %} + {% if val_int | bitwise_and(16)%}{% set result = result + 'Bit16, '%} + {% endif %} + {% endif %} + {{ result }} +{% else %} + {{ this.state }} +{% endif %} +''' + + __input_coef_val_tpl = "{% if 'Output_Coefficient' in value_json and value_json['Input_Coefficient'] != None %}{{value_json['Input_Coefficient']|string() +' %'}}{% else %}{{ this.state }}{% endif %}" # noqa: E501 __output_coef_val_tpl = "{% if 'Output_Coefficient' in value_json and value_json['Output_Coefficient'] != None %}{{value_json['Output_Coefficient']|string() +' %'}}{% else %}{{ this.state }}{% endif %}" # noqa: E501 __info_defs = { @@ -239,6 +407,8 @@ class Infos: Register.NO_INPUTS: {'name': ['inverter', 'No_Inputs'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 Register.MAX_DESIGNED_POWER: {'name': ['inverter', 'Max_Designed_Power'], 'level': logging.INFO, 'unit': 'W', 'ha': {'dev': 'inverter', 'dev_cla': None, 'stat_cla': None, 'id': 'designed_power_', 'val_tpl': __designed_power_val_tpl, 'name': 'Max Designed Power', 'icon': LIGHTNING, 'ent_cat': 'diagnostic'}}, # noqa: E501 Register.RATED_POWER: {'name': ['inverter', 'Rated_Power'], 'level': logging.DEBUG, 'unit': 'W', 'ha': {'dev': 'inverter', 'dev_cla': None, 'stat_cla': None, 'id': 'rated_power_', 'val_tpl': __rated_power_val_tpl, 'name': 'Rated Power', 'icon': LIGHTNING, 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.WORK_MODE: {'name': ['inverter', 'Work_Mode'], 'level': logging.DEBUG, 'unit': '', 'ha': {'dev': 'inverter', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'work_mode_', 'name': 'Work Mode', 'val_tpl': __work_mode_val_tpl, 'icon': 'mdi:power', 'ent_cat': 'diagnostic'}}, # noqa: E501 + Register.INPUT_COEFFICIENT: {'name': ['inverter', 'Input_Coefficient'], 'level': logging.DEBUG, 'unit': '%', 'ha': {'dev': 'inverter', 'dev_cla': None, 'stat_cla': None, 'id': 'input_coef_', 'val_tpl': __input_coef_val_tpl, 'name': 'Input Coefficient', 'icon': LIGHTNING, 'ent_cat': 'diagnostic'}}, # noqa: E501 Register.OUTPUT_COEFFICIENT: {'name': ['inverter', 'Output_Coefficient'], 'level': logging.INFO, 'unit': '%', 'ha': {'dev': 'inverter', 'dev_cla': None, 'stat_cla': None, 'id': 'output_coef_', 'val_tpl': __output_coef_val_tpl, 'name': 'Output Coefficient', 'icon': LIGHTNING, 'ent_cat': 'diagnostic'}}, # noqa: E501 Register.PV1_MANUFACTURER: {'name': ['inverter', 'PV1_Manufacturer'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 Register.PV1_MODEL: {'name': ['inverter', 'PV1_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 @@ -252,9 +422,11 @@ class Infos: Register.PV5_MODEL: {'name': ['inverter', 'PV5_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 Register.PV6_MANUFACTURER: {'name': ['inverter', 'PV6_Manufacturer'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 Register.PV6_MODEL: {'name': ['inverter', 'PV6_Model'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - + Register.BOOT_STATUS: {'name': ['inverter', 'BOOT_STATUS'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.DSP_STATUS: {'name': ['inverter', 'DSP_STATUS'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 # proxy: Register.INVERTER_CNT: {'name': ['proxy', 'Inverter_Cnt'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_count_', 'fmt': FMT_INT, 'name': 'Active Inverter Connections', 'icon': COUNTER}}, # noqa: E501 + Register.CLOUD_CONN_CNT: {'name': ['proxy', 'Cloud_Conn_Cnt'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'cloud_conn_count_', 'fmt': FMT_INT, 'name': 'Active Cloud Connections', 'icon': COUNTER}}, # noqa: E501 Register.UNKNOWN_SNR: {'name': ['proxy', 'Unknown_SNR'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'unknown_snr_', 'fmt': FMT_INT, 'name': 'Unknown Serial No', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501 Register.UNKNOWN_MSG: {'name': ['proxy', 'Unknown_Msg'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'unknown_msg_', 'fmt': FMT_INT, 'name': 'Unknown Msg Type', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501 Register.INVALID_DATA_TYPE: {'name': ['proxy', 'Invalid_Data_Type'], 'singleton': True, 'ha': {'dev': 'proxy', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_data_type_', 'fmt': FMT_INT, 'name': 'Invalid Data Type', 'icon': COUNTER, 'ent_cat': 'diagnostic'}}, # noqa: E501 @@ -269,22 +441,12 @@ class Infos: # 0xffffff03: {'name':['proxy', 'Voltage'], 'level': logging.DEBUG, 'unit': 'V', 'ha':{'dev':'proxy', 'dev_cla': 'voltage', 'stat_cla': 'measurement', 'id':'proxy_volt_', 'fmt':FMT_FLOAT,'name': 'Grid Voltage'}}, # noqa: E501 # events - Register.EVENT_401: {'name': ['events', '401_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EVENT_402: {'name': ['events', '402_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EVENT_403: {'name': ['events', '403_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EVENT_404: {'name': ['events', '404_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EVENT_405: {'name': ['events', '405_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EVENT_406: {'name': ['events', '406_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EVENT_407: {'name': ['events', '407_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EVENT_408: {'name': ['events', '408_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EVENT_409: {'name': ['events', '409_No_Utility'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EVENT_410: {'name': ['events', '410_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EVENT_411: {'name': ['events', '411_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EVENT_412: {'name': ['events', '412_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EVENT_413: {'name': ['events', '413_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EVENT_414: {'name': ['events', '414_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EVENT_415: {'name': ['events', '415_GridFreqOverRating'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 - Register.EVENT_416: {'name': ['events', '416_'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.EVENT_ALARM: {'name': ['events', 'Inverter_Alarm'], 'level': logging.INFO, 'unit': '', 'ha': {'dev': 'inverter', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_alarm_', 'name': 'Inverter Alarm', 'val_tpl': __inv_alarm_val_tpl, 'icon': 'mdi:alarm-light'}}, # noqa: E501 + Register.EVENT_FAULT: {'name': ['events', 'Inverter_Fault'], 'level': logging.INFO, 'unit': '', 'ha': {'dev': 'inverter', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_fault_', 'name': 'Inverter Fault', 'val_tpl': __inv_fault_val_tpl, 'icon': 'mdi:alarm-light'}}, # noqa: E501 + Register.EVENT_BF1: {'name': ['events', 'Inverter_Bitfield_1'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 + Register.EVENT_BF2: {'name': ['events', 'Inverter_bitfield_2'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 + # Register.EVENT_409: {'name': ['events', '409_No_Utility'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + # Register.EVENT_415: {'name': ['events', '415_GridFreqOverRating'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 # grid measures: Register.TS_GRID: {'name': ['grid', 'Timestamp'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 @@ -294,6 +456,8 @@ class Infos: Register.OUTPUT_POWER: {'name': ['grid', 'Output_Power'], 'level': logging.INFO, 'unit': 'W', 'ha': {'dev': 'inverter', 'dev_cla': 'power', 'stat_cla': 'measurement', 'id': 'out_power_', 'fmt': FMT_FLOAT, 'name': 'Power'}}, # noqa: E501 Register.INVERTER_TEMP: {'name': ['env', 'Inverter_Temp'], 'level': logging.DEBUG, 'unit': '°C', 'ha': {'dev': 'inverter', 'dev_cla': 'temperature', 'stat_cla': 'measurement', 'id': 'temp_', 'fmt': FMT_INT, 'name': 'Temperature'}}, # noqa: E501 Register.INVERTER_STATUS: {'name': ['env', 'Inverter_Status'], 'level': logging.INFO, 'unit': '', 'ha': {'dev': 'inverter', 'comp': 'sensor', 'dev_cla': None, 'stat_cla': None, 'id': 'inv_status_', 'name': 'Inverter Status', 'val_tpl': __status_type_val_tpl, 'icon': 'mdi:power'}}, # noqa: E501 + Register.DETECT_STATUS_1: {'name': ['env', 'Detect_Status_1'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.DETECT_STATUS_2: {'name': ['env', 'Detect_Status_2'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 # input measures: Register.TS_INPUT: {'name': ['input', 'Timestamp'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 @@ -343,6 +507,14 @@ class Infos: Register.IP_ADDRESS: {'name': ['controller', 'IP_Address'], 'level': logging.DEBUG, 'unit': '', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'ip_address_', 'fmt': '| string', 'name': 'IP Address', 'icon': WIFI, 'ent_cat': 'diagnostic'}}, # noqa: E501 Register.POLLING_INTERVAL: {'name': ['controller', 'Polling_Interval'], 'level': logging.DEBUG, 'unit': 's', 'ha': {'dev': 'controller', 'dev_cla': None, 'stat_cla': None, 'id': 'polling_intval_', 'fmt': FMT_STRING_SEC, 'name': 'Polling Interval', 'icon': UPDATE, 'ent_cat': 'diagnostic'}}, # noqa: E501 Register.SENSOR_LIST: {'name': ['controller', 'Sensor_List'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 + Register.SSID: {'name': ['controller', 'WiFi_SSID'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + + Register.OUTPUT_SHUTDOWN: {'name': ['other', 'Output_Shutdown'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.RATED_LEVEL: {'name': ['other', 'Rated_Level'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.GRID_VOLT_CAL_COEF: {'name': ['other', 'Grid_Volt_Cal_Coef'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + Register.PROD_COMPL_TYPE: {'name': ['other', 'Prod_Compliance_Type'], 'level': logging.INFO, 'unit': ''}, # noqa: E501 + Register.INV_UNKNOWN_1: {'name': ['inv_unknown', 'Unknown_1'], 'level': logging.DEBUG, 'unit': ''}, # noqa: E501 + } @property @@ -652,6 +824,8 @@ class Infos: def get_db_value(self, id: Register, not_found_result: any = None): '''get database value''' + if id not in self.info_defs: + return not_found_result row = self.info_defs[id] if isinstance(row, dict): keys = row['name'] diff --git a/app/src/inverter_base.py b/app/src/inverter_base.py index a493d8b..580490c 100644 --- a/app/src/inverter_base.py +++ b/app/src/inverter_base.py @@ -7,22 +7,13 @@ import gc from aiomqtt import MqttCodeError from asyncio import StreamReader, StreamWriter -if __name__ == "app.src.inverter_base": - from app.src.inverter_ifc import InverterIfc - from app.src.proxy import Proxy - from app.src.async_stream import StreamPtr - from app.src.async_stream import AsyncStreamClient - from app.src.async_stream import AsyncStreamServer - from app.src.config import Config - from app.src.infos import Infos -else: # pragma: no cover - from inverter_ifc import InverterIfc - from proxy import Proxy - from async_stream import StreamPtr - from async_stream import AsyncStreamClient - from async_stream import AsyncStreamServer - from config import Config - from infos import Infos +from inverter_ifc import InverterIfc +from proxy import Proxy +from async_stream import StreamPtr +from async_stream import AsyncStreamClient +from async_stream import AsyncStreamServer +from cnf.config import Config +from infos import Infos logger_mqtt = logging.getLogger('mqtt') @@ -31,12 +22,16 @@ class InverterBase(InverterIfc, Proxy): def __init__(self, reader: StreamReader, writer: StreamWriter, config_id: str, prot_class, - client_mode: bool = False): + client_mode: bool = False, + remote_prot_class=None): Proxy.__init__(self) self._registry.append(weakref.ref(self)) self.addr = writer.get_extra_info('peername') self.config_id = config_id - self.prot_class = prot_class + if remote_prot_class: + self.prot_class = remote_prot_class + else: + self.prot_class = prot_class self.__ha_restarts = -1 self.remote = StreamPtr(None) ifc = AsyncStreamServer(reader, writer, @@ -45,7 +40,7 @@ class InverterBase(InverterIfc, Proxy): self.remote) self.local = StreamPtr( - self.prot_class(self.addr, ifc, True, client_mode), ifc + prot_class(self.addr, ifc, True, client_mode), ifc ) def __enter__(self): diff --git a/app/src/inverter_ifc.py b/app/src/inverter_ifc.py index 55fc1b9..11bd5e8 100644 --- a/app/src/inverter_ifc.py +++ b/app/src/inverter_ifc.py @@ -2,10 +2,7 @@ from abc import abstractmethod import logging from asyncio import StreamReader, StreamWriter -if __name__ == "app.src.inverter_ifc": - from app.src.iter_registry import AbstractIterMeta -else: # pragma: no cover - from iter_registry import AbstractIterMeta +from iter_registry import AbstractIterMeta logger_mqtt = logging.getLogger('mqtt') diff --git a/app/src/messages.py b/app/src/messages.py index bbff315..eecfc80 100644 --- a/app/src/messages.py +++ b/app/src/messages.py @@ -3,15 +3,11 @@ import weakref from typing import Callable from enum import Enum - -if __name__ == "app.src.messages": - from app.src.protocol_ifc import ProtocolIfc - from app.src.infos import Infos, Register - from app.src.modbus import Modbus -else: # pragma: no cover - from protocol_ifc import ProtocolIfc - from infos import Infos, Register - from modbus import Modbus +from async_ifc import AsyncIfc +from protocol_ifc import ProtocolIfc +from infos import Infos, Register +from modbus import Modbus +from my_timer import Timer logger = logging.getLogger('msg') @@ -89,26 +85,38 @@ class Message(ProtocolIfc): '''maximum time without a received msg from the inverter in sec''' MAX_DEF_IDLE_TIME = 360 '''maximum default time without a received msg in sec''' + MB_START_TIMEOUT = 40 + '''start delay for Modbus polling in server mode''' + MB_REGULAR_TIMEOUT = 60 + '''regular Modbus polling time in server mode''' - def __init__(self, server_side: bool, send_modbus_cb: - Callable[[bytes, int, str], None], mb_timeout: int): + def __init__(self, node_id, ifc: "AsyncIfc", server_side: bool, + send_modbus_cb: Callable[[bytes, int, str], None], + mb_timeout: int): self._registry.append(weakref.ref(self)) self.server_side = server_side + self.ifc = ifc + self.node_id = node_id if server_side: self.mb = Modbus(send_modbus_cb, mb_timeout) + self.mb_timer = Timer(self.mb_timout_cb, self.node_id) else: self.mb = None - + self.mb_timer = None self.header_valid = False self.header_len = 0 self.data_len = 0 self.unique_id = 0 - self._node_id = '' self.sug_area = '' self.new_data = {} self.state = State.init self.shutdown_started = False + self.modbus_elms = 0 # for unit tests + self.mb_timeout = self.MB_REGULAR_TIMEOUT + self.mb_first_timeout = self.MB_START_TIMEOUT + '''timer value for next Modbus polling request''' + self.modbus_polling = False @property def node_id(self): @@ -152,10 +160,35 @@ class Message(ProtocolIfc): to = self.MAX_DEF_IDLE_TIME return to + def _send_modbus_cmd(self, func, addr, val, log_lvl) -> None: + if self.state != State.up: + logger.log(log_lvl, f'[{self.node_id}] ignore MODBUS cmd,' + ' as the state is not UP') + return + self.mb.build_msg(Modbus.INV_ADDR, func, addr, val, log_lvl) + + async def send_modbus_cmd(self, func, addr, val, log_lvl) -> None: + self._send_modbus_cmd(func, addr, val, log_lvl) + ''' Our puplic methods ''' def close(self) -> None: + if self.server_side: + # set inverter state to offline, if output power is very low + logging.debug('close power: ' + f'{self.db.get_db_value(Register.OUTPUT_POWER, -1)}') + if self.db.get_db_value(Register.OUTPUT_POWER, 999) < 2: + self.db.set_db_def_value(Register.INVERTER_STATUS, 0) + self.new_data['env'] = True + self.mb_timer.close() + self.state = State.closed + self.ifc.rx_set_cb(None) + self.ifc.prot_set_timeout_cb(None) + self.ifc.prot_set_init_new_client_conn_cb(None) + self.ifc.prot_set_update_header_cb(None) + self.ifc = None + if self.mb: self.mb.close() self.mb = None diff --git a/app/src/modbus.py b/app/src/modbus.py index 028699d..5c64086 100644 --- a/app/src/modbus.py +++ b/app/src/modbus.py @@ -16,10 +16,7 @@ import logging import asyncio from typing import Generator, Callable -if __name__ == "app.src.modbus": - from app.src.infos import Register -else: # pragma: no cover - from infos import Register +from infos import Register, Fmt logger = logging.getLogger('data') @@ -40,15 +37,30 @@ class Modbus(): __crc_tab = [] mb_reg_mapping = { - 0x2007: {'reg': Register.MAX_DESIGNED_POWER, 'fmt': '!H', 'ratio': 1}, # noqa: E501 + 0x2000: {'reg': Register.BOOT_STATUS, 'fmt': '!H'}, # noqa: E501 + 0x2001: {'reg': Register.DSP_STATUS, 'fmt': '!H'}, # noqa: E501 + 0x2003: {'reg': Register.WORK_MODE, 'fmt': '!H'}, + 0x2006: {'reg': Register.OUTPUT_SHUTDOWN, 'fmt': '!H'}, + 0x2007: {'reg': Register.MAX_DESIGNED_POWER, 'fmt': '!H', 'ratio': 1}, # noqa: E501 + 0x2008: {'reg': Register.RATED_LEVEL, 'fmt': '!H'}, + 0x2009: {'reg': Register.INPUT_COEFFICIENT, 'fmt': '!H', 'ratio': 100/1024}, # noqa: E501 + 0x200a: {'reg': Register.GRID_VOLT_CAL_COEF, 'fmt': '!H'}, + 0x2010: {'reg': Register.PROD_COMPL_TYPE, 'fmt': '!H'}, 0x202c: {'reg': Register.OUTPUT_COEFFICIENT, 'fmt': '!H', 'ratio': 100/1024}, # noqa: E501 0x3000: {'reg': Register.INVERTER_STATUS, 'fmt': '!H'}, # noqa: E501 - 0x3008: {'reg': Register.VERSION, 'fmt': '!H', 'eval': "f'V{(result>>12)}.{(result>>8)&0xf}.{(result>>4)&0xf}{result&0xf:1X}'"}, # noqa: E501 + 0x3001: {'reg': Register.DETECT_STATUS_1, 'fmt': '!H'}, # noqa: E501 + 0x3002: {'reg': Register.DETECT_STATUS_2, 'fmt': '!H'}, # noqa: E501 + 0x3003: {'reg': Register.EVENT_ALARM, 'fmt': '!H'}, # noqa: E501 + 0x3004: {'reg': Register.EVENT_FAULT, 'fmt': '!H'}, # noqa: E501 + 0x3005: {'reg': Register.EVENT_BF1, 'fmt': '!H'}, # noqa: E501 + 0x3006: {'reg': Register.EVENT_BF2, 'fmt': '!H'}, # noqa: E501 + + 0x3008: {'reg': Register.VERSION, 'fmt': '!H', 'func': Fmt.version}, # noqa: E501 0x3009: {'reg': Register.GRID_VOLTAGE, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501 0x300a: {'reg': Register.GRID_CURRENT, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 0x300b: {'reg': Register.GRID_FREQUENCY, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 - 0x300c: {'reg': Register.INVERTER_TEMP, 'fmt': '!H', 'eval': 'result-40'}, # noqa: E501 + 0x300c: {'reg': Register.INVERTER_TEMP, 'fmt': '!H', 'offset': -40}, # noqa: E501 # 0x300d 0x300e: {'reg': Register.RATED_POWER, 'fmt': '!H', 'ratio': 1}, # noqa: E501 0x300f: {'reg': Register.OUTPUT_POWER, 'fmt': '!H', 'ratio': 0.1}, # noqa: E501 @@ -74,6 +86,7 @@ class Modbus(): 0x3026: {'reg': Register.PV3_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501 0x3028: {'reg': Register.PV4_DAILY_GENERATION, 'fmt': '!H', 'ratio': 0.01}, # noqa: E501 0x3029: {'reg': Register.PV4_TOTAL_GENERATION, 'fmt': '!L', 'ratio': 0.01}, # noqa: E501 + # 0x302a } def __init__(self, snd_handler: Callable[[bytes, int, str], None], @@ -117,6 +130,9 @@ class Modbus(): while not self.que.empty(): self.que.get_nowait() + def set_node_id(self, node_id: str): + self.node_id = node_id + def build_msg(self, addr: int, func: int, reg: int, val: int, log_lvl=logging.DEBUG) -> None: """Build MODBUS RTU request frame and add it to the tx queue @@ -160,14 +176,13 @@ class Modbus(): return True - def recv_resp(self, info_db, buf: bytes, node_id: str) -> \ + def recv_resp(self, info_db, buf: bytes) -> \ Generator[tuple[str, bool, int | float | str], None, None]: """Generator which check and parse a received MODBUS response. Keyword arguments: info_db: database for info lockups buf: received Modbus RTU response frame - node_id: string for logging which identifies the slave Returns on error and set Self.err to: 1: CRC error @@ -177,7 +192,6 @@ class Modbus(): 5: No MODBUS request pending """ # logging.info(f'recv_resp: first byte modbus:{buf[0]} len:{len(buf)}') - self.node_id = node_id fcode = buf[1] data_available = self.last_addr == self.INV_ADDR and \ @@ -228,17 +242,6 @@ class Modbus(): return False - def __get_value(self, buf: bytes, idx: int, row: dict): - '''get a value from the received buffer''' - val = struct.unpack_from(row['fmt'], buf, idx) - result = val[0] - - if 'eval' in row: - result = eval(row['eval']) - if 'ratio' in row: - result = round(result * row['ratio'], 2) - return result - def __process_data(self, info_db, buf: bytes, first_reg, elmlen): '''Generator over received registers, updates the db''' for i in range(0, elmlen): @@ -248,7 +251,7 @@ class Modbus(): info_id = row['reg'] keys, level, unit, must_incr = info_db._key_obj(info_id) if keys: - result = self.__get_value(buf, 3+2*i, row) + result = Fmt.get_value(buf, 3+2*i, row) name, update = info_db.update_db(keys, must_incr, result) yield keys[0], update, result diff --git a/app/src/modbus_tcp.py b/app/src/modbus_tcp.py index a0082ee..7d371c9 100644 --- a/app/src/modbus_tcp.py +++ b/app/src/modbus_tcp.py @@ -2,14 +2,9 @@ import logging import traceback import asyncio -if __name__ == "app.src.modbus_tcp": - from app.src.config import Config - from app.src.gen3plus.inverter_g3p import InverterG3P - from app.src.infos import Infos -else: # pragma: no cover - from config import Config - from gen3plus.inverter_g3p import InverterG3P - from infos import Infos +from cnf.config import Config +from gen3plus.inverter_g3p import InverterG3P +from infos import Infos logger = logging.getLogger('conn') @@ -57,15 +52,17 @@ class ModbusTcp(): # logging.info(f"SerialNo:{inv['monitor_sn']} host:{client['host']} port:{client['port']}") # noqa: E501 loop.create_task(self.modbus_loop(client['host'], client['port'], - inv['monitor_sn'])) + inv['monitor_sn'], + client['forward'])) - async def modbus_loop(self, host, port, snr: int) -> None: + async def modbus_loop(self, host, port, + snr: int, forward: bool) -> None: '''Loop for receiving messages from the TSUN cloud (client-side)''' while True: try: async with ModbusConn(host, port) as inverter: stream = inverter.local.stream - await stream.send_start_cmd(snr, host) + await stream.send_start_cmd(snr, host, forward) await stream.ifc.loop() logger.info(f'[{stream.node_id}:{stream.conn_no}] ' f'Connection closed - Shutdown: ' diff --git a/app/src/mqtt.py b/app/src/mqtt.py index 83e0fd4..0d33cac 100644 --- a/app/src/mqtt.py +++ b/app/src/mqtt.py @@ -2,16 +2,11 @@ import asyncio import logging import aiomqtt import traceback -if __name__ == "app.src.mqtt": - from app.src.modbus import Modbus - from app.src.messages import Message - from app.src.config import Config - from app.src.singleton import Singleton -else: # pragma: no cover - from modbus import Modbus - from messages import Message - from config import Config - from singleton import Singleton + +from modbus import Modbus +from messages import Message +from cnf.config import Config +from singleton import Singleton logger_mqtt = logging.getLogger('mqtt') diff --git a/app/src/protocol_ifc.py b/app/src/protocol_ifc.py index 46795e4..3b6c886 100644 --- a/app/src/protocol_ifc.py +++ b/app/src/protocol_ifc.py @@ -1,11 +1,7 @@ from abc import abstractmethod -if __name__ == "app.src.protocol_ifc": - from app.src.iter_registry import AbstractIterMeta - from app.src.async_ifc import AsyncIfc -else: # pragma: no cover - from iter_registry import AbstractIterMeta - from async_ifc import AsyncIfc +from async_ifc import AsyncIfc +from iter_registry import AbstractIterMeta class ProtocolIfc(metaclass=AbstractIterMeta): diff --git a/app/src/proxy.py b/app/src/proxy.py index 9b75c37..3f4f263 100644 --- a/app/src/proxy.py +++ b/app/src/proxy.py @@ -2,14 +2,9 @@ import asyncio import logging import json -if __name__ == "app.src.proxy": - from app.src.config import Config - from app.src.mqtt import Mqtt - from app.src.infos import Infos -else: # pragma: no cover - from config import Config - from mqtt import Mqtt - from infos import Infos +from cnf.config import Config +from mqtt import Mqtt +from infos import Infos logger_mqtt = logging.getLogger('mqtt') diff --git a/app/src/server.py b/app/src/server.py index cda8501..ce5077f 100644 --- a/app/src/server.py +++ b/app/src/server.py @@ -2,6 +2,7 @@ import logging import asyncio import signal import os +import argparse from asyncio import StreamReader, StreamWriter from aiohttp import web from logging import config # noqa F401 @@ -10,7 +11,10 @@ from inverter_ifc import InverterIfc from gen3.inverter_g3 import InverterG3 from gen3plus.inverter_g3p import InverterG3P from scheduler import Schedule -from config import Config +from cnf.config import Config +from cnf.config_read_env import ConfigReadEnv +from cnf.config_read_toml import ConfigReadToml +from cnf.config_read_json import ConfigReadJson from modbus_tcp import ModbusTcp routes = web.RouteTableDef() @@ -116,6 +120,8 @@ def get_log_level() -> int: '''checks if LOG_LVL is set in the environment and returns the corresponding logging.LOG_LEVEL''' log_level = os.getenv('LOG_LVL', 'INFO') + logging.info(f"LOG_LVL : {log_level}") + if log_level == 'DEBUG': log_level = logging.DEBUG elif log_level == 'WARN': @@ -125,7 +131,17 @@ def get_log_level() -> int: return log_level -if __name__ == "__main__": +if __name__ == "__main__": # pragma: no cover + parser = argparse.ArgumentParser() + parser.add_argument('-p', '--config_path', type=str, + default='./config/', + help='set path for the configuration files') + parser.add_argument('-j', '--json_config', type=str, + help='read user config from json-file') + parser.add_argument('-t', '--toml_config', type=str, + help='read user config from toml-file') + parser.add_argument('--add_on', action='store_true') + args = parser.parse_args() # # Setup our daily, rotating logger # @@ -134,9 +150,14 @@ if __name__ == "__main__": logging.config.fileConfig('logging.ini') logging.info(f'Server "{serv_name} - {version}" will be started') + logging.info(f"AddOn: {args.add_on}") + logging.info(f"config_path: {args.config_path}") + logging.info(f"json_config: {args.json_config}") + logging.info(f"toml_config: {args.toml_config}") + log_level = get_log_level() + logging.info('******') # set lowest-severity for 'root', 'msg', 'conn' and 'data' logger - log_level = get_log_level() logging.getLogger().setLevel(log_level) logging.getLogger('msg').setLevel(log_level) logging.getLogger('conn').setLevel(log_level) @@ -149,9 +170,18 @@ if __name__ == "__main__": asyncio.set_event_loop(loop) # read config file - ConfigErr = Config.class_init() + Config.init(ConfigReadToml("default_config.toml")) + ConfigReadEnv() + ConfigReadJson(args.config_path + "config.json") + ConfigReadToml(args.config_path + "config.toml") + ConfigReadJson(args.json_config) + ConfigReadToml(args.toml_config) + ConfigErr = Config.get_error() + if ConfigErr is not None: logging.info(f'ConfigErr: {ConfigErr}') + logging.info('******') + Proxy.class_init() Schedule.start() ModbusTcp(loop) diff --git a/app/tests/test_async_stream.py b/app/tests/test_async_stream.py index d7dcf12..3f4db93 100644 --- a/app/tests/test_async_stream.py +++ b/app/tests/test_async_stream.py @@ -4,12 +4,13 @@ import asyncio import gc import time -from app.src.infos import Infos -from app.src.inverter_base import InverterBase -from app.src.async_stream import AsyncStreamServer, AsyncStreamClient, StreamPtr -from app.src.messages import Message -from app.tests.test_modbus_tcp import FakeReader, FakeWriter -from app.tests.test_inverter_base import config_conn, patch_open_connection +from infos import Infos +from inverter_base import InverterBase +from async_stream import AsyncStreamServer, AsyncStreamClient, StreamPtr +from messages import Message + +from test_modbus_tcp import FakeReader, FakeWriter +from test_inverter_base import config_conn, patch_open_connection pytest_plugins = ('pytest_asyncio',) @@ -17,10 +18,13 @@ pytest_plugins = ('pytest_asyncio',) Infos.static_init() class FakeProto(Message): - def __init__(self, server_side): - super().__init__(server_side, None, 10) + def __init__(self, ifc, server_side): + super().__init__('G3F', ifc, server_side, None, 10) self.conn_no = 0 + def mb_timout_cb(self, exp_cnt): + pass # empty callback + def fake_reader_fwd(): reader = FakeReader() reader.test = FakeReader.RD_TEST_13_BYTES @@ -337,6 +341,7 @@ def create_remote(remote, test_type, with_close_hdr:bool = False): elif test_type == TestType.FWD_RUNTIME_ERROR_NO_STREAM: remote.stream = None raise RuntimeError("Peer closed") + return True def close(): return @@ -349,7 +354,7 @@ def create_remote(remote, test_type, with_close_hdr:bool = False): FakeReader(), FakeWriter(), StreamPtr(None), close_hndl) remote.ifc.prot_set_update_header_cb(update_hdr) remote.ifc.prot_set_init_new_client_conn_cb(callback) - remote.stream = FakeProto(False) + remote.stream = FakeProto(remote.ifc, False) @pytest.mark.asyncio async def test_forward(): @@ -530,3 +535,39 @@ async def test_forward_runtime_error3(): await ifc.server_loop() assert cnt == 1 del ifc + +@pytest.mark.asyncio +async def test_forward_resp(): + assert asyncio.get_running_loop() + remote = StreamPtr(None) + cnt = 0 + + def _close_cb(): + nonlocal cnt, remote, ifc + cnt += 1 + + cnt = 0 + ifc = AsyncStreamClient(fake_reader_fwd(), FakeWriter(), remote, _close_cb) + create_remote(remote, TestType.FWD_NO_EXCPT) + ifc.fwd_add(b'test-forward_msg') + await ifc.client_loop('') + assert cnt == 1 + del ifc + +@pytest.mark.asyncio +async def test_forward_resp2(): + assert asyncio.get_running_loop() + remote = StreamPtr(None) + cnt = 0 + + def _close_cb(): + nonlocal cnt, remote, ifc + cnt += 1 + + cnt = 0 + ifc = AsyncStreamClient(fake_reader_fwd(), FakeWriter(), None, _close_cb) + create_remote(remote, TestType.FWD_NO_EXCPT) + ifc.fwd_add(b'test-forward_msg') + await ifc.client_loop('') + assert cnt == 1 + del ifc diff --git a/app/tests/test_byte_fifo.py b/app/tests/test_byte_fifo.py index 1544cc0..f1392db 100644 --- a/app/tests/test_byte_fifo.py +++ b/app/tests/test_byte_fifo.py @@ -1,6 +1,6 @@ # test_with_pytest.py -from app.src.byte_fifo import ByteFifo +from byte_fifo import ByteFifo def test_fifo(): read = ByteFifo() diff --git a/app/tests/test_config.py b/app/tests/test_config.py index 5ceb1b3..d229dac 100644 --- a/app/tests/test_config.py +++ b/app/tests/test_config.py @@ -1,16 +1,56 @@ # test_with_pytest.py -import tomllib +import pytest +import json +from mock import patch from schema import SchemaMissingKeyError -from app.src.config import Config +from cnf.config import Config, ConfigIfc +from cnf.config_read_toml import ConfigReadToml -class TstConfig(Config): +class FakeBuffer: + rd = str() + +test_buffer = FakeBuffer + + +class FakeFile(): + def __init__(self): + self.buf = test_buffer + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc, tb): + pass + + +class FakeOptionsFile(FakeFile): + def __init__(self, OpenTextMode): + super().__init__() + self.bin_mode = 'b' in OpenTextMode + + def read(self): + if self.bin_mode: + return bytearray(self.buf.rd.encode('utf-8')).copy() + else: + return self.buf.rd.copy() + +def patch_open(): + def new_open(file: str, OpenTextMode="rb"): + if file == "_no__file__no_": + raise FileNotFoundError + return FakeOptionsFile(OpenTextMode) + + with patch('builtins.open', new_open) as conn: + yield conn + +class TstConfig(ConfigIfc): @classmethod - def set(cls, cnf): + def __init__(cls, cnf): cls.act_config = cnf @classmethod - def _read_config_file(cls) -> dict: + def add_config(cls) -> dict: return cls.act_config @@ -22,82 +62,9 @@ def test_empty_config(): except SchemaMissingKeyError: pass -def test_default_config(): - with open("app/config/default_config.toml", "rb") as f: - cnf = tomllib.load(f) - - try: - validated = Config.conf_schema.validate(cnf) - except Exception: - assert False - assert validated == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, - 'inverters': { - 'allow_all': False, - 'R170000000000001': { - 'node_id': '', - 'pv1': {'manufacturer': 'Risen', - 'type': 'RSM40-8-395M'}, - 'pv2': {'manufacturer': 'Risen', - 'type': 'RSM40-8-395M'}, - 'modbus_polling': False, - 'monitor_sn': 0, - 'suggested_area': '', - 'sensor_list': 688}, - 'Y170000000000001': { - 'modbus_polling': True, - 'monitor_sn': 2000000000, - 'node_id': '', - 'pv1': {'manufacturer': 'Risen', - 'type': 'RSM40-8-410M'}, - 'pv2': {'manufacturer': 'Risen', - 'type': 'RSM40-8-410M'}, - 'pv3': {'manufacturer': 'Risen', - 'type': 'RSM40-8-410M'}, - 'pv4': {'manufacturer': 'Risen', - 'type': 'RSM40-8-410M'}, - 'suggested_area': '', - 'sensor_list': 688}}} - -def test_full_config(): - cnf = {'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, - 'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, - 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, - 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, - 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': '', 'passwd': ''}, - 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, - 'inverters': {'allow_all': True, - 'R170000000000001': {'modbus_polling': True, 'node_id': '', 'sensor_list': 0, 'suggested_area': '', 'pv1': {'type': 'type1', 'manufacturer': 'man1'}, 'pv2': {'type': 'type2', 'manufacturer': 'man2'}, 'pv3': {'type': 'type3', 'manufacturer': 'man3'}}, - 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'node_id': '', 'sensor_list': 0x1511, 'suggested_area': ''}}} - try: - validated = Config.conf_schema.validate(cnf) - except Exception: - assert False - assert validated == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'node_id': '', 'modbus_polling': True, 'monitor_sn': 0, 'pv1': {'manufacturer': 'man1','type': 'type1'},'pv2': {'manufacturer': 'man2','type': 'type2'},'pv3': {'manufacturer': 'man3','type': 'type3'}, 'suggested_area': '', 'sensor_list': 0}, 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'node_id': '', 'suggested_area': '', 'sensor_list': 5393}}} - -def test_mininum_config(): - cnf = {'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, - 'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+']}, - 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE']}}}, - 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, - 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': '', 'passwd': ''}, - 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, - 'inverters': {'allow_all': True, - 'R170000000000001': {}} - } - - try: - validated = Config.conf_schema.validate(cnf) - except Exception: - assert False - assert validated == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': {'allow_all': True, 'R170000000000001': {'node_id': '', 'modbus_polling': True, 'monitor_sn': 0, 'suggested_area': '', 'sensor_list': 688}}} - -def test_read_empty(): - cnf = {} - TstConfig.set(cnf) - err = TstConfig.read('app/config/') - assert err == None - cnf = TstConfig.get() - assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, +@pytest.fixture +def ConfigDefault(): + return {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': { 'allow_all': False, 'R170000000000001': { @@ -128,27 +95,150 @@ def test_read_empty(): } } } + +@pytest.fixture +def ConfigComplete(): + return { + 'gen3plus': { + 'at_acl': { + 'mqtt': {'allow': ['AT+'], 'block': ['AT+SUPDATE']}, + 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], + 'block': ['AT+SUPDATE']} + } + }, + 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', + 'port': 5005}, + 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', + 'port': 10000}, + 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, + 'ha': {'auto_conf_prefix': 'homeassistant', + 'discovery_prefix': 'homeassistant', + 'entity_prefix': 'tsun', + 'proxy_node_id': 'proxy', + 'proxy_unique_id': 'P170000000000001'}, + 'inverters': { + 'allow_all': False, + 'R170000000000001': {'node_id': 'PV-Garage/', + 'modbus_polling': False, + 'monitor_sn': 0, + 'pv1': {'manufacturer': 'man1', + 'type': 'type1'}, + 'pv2': {'manufacturer': 'man2', + 'type': 'type2'}, + 'suggested_area': 'Garage', + 'sensor_list': 688}, + 'Y170000000000001': {'modbus_polling': True, + 'monitor_sn': 2000000000, + 'node_id': 'PV-Garage2/', + 'pv1': {'manufacturer': 'man1', + 'type': 'type1'}, + 'pv2': {'manufacturer': 'man2', + 'type': 'type2'}, + 'pv3': {'manufacturer': 'man3', + 'type': 'type3'}, + 'pv4': {'manufacturer': 'man4', + 'type': 'type4'}, + 'suggested_area': 'Garage2', + 'sensor_list': 688} + } + } + +def test_default_config(): + Config.init(ConfigReadToml("app/config/default_config.toml")) + validated = Config.def_config + assert validated == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, + 'inverters': { + 'allow_all': False, + 'R170000000000001': { + 'node_id': '', + 'pv1': {'manufacturer': 'Risen', + 'type': 'RSM40-8-395M'}, + 'pv2': {'manufacturer': 'Risen', + 'type': 'RSM40-8-395M'}, + 'modbus_polling': False, + 'monitor_sn': 0, + 'suggested_area': '', + 'sensor_list': 688}, + 'Y170000000000001': { + 'modbus_polling': True, + 'monitor_sn': 2000000000, + 'node_id': '', + 'pv1': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'pv2': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'pv3': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'pv4': {'manufacturer': 'Risen', + 'type': 'RSM40-8-410M'}, + 'suggested_area': '', + 'sensor_list': 688}}} + +def test_full_config(ConfigComplete): + cnf = {'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, + 'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': ['AT+SUPDATE']}, + 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': ['AT+SUPDATE']}}}, + 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, + 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': '', 'passwd': ''}, + 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, + 'inverters': {'allow_all': False, + 'R170000000000001': {'modbus_polling': False, 'node_id': 'PV-Garage/', 'sensor_list': 0x02B0, 'suggested_area': 'Garage', 'pv1': {'type': 'type1', 'manufacturer': 'man1'}, 'pv2': {'type': 'type2', 'manufacturer': 'man2'}}, + 'Y170000000000001': {'modbus_polling': True, 'monitor_sn': 2000000000, 'node_id': 'PV-Garage2/', 'sensor_list': 0x02B0, 'suggested_area': 'Garage2', 'pv1': {'type': 'type1', 'manufacturer': 'man1'}, 'pv2': {'type': 'type2', 'manufacturer': 'man2'}, 'pv3': {'type': 'type3', 'manufacturer': 'man3'}, 'pv4': {'type': 'type4', 'manufacturer': 'man4'}}}} + try: + validated = Config.conf_schema.validate(cnf) + except Exception: + assert False + assert validated == ConfigComplete + +def test_read_empty(ConfigDefault): + test_buffer.rd = "" - defcnf = TstConfig.def_config.get('solarman') + Config.init(ConfigReadToml("app/config/default_config.toml")) + for _ in patch_open(): + ConfigReadToml("config/config.toml") + err = Config.get_error() + + assert err == None + cnf = Config.get() + assert cnf == ConfigDefault + + defcnf = Config.def_config.get('solarman') assert defcnf == {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000} - assert True == TstConfig.is_default('solarman') + assert True == Config.is_default('solarman') def test_no_file(): - cnf = {} - TstConfig.set(cnf) - err = TstConfig.read('') + Config.init(ConfigReadToml("default_config.toml")) + err = Config.get_error() assert err == "Config.read: [Errno 2] No such file or directory: 'default_config.toml'" - cnf = TstConfig.get() + cnf = Config.get() assert cnf == {} - defcnf = TstConfig.def_config.get('solarman') + defcnf = Config.def_config.get('solarman') assert defcnf == None -def test_read_cnf1(): - cnf = {'solarman' : {'enabled': False}} - TstConfig.set(cnf) - err = TstConfig.read('app/config/') +def test_no_file2(): + Config.init(ConfigReadToml("app/config/default_config.toml")) + assert Config.err == None + ConfigReadToml("_no__file__no_") + err = Config.get_error() assert err == None - cnf = TstConfig.get() + +def test_invalid_filename(): + Config.init(ConfigReadToml("app/config/default_config.toml")) + assert Config.err == None + ConfigReadToml(None) + err = Config.get_error() + assert err == None + +def test_read_cnf1(): + test_buffer.rd = "solarman.enabled = false" + + Config.init(ConfigReadToml("app/config/default_config.toml")) + for _ in patch_open(): + ConfigReadToml("config/config.toml") + err = Config.get_error() + + assert err == None + cnf = Config.get() assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': False, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': { 'allow_all': False, @@ -180,18 +270,22 @@ def test_read_cnf1(): } } } - cnf = TstConfig.get('solarman') + cnf = Config.get('solarman') assert cnf == {'enabled': False, 'host': 'iot.talent-monitoring.com', 'port': 10000} - defcnf = TstConfig.def_config.get('solarman') + defcnf = Config.def_config.get('solarman') assert defcnf == {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000} - assert False == TstConfig.is_default('solarman') + assert False == Config.is_default('solarman') def test_read_cnf2(): - cnf = {'solarman' : {'enabled': 'FALSE'}} - TstConfig.set(cnf) - err = TstConfig.read('app/config/') + test_buffer.rd = "solarman.enabled = 'FALSE'" + + Config.init(ConfigReadToml("app/config/default_config.toml")) + for _ in patch_open(): + ConfigReadToml("config/config.toml") + err = Config.get_error() + assert err == None - cnf = TstConfig.get() + cnf = Config.get() assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 10000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': { 'allow_all': False, @@ -223,22 +317,30 @@ def test_read_cnf2(): } } } - assert True == TstConfig.is_default('solarman') + assert True == Config.is_default('solarman') -def test_read_cnf3(): - cnf = {'solarman' : {'port': 'FALSE'}} - TstConfig.set(cnf) - err = TstConfig.read('app/config/') - assert err == 'Config.read: Key \'solarman\' error:\nKey \'port\' error:\nint(\'FALSE\') raised ValueError("invalid literal for int() with base 10: \'FALSE\'")' - cnf = TstConfig.get() - assert cnf == {'solarman': {'port': 'FALSE'}} +def test_read_cnf3(ConfigDefault): + test_buffer.rd = "solarman.port = 'FALSE'" + + Config.init(ConfigReadToml("app/config/default_config.toml")) + for _ in patch_open(): + ConfigReadToml("config/config.toml") + err = Config.get_error() + + assert err == 'error: Key \'solarman\' error:\nKey \'port\' error:\nint(\'FALSE\') raised ValueError("invalid literal for int() with base 10: \'FALSE\'")' + cnf = Config.get() + assert cnf == ConfigDefault def test_read_cnf4(): - cnf = {'solarman' : {'port': 5000}} - TstConfig.set(cnf) - err = TstConfig.read('app/config/') + test_buffer.rd = "solarman.port = 5000" + + Config.init(ConfigReadToml("app/config/default_config.toml")) + for _ in patch_open(): + ConfigReadToml("config/config.toml") + err = Config.get_error() + assert err == None - cnf = TstConfig.get() + cnf = Config.get() assert cnf == {'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': []}, 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], 'block': []}}}, 'tsun': {'enabled': True, 'host': 'logger.talent-monitoring.com', 'port': 5005}, 'solarman': {'enabled': True, 'host': 'iot.talent-monitoring.com', 'port': 5000}, 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None}, 'ha': {'auto_conf_prefix': 'homeassistant', 'discovery_prefix': 'homeassistant', 'entity_prefix': 'tsun', 'proxy_node_id': 'proxy', 'proxy_unique_id': 'P170000000000001'}, 'inverters': { 'allow_all': False, @@ -270,16 +372,22 @@ def test_read_cnf4(): } } } - assert False == TstConfig.is_default('solarman') + assert False == Config.is_default('solarman') def test_read_cnf5(): - cnf = {'solarman' : {'port': 1023}} - TstConfig.set(cnf) - err = TstConfig.read('app/config/') + test_buffer.rd = "solarman.port = 1023" + + Config.init(ConfigReadToml("app/config/default_config.toml")) + for _ in patch_open(): + ConfigReadToml("config/config.toml") + err = Config.get_error() assert err != None def test_read_cnf6(): - cnf = {'solarman' : {'port': 65536}} - TstConfig.set(cnf) - err = TstConfig.read('app/config/') + test_buffer.rd = "solarman.port = 65536" + + Config.init(ConfigReadToml("app/config/default_config.toml")) + for _ in patch_open(): + ConfigReadToml("config/config.toml") + err = Config.get_error() assert err != None diff --git a/app/tests/test_config_read_env.py b/app/tests/test_config_read_env.py new file mode 100644 index 0000000..3bf33fc --- /dev/null +++ b/app/tests/test_config_read_env.py @@ -0,0 +1,53 @@ +# test_with_pytest.py +import pytest +import os +from mock import patch +from cnf.config import Config +from cnf.config_read_toml import ConfigReadToml +from cnf.config_read_env import ConfigReadEnv + +def patch_getenv(): + def new_getenv(key: str, defval=None): + """Get an environment variable, return None if it doesn't exist. +The optional second argument can specify an alternate default. key, +default and the result are str.""" + if key == 'MQTT_PASSWORD': + return 'passwd' + elif key == 'MQTT_PORT': + return 1234 + elif key == 'MQTT_HOST': + return "" + return defval + + with patch.object(os, 'getenv', new_getenv) as conn: + yield conn + +def test_extend_key(): + cnf_rd = ConfigReadEnv() + + conf = {} + cnf_rd._extend_key(conf, "mqtt.user", "testuser") + assert conf == { + 'mqtt': { + 'user': 'testuser', + }, + } + + conf = {} + cnf_rd._extend_key(conf, "mqtt", "testuser") + assert conf == { + 'mqtt': 'testuser', + } + + conf = {} + cnf_rd._extend_key(conf, "", "testuser") + assert conf == {'': 'testuser'} + +def test_read_env_config(): + Config.init(ConfigReadToml("app/config/default_config.toml")) + assert Config.get('mqtt') == {'host': 'mqtt', 'port': 1883, 'user': None, 'passwd': None} + for _ in patch_getenv(): + + ConfigReadEnv() + assert Config.get_error() == None + assert Config.get('mqtt') == {'host': 'mqtt', 'port': 1234, 'user': None, 'passwd': 'passwd'} diff --git a/app/tests/test_config_read_json.py b/app/tests/test_config_read_json.py new file mode 100644 index 0000000..0fd7b07 --- /dev/null +++ b/app/tests/test_config_read_json.py @@ -0,0 +1,404 @@ +# test_with_pytest.py +import pytest +from mock import patch +from cnf.config import Config +from cnf.config_read_json import ConfigReadJson +from cnf.config_read_toml import ConfigReadToml + +from test_config import ConfigDefault, ConfigComplete + + +class CnfIfc(ConfigReadJson): + def __init__(self): + pass + + +class FakeBuffer: + rd = str() + wr = str() + + +test_buffer = FakeBuffer + + +class FakeFile(): + def __init__(self): + self.buf = test_buffer + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc, tb): + pass + + +class FakeOptionsFile(FakeFile): + def __init__(self, OpenTextMode): + super().__init__() + self.bin_mode = 'b' in OpenTextMode + + def read(self): + print(f"Fake.read: bmode:{self.bin_mode}") + if self.bin_mode: + return bytearray(self.buf.rd.encode('utf-8')).copy() + else: + print(f"Fake.read: str:{self.buf.rd}") + return self.buf.rd + +def patch_open(): + def new_open(file: str, OpenTextMode="r"): + if file == "_no__file__no_": + raise FileNotFoundError + return FakeOptionsFile(OpenTextMode) + + with patch('builtins.open', new_open) as conn: + yield conn + +@pytest.fixture +def ConfigTomlEmpty(): + return { + 'mqtt': {'host': 'mqtt', 'port': 1883, 'user': '', 'passwd': ''}, + 'ha': {'auto_conf_prefix': 'homeassistant', + 'discovery_prefix': 'homeassistant', + 'entity_prefix': 'tsun', + 'proxy_node_id': 'proxy', + 'proxy_unique_id': 'P170000000000001'}, + 'solarman': { + 'enabled': True, + 'host': 'iot.talent-monitoring.com', + 'port': 10000, + }, + 'tsun': { + 'enabled': True, + 'host': 'logger.talent-monitoring.com', + 'port': 5005, + }, + 'inverters': { + 'allow_all': False + }, + 'gen3plus': {'at_acl': {'tsun': {'allow': [], 'block': []}, + 'mqtt': {'allow': [], 'block': []}}}, + } + + +def test_no_config(ConfigDefault): + test_buffer.rd = "" # empty buffer, no json + + Config.init(ConfigReadToml("app/config/default_config.toml")) + for _ in patch_open(): + ConfigReadJson() + err = Config.get_error() + + assert err == 'error: Expecting value: line 1 column 1 (char 0)' + cnf = Config.get() + assert cnf == ConfigDefault + +def test_no_file(ConfigDefault): + test_buffer.rd = "" # empty buffer, no json + + Config.init(ConfigReadToml("app/config/default_config.toml")) + for _ in patch_open(): + ConfigReadJson("_no__file__no_") + err = Config.get_error() + + assert err == None + cnf = Config.get() + assert cnf == ConfigDefault + +def test_invalid_filename(ConfigDefault): + test_buffer.rd = "" # empty buffer, no json + + Config.init(ConfigReadToml("app/config/default_config.toml")) + for _ in patch_open(): + ConfigReadJson(None) + err = Config.get_error() + + assert err == None + cnf = Config.get() + assert cnf == ConfigDefault + +def test_cnv1(): + """test dotted key converting""" + tst = { + "gen3plus.at_acl.mqtt.block": [ + "AT+SUPDATE", + "AT+" + ] +} + + cnf = ConfigReadJson() + obj = cnf.convert_to_obj(tst) + assert obj == { + 'gen3plus': { + 'at_acl': { + 'mqtt': { + 'block': [ + 'AT+SUPDATE', + "AT+" + ], + }, + }, + }, + } + +def test_cnv2(): + """test a valid list with serials in inverters""" + tst = { + "inverters": [ + { + "serial": "R170000000000001", + }, + { + "serial": "Y170000000000001", + } + ], +} + + cnf = ConfigReadJson() + obj = cnf.convert_to_obj(tst) + assert obj == { + 'inverters': { + 'R170000000000001': {}, + 'Y170000000000001': {} + }, + } + +def test_cnv3(): + """test the combination of a list and a scalar in inverters""" + tst = { + "inverters": [ + { + "serial": "R170000000000001", + }, + { + "serial": "Y170000000000001", + } + ], + "inverters.allow_all": False, +} + + cnf = ConfigReadJson() + obj = cnf.convert_to_obj(tst) + assert obj == { + 'inverters': { + 'R170000000000001': {}, + 'Y170000000000001': {}, + 'allow_all': False, + }, + } + +def test_cnv4(): + tst = { + "inverters": [ + { + "serial": "R170000000000001", + "node_id": "PV-Garage/", + "suggested_area": "Garage", + "modbus_polling": False, + "pv1_manufacturer": "man1", + "pv1_type": "type1", + "pv2_manufacturer": "man2", + "pv2_type": "type2", + "sensor_list": 688 + }, + { + "serial": "Y170000000000001", + "monitor_sn": 2000000000, + "node_id": "PV-Garage2/", + "suggested_area": "Garage2", + "modbus_polling": True, + "client_mode_host": "InverterIP", + "client_mode_port": 1234, + "pv1_manufacturer": "man1", + "pv1_type": "type1", + "pv2_manufacturer": "man2", + "pv2_type": "type2", + "pv3_manufacturer": "man3", + "pv3_type": "type3", + "pv4_manufacturer": "man4", + "pv4_type": "type4", + "sensor_list": 688 + } + ], + "tsun.enabled": True, + "solarman.enabled": True, + "inverters.allow_all": False, + "gen3plus.at_acl.tsun.allow": [ + "AT+Z", + "AT+UPURL", + "AT+SUPDATE" + ], + "gen3plus.at_acl.tsun.block": [ + "AT+SUPDATE" + ], + "gen3plus.at_acl.mqtt.allow": [ + "AT+" + ], + "gen3plus.at_acl.mqtt.block": [ + "AT+SUPDATE" + ] +} + + cnf = ConfigReadJson() + obj = cnf.convert_to_obj(tst) + assert obj == { + 'gen3plus': {'at_acl': {'mqtt': {'allow': ['AT+'], 'block': ['AT+SUPDATE']}, + 'tsun': {'allow': ['AT+Z', 'AT+UPURL', 'AT+SUPDATE'], + 'block': ['AT+SUPDATE']}}}, + 'inverters': {'R170000000000001': {'modbus_polling': False, + 'node_id': 'PV-Garage/', + 'pv1_manufacturer': 'man1', + 'pv1_type': 'type1', + 'pv2_manufacturer': 'man2', + 'pv2_type': 'type2', + 'sensor_list': 688, + 'suggested_area': 'Garage'}, + 'Y170000000000001': {'client_mode_host': 'InverterIP', + 'client_mode_port': 1234, + 'modbus_polling': True, + 'monitor_sn': 2000000000, + 'node_id': 'PV-Garage2/', + 'pv1_manufacturer': 'man1', + 'pv1_type': 'type1', + 'pv2_manufacturer': 'man2', + 'pv2_type': 'type2', + 'pv3_manufacturer': 'man3', + 'pv3_type': 'type3', + 'pv4_manufacturer': 'man4', + 'pv4_type': 'type4', + 'sensor_list': 688, + 'suggested_area': 'Garage2'}, + 'allow_all': False}, + 'solarman': {'enabled': True}, + 'tsun': {'enabled': True} + } + +def test_cnv5(): + """test a invalid list with missing serials""" + tst = { + "inverters": [ + { + "node_id": "PV-Garage1/", + }, + { + "serial": "Y170000000000001", + "node_id": "PV-Garage2/", + } + ], +} + cnf = ConfigReadJson() + obj = cnf.convert_to_obj(tst) + assert obj == { + 'inverters': { + 'Y170000000000001': {'node_id': 'PV-Garage2/'} + }, + } + +def test_cnv6(): + """test overwritting a value in inverters""" + tst = { + "inverters": [{ + "serial": "Y170000000000001", + "node_id": "PV-Garage2/", + }], + } + tst2 = { + "inverters": [{ + "serial": "Y170000000000001", + "node_id": "PV-Garden/", + }], + } + cnf = ConfigReadJson() + conf = {} + for key, val in tst.items(): + cnf.convert_inv_arr(conf, key, val) + + assert conf == { + 'inverters': { + 'Y170000000000001': {'node_id': 'PV-Garage2/'} + }, + } + + for key, val in tst2.items(): + cnf.convert_inv_arr(conf, key, val) + + assert conf == { + 'inverters': { + 'Y170000000000001': {'node_id': 'PV-Garden/'} + }, + } + +def test_empty_config(ConfigDefault): + test_buffer.rd = "{}" # empty json + + Config.init(ConfigReadToml("app/config/default_config.toml")) + for _ in patch_open(): + ConfigReadJson() + err = Config.get_error() + + assert err == None + cnf = Config.get() + assert cnf == ConfigDefault + + +def test_full_config(ConfigComplete): + test_buffer.rd = """ +{ + "inverters": [ + { + "serial": "R170000000000001", + "node_id": "PV-Garage/", + "suggested_area": "Garage", + "modbus_polling": false, + "pv1.manufacturer": "man1", + "pv1.type": "type1", + "pv2.manufacturer": "man2", + "pv2.type": "type2", + "sensor_list": 688 + }, + { + "serial": "Y170000000000001", + "monitor_sn": 2000000000, + "node_id": "PV-Garage2/", + "suggested_area": "Garage2", + "modbus_polling": true, + "client_mode_host": "InverterIP", + "client_mode_port": 1234, + "pv1.manufacturer": "man1", + "pv1.type": "type1", + "pv2.manufacturer": "man2", + "pv2.type": "type2", + "pv3.manufacturer": "man3", + "pv3.type": "type3", + "pv4.manufacturer": "man4", + "pv4.type": "type4", + "sensor_list": 688 + } + ], + "tsun.enabled": true, + "solarman.enabled": true, + "inverters.allow_all": false, + "gen3plus.at_acl.tsun.allow": [ + "AT+Z", + "AT+UPURL", + "AT+SUPDATE" + ], + "gen3plus.at_acl.tsun.block": [ + "AT+SUPDATE" + ], + "gen3plus.at_acl.mqtt.allow": [ + "AT+" + ], + "gen3plus.at_acl.mqtt.block": [ + "AT+SUPDATE" + ] +} +""" + Config.init(ConfigReadToml("app/config/default_config.toml")) + for _ in patch_open(): + ConfigReadJson() + err = Config.get_error() + + assert err == None + cnf = Config.get() + assert cnf == ConfigComplete diff --git a/app/tests/test_infos.py b/app/tests/test_infos.py index 8d0c268..43c0050 100644 --- a/app/tests/test_infos.py +++ b/app/tests/test_infos.py @@ -2,8 +2,8 @@ import pytest import json, math import logging -from app.src.infos import Register, ClrAtMidnight -from app.src.infos import Infos +from infos import Register, ClrAtMidnight +from infos import Infos, Fmt def test_statistic_counter(): i = Infos() @@ -17,13 +17,13 @@ def test_statistic_counter(): assert val == None or val == 0 i.static_init() # initialize counter - assert json.dumps(i.stat) == json.dumps({"proxy": {"Inverter_Cnt": 0, "Unknown_SNR": 0, "Unknown_Msg": 0, "Invalid_Data_Type": 0, "Internal_Error": 0,"Unknown_Ctrl": 0, "OTA_Start_Msg": 0, "SW_Exception": 0, "Invalid_Msg_Format": 0, "AT_Command": 0, "AT_Command_Blocked": 0, "Modbus_Command": 0}}) + assert json.dumps(i.stat) == json.dumps({"proxy": {"Inverter_Cnt": 0, "Cloud_Conn_Cnt": 0, "Unknown_SNR": 0, "Unknown_Msg": 0, "Invalid_Data_Type": 0, "Internal_Error": 0,"Unknown_Ctrl": 0, "OTA_Start_Msg": 0, "SW_Exception": 0, "Invalid_Msg_Format": 0, "AT_Command": 0, "AT_Command_Blocked": 0, "Modbus_Command": 0}}) val = i.dev_value(Register.INVERTER_CNT) # valid and initiliazed addr assert val == 0 i.inc_counter('Inverter_Cnt') - assert json.dumps(i.stat) == json.dumps({"proxy": {"Inverter_Cnt": 1, "Unknown_SNR": 0, "Unknown_Msg": 0, "Invalid_Data_Type": 0, "Internal_Error": 0,"Unknown_Ctrl": 0, "OTA_Start_Msg": 0, "SW_Exception": 0, "Invalid_Msg_Format": 0, "AT_Command": 0, "AT_Command_Blocked": 0, "Modbus_Command": 0}}) + assert json.dumps(i.stat) == json.dumps({"proxy": {"Inverter_Cnt": 1, "Cloud_Conn_Cnt": 0, "Unknown_SNR": 0, "Unknown_Msg": 0, "Invalid_Data_Type": 0, "Internal_Error": 0,"Unknown_Ctrl": 0, "OTA_Start_Msg": 0, "SW_Exception": 0, "Invalid_Msg_Format": 0, "AT_Command": 0, "AT_Command_Blocked": 0, "Modbus_Command": 0}}) val = i.dev_value(Register.INVERTER_CNT) assert val == 1 @@ -256,3 +256,24 @@ def test_key_obj(): assert level == logging.DEBUG assert unit == 'kWh' assert must_incr == True + +def test_hex4_cnv(): + tst_val = (0x12ef, ) + string = Fmt.hex4(tst_val) + assert string == '12ef' + val = Fmt.hex4(string, reverse=True) + assert val == tst_val[0] + +def test_mac_cnv(): + tst_val = (0x12, 0x34, 0x67, 0x89, 0xcd, 0xef) + string = Fmt.mac(tst_val) + assert string == '12:34:67:89:cd:ef' + val = Fmt.mac(string, reverse=True) + assert val == tst_val + +def test_version_cnv(): + tst_val = (0x123f, ) + string = Fmt.version(tst_val) + assert string == 'V1.2.3F' + val = Fmt.version(string, reverse=True) + assert val == tst_val[0] diff --git a/app/tests/test_infos_g3.py b/app/tests/test_infos_g3.py index 6fad692..da3eaed 100644 --- a/app/tests/test_infos_g3.py +++ b/app/tests/test_infos_g3.py @@ -1,7 +1,7 @@ # test_with_pytest.py import pytest, json, math -from app.src.infos import Register -from app.src.gen3.infos_g3 import InfosG3, RegisterMap +from infos import Register +from gen3.infos_g3 import InfosG3, RegisterMap @pytest.fixture def contr_data_seq(): # Get Time Request message @@ -421,7 +421,7 @@ def test_must_incr_total(inv_data_seq2, inv_data_seq2_zero): if key == 'total' or key == 'inverter' or key == 'env': assert update == True tests +=1 - assert tests==8 + assert tests==12 assert json.dumps(i.db['total']) == json.dumps({'Daily_Generation': 1.7, 'Total_Generation': 17.36}) assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 64.5, "Daily_Generation": 1.08, "Total_Generation": 9.74}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 45.7, "Daily_Generation": 0.62, "Total_Generation": 7.62}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}}) assert json.dumps(i.db['env']) == json.dumps({"Inverter_Status": 1, "Inverter_Temp": 23}) @@ -435,7 +435,7 @@ def test_must_incr_total(inv_data_seq2, inv_data_seq2_zero): assert json.dumps(i.db['total']) == json.dumps({'Daily_Generation': 1.7, 'Total_Generation': 17.36}) assert json.dumps(i.db['input']) == json.dumps({"pv1": {"Voltage": 33.6, "Current": 1.91, "Power": 64.5, "Daily_Generation": 1.08, "Total_Generation": 9.74}, "pv2": {"Voltage": 33.5, "Current": 1.36, "Power": 45.7, "Daily_Generation": 0.62, "Total_Generation": 7.62}, "pv3": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}, "pv4": {"Voltage": 0.0, "Current": 0.0, "Power": 0.0}}) assert json.dumps(i.db['env']) == json.dumps({"Inverter_Status": 1, "Inverter_Temp": 23}) - assert json.dumps(i.db['inverter']) == json.dumps({"Rated_Power": 600, "Max_Designed_Power": -1, "Output_Coefficient": 100.0, "No_Inputs": 2}) + assert json.dumps(i.db['inverter']) == json.dumps({"Rated_Power": 600, "BOOT_STATUS": 0, "DSP_STATUS": 21930, "Work_Mode": 0, "Max_Designed_Power": -1, "Input_Coefficient": -0.1, "Output_Coefficient": 100.0, "No_Inputs": 2}) tests = 0 for key, update in i.parse (inv_data_seq2_zero): @@ -501,10 +501,10 @@ def test_new_data_types(inv_data_new): else: assert False - assert tests==15 - assert json.dumps(i.db['inverter']) == json.dumps({"Manufacturer": 0}) + assert tests==7 + assert json.dumps(i.db['inverter']) == json.dumps({"Manufacturer": 0, "DSP_STATUS": 0}) assert json.dumps(i.db['input']) == json.dumps({"pv1": {}}) - assert json.dumps(i.db['events']) == json.dumps({"401_": 0, "404_": 0, "405_": 0, "408_": 0, "409_No_Utility": 0, "406_": 0, "416_": 0}) + assert json.dumps(i.db['events']) == json.dumps({"Inverter_Alarm": 0, "Inverter_Fault": 0}) def test_invalid_data_type(invalid_data_seq): i = InfosG3() @@ -520,15 +520,3 @@ def test_invalid_data_type(invalid_data_seq): val = i.dev_value(Register.INVALID_DATA_TYPE) # check invalid data type counter assert val == 1 - -def test_result_eval(inv_data_seq2: bytes): - - # add eval to convert temperature from °F to °C - RegisterMap.map[0x00000514]['eval'] = '(result-32)/1.8' - - i = InfosG3() - - for _, _ in i.parse (inv_data_seq2): - pass # side effect is calling generator i.parse() - assert math.isclose(-5.0, round (i.get_db_value(Register.INVERTER_TEMP, 0),4), rel_tol=1e-09, abs_tol=1e-09) - del RegisterMap.map[0x00000514]['eval'] # remove eval diff --git a/app/tests/test_infos_g3p.py b/app/tests/test_infos_g3p.py index c80a6b7..e0cac05 100644 --- a/app/tests/test_infos_g3p.py +++ b/app/tests/test_infos_g3p.py @@ -1,9 +1,9 @@ # test_with_pytest.py import pytest, json, math, random -from app.src.infos import Register -from app.src.gen3plus.infos_g3p import InfosG3P -from app.src.gen3plus.infos_g3p import RegisterMap +from infos import Register +from gen3plus.infos_g3p import InfosG3P +from gen3plus.infos_g3p import RegisterMap @pytest.fixture(scope="session") def str_test_ip(): @@ -57,6 +57,7 @@ def inverter_data(): # 0x4210 ftype: 0x01 msg += b'\x01\x61\x00\xa8\x02\x54\x01\x5a\x00\x8a\x01\xe4\x01\x5a\x00\xbd' msg += b'\x02\x8f\x00\x11\x00\x01\x00\x00\x00\x0b\x00\x00\x27\x98\x00\x04' msg += b'\x00\x00\x0c\x04\x00\x03\x00\x00\x0a\xe7\x00\x05\x00\x00\x0c\x75' + msg += b'\x00\x00\x00\x00\x06\x16\x02\x00\x00\x00\x55\xaa\x00\x01\x00\x00' msg += b'\x00\x00\x00\x00\xff\xff\x07\xd0\x00\x03\x04\x00\x04\x00\x04\x00' msg += b'\x04\x00\x00\x01\xff\xff\x00\x01\x00\x06\x00\x68\x00\x68\x05\x00' @@ -85,10 +86,21 @@ def test_parse_4110(str_test_ip, device_data: bytes): pass # side effect is calling generator i.parse() assert json.dumps(i.db) == json.dumps({ - 'controller': {"Data_Up_Interval": 300, "Collect_Interval": 1, "Heartbeat_Interval": 120, "Signal_Strength": 100, "IP_Address": str_test_ip, "Sensor_List": "02b0"}, + 'controller': {"Data_Up_Interval": 300, "Collect_Interval": 1, "Heartbeat_Interval": 120, "Signal_Strength": 100, "IP_Address": str_test_ip, "Sensor_List": "02b0", "WiFi_SSID": "Allius-Home"}, 'collector': {"Chip_Model": "LSW5BLE_17_02B0_1.05", "MAC-Addr": "40:2a:8f:4f:51:54", "Collector_Fw_Version": "V1.1.00.0B"}, }) +def test_build_4110(str_test_ip, device_data: bytes): + i = InfosG3P(client_mode=False) + i.db.clear() + for key, update in i.parse (device_data, 0x41, 2): + pass # side effect is calling generator i.parse() + + build_msg = i.build(len(device_data), 0x41, 2) + for i in range(11, 20): + build_msg[i] = device_data[i] + assert device_data == build_msg + def test_parse_4210(inverter_data: bytes): i = InfosG3P(client_mode=False) i.db.clear() @@ -98,16 +110,31 @@ def test_parse_4210(inverter_data: bytes): assert json.dumps(i.db) == json.dumps({ "controller": {"Sensor_List": "02b0", "Power_On_Time": 2051}, - "inverter": {"Serial_Number": "Y17E00000000000E", "Version": "V4.0.10", "Rated_Power": 600, "Max_Designed_Power": 2000, "Output_Coefficient": 100.0}, - "env": {"Inverter_Status": 1, "Inverter_Temp": 14}, + "inverter": {"Serial_Number": "Y17E00000000000E", "Version": "V4.0.10", "Rated_Power": 600, "BOOT_STATUS": 0, "DSP_STATUS": 21930, "Work_Mode": 0, "Max_Designed_Power": 2000, "Input_Coefficient": 100.0, "Output_Coefficient": 100.0}, + "env": {"Inverter_Status": 1, "Detect_Status_1": 2, "Detect_Status_2": 0, "Inverter_Temp": 14}, + "events": {"Inverter_Alarm": 0, "Inverter_Fault": 0, "Inverter_Bitfield_1": 0, "Inverter_bitfield_2": 0}, "grid": {"Voltage": 224.8, "Current": 0.73, "Frequency": 50.05, "Output_Power": 165.8}, "input": {"pv1": {"Voltage": 35.3, "Current": 1.68, "Power": 59.6, "Daily_Generation": 0.04, "Total_Generation": 30.76}, "pv2": {"Voltage": 34.6, "Current": 1.38, "Power": 48.4, "Daily_Generation": 0.03, "Total_Generation": 27.91}, "pv3": {"Voltage": 34.6, "Current": 1.89, "Power": 65.5, "Daily_Generation": 0.05, "Total_Generation": 31.89}, "pv4": {"Voltage": 1.7, "Current": 0.01, "Power": 0.0, "Total_Generation": 15.58}}, - "total": {"Daily_Generation": 0.11, "Total_Generation": 101.36} + "total": {"Daily_Generation": 0.11, "Total_Generation": 101.36}, + "inv_unknown": {"Unknown_1": 512}, + "other": {"Output_Shutdown": 65535, "Rated_Level": 3, "Grid_Volt_Cal_Coef": 1024, "Prod_Compliance_Type": 6} }) + +def test_build_4210(inverter_data: bytes): + i = InfosG3P(client_mode=False) + i.db.clear() + for key, update in i.parse (inverter_data, 0x42, 1): + pass # side effect is calling generator i.parse() + + build_msg = i.build(len(inverter_data), 0x42, 1) + for i in range(11, 31): + build_msg[i] = inverter_data[i] + assert inverter_data == build_msg + def test_build_ha_conf1(): i = InfosG3P(client_mode=False) i.static_init() # initialize counter @@ -256,10 +283,12 @@ def test_build_ha_conf4(): assert tests==1 -def test_exception_and_eval(inverter_data: bytes): +def test_exception_and_calc(inverter_data: bytes): - # add eval to convert temperature from °F to °C - RegisterMap.map[0x420100d8]['eval'] = '(result-32)/1.8' + # patch table to convert temperature from °F to °C + ofs = RegisterMap.map[0x420100d8]['offset'] + RegisterMap.map[0x420100d8]['quotient'] = 1.8 + RegisterMap.map[0x420100d8]['offset'] = -32/1.8 # map PV1_VOLTAGE to invalid register RegisterMap.map[0x420100e0]['reg'] = Register.TEST_REG2 # set invalid maping entry for OUTPUT_POWER (string instead of dict type) @@ -267,16 +296,43 @@ def test_exception_and_eval(inverter_data: bytes): RegisterMap.map[0x420100de] = 'invalid_entry' i = InfosG3P(client_mode=False) - # i.db.clear() + i.db.clear() for key, update in i.parse (inverter_data, 0x42, 1): pass # side effect is calling generator i.parse() assert math.isclose(12.2222, round (i.get_db_value(Register.INVERTER_TEMP, 0),4), rel_tol=1e-09, abs_tol=1e-09) - del RegisterMap.map[0x420100d8]['eval'] # remove eval - RegisterMap.map[0x420100e0]['reg'] = Register.PV1_VOLTAGE # reset mapping - RegisterMap.map[0x420100de] = backup # reset mapping + + build_msg = i.build(len(inverter_data), 0x42, 1) + assert build_msg[32:0xde] == inverter_data[32:0xde] + assert build_msg[0xde:0xe2] == b'\x00\x00\x00\x00' + assert build_msg[0xe2:-1] == inverter_data[0xe2:-1] + + + # remove a table entry and test parsing and building + del RegisterMap.map[0x420100d8]['quotient'] + del RegisterMap.map[0x420100d8]['offset'] + + i.db.clear() for key, update in i.parse (inverter_data, 0x42, 1): pass # side effect is calling generator i.parse() assert 54 == i.get_db_value(Register.INVERTER_TEMP, 0) - \ No newline at end of file + + build_msg = i.build(len(inverter_data), 0x42, 1) + assert build_msg[32:0xd8] == inverter_data[32:0xd8] + assert build_msg[0xd8:0xe2] == b'\x006\x00\x00\x02X\x00\x00\x00\x00' + assert build_msg[0xe2:-1] == inverter_data[0xe2:-1] + + # test restore table + RegisterMap.map[0x420100d8]['offset'] = ofs + RegisterMap.map[0x420100e0]['reg'] = Register.PV1_VOLTAGE # reset mapping + RegisterMap.map[0x420100de] = backup # reset mapping + + # test orginial table + i.db.clear() + for key, update in i.parse (inverter_data, 0x42, 1): + pass # side effect is calling generator i.parse() + assert 14 == i.get_db_value(Register.INVERTER_TEMP, 0) + + build_msg = i.build(len(inverter_data), 0x42, 1) + assert build_msg[32:-1] == inverter_data[32:-1] diff --git a/app/tests/test_inverter_base.py b/app/tests/test_inverter_base.py index 054d729..2e05777 100644 --- a/app/tests/test_inverter_base.py +++ b/app/tests/test_inverter_base.py @@ -5,14 +5,14 @@ import gc from mock import patch from enum import Enum -from app.src.infos import Infos -from app.src.config import Config -from app.src.gen3.talent import Talent -from app.src.inverter_base import InverterBase -from app.src.singleton import Singleton -from app.src.async_stream import AsyncStream, AsyncStreamClient +from infos import Infos +from cnf.config import Config +from gen3.talent import Talent +from inverter_base import InverterBase +from singleton import Singleton +from async_stream import AsyncStream, AsyncStreamClient -from app.tests.test_modbus_tcp import patch_mqtt_err, patch_mqtt_except, test_port, test_hostname +from test_modbus_tcp import patch_mqtt_err, patch_mqtt_except, test_port, test_hostname pytest_plugins = ('pytest_asyncio',) @@ -69,13 +69,13 @@ class FakeWriter(): async def wait_closed(self): return -class TestType(Enum): +class MockType(Enum): RD_TEST_0_BYTES = 1 RD_TEST_TIMEOUT = 2 RD_TEST_EXCEPT = 3 -test = TestType.RD_TEST_0_BYTES +test = MockType.RD_TEST_0_BYTES @pytest.fixture def patch_open_connection(): @@ -85,9 +85,9 @@ def patch_open_connection(): def new_open(host: str, port: int): global test - if test == TestType.RD_TEST_TIMEOUT: + if test == MockType.RD_TEST_TIMEOUT: raise ConnectionRefusedError - elif test == TestType.RD_TEST_EXCEPT: + elif test == MockType.RD_TEST_EXCEPT: raise ValueError("Value cannot be negative") # Compliant return new_conn(None) diff --git a/app/tests/test_inverter_g3.py b/app/tests/test_inverter_g3.py index 45438bb..620173c 100644 --- a/app/tests/test_inverter_g3.py +++ b/app/tests/test_inverter_g3.py @@ -5,15 +5,15 @@ import sys,gc from mock import patch from enum import Enum -from app.src.infos import Infos -from app.src.config import Config -from app.src.proxy import Proxy -from app.src.inverter_base import InverterBase -from app.src.singleton import Singleton -from app.src.gen3.inverter_g3 import InverterG3 -from app.src.async_stream import AsyncStream +from infos import Infos +from cnf.config import Config +from proxy import Proxy +from inverter_base import InverterBase +from singleton import Singleton +from gen3.inverter_g3 import InverterG3 +from async_stream import AsyncStream -from app.tests.test_modbus_tcp import patch_mqtt_err, patch_mqtt_except, test_port, test_hostname +from test_modbus_tcp import patch_mqtt_err, patch_mqtt_except, test_port, test_hostname pytest_plugins = ('pytest_asyncio',) @@ -70,13 +70,13 @@ class FakeWriter(): async def wait_closed(self): return -class TestType(Enum): +class MockType(Enum): RD_TEST_0_BYTES = 1 RD_TEST_TIMEOUT = 2 RD_TEST_EXCEPT = 3 -test = TestType.RD_TEST_0_BYTES +test = MockType.RD_TEST_0_BYTES @pytest.fixture def patch_open_connection(): @@ -86,9 +86,9 @@ def patch_open_connection(): def new_open(host: str, port: int): global test - if test == TestType.RD_TEST_TIMEOUT: + if test == MockType.RD_TEST_TIMEOUT: raise ConnectionRefusedError - elif test == TestType.RD_TEST_EXCEPT: + elif test == MockType.RD_TEST_EXCEPT: raise ValueError("Value cannot be negative") # Compliant return new_conn(None) @@ -144,14 +144,14 @@ async def test_remote_except(config_conn, patch_open_connection): assert asyncio.get_running_loop() global test - test = TestType.RD_TEST_TIMEOUT + test = MockType.RD_TEST_TIMEOUT with InverterG3(FakeReader(), FakeWriter()) as inverter: await inverter.create_remote() await asyncio.sleep(0) assert inverter.remote.stream==None - test = TestType.RD_TEST_EXCEPT + test = MockType.RD_TEST_EXCEPT await inverter.create_remote() await asyncio.sleep(0) assert inverter.remote.stream==None diff --git a/app/tests/test_inverter_g3p.py b/app/tests/test_inverter_g3p.py index 0f47cbe..6bb98ed 100644 --- a/app/tests/test_inverter_g3p.py +++ b/app/tests/test_inverter_g3p.py @@ -4,14 +4,14 @@ import asyncio from mock import patch from enum import Enum -from app.src.infos import Infos -from app.src.config import Config -from app.src.proxy import Proxy -from app.src.inverter_base import InverterBase -from app.src.singleton import Singleton -from app.src.gen3plus.inverter_g3p import InverterG3P +from infos import Infos +from cnf.config import Config +from proxy import Proxy +from inverter_base import InverterBase +from singleton import Singleton +from gen3plus.inverter_g3p import InverterG3P -from app.tests.test_modbus_tcp import patch_mqtt_err, patch_mqtt_except, test_port, test_hostname +from test_modbus_tcp import patch_mqtt_err, patch_mqtt_except, test_port, test_hostname pytest_plugins = ('pytest_asyncio',) @@ -69,13 +69,13 @@ class FakeWriter(): async def wait_closed(self): return -class TestType(Enum): +class MockType(Enum): RD_TEST_0_BYTES = 1 RD_TEST_TIMEOUT = 2 RD_TEST_EXCEPT = 3 -test = TestType.RD_TEST_0_BYTES +test = MockType.RD_TEST_0_BYTES @pytest.fixture def patch_open_connection(): @@ -85,9 +85,9 @@ def patch_open_connection(): def new_open(host: str, port: int): global test - if test == TestType.RD_TEST_TIMEOUT: + if test == MockType.RD_TEST_TIMEOUT: raise ConnectionRefusedError - elif test == TestType.RD_TEST_EXCEPT: + elif test == MockType.RD_TEST_EXCEPT: raise ValueError("Value cannot be negative") # Compliant return new_conn(None) @@ -121,14 +121,14 @@ async def test_remote_except(config_conn, patch_open_connection): assert asyncio.get_running_loop() global test - test = TestType.RD_TEST_TIMEOUT + test = MockType.RD_TEST_TIMEOUT with InverterG3P(FakeReader(), FakeWriter(), client_mode=False) as inverter: await inverter.create_remote() await asyncio.sleep(0) assert inverter.remote.stream==None - test = TestType.RD_TEST_EXCEPT + test = MockType.RD_TEST_EXCEPT await inverter.create_remote() await asyncio.sleep(0) assert inverter.remote.stream==None @@ -144,7 +144,7 @@ async def test_mqtt_publish(config_conn, patch_open_connection): with InverterG3P(FakeReader(), FakeWriter(), client_mode=False) as inverter: stream = inverter.local.stream await inverter.async_publ_mqtt() # check call with invalid unique_id - stream._SolarmanV5__set_serial_no(snr= 123344) + stream._set_serial_no(snr= 123344) stream.new_data['inverter'] = True stream.db.db['inverter'] = {} @@ -171,7 +171,7 @@ async def test_mqtt_err(config_conn, patch_open_connection, patch_mqtt_err): with InverterG3P(FakeReader(), FakeWriter(), client_mode=False) as inverter: stream = inverter.local.stream - stream._SolarmanV5__set_serial_no(snr= 123344) + stream._set_serial_no(snr= 123344) stream.new_data['inverter'] = True stream.db.db['inverter'] = {} await inverter.async_publ_mqtt() @@ -188,7 +188,7 @@ async def test_mqtt_except(config_conn, patch_open_connection, patch_mqtt_except with InverterG3P(FakeReader(), FakeWriter(), client_mode=False) as inverter: stream = inverter.local.stream - stream._SolarmanV5__set_serial_no(snr= 123344) + stream._set_serial_no(snr= 123344) stream.new_data['inverter'] = True stream.db.db['inverter'] = {} diff --git a/app/tests/test_modbus.py b/app/tests/test_modbus.py index d0e321e..b6914b0 100644 --- a/app/tests/test_modbus.py +++ b/app/tests/test_modbus.py @@ -1,8 +1,8 @@ # test_with_pytest.py import pytest import asyncio -from app.src.modbus import Modbus -from app.src.infos import Infos, Register +from modbus import Modbus +from infos import Infos, Register pytest_plugins = ('pytest_asyncio',) @@ -77,9 +77,10 @@ def test_recv_resp_crc_err(): mb.last_fcode = 3 mb.last_reg = 0x300e mb.last_len = 2 + mb.set_node_id('test') # check matching response, but with CRC error call = 0 - for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x04\x01\x2c\x00\x46\xbb\xf3', 'test'): + for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x04\x01\x2c\x00\x46\xbb\xf3'): call += 1 assert mb.err == 1 assert 0 == call @@ -97,10 +98,11 @@ def test_recv_resp_invalid_addr(): mb.last_fcode = 3 mb.last_reg = 0x300e mb.last_len = 2 + mb.set_node_id('test') # check not matching response, with wrong server addr call = 0 - for key, update in mb.recv_resp(mb.db, b'\x02\x03\x04\x01\x2c\x00\x46\x88\xf4', 'test'): + for key, update in mb.recv_resp(mb.db, b'\x02\x03\x04\x01\x2c\x00\x46\x88\xf4'): call += 1 assert mb.err == 2 assert 0 == call @@ -120,7 +122,8 @@ def test_recv_recv_fcode(): # check not matching response, with wrong function code call = 0 - for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x04\x01\x2c\x00\x46\xbb\xf4', 'test'): + mb.set_node_id('test') + for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x04\x01\x2c\x00\x46\xbb\xf4'): call += 1 assert mb.err == 3 @@ -142,7 +145,8 @@ def test_recv_resp_len(): # check not matching response, with wrong data length call = 0 - for key, update, _ in mb.recv_resp(mb.db, b'\x01\x03\x04\x01\x2c\x00\x46\xbb\xf4', 'test'): + mb.set_node_id('test') + for key, update, _ in mb.recv_resp(mb.db, b'\x01\x03\x04\x01\x2c\x00\x46\xbb\xf4'): call += 1 assert mb.err == 4 @@ -161,7 +165,8 @@ def test_recv_unexpect_resp(): # check unexpected response, which must be dropped call = 0 - for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x04\x01\x2c\x00\x46\xbb\xf4', 'test'): + mb.set_node_id('test') + for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x04\x01\x2c\x00\x46\xbb\xf4'): call += 1 assert mb.err == 5 @@ -177,8 +182,9 @@ def test_parse_resp(): assert mb.req_pend call = 0 + mb.set_node_id('test') exp_result = ['V0.0.2C', 4.4, 0.7, 0.7, 30] - for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x0c\x01\x2c\x00\x2c\x00\x2c\x00\x46\x00\x46\x00\x46\x32\xc8', 'test'): + for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x0c\x01\x2c\x00\x2c\x00\x2c\x00\x46\x00\x46\x00\x46\x32\xc8'): if key == 'grid': assert update == True elif key == 'inverter': @@ -226,8 +232,9 @@ def test_queue2(): assert mb.send_calls == 1 assert mb.pdu == b'\x01\x030\x07\x00\x06{\t' call = 0 + mb.set_node_id('test') exp_result = ['V0.0.2C', 4.4, 0.7, 0.7, 30] - for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x0c\x01\x2c\x00\x2c\x00\x2c\x00\x46\x00\x46\x00\x46\x32\xc8', 'test'): + for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x0c\x01\x2c\x00\x2c\x00\x2c\x00\x46\x00\x46\x00\x46\x32\xc8'): if key == 'grid': assert update == True elif key == 'inverter': @@ -245,14 +252,14 @@ def test_queue2(): assert mb.send_calls == 2 assert mb.pdu == b'\x01\x06\x20\x08\x00\x04\x02\x0b' - for key, update, val in mb.recv_resp(mb.db, b'\x01\x06\x20\x08\x00\x04\x02\x0b', 'test'): + for key, update, val in mb.recv_resp(mb.db, b'\x01\x06\x20\x08\x00\x04\x02\x0b'): pass # call generator mb.recv_resp() assert mb.que.qsize() == 0 assert mb.send_calls == 3 assert mb.pdu == b'\x01\x030\x07\x00\x06{\t' call = 0 - for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x0c\x01\x2c\x00\x2c\x00\x2c\x00\x46\x00\x46\x00\x46\x32\xc8', 'test'): + for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x0c\x01\x2c\x00\x2c\x00\x2c\x00\x46\x00\x46\x00\x46\x32\xc8'): call += 1 assert 0 == mb.err assert 5 == call @@ -276,8 +283,9 @@ def test_queue3(): assert mb.recv_responses == 0 call = 0 + mb.set_node_id('test') exp_result = ['V0.0.2C', 4.4, 0.7, 0.7, 30] - for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x0c\x01\x2c\x00\x2c\x00\x2c\x00\x46\x00\x46\x00\x46\x32\xc8', 'test'): + for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x0c\x01\x2c\x00\x2c\x00\x2c\x00\x46\x00\x46\x00\x46\x32\xc8'): if key == 'grid': assert update == True elif key == 'inverter': @@ -296,7 +304,7 @@ def test_queue3(): assert mb.send_calls == 2 assert mb.pdu == b'\x01\x06\x20\x08\x00\x04\x02\x0b' - for key, update, val in mb.recv_resp(mb.db, b'\x01\x06\x20\x08\x00\x04\x02\x0b', 'test'): + for key, update, val in mb.recv_resp(mb.db, b'\x01\x06\x20\x08\x00\x04\x02\x0b'): pass # no code in loop is OK; calling the generator is the purpose assert 0 == mb.err assert mb.recv_responses == 2 @@ -305,7 +313,7 @@ def test_queue3(): assert mb.send_calls == 3 assert mb.pdu == b'\x01\x030\x07\x00\x06{\t' call = 0 - for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x0c\x01\x2c\x00\x2c\x00\x2c\x00\x46\x00\x46\x00\x46\x32\xc8', 'test'): + for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x0c\x01\x2c\x00\x2c\x00\x2c\x00\x46\x00\x46\x00\x46\x32\xc8'): call += 1 assert 0 == mb.err assert mb.recv_responses == 2 @@ -373,7 +381,8 @@ def test_recv_unknown_data(): # check matching response, but with CRC error call = 0 - for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x04\x01\x2c\x00\x46\xbb\xf4', 'test'): + mb.set_node_id('test') + for key, update, val in mb.recv_resp(mb.db, b'\x01\x03\x04\x01\x2c\x00\x46\xbb\xf4'): call += 1 assert mb.err == 0 assert 0 == call diff --git a/app/tests/test_modbus_tcp.py b/app/tests/test_modbus_tcp.py index 93ecfa0..1c69b60 100644 --- a/app/tests/test_modbus_tcp.py +++ b/app/tests/test_modbus_tcp.py @@ -5,14 +5,14 @@ from aiomqtt import MqttCodeError from mock import patch from enum import Enum -from app.src.singleton import Singleton -from app.src.config import Config -from app.src.infos import Infos -from app.src.mqtt import Mqtt -from app.src.inverter_base import InverterBase -from app.src.messages import Message, State -from app.src.proxy import Proxy -from app.src.modbus_tcp import ModbusConn, ModbusTcp +from singleton import Singleton +from cnf.config import Config +from infos import Infos +from mqtt import Mqtt +from inverter_base import InverterBase +from messages import Message, State +from proxy import Proxy +from modbus_tcp import ModbusConn, ModbusTcp pytest_plugins = ('pytest_asyncio',) @@ -52,6 +52,10 @@ def config_conn(test_hostname, test_port): 'proxy_node_id': 'test_1', 'proxy_unique_id': '' }, + 'solarman':{ + 'host': 'access1.solarmanpv.com', + 'port': 10000 + }, 'inverters':{ 'allow_all': True, "R170000000000001":{ @@ -65,7 +69,8 @@ def config_conn(test_hostname, test_port): 'sensor_list': 0x2b0, 'client_mode':{ 'host': '192.168.0.1', - 'port': 8899 + 'port': 8899, + 'forward': True } } } diff --git a/app/tests/test_mqtt.py b/app/tests/test_mqtt.py index 9fb857e..9560767 100644 --- a/app/tests/test_mqtt.py +++ b/app/tests/test_mqtt.py @@ -5,13 +5,15 @@ import aiomqtt import logging from mock import patch, Mock -from app.src.async_stream import AsyncIfcImpl -from app.src.singleton import Singleton -from app.src.mqtt import Mqtt -from app.src.modbus import Modbus -from app.src.gen3plus.solarman_v5 import SolarmanV5 -from app.src.config import Config +from async_stream import AsyncIfcImpl +from singleton import Singleton +from mqtt import Mqtt +from modbus import Modbus +from gen3plus.solarman_v5 import SolarmanV5 +from cnf.config import Config +NO_MOSQUITTO_TEST = False +'''disable all tests with connections to test.mosquitto.org''' pytest_plugins = ('pytest_asyncio',) @@ -69,23 +71,79 @@ def spy_modbus_cmd_client(): def test_native_client(test_hostname, test_port): """Sanity check: Make sure the paho-mqtt client can connect to the test - MQTT server. + MQTT server. Otherwise the test set NO_MOSQUITTO_TEST to True and disable + all test cases which depends on the test.mosquitto.org server """ + global NO_MOSQUITTO_TEST + if NO_MOSQUITTO_TEST: + pytest.skip('skipping, since Mosquitto is not reliable at the moment') import paho.mqtt.client as mqtt import threading - c = mqtt.Client() + c = mqtt.Client(mqtt.CallbackAPIVersion.VERSION2) c.loop_start() try: # Just make sure the client connects successfully on_connect = threading.Event() c.on_connect = Mock(side_effect=lambda *_: on_connect.set()) c.connect_async(test_hostname, test_port) - assert on_connect.wait(5) + if not on_connect.wait(3): + NO_MOSQUITTO_TEST = True # skip all mosquitto tests + pytest.skip('skipping, since Mosquitto is not reliable at the moment') finally: c.loop_stop() +@pytest.mark.asyncio +async def test_mqtt_connection(config_mqtt_conn): + global NO_MOSQUITTO_TEST + if NO_MOSQUITTO_TEST: + pytest.skip('skipping, since Mosquitto is not reliable at the moment') + + _ = config_mqtt_conn + assert asyncio.get_running_loop() + + on_connect = asyncio.Event() + async def cb(): + on_connect.set() + + try: + m = Mqtt(cb) + assert m.task + assert await asyncio.wait_for(on_connect.wait(), 5) + # await asyncio.sleep(1) + assert 0 == m.ha_restarts + await m.publish('homeassistant/status', 'online') + except TimeoutError: + assert False + finally: + await m.close() + await m.publish('homeassistant/status', 'online') + +@pytest.mark.asyncio +async def test_ha_reconnect(config_mqtt_conn): + global NO_MOSQUITTO_TEST + if NO_MOSQUITTO_TEST: + pytest.skip('skipping, since Mosquitto is not reliable at the moment') + + _ = config_mqtt_conn + on_connect = asyncio.Event() + async def cb(): + on_connect.set() + + try: + m = Mqtt(cb) + msg = aiomqtt.Message(topic= 'homeassistant/status', payload= b'offline', qos= 0, retain = False, mid= 0, properties= None) + await m.dispatch_msg(msg) + assert not on_connect.is_set() + + msg = aiomqtt.Message(topic= 'homeassistant/status', payload= b'online', qos= 0, retain = False, mid= 0, properties= None) + await m.dispatch_msg(msg) + assert on_connect.is_set() + + finally: + await m.close() + @pytest.mark.asyncio async def test_mqtt_no_config(config_no_conn): _ = config_no_conn @@ -110,29 +168,6 @@ async def test_mqtt_no_config(config_no_conn): finally: await m.close() -@pytest.mark.asyncio -async def test_mqtt_connection(config_mqtt_conn): - _ = config_mqtt_conn - assert asyncio.get_running_loop() - - on_connect = asyncio.Event() - async def cb(): - on_connect.set() - - try: - m = Mqtt(cb) - assert m.task - assert await asyncio.wait_for(on_connect.wait(), 5) - # await asyncio.sleep(1) - assert 0 == m.ha_restarts - await m.publish('homeassistant/status', 'online') - except TimeoutError: - assert False - finally: - await m.close() - await m.publish('homeassistant/status', 'online') - - @pytest.mark.asyncio async def test_msg_dispatch(config_mqtt_conn, spy_modbus_cmd): _ = config_mqtt_conn @@ -209,26 +244,6 @@ async def test_msg_ignore_client_conn(config_mqtt_conn, spy_modbus_cmd_client): finally: await m.close() -@pytest.mark.asyncio -async def test_ha_reconnect(config_mqtt_conn): - _ = config_mqtt_conn - on_connect = asyncio.Event() - async def cb(): - on_connect.set() - - try: - m = Mqtt(cb) - msg = aiomqtt.Message(topic= 'homeassistant/status', payload= b'offline', qos= 0, retain = False, mid= 0, properties= None) - await m.dispatch_msg(msg) - assert not on_connect.is_set() - - msg = aiomqtt.Message(topic= 'homeassistant/status', payload= b'online', qos= 0, retain = False, mid= 0, properties= None) - await m.dispatch_msg(msg) - assert on_connect.is_set() - - finally: - await m.close() - @pytest.mark.asyncio async def test_ignore_unknown_func(config_mqtt_conn): '''don't dispatch for unknwon function names''' diff --git a/app/tests/test_proxy.py b/app/tests/test_proxy.py index bee01fd..ace707e 100644 --- a/app/tests/test_proxy.py +++ b/app/tests/test_proxy.py @@ -5,11 +5,11 @@ import aiomqtt import logging from mock import patch, Mock -from app.src.singleton import Singleton -from app.src.proxy import Proxy -from app.src.mqtt import Mqtt -from app.src.gen3plus.solarman_v5 import SolarmanV5 -from app.src.config import Config +from singleton import Singleton +from proxy import Proxy +from mqtt import Mqtt +from gen3plus.solarman_v5 import SolarmanV5 +from cnf.config import Config pytest_plugins = ('pytest_asyncio',) diff --git a/app/tests/test_server.py b/app/tests/test_server.py new file mode 100644 index 0000000..367bf5b --- /dev/null +++ b/app/tests/test_server.py @@ -0,0 +1,24 @@ +# test_with_pytest.py +import pytest +import logging +import os +from mock import patch +from server import get_log_level + +def test_get_log_level(): + + with patch.dict(os.environ, {'LOG_LVL': ''}): + log_lvl = get_log_level() + assert log_lvl == logging.INFO + + with patch.dict(os.environ, {'LOG_LVL': 'DEBUG'}): + log_lvl = get_log_level() + assert log_lvl == logging.DEBUG + + with patch.dict(os.environ, {'LOG_LVL': 'WARN'}): + log_lvl = get_log_level() + assert log_lvl == logging.WARNING + + with patch.dict(os.environ, {'LOG_LVL': 'UNKNOWN'}): + log_lvl = get_log_level() + assert log_lvl == logging.INFO diff --git a/app/tests/test_singleton.py b/app/tests/test_singleton.py index d645e08..9d9eb94 100644 --- a/app/tests/test_singleton.py +++ b/app/tests/test_singleton.py @@ -1,16 +1,16 @@ # test_with_pytest.py import pytest -from app.src.singleton import Singleton +from singleton import Singleton -class Test(metaclass=Singleton): +class Example(metaclass=Singleton): def __init__(self): pass # is a dummy test class def test_singleton_metaclass(): Singleton._instances.clear() - a = Test() + a = Example() assert 1 == len(Singleton._instances) - b = Test() + b = Example() assert 1 == len(Singleton._instances) assert a is b del a diff --git a/app/tests/test_solarman.py b/app/tests/test_solarman.py index a980744..6f11bec 100644 --- a/app/tests/test_solarman.py +++ b/app/tests/test_solarman.py @@ -5,12 +5,12 @@ import asyncio import logging import random from math import isclose -from app.src.async_stream import AsyncIfcImpl, StreamPtr -from app.src.gen3plus.solarman_v5 import SolarmanV5 -from app.src.config import Config -from app.src.infos import Infos, Register -from app.src.modbus import Modbus -from app.src.messages import State, Message +from async_stream import AsyncIfcImpl, StreamPtr +from gen3plus.solarman_v5 import SolarmanV5, SolarmanBase +from cnf.config import Config +from infos import Infos, Register +from modbus import Modbus +from messages import State, Message pytest_plugins = ('pytest_asyncio',) @@ -37,6 +37,9 @@ class FakeIfc(AsyncIfcImpl): super().__init__() self.remote = StreamPtr(None) + async def create_remote(self): + await asyncio.sleep(0) + class MemoryStream(SolarmanV5): def __init__(self, msg, chunks = (0,), server_side: bool = True): _ifc = FakeIfc() @@ -109,7 +112,7 @@ class MemoryStream(SolarmanV5): c.ifc.remote.stream = self return c - def _SolarmanV5__flush_recv_msg(self) -> None: + def _SolarmanBase__flush_recv_msg(self) -> None: self.msg_recvd.append( { 'control': self.control, @@ -117,7 +120,7 @@ class MemoryStream(SolarmanV5): 'data_len': self.data_len } ) - super()._SolarmanV5__flush_recv_msg() + super()._SolarmanBase__flush_recv_msg() self.msg_count += 1 @@ -1102,7 +1105,7 @@ def test_sync_start_ind(config_tsun_inv1, sync_start_ind_msg, sync_start_rsp_msg assert m.db.stat['proxy']['Invalid_Msg_Format'] == 0 m.seq.server_side = False # simulate forawding to TSUN cloud - m._update_header(m.ifc.fwd_fifo.peek()) + m._SolarmanBase__update_header(m.ifc.fwd_fifo.peek()) assert str(m.seq) == '0d:0e' # value after forwarding indication assert m.ifc.fwd_fifo.get()==sync_start_fwd_msg @@ -1768,7 +1771,7 @@ async def test_start_client_mode(config_tsun_inv1, str_test_ip): assert m.no_forwarding == False assert m.mb_timer.tim == None assert asyncio.get_running_loop() == m.mb_timer.loop - await m.send_start_cmd(get_sn_int(), str_test_ip, m.mb_first_timeout) + await m.send_start_cmd(get_sn_int(), str_test_ip, False, m.mb_first_timeout) assert m.sent_pdu==bytearray(b'\xa5\x17\x00\x10E\x01\x00!Ce{\x02\xb0\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x030\x00\x000J\xde\xf1\x15') assert m.db.get_db_value(Register.IP_ADDRESS) == str_test_ip assert isclose(m.db.get_db_value(Register.POLLING_INTERVAL), 0.5) @@ -1803,3 +1806,30 @@ def test_timeout(config_tsun_inv1): assert SolarmanV5.MAX_DEF_IDLE_TIME == m._timeout() m.state = State.closed m.close() + +def test_fnc_dispatch(): + def msg(): + return + + _ = config_tsun_inv1 + m = MemoryStream(b'') + m.switch[1] = msg + m.switch[2] = "msg" + + _obj, _str = m.get_fnc_handler(1) + assert _obj == msg + assert _str == "'msg'" + + _obj, _str = m.get_fnc_handler(2) + assert _obj == m.msg_unknown + assert _str == "'msg'" + + _obj, _str = m.get_fnc_handler(3) + assert _obj == m.msg_unknown + assert _str == "'msg_unknown'" + +def test_timestamp(): + m = MemoryStream(b'') + ts = m._timestamp() + ts_emu = m._emu_timestamp() + assert ts == ts_emu + 24*60*60 \ No newline at end of file diff --git a/app/tests/test_solarman_emu.py b/app/tests/test_solarman_emu.py new file mode 100644 index 0000000..41e0e48 --- /dev/null +++ b/app/tests/test_solarman_emu.py @@ -0,0 +1,233 @@ +import pytest +import asyncio + +from async_stream import AsyncIfcImpl, StreamPtr +from gen3plus.solarman_v5 import SolarmanV5, SolarmanBase +from gen3plus.solarman_emu import SolarmanEmu +from infos import Infos, Register + +from test_solarman import FakeIfc, MemoryStream, get_sn_int, get_sn, correct_checksum, config_tsun_inv1, msg_modbus_rsp +from test_infos_g3p import str_test_ip, bytes_test_ip + +timestamp = 0x3224c8bc + +class InvStream(MemoryStream): + def __init__(self, msg=b''): + super().__init__(msg) + + def _emu_timestamp(self): + return timestamp + +class CldStream(SolarmanEmu): + def __init__(self, inv: InvStream): + _ifc = FakeIfc() + _ifc.remote.stream = inv + super().__init__(('test.local', 1234), _ifc, server_side=False, client_mode=False) + self.__msg = b'' + self.__msg_len = 0 + self.__offs = 0 + self.msg_count = 0 + self.msg_recvd = [] + + def _emu_timestamp(self): + return timestamp + + def append_msg(self, msg): + self.__msg += msg + self.__msg_len += len(msg) + + def _read(self) -> int: + copied_bytes = 0 + try: + if (self.__offs < self.__msg_len): + self.ifc.rx_fifo += self.__msg[self.__offs:] + copied_bytes = self.__msg_len - self.__offs + self.__offs = self.__msg_len + except Exception: + pass # ignore exceptions here + return copied_bytes + + def _SolarmanBase__flush_recv_msg(self) -> None: + self.msg_recvd.append( + { + 'control': self.control, + 'seq': str(self.seq), + 'data_len': self.data_len + } + ) + super()._SolarmanBase__flush_recv_msg() + self.msg_count += 1 + +@pytest.fixture +def device_ind_msg(bytes_test_ip): # 0x4110 + msg = b'\xa5\xd4\x00\x10\x41\x00\x01' +get_sn() +b'\x02\xbc\xc8\x24\x32' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x05\x3c\x78\x01\x00\x01\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + bytes_test_ip + msg += b'\x0f\x00\x01\xb0' + msg += b'\x02\x0f\x00\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xfe\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += correct_checksum(msg) + msg += b'\x15' + return msg + +@pytest.fixture +def inverter_ind_msg(): # 0x4210 + msg = b'\xa5\x99\x01\x10\x42\x00\x01' +get_sn() +b'\x01\xb0\x02\xbc\xc8' + msg += b'\x24\x32\x3c\x00\x00\x00\xa0\x47\xe4\x33\x01\x00\x03\x08\x00\x00' + msg += b'\x59\x31\x37\x30\x30\x30\x30\x30\x30\x30\x30\x30\x30\x30\x30\x31' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x01\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x40\x10\x08\xc8\x00\x49\x13\x8d\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00' + msg += b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00' + msg += b'\x04\x00\x00\x01\xff\xff\x00\x01\x00\x06\x00\x68\x00\x68\x05\x00' + msg += b'\x09\xcd\x07\xb6\x13\x9c\x13\x24\x00\x01\x07\xae\x04\x0f\x00\x41' + msg += b'\x00\x0f\x0a\x64\x0a\x64\x00\x06\x00\x06\x09\xf6\x12\x8c\x12\x8c' + msg += b'\x00\x10\x00\x10\x14\x52\x14\x52\x00\x10\x00\x10\x01\x51\x00\x05' + msg += b'\x00\x00\x00\x01\x13\x9c\x0f\xa0\x00\x4e\x00\x66\x03\xe8\x04\x00' + msg += b'\x09\xce\x07\xa8\x13\x9c\x13\x26\x00\x00\x00\x00\x00\x00\x00\x00' + msg += b'\x00\x00\x00\x00\x04\x00\x04\x00\x00\x00\x00\x00\xff\xff\x00\x00' + msg += b'\x00\x00\x00\x00' + msg += correct_checksum(msg) + msg += b'\x15' + return msg + +@pytest.fixture +def inverter_rsp_msg(): # 0x1210 + msg = b'\xa5\x0a\x00\x10\x12\x02\02' +get_sn() +b'\x01\x01' + msg += b'\x00\x00\x00\x00' + msg += b'\x3c\x00\x00\x00' + msg += correct_checksum(msg) + msg += b'\x15' + return msg + +@pytest.fixture +def heartbeat_ind(): + msg = b'\xa5\x01\x00\x10G\x00\x01\x00\x00\x00\x00\x00Y\x15' + return msg + +def test_emu_init_close(): + # received a message with wrong start byte plus an valid message + # the complete receive buffer must be cleared to + # find the next valid message + inv = InvStream() + cld = CldStream(inv) + cld.close() + + +@pytest.mark.asyncio +async def test_emu_start(config_tsun_inv1, msg_modbus_rsp, str_test_ip, device_ind_msg): + _ = config_tsun_inv1 + assert asyncio.get_running_loop() + inv = InvStream(msg_modbus_rsp) + + assert asyncio.get_running_loop() == inv.mb_timer.loop + await inv.send_start_cmd(get_sn_int(), str_test_ip, True, inv.mb_first_timeout) + inv.read() # read complete msg, and dispatch msg + assert not inv.header_valid # must be invalid, since msg was handled and buffer flushed + assert inv.msg_count == 1 + assert inv.control == 0x1510 + + cld = CldStream(inv) + cld.ifc.update_header_cb(inv.ifc.fwd_fifo.peek()) + assert inv.ifc.fwd_fifo.peek() == device_ind_msg + cld.close() + +def test_snd_hb(config_tsun_inv1, heartbeat_ind): + _ = config_tsun_inv1 + inv = InvStream() + cld = CldStream(inv) + + # await inv.send_start_cmd(get_sn_int(), str_test_ip, False, inv.mb_first_timeout) + cld.send_heartbeat_cb(0) + assert cld.ifc.tx_fifo.peek() == heartbeat_ind + cld.close() + +@pytest.mark.asyncio +async def test_snd_inv_data(config_tsun_inv1, inverter_ind_msg, inverter_rsp_msg): + _ = config_tsun_inv1 + inv = InvStream() + inv.db.set_db_def_value(Register.INVERTER_STATUS, 1) + inv.db.set_db_def_value(Register.DETECT_STATUS_1, 2) + inv.db.set_db_def_value(Register.VERSION, 'V4.0.10') + inv.db.set_db_def_value(Register.GRID_VOLTAGE, 224.8) + inv.db.set_db_def_value(Register.GRID_CURRENT, 0.73) + inv.db.set_db_def_value(Register.GRID_FREQUENCY, 50.05) + inv.db.set_db_def_value(Register.PROD_COMPL_TYPE, 6) + assert asyncio.get_running_loop() == inv.mb_timer.loop + await inv.send_start_cmd(get_sn_int(), str_test_ip, False, inv.mb_first_timeout) + inv.db.set_db_def_value(Register.DATA_UP_INTERVAL, 17) # set test value + + cld = CldStream(inv) + cld.time_ofs = 0x33e447a0 + cld.last_sync = cld._emu_timestamp() - 60 + cld.pkt_cnt = 0x802 + assert cld.data_up_inv == 17 # check test value + cld.data_up_inv = 0.1 # speedup test first data msg + cld._init_new_client_conn() + cld.data_up_inv = 0.5 # timeout for second data msg + await asyncio.sleep(0.2) + assert cld.ifc.tx_fifo.get() == inverter_ind_msg + + cld.append_msg(inverter_rsp_msg) + cld.read() # read complete msg, and dispatch msg + + assert not cld.header_valid # must be invalid, since msg was handled and buffer flushed + assert cld.msg_count == 1 + assert cld.header_len==11 + assert cld.snr == 2070233889 + assert cld.unique_id == '2070233889' + assert cld.msg_recvd[0]['control']==0x1210 + assert cld.msg_recvd[0]['seq']=='02:02' + assert cld.msg_recvd[0]['data_len']==0x0a + assert '02b0' == cld.db.get_db_value(Register.SENSOR_LIST, None) + assert cld.db.stat['proxy']['Unknown_Msg'] == 0 + + cld.close() + +@pytest.mark.asyncio +async def test_rcv_invalid(config_tsun_inv1, inverter_ind_msg, inverter_rsp_msg): + _ = config_tsun_inv1 + inv = InvStream() + assert asyncio.get_running_loop() == inv.mb_timer.loop + await inv.send_start_cmd(get_sn_int(), str_test_ip, False, inv.mb_first_timeout) + inv.db.set_db_def_value(Register.DATA_UP_INTERVAL, 17) # set test value + + cld = CldStream(inv) + cld._init_new_client_conn() + + cld.append_msg(inverter_ind_msg) + cld.read() # read complete msg, and dispatch msg + + assert not cld.header_valid # must be invalid, since msg was handled and buffer flushed + assert cld.msg_count == 1 + assert cld.header_len==11 + assert cld.snr == 2070233889 + assert cld.unique_id == '2070233889' + assert cld.msg_recvd[0]['control']==0x4210 + assert cld.msg_recvd[0]['seq']=='00:01' + assert cld.msg_recvd[0]['data_len']==0x199 + assert '02b0' == cld.db.get_db_value(Register.SENSOR_LIST, None) + assert cld.db.stat['proxy']['Unknown_Msg'] == 1 + + + cld.close() diff --git a/app/tests/test_talent.py b/app/tests/test_talent.py index ae156bb..2b1ef6c 100644 --- a/app/tests/test_talent.py +++ b/app/tests/test_talent.py @@ -1,12 +1,12 @@ # test_with_pytest.py import pytest, logging, asyncio from math import isclose -from app.src.async_stream import AsyncIfcImpl, StreamPtr -from app.src.gen3.talent import Talent, Control -from app.src.config import Config -from app.src.infos import Infos, Register -from app.src.modbus import Modbus -from app.src.messages import State +from async_stream import AsyncIfcImpl, StreamPtr +from gen3.talent import Talent, Control +from cnf.config import Config +from infos import Infos, Register +from modbus import Modbus +from messages import State pytest_plugins = ('pytest_asyncio',) diff --git a/ha_addon/Makefile b/ha_addon/Makefile deleted file mode 100644 index b83b0f5..0000000 --- a/ha_addon/Makefile +++ /dev/null @@ -1,45 +0,0 @@ -SHELL = /bin/sh - -# Folders -SRC=../app -SRC_PROXY=$(SRC)/src -CNF_PROXY=$(SRC)/config - -DST=rootfs -DST_PROXY=$(DST)/home/proxy - -# collect source files -SRC_FILES := $(wildcard $(SRC_PROXY)/*.py)\ - $(wildcard $(SRC_PROXY)/*.ini)\ - $(wildcard $(SRC_PROXY)/gen3/*.py)\ - $(wildcard $(SRC_PROXY)/gen3plus/*.py) -CNF_FILES := $(wildcard $(CNF_PROXY)/*.toml) - -# determine destination files -TARGET_FILES = $(SRC_FILES:$(SRC_PROXY)/%=$(DST_PROXY)/%) -CONFIG_FILES = $(CNF_FILES:$(CNF_PROXY)/%=$(DST_PROXY)/%) - -build: rootfs - -clean: - rm -r -f $(DST_PROXY) - rm -f $(DST)/requirements.txt - -rootfs: $(TARGET_FILES) $(CONFIG_FILES) $(DST)/requirements.txt - -.PHONY: build clean rootfs - - -$(CONFIG_FILES): $(DST_PROXY)/% : $(CNF_PROXY)/% - @echo Copy $< to $@ - @mkdir -p $(@D) - @cp $< $@ - -$(TARGET_FILES): $(DST_PROXY)/% : $(SRC_PROXY)/% - @echo Copy $< to $@ - @mkdir -p $(@D) - @cp $< $@ - -$(DST)/requirements.txt : $(SRC)/requirements.txt - @echo Copy $< to $@ - @cp $< $@ diff --git a/ha_addon/rootfs/home/create_config_toml.py b/ha_addon/rootfs/home/create_config_toml.py deleted file mode 100644 index 91f8543..0000000 --- a/ha_addon/rootfs/home/create_config_toml.py +++ /dev/null @@ -1,65 +0,0 @@ -import json -import os - -# Dieses file übernimmt die Add-On Konfiguration und schreibt sie in die -# Konfigurationsdatei des tsun-proxy -# Die Addon Konfiguration wird in der Datei /data/options.json bereitgestellt -# Die Konfiguration wird in der Datei /home/proxy/config/config.toml -# gespeichert - -# Übernehme die Umgebungsvariablen -# alternativ kann auch auf die homeassistant supervisor API zugegriffen werden - -data = {} -data['mqtt.host'] = os.getenv('MQTT_HOST') -data['mqtt.port'] = os.getenv('MQTT_PORT') -data['mqtt.user'] = os.getenv('MQTT_USER') -data['mqtt.passwd'] = os.getenv('MQTT_PASSWORD') - - -# Lese die Add-On Konfiguration aus der Datei /data/options.json -with open('/data/options.json') as json_file: - # with open('options.json') as json_file: - options_data = json.load(json_file) - data.update(options_data) - - -# Schreibe die Add-On Konfiguration in die Datei /home/proxy/config/config.toml # noqa: E501 -with open('/home/proxy/config/config.toml', 'w+') as f: - # with open('./config/config.toml', 'w+') as f: - f.write(f""" -mqtt.host = '{data.get('mqtt.host')}' # URL or IP address of the mqtt broker -mqtt.port = {data.get('mqtt.port')} -mqtt.user = '{data.get('mqtt.user')}' -mqtt.passwd = '{data.get('mqtt.passwd')}' - - -ha.auto_conf_prefix = '{data.get('ha.auto_conf_prefix', 'homeassistant')}' # MQTT prefix for subscribing for homeassistant status updates # noqa: E501 -ha.discovery_prefix = '{data.get('ha.discovery_prefix', 'homeassistant')}' # MQTT prefix for discovery topic # noqa: E501 -ha.entity_prefix = '{data.get('ha.entity_prefix', 'tsun')}' # MQTT topic prefix for publishing inverter values # noqa: E501 -ha.proxy_node_id = '{data.get('ha.proxy_node_id', 'proxy')}' # MQTT node id, for the proxy_node_id -ha.proxy_unique_id = '{data.get('ha.proxy_unique_id', 'P170000000000001')}' # MQTT unique id, to identify a proxy instance - - -tsun.enabled = {str(data.get('tsun.enabled', True)).lower()} -tsun.host = '{data.get('tsun.host', 'logger.talent-monitoring.com')}' -tsun.port = {data.get('tsun.port', 5005)} - - -solarman.enabled = {str(data.get('solarman.enabled', True)).lower()} -solarman.host = '{data.get('solarman.host', 'iot.talent-monitoring.com')}' -solarman.port = {data.get('solarman.port', 10000)} - - -inverters.allow_all = {str(data.get('inverters.allow_all', False)).lower()} -""") - - for inverter in data['inverters']: - f.write(f""" -[inverters."{inverter['serial']}"] -node_id = '{inverter['node_id']}' -suggested_area = '{inverter['suggested_area']}' -modbus_polling = {str(inverter['modbus_polling']).lower()} -pv1 = {{type = '{inverter['pv1_type']}', manufacturer = '{inverter['pv1_manufacturer']}'}} # Optional, PV module descr # noqa: E501 -pv2 = {{type = '{inverter['pv2_type']}', manufacturer = '{inverter['pv2_manufacturer']}'}} # Optional, PV module descr # noqa: E501 -""") diff --git a/ha_addon/rootfs/home/options.json b/ha_addon/rootfs/home/options.json deleted file mode 100644 index 6603eb5..0000000 --- a/ha_addon/rootfs/home/options.json +++ /dev/null @@ -1,19 +0,0 @@ - - -{ - "inverters": [ - { - "serial": "R17E760702080400", - "node_id": "PV-Garage", - "suggested_area": "Garage", - "modbus_polling": false, - "pv1_manufacturer": "Shinefar", - "pv1_type": "SF-M18/144550", - "pv2_manufacturer": "Shinefar", - "pv2_type": "SF-M18/144550" - } - ], - "tsun.enabled": false, - "solarman.enabled": false, - "inverters.allow_all": false -} \ No newline at end of file diff --git a/ha_addon/tests/test_create_config_toml.py b/ha_addon/tests/test_create_config_toml.py deleted file mode 100644 index 077a615..0000000 --- a/ha_addon/tests/test_create_config_toml.py +++ /dev/null @@ -1,6 +0,0 @@ -# test_with_pytest.py -# import ha_addon.rootfs.home.create_config_toml - - -def test_config(): - pass diff --git a/ha_addon/Dockerfile b/ha_addons/ha_addon/Dockerfile similarity index 77% rename from ha_addon/Dockerfile rename to ha_addons/ha_addon/Dockerfile index 6568cc7..aa267cd 100755 --- a/ha_addon/Dockerfile +++ b/ha_addons/ha_addon/Dockerfile @@ -13,10 +13,7 @@ # 1 Build Image # ###################### -# opt for suitable build base. I opted for the recommended hassio-addon base - -#ARG BUILD_FROM="ghcr.io/hassio-addons/debian-base:latest" -ARG BUILD_FROM="ghcr.io/hassio-addons/base:latest" +ARG BUILD_FROM="ghcr.io/hassio-addons/base:stable" FROM $BUILD_FROM @@ -70,18 +67,16 @@ COPY rootfs/ / RUN chmod a+x /run.sh -# no idea whether needed or not -ENV SERVICE_NAME="tsun-proxy" -ENV UID=1000 -ENV GID=1000 -ENV VERSION="0.0" - - ####################### # 6 run app # ####################### +ARG SERVICE_NAME +ARG VERSION +ENV SERVICE_NAME=${SERVICE_NAME} + +RUN echo ${VERSION} > /proxy-version.txt # command to run on container start CMD [ "/run.sh" ] @@ -90,8 +85,3 @@ CMD [ "/run.sh" ] ####################### -# Labels -LABEL \ - io.hass.version="VERSION" \ - io.hass.type="addon" \ - io.hass.arch="armhf|aarch64|i386|amd64" diff --git a/ha_addons/ha_addon/Makefile b/ha_addons/ha_addon/Makefile new file mode 100644 index 0000000..43de018 --- /dev/null +++ b/ha_addons/ha_addon/Makefile @@ -0,0 +1,74 @@ +#!make +include ../../.env + +SHELL = /bin/sh +IMAGE = tsun-gen3-addon + + +# Folders +SRC=../../app +SRC_PROXY=$(SRC)/src +CNF_PROXY=$(SRC)/config + +DST=rootfs +DST_PROXY=$(DST)/home/proxy + +# collect source files +SRC_FILES := $(wildcard $(SRC_PROXY)/*.py)\ + $(wildcard $(SRC_PROXY)/*.ini)\ + $(wildcard $(SRC_PROXY)/cnf/*.py)\ + $(wildcard $(SRC_PROXY)/gen3/*.py)\ + $(wildcard $(SRC_PROXY)/gen3plus/*.py) +CNF_FILES := $(wildcard $(CNF_PROXY)/*.toml) + +# determine destination files +TARGET_FILES = $(SRC_FILES:$(SRC_PROXY)/%=$(DST_PROXY)/%) +CONFIG_FILES = $(CNF_FILES:$(CNF_PROXY)/%=$(DST_PROXY)/%) + +export BUILD_DATE := ${shell date -Iminutes} +VERSION := $(shell cat $(SRC)/.version) +export MAJOR := $(shell echo $(VERSION) | cut -f1 -d.) + +PUBLIC_URL := $(shell echo $(PUBLIC_CONTAINER_REGISTRY) | cut -f1 -d/) +PUBLIC_USER :=$(shell echo $(PUBLIC_CONTAINER_REGISTRY) | cut -f2 -d/) + + +dev debug: build + @echo version: $(VERSION) build-date: $(BUILD_DATE) image: $(PRIVAT_CONTAINER_REGISTRY)$(IMAGE) + export VERSION=$(VERSION)-$@ && \ + export IMAGE=$(PRIVAT_CONTAINER_REGISTRY)$(IMAGE) && \ + docker buildx bake -f docker-bake.hcl $@ + +rc: build + @echo version: $(VERSION) build-date: $(BUILD_DATE) image: $(PUBLIC_CONTAINER_REGISTRY)$(IMAGE) + @echo login at $(PUBLIC_URL) as $(PUBLIC_USER) + @DO_LOGIN="$(shell echo $(PUBLIC_CR_KEY) | docker login $(PUBLIC_URL) -u $(PUBLIC_USER) --password-stdin)" + export VERSION=$(VERSION)-$@ && \ + export IMAGE=$(PUBLIC_CONTAINER_REGISTRY)$(IMAGE) && \ + docker buildx bake -f docker-bake.hcl $@ + + +build: rootfs + +clean: + rm -r -f $(DST_PROXY) + rm -f $(DST)/requirements.txt + +rootfs: $(TARGET_FILES) $(CONFIG_FILES) $(DST)/requirements.txt + +.PHONY: debug dev build clean rootfs + + +$(CONFIG_FILES): $(DST_PROXY)/% : $(CNF_PROXY)/% + @echo Copy $< to $@ + @mkdir -p $(@D) + @cp $< $@ + +$(TARGET_FILES): $(DST_PROXY)/% : $(SRC_PROXY)/% + @echo Copy $< to $@ + @mkdir -p $(@D) + @cp $< $@ + +$(DST)/requirements.txt : $(SRC)/requirements.txt + @echo Copy $< to $@ + @cp $< $@ diff --git a/ha_addon/config.yaml b/ha_addons/ha_addon/config.yaml similarity index 59% rename from ha_addon/config.yaml rename to ha_addons/ha_addon/config.yaml index 4643693..3da3876 100755 --- a/ha_addon/config.yaml +++ b/ha_addons/ha_addon/config.yaml @@ -1,6 +1,8 @@ name: "TSUN-Proxy" description: "MQTT Proxy for TSUN Photovoltaic Inverters" -version: "0.0.7" +version: "dev" +image: docker.io/sallius/tsun-gen3-addon +url: https://github.com/s-allius/tsun-gen3-proxy slug: "tsun-proxy" init: false arch: @@ -20,24 +22,35 @@ ports: # Definition of parameters in the configuration tab of the addon # parameters are available within the container as /data/options.json -# and should become picked up by the proxy - current workarround as a transfer script -# TODO: add further schema for remaining config parameters +# and should become picked up by the proxy - current workaround as a transfer script +# TODO: check again for multi hierarchie parameters # TODO: implement direct reading of the configuration file schema: inverters: - serial: str + monitor_sn: int? node_id: str suggested_area: str modbus_polling: bool - #strings: # leider funktioniert es nicht die folgenden 3 parameter im schema aufzulisten. möglicherweise wird die verschachtelung nicht unterstützt. + client_mode_host: str? + client_mode_port: int? + #strings: # leider funktioniert es nicht die folgenden 3 parameter im schema aufzulisten. möglicherweise wird die verschachtelung nicht unterstützt. # - string: str # type: str # manufacturer: str # daher diese variante - pv1_manufacturer: str - pv1_type: str - pv2_manufacturer: str - pv2_type: str + pv1.manufacturer: str? + pv1.type: str? + pv2.manufacturer: str? + pv2.type: str? + pv3.manufacturer: str? + pv3.type: str? + pv4.manufacturer: str? + pv4.type: str? + pv5.manufacturer: str? + pv5.type: str? + pv6.manufacturer: str? + pv6.type: str? tsun.enabled: bool solarman.enabled: bool inverters.allow_all: bool @@ -52,6 +65,16 @@ schema: ha.entity_prefix: str? #dito ha.proxy_node_id: str? #dito ha.proxy_unique_id: str? #dito + tsun.host: str? + solarman.host: str? + gen3plus.at_acl.tsun.allow: + - str + gen3plus.at_acl.tsun.block: + - str? + gen3plus.at_acl.mqtt.allow: + - str + gen3plus.at_acl.mqtt.block: + - str? # set default options for mandatory parameters # for optional parameters do not define any default value in the options dictionary. @@ -62,17 +85,19 @@ options: node_id: PV-Garage suggested_area: Garage modbus_polling: false - #strings: + # strings: # - string: PV1 # type: SF-M18/144550 # manufacturer: Shinefar # - string: PV2 # type: SF-M18/144550 # manufacturer: Shinefar - pv1_manufacturer: Shinefar - pv1_type: SF-M18/144550 - pv2_manufacturer: Shinefar - pv2_type: SF-M18/144550 + pv1.manufacturer: Shinefar + pv1.type: SF-M18/144550 + pv2.manufacturer: Shinefar + pv2.type: SF-M18/144550 tsun.enabled: true # set default solarman.enabled: true # set default inverters.allow_all: false # set default + gen3plus.at_acl.tsun.allow: ["AT+Z", "AT+UPURL", "AT+SUPDATE"] + gen3plus.at_acl.mqtt.allow: ["AT+"] \ No newline at end of file diff --git a/ha_addons/ha_addon/docker-bake.hcl b/ha_addons/ha_addon/docker-bake.hcl new file mode 100644 index 0000000..408a326 --- /dev/null +++ b/ha_addons/ha_addon/docker-bake.hcl @@ -0,0 +1,99 @@ +variable "IMAGE" { + default = "tsun-gen3-addon" +} +variable "VERSION" { + default = "0.0.0" +} +variable "MAJOR" { + default = "0" +} +variable "BUILD_DATE" { + default = "dev" +} +variable "BRANCH" { + default = "" +} +variable "DESCRIPTION" { + default = "This proxy enables a reliable connection between TSUN third generation inverters (eg. TSOL MS600, MS800, MS2000) and an MQTT broker to integrate the inverter into typical home automations." +} + +target "_common" { + context = "." + dockerfile = "Dockerfile" + args = { + VERSION = "${VERSION}" + environment = "production" + } + attest = [ + "type =provenance,mode=max", + "type =sbom,generator=docker/scout-sbom-indexer:latest" + ] + annotations = [ + "index:io.hass.version=${VERSION}", + "index:io.hass.type=addon", + "index:io.hass.arch=armhf|aarch64|i386|amd64", + "index:org.opencontainers.image.title=TSUN-Proxy", + "index:org.opencontainers.image.authors=Stefan Allius", + "index:org.opencontainers.image.created=${BUILD_DATE}", + "index:org.opencontainers.image.version=${VERSION}", + "index:org.opencontainers.image.revision=${BRANCH}", + "index:org.opencontainers.image.description=${DESCRIPTION}", + "index:org.opencontainers.image.licenses=BSD-3-Clause", + "index:org.opencontainers.image.source=https://github.com/s-allius/tsun-gen3-proxy/ha_addons/ha_addon" + ] + labels = { + "io.hass.version" = "${VERSION}" + "io.hass.type" = "addon" + "io.hass.arch" = "armhf|aarch64|i386|amd64" + "org.opencontainers.image.title" = "TSUN-Proxy" + "org.opencontainers.image.authors" = "Stefan Allius" + "org.opencontainers.image.created" = "${BUILD_DATE}" + "org.opencontainers.image.version" = "${VERSION}" + "org.opencontainers.image.revision" = "${BRANCH}" + "org.opencontainers.image.description" = "${DESCRIPTION}" + "org.opencontainers.image.licenses" = "BSD-3-Clause" + "org.opencontainers.image.source" = "https://github.com/s-allius/tsun-gen3-proxy/ha_addonsha_addon" + } + output = [ + "type=image,push=true" + ] + + no-cache = false + platforms = ["linux/amd64", "linux/arm64", "linux/arm/v7"] +} + +target "_debug" { + args = { + LOG_LVL = "DEBUG" + environment = "dev" + } +} +target "_prod" { + args = { + } +} +target "debug" { + inherits = ["_common", "_debug"] + tags = ["${IMAGE}:debug"] +} + +target "dev" { + inherits = ["_common"] + tags = ["${IMAGE}:dev"] +} + +target "preview" { + inherits = ["_common", "_prod"] + tags = ["${IMAGE}:preview", "${IMAGE}:${VERSION}"] +} + +target "rc" { + inherits = ["_common", "_prod"] + tags = ["${IMAGE}:rc", "${IMAGE}:${VERSION}"] +} + +target "rel" { + inherits = ["_common", "_prod"] + tags = ["${IMAGE}:latest", "${IMAGE}:${MAJOR}", "${IMAGE}:${VERSION}"] + no-cache = true +} diff --git a/ha_addon/icon.png b/ha_addons/ha_addon/icon.png similarity index 100% rename from ha_addon/icon.png rename to ha_addons/ha_addon/icon.png diff --git a/ha_addon/logo.png b/ha_addons/ha_addon/logo.png similarity index 100% rename from ha_addon/logo.png rename to ha_addons/ha_addon/logo.png diff --git a/ha_addon/rootfs/run.sh b/ha_addons/ha_addon/rootfs/run.sh similarity index 73% rename from ha_addon/rootfs/run.sh rename to ha_addons/ha_addon/rootfs/run.sh index d937ac8..5329d6f 100755 --- a/ha_addon/rootfs/run.sh +++ b/ha_addons/ha_addon/rootfs/run.sh @@ -23,12 +23,13 @@ fi cd /home || exit - -echo "Erstelle config.toml" -python3 create_config_toml.py - +# Erstelle Ordner für log und config +mkdir -p proxy/log +mkdir -p proxy/config cd /home/proxy || exit -echo "Starte Webserver" -python3 server.py +export VERSION=$(cat /proxy-version.txt) + +echo "Start Proxyserver..." +python3 server.py --json_config=/data/options.json diff --git a/ha_addon/translations/en.yaml b/ha_addons/ha_addon/translations/en.yaml similarity index 64% rename from ha_addon/translations/en.yaml rename to ha_addons/ha_addon/translations/en.yaml index 99e0501..9331e48 100755 --- a/ha_addon/translations/en.yaml +++ b/ha_addons/ha_addon/translations/en.yaml @@ -2,7 +2,7 @@ configuration: inverters: name: Inverters - description: >- + description: >+ For each GEN3 inverter, the serial number of the inverter must be mapped to an MQTT definition. To do this, the corresponding configuration block is started with <16-digit serial number> so that all subsequent parameters are assigned @@ -11,6 +11,7 @@ configuration: The serial numbers of all GEN3 inverters start with `R17`! + monitor_sn # The GEN3PLUS "Monitoring SN:" node_id # MQTT replacement for inverters serial number suggested_area # suggested installation area for home-assistant modbus_polling # Disable optional MODBUS polling @@ -18,21 +19,28 @@ configuration: pv2 # Optional, PV module descr tsun.enabled: - name: Connection to TSUN Cloud + name: Connection to TSUN Cloud - for GEN3 inverter only description: >- - disable connecting to the tsun cloud avoids updates. - The Inverter become isolated from Internet if switched on. + switch on/off connection to the TSUN cloud + This connection is only required if you want send data to the TSUN cloud + eg. to use the TSUN APPs or receive firmware updates. + + on - normal proxy operation + off - The Inverter become isolated from Internet solarman.enabled: - name: Connection to Solarman Cloud + name: Connection to Solarman Cloud - for GEN3PLUS inverter only description: >- - disables connecting to the Solarman cloud avoids updates. - The Inverter become isolated from Internet if switched on. + switch on/off connection to the Solarman cloud + This connection is only required if you want send data to the Solarman cloud + eg. to use the Solarman APPs or receive firmware updates. + + on - normal proxy operation + off - The Inverter become isolated from Internet inverters.allow_all: name: Allow all connections from all inverters description: >- The proxy only usually accepts connections from known inverters. - This can be switched off for test purposes and unknown serial - numbers are also accepted. + Switch on for test purposes and unknown serial numbers. mqtt.host: name: MQTT Broker Host description: >- @@ -59,6 +67,17 @@ configuration: name: MQTT node id, for the proxy_node_id ha.proxy_unique_id: name: MQTT unique id, to identify a proxy instance + tsun.host: + name: TSUN Cloud Host + description: >- + Hostname or IP address of the TSUN cloud. if not set, the addon will try to connect to the cloud default + on logger.talent-monitoring.com + solarman.host: + name: Solarman Cloud Host + description: >- + Hostname or IP address of the Solarman cloud. if not set, the addon will try to connect to the cloud default + on iot.talent-monitoring.com + network: 8127/tcp: x... diff --git a/ha_addons/repository.yaml b/ha_addons/repository.yaml new file mode 100644 index 0000000..42a9fc5 --- /dev/null +++ b/ha_addons/repository.yaml @@ -0,0 +1,3 @@ +name: TSUN-Proxy +url: https://github.com/s-allius/tsun-gen3-proxy/ha_addons +maintainer: Stefan Allius \ No newline at end of file diff --git a/proxy.c4 b/proxy.c4 new file mode 100644 index 0000000..9ea402f --- /dev/null +++ b/proxy.c4 @@ -0,0 +1,20 @@ +model { + extend home.logger.proxy { + component webserver 'http server' + component inverter 'inverter' + component local 'local connection' + component remote 'remote connection' + component r-ifc 'async-ifc' + component l-ifc 'async-ifc' + component prot 'Protocol' 'SolarmanV5 or Talent' + component config 'config' 'reads the file confg.toml' + component mqtt + inverter -> local + inverter -> remote + remote -> r-ifc + remote -> prot + local -> l-ifc + local -> prot + prot -> mqtt + } +} \ No newline at end of file diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..aafab51 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,8 @@ +# pytest.ini or .pytest.ini +[pytest] +minversion = 8.0 +addopts = -ra -q --durations=5 +pythonpath = app/src app/tests ha_addons/ha_addon/rootfs +testpaths = app/tests ha_addons/ha_addon/tests +asyncio_default_fixture_loop_scope = function +asyncio_mode = strict \ No newline at end of file