From 32287a74a9fa5fad8f98735ab887c5983a3b8740 Mon Sep 17 00:00:00 2001 From: BjoernAtBosch Date: Mon, 13 Jun 2022 13:24:25 +0200 Subject: [PATCH] Initial commit of VAL services --- .bandit | 2 + .clang-format | 39 + .dapr/components/.gitignore | 0 .dapr/config.yaml | 25 + .devcontainer/Dockerfile | 48 + .devcontainer/devcontainer.json | 24 + .dockerignore | 10 + .flake8 | 8 + .gitattributes | 2 + .github/dependabot.yml | 50 + .github/workflows/README.md | 37 + .github/workflows/dev_container_build.yml | 70 + .../dev_container_build_prerelease.yml | 27 + .github/workflows/doc/ci.drawio | 234 ++ .github/workflows/doc/ci_workflow.svg | 1 + .github/workflows/hvac_service.yml | 77 + .github/workflows/integration_test.yml | 105 + .github/workflows/lint.yml | 105 + .github/workflows/release.yml | 140 + .github/workflows/release_prepare.yml | 45 + .github/workflows/seat_service_build.yml | 70 + .github/workflows/seat_service_docu_build.yml | 56 + .github/workflows/seat_service_release.yml | 129 + .../workflows/seat_service_seatctrl_test.yml | 71 + .github/workflows/vulnerabilityscan.yml | 70 + .gitignore | 9 + .mypy.ini | 6 + .vscode/README.md | 29 + .vscode/scripts/clean-binaries.sh | 33 + .vscode/scripts/ensure-dapr.sh | 44 + .vscode/scripts/exec-check.sh | 21 + .vscode/scripts/k3d-uninstall.sh | 28 + .vscode/scripts/run-hvac-cli.sh | 59 + .vscode/scripts/run-hvacservice.sh | 64 + .vscode/scripts/run-integrationtest.sh | 35 + .vscode/scripts/run-seatservice-cli.sh | 57 + .vscode/scripts/run-seatservice.sh | 73 + .vscode/scripts/run-vehicledatabroker-cli.sh | 46 + .vscode/scripts/run-vehicledatabroker.sh | 58 + .vscode/tasks.json | 291 ++ CONTRIBUTING.md | 65 + LICENSE | 478 ++- NOTICE-3RD-PARTY-CONTENT.md | 52 + NOTICE.md | 33 + README.md | 148 + hvac_service/.gitignore | 4 + hvac_service/Dockerfile | 71 + hvac_service/README.md | 31 + hvac_service/docker-build.sh | 102 + hvac_service/hvacservice.py | 261 ++ .../proto/sdv/edge/comfort/hvac/v1/hvac.proto | 78 + hvac_service/requirements-dev.txt | 4 + hvac_service/requirements.txt | 3 + hvac_service/sdv/__init__.py | 0 hvac_service/sdv/databroker/__init__.py | 0 hvac_service/sdv/databroker/v1/__init__.py | 0 hvac_service/sdv/databroker/v1/broker_pb2.py | 145 + .../sdv/databroker/v1/broker_pb2_grpc.py | 182 ++ .../sdv/databroker/v1/collector_pb2.py | 214 ++ .../sdv/databroker/v1/collector_pb2_grpc.py | 214 ++ hvac_service/sdv/databroker/v1/types_pb2.py | 217 ++ .../sdv/databroker/v1/types_pb2_grpc.py | 3 + hvac_service/sdv/edge/__init__.py | 0 hvac_service/sdv/edge/comfort/__init__.py | 0 .../sdv/edge/comfort/hvac/__init__.py | 0 .../sdv/edge/comfort/hvac/v1/__init__.py | 0 .../sdv/edge/comfort/hvac/v1/hvac_pb2.py | 91 + .../sdv/edge/comfort/hvac/v1/hvac_pb2_grpc.py | 158 + hvac_service/testclient.py | 116 + hvac_service/update-protobuf.sh | 56 + integration_test/.gitignore | 1 + integration_test/README.md | 95 + integration_test/__init__.py | 0 integration_test/broker_subscribe.py | 483 +++ .../gen_proto/sdv/databroker/v1/broker_pb2.py | 114 + .../sdv/databroker/v1/broker_pb2.pyi | 145 + .../sdv/databroker/v1/broker_pb2_grpc.py | 146 + .../sdv/databroker/v1/collector_pb2.py | 163 + .../sdv/databroker/v1/collector_pb2.pyi | 190 ++ .../sdv/databroker/v1/collector_pb2_grpc.py | 176 ++ .../gen_proto/sdv/databroker/v1/types_pb2.py | 175 ++ .../gen_proto/sdv/databroker/v1/types_pb2.pyi | 375 +++ .../sdv/databroker/v1/types_pb2_grpc.py | 4 + integration_test/it-config | 52 + integration_test/it-seat-move.sh | 66 + integration_test/it-setup.sh | 328 +++ integration_test/requirements-dev.txt | 12 + integration_test/requirements.txt | 6 + integration_test/setup.py | 16 + integration_test/task-seat-move.sh | 52 + integration_test/test_feeder.py | 150 + integration_test/test_val_seat.py | 247 ++ integration_test/update-protobuf.sh | 53 + integration_test/vdb_helper.py | 315 ++ prepare_release.sh | 46 + seat_service/.gitignore | 6 + seat_service/CMakeLists.txt | 39 + seat_service/Dockerfile | 61 + seat_service/README.md | 108 + seat_service/build-debug.sh | 38 + seat_service/build-docu.sh | 21 + seat_service/build-release.sh | 64 + seat_service/build-seatctrl.sh | 54 + seat_service/clang-format.sh | 17 + seat_service/conanfile.txt | 42 + seat_service/docker-build.sh | 112 + seat_service/docs/SeatService.drawio | 1 + .../docs/assets/SeatService_context.svg | 1 + .../docs/assets/SeatService_internal.svg | 1 + seat_service/docs/doxygen/doxyfile | 2615 +++++++++++++++++ .../doxygen-awesome-sidebar-only-darkmode.css | 32 + seat_service/docs/doxygen/doxygen-awesome.css | 1475 ++++++++++ .../sdv/edge/comfort/seats/v1/seats.proto | 143 + seat_service/src/CMakeLists.txt | 35 + .../src/bin/seat_service/CMakeLists.txt | 27 + seat_service/src/bin/seat_service/main.cc | 150 + .../src/bin/seat_service/seat_data_feeder.cc | 104 + .../src/bin/seat_service/seat_data_feeder.h | 56 + .../src/examples/broker_feeder/CMakeLists.txt | 28 + .../examples/broker_feeder/broker_feeder.cc | 59 + .../src/examples/can_send/CMakeLists.txt | 25 + .../src/examples/can_send/can_send.cc | 68 + .../src/examples/can_subscribe/CMakeLists.txt | 25 + .../examples/can_subscribe/can_subscribe.cc | 61 + .../examples/seat_svc_client/CMakeLists.txt | 33 + .../seat_svc_client/seat_svc_client.cc | 358 +++ .../src/lib/broker_feeder/CMakeLists.txt | 92 + .../src/lib/broker_feeder/create_datapoint.h | 86 + .../lib/broker_feeder/data_broker_feeder.cc | 342 +++ .../lib/broker_feeder/data_broker_feeder.h | 97 + .../src/lib/can_helpers/CMakeLists.txt | 22 + .../src/lib/can_helpers/can_bcm_interface.cc | 205 ++ .../src/lib/can_helpers/can_bcm_interface.h | 87 + .../src/lib/can_helpers/can_raw_socket.cc | 105 + .../src/lib/can_helpers/can_raw_socket.h | 61 + .../src/lib/grpc_services/CMakeLists.txt | 14 + .../seats_grpc_service/CMakeLists.txt | 91 + .../seats_grpc_service/seats_grpc_service.cc | 155 + .../seats_grpc_service/seats_grpc_service.h | 56 + .../src/lib/seat_adjuster/CMakeLists.txt | 28 + .../src/lib/seat_adjuster/seat_adjuster.cc | 222 ++ .../src/lib/seat_adjuster/seat_adjuster.h | 69 + .../seat_controller/CMakeLists.txt | 147 + .../seat_adjuster/seat_controller/README.md | 42 + .../seat_controller/cmake/CodeCoverage.cmake | 708 +++++ .../lib/seat_adjuster/seat_controller/main.cc | 139 + .../seat_controller/seat_controller.cc | 911 ++++++ .../seat_controller/seat_controller.h | 305 ++ .../seat_controller/seat_ecu.dbc | 278 ++ .../seat_controller/tests/CMakeLists.txt | 125 + .../tests/cansim/CMakeLists.txt | 68 + .../seat_controller/tests/cansim/README.md | 29 + .../seat_controller/tests/cansim/cansim_lib.c | 513 ++++ .../seat_controller/tests/cansim/cansim_lib.h | 89 + .../seat_controller/tests/cansim/main.c | 140 + .../tests/cansim/seatadjuster_engine.c | 460 +++ .../tests/cansim/seatadjuster_engine.h | 140 + .../tests/integration_seatctrl.cc | 438 +++ .../tests/mock/mock_unix_socket.cc | 271 ++ .../tests/mock/mock_unix_socket.h | 52 + .../tests/test_seatctrl_api.cc | 719 +++++ .../seat_controller/tools/README.md | 98 + .../seat_controller/tools/can-decode | 29 + .../seat_controller/tools/cangen-SECU1_STAT | 30 + .../seat_controller/tools/cansim | 55 + .../seat_controller/tools/ecu-reset | 235 ++ .../seat_controller/tools/motor-dec | 34 + .../seat_controller/tools/motor-inc | 34 + .../seat_controller/tools/motor-stop | 17 + .../seat_controller/tools/motor-swipe | 38 + .../seat_controller/tools/setup-vcan | 26 + .../seat_controller/tools/sim-SECU1_STAT | 61 + .../seat_controller/tools/val_start.sh | 110 + seat_service/toolchains/target_aarch64_Debug | 25 + .../toolchains/target_aarch64_Release | 25 + seat_service/toolchains/target_x86_64_Debug | 9 + seat_service/toolchains/target_x86_64_Release | 9 + seat_service/vscode-conan.sh | 53 + setup.cfg | 35 + tools/Dockerfile | 89 + tools/check-license.sh | 46 + tools/check-scripts.sh | 39 + .../proto/sdv/databroker/v1/broker.proto | 87 + .../proto/sdv/databroker/v1/collector.proto | 111 + .../proto/sdv/databroker/v1/types.proto | 152 + whitelisted-licenses.txt | 0 186 files changed, 22769 insertions(+), 277 deletions(-) create mode 100644 .bandit create mode 100644 .clang-format create mode 100644 .dapr/components/.gitignore create mode 100644 .dapr/config.yaml create mode 100644 .devcontainer/Dockerfile create mode 100644 .devcontainer/devcontainer.json create mode 100644 .dockerignore create mode 100644 .flake8 create mode 100644 .gitattributes create mode 100644 .github/dependabot.yml create mode 100644 .github/workflows/README.md create mode 100644 .github/workflows/dev_container_build.yml create mode 100644 .github/workflows/dev_container_build_prerelease.yml create mode 100644 .github/workflows/doc/ci.drawio create mode 100644 .github/workflows/doc/ci_workflow.svg create mode 100644 .github/workflows/hvac_service.yml create mode 100644 .github/workflows/integration_test.yml create mode 100644 .github/workflows/lint.yml create mode 100644 .github/workflows/release.yml create mode 100644 .github/workflows/release_prepare.yml create mode 100644 .github/workflows/seat_service_build.yml create mode 100644 .github/workflows/seat_service_docu_build.yml create mode 100644 .github/workflows/seat_service_release.yml create mode 100644 .github/workflows/seat_service_seatctrl_test.yml create mode 100644 .github/workflows/vulnerabilityscan.yml create mode 100644 .gitignore create mode 100644 .mypy.ini create mode 100644 .vscode/README.md create mode 100755 .vscode/scripts/clean-binaries.sh create mode 100755 .vscode/scripts/ensure-dapr.sh create mode 100755 .vscode/scripts/exec-check.sh create mode 100755 .vscode/scripts/k3d-uninstall.sh create mode 100755 .vscode/scripts/run-hvac-cli.sh create mode 100755 .vscode/scripts/run-hvacservice.sh create mode 100755 .vscode/scripts/run-integrationtest.sh create mode 100755 .vscode/scripts/run-seatservice-cli.sh create mode 100755 .vscode/scripts/run-seatservice.sh create mode 100755 .vscode/scripts/run-vehicledatabroker-cli.sh create mode 100755 .vscode/scripts/run-vehicledatabroker.sh create mode 100644 .vscode/tasks.json create mode 100644 CONTRIBUTING.md create mode 100644 NOTICE-3RD-PARTY-CONTENT.md create mode 100644 NOTICE.md create mode 100644 README.md create mode 100644 hvac_service/.gitignore create mode 100644 hvac_service/Dockerfile create mode 100644 hvac_service/README.md create mode 100755 hvac_service/docker-build.sh create mode 100644 hvac_service/hvacservice.py create mode 100644 hvac_service/proto/sdv/edge/comfort/hvac/v1/hvac.proto create mode 100644 hvac_service/requirements-dev.txt create mode 100644 hvac_service/requirements.txt create mode 100644 hvac_service/sdv/__init__.py create mode 100644 hvac_service/sdv/databroker/__init__.py create mode 100644 hvac_service/sdv/databroker/v1/__init__.py create mode 100644 hvac_service/sdv/databroker/v1/broker_pb2.py create mode 100644 hvac_service/sdv/databroker/v1/broker_pb2_grpc.py create mode 100644 hvac_service/sdv/databroker/v1/collector_pb2.py create mode 100644 hvac_service/sdv/databroker/v1/collector_pb2_grpc.py create mode 100644 hvac_service/sdv/databroker/v1/types_pb2.py create mode 100644 hvac_service/sdv/databroker/v1/types_pb2_grpc.py create mode 100644 hvac_service/sdv/edge/__init__.py create mode 100644 hvac_service/sdv/edge/comfort/__init__.py create mode 100644 hvac_service/sdv/edge/comfort/hvac/__init__.py create mode 100644 hvac_service/sdv/edge/comfort/hvac/v1/__init__.py create mode 100644 hvac_service/sdv/edge/comfort/hvac/v1/hvac_pb2.py create mode 100644 hvac_service/sdv/edge/comfort/hvac/v1/hvac_pb2_grpc.py create mode 100755 hvac_service/testclient.py create mode 100755 hvac_service/update-protobuf.sh create mode 100644 integration_test/.gitignore create mode 100644 integration_test/README.md create mode 100644 integration_test/__init__.py create mode 100755 integration_test/broker_subscribe.py create mode 100644 integration_test/gen_proto/sdv/databroker/v1/broker_pb2.py create mode 100644 integration_test/gen_proto/sdv/databroker/v1/broker_pb2.pyi create mode 100644 integration_test/gen_proto/sdv/databroker/v1/broker_pb2_grpc.py create mode 100644 integration_test/gen_proto/sdv/databroker/v1/collector_pb2.py create mode 100644 integration_test/gen_proto/sdv/databroker/v1/collector_pb2.pyi create mode 100644 integration_test/gen_proto/sdv/databroker/v1/collector_pb2_grpc.py create mode 100644 integration_test/gen_proto/sdv/databroker/v1/types_pb2.py create mode 100644 integration_test/gen_proto/sdv/databroker/v1/types_pb2.pyi create mode 100644 integration_test/gen_proto/sdv/databroker/v1/types_pb2_grpc.py create mode 100755 integration_test/it-config create mode 100755 integration_test/it-seat-move.sh create mode 100755 integration_test/it-setup.sh create mode 100644 integration_test/requirements-dev.txt create mode 100644 integration_test/requirements.txt create mode 100644 integration_test/setup.py create mode 100755 integration_test/task-seat-move.sh create mode 100644 integration_test/test_feeder.py create mode 100644 integration_test/test_val_seat.py create mode 100755 integration_test/update-protobuf.sh create mode 100644 integration_test/vdb_helper.py create mode 100755 prepare_release.sh create mode 100644 seat_service/.gitignore create mode 100644 seat_service/CMakeLists.txt create mode 100644 seat_service/Dockerfile create mode 100644 seat_service/README.md create mode 100755 seat_service/build-debug.sh create mode 100755 seat_service/build-docu.sh create mode 100755 seat_service/build-release.sh create mode 100755 seat_service/build-seatctrl.sh create mode 100755 seat_service/clang-format.sh create mode 100644 seat_service/conanfile.txt create mode 100755 seat_service/docker-build.sh create mode 100644 seat_service/docs/SeatService.drawio create mode 100644 seat_service/docs/assets/SeatService_context.svg create mode 100644 seat_service/docs/assets/SeatService_internal.svg create mode 100644 seat_service/docs/doxygen/doxyfile create mode 100644 seat_service/docs/doxygen/doxygen-awesome-sidebar-only-darkmode.css create mode 100644 seat_service/docs/doxygen/doxygen-awesome.css create mode 100644 seat_service/proto/sdv/edge/comfort/seats/v1/seats.proto create mode 100644 seat_service/src/CMakeLists.txt create mode 100644 seat_service/src/bin/seat_service/CMakeLists.txt create mode 100644 seat_service/src/bin/seat_service/main.cc create mode 100644 seat_service/src/bin/seat_service/seat_data_feeder.cc create mode 100644 seat_service/src/bin/seat_service/seat_data_feeder.h create mode 100644 seat_service/src/examples/broker_feeder/CMakeLists.txt create mode 100644 seat_service/src/examples/broker_feeder/broker_feeder.cc create mode 100644 seat_service/src/examples/can_send/CMakeLists.txt create mode 100644 seat_service/src/examples/can_send/can_send.cc create mode 100644 seat_service/src/examples/can_subscribe/CMakeLists.txt create mode 100644 seat_service/src/examples/can_subscribe/can_subscribe.cc create mode 100644 seat_service/src/examples/seat_svc_client/CMakeLists.txt create mode 100644 seat_service/src/examples/seat_svc_client/seat_svc_client.cc create mode 100644 seat_service/src/lib/broker_feeder/CMakeLists.txt create mode 100644 seat_service/src/lib/broker_feeder/create_datapoint.h create mode 100644 seat_service/src/lib/broker_feeder/data_broker_feeder.cc create mode 100644 seat_service/src/lib/broker_feeder/data_broker_feeder.h create mode 100644 seat_service/src/lib/can_helpers/CMakeLists.txt create mode 100644 seat_service/src/lib/can_helpers/can_bcm_interface.cc create mode 100644 seat_service/src/lib/can_helpers/can_bcm_interface.h create mode 100644 seat_service/src/lib/can_helpers/can_raw_socket.cc create mode 100644 seat_service/src/lib/can_helpers/can_raw_socket.h create mode 100644 seat_service/src/lib/grpc_services/CMakeLists.txt create mode 100644 seat_service/src/lib/grpc_services/seats_grpc_service/CMakeLists.txt create mode 100644 seat_service/src/lib/grpc_services/seats_grpc_service/seats_grpc_service.cc create mode 100644 seat_service/src/lib/grpc_services/seats_grpc_service/seats_grpc_service.h create mode 100644 seat_service/src/lib/seat_adjuster/CMakeLists.txt create mode 100644 seat_service/src/lib/seat_adjuster/seat_adjuster.cc create mode 100644 seat_service/src/lib/seat_adjuster/seat_adjuster.h create mode 100644 seat_service/src/lib/seat_adjuster/seat_controller/CMakeLists.txt create mode 100644 seat_service/src/lib/seat_adjuster/seat_controller/README.md create mode 100644 seat_service/src/lib/seat_adjuster/seat_controller/cmake/CodeCoverage.cmake create mode 100644 seat_service/src/lib/seat_adjuster/seat_controller/main.cc create mode 100644 seat_service/src/lib/seat_adjuster/seat_controller/seat_controller.cc create mode 100644 seat_service/src/lib/seat_adjuster/seat_controller/seat_controller.h create mode 100644 seat_service/src/lib/seat_adjuster/seat_controller/seat_ecu.dbc create mode 100644 seat_service/src/lib/seat_adjuster/seat_controller/tests/CMakeLists.txt create mode 100644 seat_service/src/lib/seat_adjuster/seat_controller/tests/cansim/CMakeLists.txt create mode 100644 seat_service/src/lib/seat_adjuster/seat_controller/tests/cansim/README.md create mode 100644 seat_service/src/lib/seat_adjuster/seat_controller/tests/cansim/cansim_lib.c create mode 100644 seat_service/src/lib/seat_adjuster/seat_controller/tests/cansim/cansim_lib.h create mode 100644 seat_service/src/lib/seat_adjuster/seat_controller/tests/cansim/main.c create mode 100644 seat_service/src/lib/seat_adjuster/seat_controller/tests/cansim/seatadjuster_engine.c create mode 100644 seat_service/src/lib/seat_adjuster/seat_controller/tests/cansim/seatadjuster_engine.h create mode 100644 seat_service/src/lib/seat_adjuster/seat_controller/tests/integration_seatctrl.cc create mode 100644 seat_service/src/lib/seat_adjuster/seat_controller/tests/mock/mock_unix_socket.cc create mode 100644 seat_service/src/lib/seat_adjuster/seat_controller/tests/mock/mock_unix_socket.h create mode 100644 seat_service/src/lib/seat_adjuster/seat_controller/tests/test_seatctrl_api.cc create mode 100644 seat_service/src/lib/seat_adjuster/seat_controller/tools/README.md create mode 100755 seat_service/src/lib/seat_adjuster/seat_controller/tools/can-decode create mode 100755 seat_service/src/lib/seat_adjuster/seat_controller/tools/cangen-SECU1_STAT create mode 100755 seat_service/src/lib/seat_adjuster/seat_controller/tools/cansim create mode 100755 seat_service/src/lib/seat_adjuster/seat_controller/tools/ecu-reset create mode 100755 seat_service/src/lib/seat_adjuster/seat_controller/tools/motor-dec create mode 100755 seat_service/src/lib/seat_adjuster/seat_controller/tools/motor-inc create mode 100755 seat_service/src/lib/seat_adjuster/seat_controller/tools/motor-stop create mode 100755 seat_service/src/lib/seat_adjuster/seat_controller/tools/motor-swipe create mode 100755 seat_service/src/lib/seat_adjuster/seat_controller/tools/setup-vcan create mode 100755 seat_service/src/lib/seat_adjuster/seat_controller/tools/sim-SECU1_STAT create mode 100755 seat_service/src/lib/seat_adjuster/seat_controller/tools/val_start.sh create mode 100644 seat_service/toolchains/target_aarch64_Debug create mode 100644 seat_service/toolchains/target_aarch64_Release create mode 100644 seat_service/toolchains/target_x86_64_Debug create mode 100644 seat_service/toolchains/target_x86_64_Release create mode 100755 seat_service/vscode-conan.sh create mode 100644 setup.cfg create mode 100644 tools/Dockerfile create mode 100755 tools/check-license.sh create mode 100755 tools/check-scripts.sh create mode 100644 vehicle_data_broker/proto/sdv/databroker/v1/broker.proto create mode 100644 vehicle_data_broker/proto/sdv/databroker/v1/collector.proto create mode 100644 vehicle_data_broker/proto/sdv/databroker/v1/types.proto create mode 100644 whitelisted-licenses.txt diff --git a/.bandit b/.bandit new file mode 100644 index 0000000..f783118 --- /dev/null +++ b/.bandit @@ -0,0 +1,2 @@ +[bandit] +skips: B101 diff --git a/.clang-format b/.clang-format new file mode 100644 index 0000000..e5f9551 --- /dev/null +++ b/.clang-format @@ -0,0 +1,39 @@ +# ******************************************************************************** +# Copyright (c) 2022 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# *******************************************************************************/ + +--- +Language: Cpp +BasedOnStyle: Google +ColumnLimit: 120 +IndentWidth: 4 +PenaltyBreakBeforeFirstCallParameter: 1 +PenaltyBreakComment: 300 +PenaltyBreakFirstLessLess: 120 +PenaltyBreakString: 1000 +PenaltyExcessCharacter: 1000000 +PenaltyReturnTypeOnItsOwnLine: 200 +Standard: Cpp11 +TabWidth: 4 +UseTab: Never +BreakConstructorInitializers: BeforeComma +ConstructorInitializerAllOnOneLineOrOnePerLine: false +... + +--- +Language: Proto +BasedOnStyle: Google +ColumnLimit: 120 +IndentWidth: 4 +UseTab: Never +AlignConsecutiveAssignments: true +--- diff --git a/.dapr/components/.gitignore b/.dapr/components/.gitignore new file mode 100644 index 0000000..e69de29 diff --git a/.dapr/config.yaml b/.dapr/config.yaml new file mode 100644 index 0000000..4009c5d --- /dev/null +++ b/.dapr/config.yaml @@ -0,0 +1,25 @@ +# /******************************************************************************** +# * Copyright (c) 2022 Contributors to the Eclipse Foundation +# * +# * See the NOTICE file(s) distributed with this work for additional +# * information regarding copyright ownership. +# * +# * This program and the accompanying materials are made available under the +# * terms of the Apache License 2.0 which is available at +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * SPDX-License-Identifier: Apache-2.0 +# ********************************************************************************/ + +apiVersion: dapr.io/v1alpha1 +kind: Configuration +metadata: + name: config +spec: + # tracing: + # samplingRate: "15" + # zipkin: + # endpointAddress: http://localhost:9411/api/v2/spans + features: + - name: proxy.grpc + enabled: true diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile new file mode 100644 index 0000000..054fc73 --- /dev/null +++ b/.devcontainer/Dockerfile @@ -0,0 +1,48 @@ +# /******************************************************************************** +# * Copyright (c) 2022 Contributors to the Eclipse Foundation +# * +# * See the NOTICE file(s) distributed with this work for additional +# * information regarding copyright ownership. +# * +# * This program and the accompanying materials are made available under the +# * terms of the Apache License 2.0 which is available at +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * SPDX-License-Identifier: Apache-2.0 +# ********************************************************************************/ + +# Download base image of Ubuntu +FROM ghcr.io/eclipse/kuksa.val.services/oci_kuksa-val-services-ci:prerelease + +# If you get proxy errors, make sure Docker is configured correctly. +# Hint: https://dev.to/zyfa/setup-the-proxy-for-dockerfile-building--4jc8 + +USER root + +# To prevent interactive shells +ENV DEBIAN_FRONTEND=noninteractive + +# Install basic utils needed inside devcontainer +RUN apt-get update && \ + apt-get install -qqy curl wget zip && \ + apt-get install -qqy git && \ + apt-get install -qqy bash && \ + apt-get install -qqy xz-utils && \ + apt-get install -qqy apt-transport-https + +# Set the locale +RUN apt-get update && \ + apt-get install -qqy locales locales-all +ENV LC_ALL en_US.UTF-8 +ENV LANG en_US.UTF-8 +ENV LANGUAGE en_US.UTF-8 + +# Set timezone inside Docker container +ENV TZ=UTC +RUN echo $TZ > /etc/timezone && \ + apt-get update && apt-get install -qqy tzdata && \ + rm /etc/localtime && \ + dpkg-reconfigure -f noninteractive tzdata + +#TODO: fix and uncomment: This is needed for local development under linux +#USER dockeruser diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 0000000..6f06c2f --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,24 @@ +{ + "name": "oci_kuksa-val-services-dev", + "build": { + "dockerfile": "Dockerfile" + }, + "runArgs": ["--init", "--privileged"], + "settings": { + "terminal.integrated.defaultProfile.linux": "bash", + "http.proxyStrictSSL": false + }, + "features": { + "docker-in-docker": { + "version": "latest", + "moby": true + } + }, + "extensions": [ + "yzhang.markdown-all-in-one", + "arturock.gitstash", + "eamodio.gitlens", + "streetsidesoftware.code-spell-checker", + "twxs.cmake" + ] +} diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..7c5331c --- /dev/null +++ b/.dockerignore @@ -0,0 +1,10 @@ +.devcontainer/ +.github/ +.git +.vscode/ +tools/ +report/ +*.tar +**/build/ +**/build_seat_controller/ +**/target/ diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..dc1fd6c --- /dev/null +++ b/.flake8 @@ -0,0 +1,8 @@ +[flake8] +max-line-length = 120 +exclude = + .git, + __pycache__, + .venv, + # Ignore generated *_pb2.py files + gen_proto, *_pb2* diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..e757139 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,2 @@ +*.sh text eol=lf +seat_service/src/lib/seat_adjuster/seat_controller/tools/* text eol=lf \ No newline at end of file diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..b9952cf --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,50 @@ +# /******************************************************************************** +# * Copyright (c) 2022 Contributors to the Eclipse Foundation +# * +# * See the NOTICE file(s) distributed with this work for additional +# * information regarding copyright ownership. +# * +# * This program and the accompanying materials are made available under the +# * terms of the Apache License 2.0 which is available at +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * SPDX-License-Identifier: Apache-2.0 +# ********************************************************************************/ +# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates + +# This configures Dependabot version updates, to keep dependencies up to date. +# Dependabot security updates are configured differently, +# see: https://docs.github.com/en/code-security/dependabot/dependabot-security-updates/about-dependabot-security-updates + +version: 2 +updates: + - package-ecosystem: "pip" + directory: "/hvac_service" + schedule: + interval: "monthly" + + - package-ecosystem: "pip" + directory: "/integration_test" + schedule: + interval: "monthly" + + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "monthly" + + - package-ecosystem: "docker" + directory: "/seat_service" + schedule: + interval: "monthly" + + - package-ecosystem: "docker" + directory: "/hvac_service" + schedule: + interval: "monthly" + + - package-ecosystem: "docker" + directory: "/tools" + schedule: + interval: "monthly" +# conan package-ecosystem is for C++ is not supported yet diff --git a/.github/workflows/README.md b/.github/workflows/README.md new file mode 100644 index 0000000..68c9c06 --- /dev/null +++ b/.github/workflows/README.md @@ -0,0 +1,37 @@ +# Continuous integration + +## Naming of workflows + +* for sub components like hvac_service and seat_service use pattern: + **\_\ eg: hvac_service** + +* for overall tasks use pattern: + **\ eg: release** + +## Naming of Tags + +* Release tags need to be in the following form: + **v\.\.\\* eg: v1.2.1, v2.0.0alpha** +## Workflow and branching model +![ci_workflow](./doc/ci_workflow.svg) + + +## Naming of artifacts + +Artifacts result from the workflow are used in other workflows and might be published directly to a release page, therefore their naming matters. +In general the naming shall follow: **(_)_.tar/zip** + +* Application binaries: **bin_\__.tar**, e.g. bin_vservice-seat_x86_64_release.tar.gz +* Containers layers: **oci_\.tar**, e.g. oci_vservice-seat.tar +* Test result reports: **report_test_\**, e.g. report_test_vservice-seat-ctl +* Test coverage reports: **report_codecov_\.\**, e.g. report_codecov_vservice-seat-ctl +* Documentation: **docu_\.\**, e.g: docu_vservice-seat + +## How to create a new release +1. Adapt the version tags in all needed files, (e.g.: for v0.15.0) via executing + * ``./prepare_release.sh 0.15.0`` +2. tag a main branch version with a release tag + * This trigger a github workflow which automatically creates a draft release +3. Publish the release on the repo webpage + * navigate to the repo webpage -> Release -> edited the create draft -> Publish + diff --git a/.github/workflows/dev_container_build.yml b/.github/workflows/dev_container_build.yml new file mode 100644 index 0000000..575d168 --- /dev/null +++ b/.github/workflows/dev_container_build.yml @@ -0,0 +1,70 @@ +# /******************************************************************************** +# * Copyright (c) 2022 Contributors to the Eclipse Foundation +# * +# * See the NOTICE file(s) distributed with this work for additional +# * information regarding copyright ownership. +# * +# * This program and the accompanying materials are made available under the +# * terms of the Apache License 2.0 which is available at +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * SPDX-License-Identifier: Apache-2.0 +# ********************************************************************************/ + +name: dev-container-build + +on: + push: + paths: + - "tools/Dockerfile" + branches: + - main + workflow_dispatch: + inputs: + tag: + description: "tag of the docker container, eg:latest, v0.2.1" + required: false + default: "latest" + workflow_call: + inputs: + tag: + description: "tag of the docker container, eg:latest, v0.2.1" + type: string + required: false + default: "latest" + +jobs: + build: + runs-on: [ubuntu-latest] + + steps: + - name: adding github workspace as safe directory + run: git config --global --add safe.directory $GITHUB_WORKSPACE + + - uses: actions/checkout@v3 + + - id: repository-name-adjusted + name: Prepare repository name in lower case for docker upload. This supports repository names in mixed case + uses: ASzc/change-string-case-action@v2 + with: + string: ${{ github.repository }} + + - name: Login to GitHub Container Registry + uses: docker/login-action@v1 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + # Runs docker Build + - name: Run docker Build + # github.event.inputs.tag - used when triggered by workflow_dispatch + # inputs.tag - used when triggered by workflow_call + run: | + docker build --build-arg version=latest --build-arg USER_ID=0 --build-arg GROUP_ID=0 -t ghcr.io/${{ steps.repository-name-adjusted.outputs.lowercase }}/oci_kuksa-val-services-ci:${{ github.event.inputs.tag }}${{ inputs.tag }} -f tools/Dockerfile . + # push docker + - name: Push docker + # github.event.inputs.tag - used when triggered by workflow_dispatch + # inputs.tag - used when triggered by workflow_call + run: | + docker push ghcr.io/${{ steps.repository-name-adjusted.outputs.lowercase }}/oci_kuksa-val-services-ci:${{ github.event.inputs.tag }}${{ inputs.tag }} diff --git a/.github/workflows/dev_container_build_prerelease.yml b/.github/workflows/dev_container_build_prerelease.yml new file mode 100644 index 0000000..08aaa11 --- /dev/null +++ b/.github/workflows/dev_container_build_prerelease.yml @@ -0,0 +1,27 @@ +# /******************************************************************************** +# * Copyright (c) 2022 Contributors to the Eclipse Foundation +# * +# * See the NOTICE file(s) distributed with this work for additional +# * information regarding copyright ownership. +# * +# * This program and the accompanying materials are made available under the +# * terms of the Apache License 2.0 which is available at +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * SPDX-License-Identifier: Apache-2.0 +# ********************************************************************************/ + +name: dev-container-build-prerelease + +on: + push: + paths: + - "tools/Dockerfile" + - ".github/workflows/dev_container_build_prerelease.yml" + workflow_dispatch: + +jobs: + call-dev-container-build: + uses: ./.github/workflows/dev_container_build.yml + with: + tag: "prerelease" diff --git a/.github/workflows/doc/ci.drawio b/.github/workflows/doc/ci.drawio new file mode 100644 index 0000000..9b512d5 --- /dev/null +++ b/.github/workflows/doc/ci.drawio @@ -0,0 +1,234 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/.github/workflows/doc/ci_workflow.svg b/.github/workflows/doc/ci_workflow.svg new file mode 100644 index 0000000..c7b399a --- /dev/null +++ b/.github/workflows/doc/ci_workflow.svg @@ -0,0 +1 @@ +
vehicle abstraction layer repo (this)
vehicle abstraction layer repo (this)
main
main
feature 1
feature 1
PR
PR
new feature added
new feature added
branches
branch...
v0.0.1
v0.0.1
feature 2
feature 2
PR
PR
prepare release:
define version
add changelog
prepare release:...
Viewer does not support full SVG 1.1
\ No newline at end of file diff --git a/.github/workflows/hvac_service.yml b/.github/workflows/hvac_service.yml new file mode 100644 index 0000000..94b3004 --- /dev/null +++ b/.github/workflows/hvac_service.yml @@ -0,0 +1,77 @@ +# /******************************************************************************** +# * Copyright (c) 2022 Contributors to the Eclipse Foundation +# * +# * See the NOTICE file(s) distributed with this work for additional +# * information regarding copyright ownership. +# * +# * This program and the accompanying materials are made available under the +# * terms of the Apache License 2.0 which is available at +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * SPDX-License-Identifier: Apache-2.0 +# ********************************************************************************/ + +name: hvac-service + +on: + pull_request: + paths: + - ".github/workflows/hvac_service.yml" + - "hvac_service/**" + workflow_call: + workflow_dispatch: + +jobs: + build-hvac-service-image: + name: "Build multi-arch image" + runs-on: ubuntu-latest + + steps: + - name: Checkout Repository + uses: actions/checkout@v3 + + - name: Retrieve build binaries + uses: actions/download-artifact@v3 + with: + path: ${{github.workspace}} + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + + - name: Login to GitHub Container Registry + uses: docker/login-action@v1 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - id: repository-name-adjusted + name: Make repository name in lower case for docker upload. + uses: ASzc/change-string-case-action@v2 + with: + string: ${{ github.repository }} + + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + + - name: "Build image" + id: image_build + uses: docker/build-push-action@v2 + with: + pull: true + push: false + outputs: | + type=oci,dest=./oci_vservice-hvac.tar + context: ./hvac_service + file: ./hvac_service/Dockerfile + platforms: linux/amd64, linux/arm64 + tags: ${{ github.sha }} + labels: | + org.opencontainers.image.source=https://github.com/${{steps.repository-name-adjusted.outputs.lowercase}} + + - name: Temporarily save Docker image + uses: actions/upload-artifact@v3 + with: + name: oci_vservice-hvac.tar + path: ${{github.workspace}}/oci_vservice-hvac.tar + retention-days: 1 diff --git a/.github/workflows/integration_test.yml b/.github/workflows/integration_test.yml new file mode 100644 index 0000000..09c582a --- /dev/null +++ b/.github/workflows/integration_test.yml @@ -0,0 +1,105 @@ +# /******************************************************************************** +# * Copyright (c) 2022 Contributors to the Eclipse Foundation +# * +# * See the NOTICE file(s) distributed with this work for additional +# * information regarding copyright ownership. +# * +# * This program and the accompanying materials are made available under the +# * terms of the Apache License 2.0 which is available at +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * SPDX-License-Identifier: Apache-2.0 +# ********************************************************************************/ + +name: integration-test + +on: + workflow_dispatch: + push: + # Run only on branches/commits and not tags + branches: + - main + pull_request: + branches: + - main + +jobs: + setup: + runs-on: ubuntu-latest + + steps: + - name: Adding github workspace as safe directory + run: | + git config --global --add safe.directory $GITHUB_WORKSPACE + git config --global user.email "github-automation@users.noreply.github.com" + git config --global user.name "Github Automation" + + - name: Checkout Repository + uses: actions/checkout@v3 + + - id: repository-name-adjusted + name: Prepare repository name in lower case for docker upload. This supports repository names in mixed case + uses: ASzc/change-string-case-action@v2 + with: + string: ${{ github.repository }} + + - name: Login to GitHub Container Registry + uses: docker/login-action@v1 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + + - name: Setup VAL containers + shell: bash + run: | + ${{github.workspace}}/integration_test/it-setup.sh init + ${{github.workspace}}/integration_test/it-setup.sh start + ${{github.workspace}}/integration_test/it-setup.sh status + echo "$ docker image ls" + docker image ls + echo "$ docker ps -a" + docker ps -a + echo "$ docker inspect val-int" + docker inspect val-int + + - name: Run Integration Tests + shell: bash + env: + # force using it-setup.sh (testing with ghcr.io tags), DAPR standalone mode does not work in CI + USE_DAPR: "0" + run: | + pip install -q -r integration_test/requirements.txt + pip install -q -r integration_test/requirements-dev.txt + pip install -e integration_test/ + pytest -s ./integration_test --asyncio-mode=auto --override-ini \ + junit_family=xunit1 --junit-xml=./results/IntegrationTest/junit.xml \ + --log-file=./results/IntegrationTest/integration.log + + - name: Publish Integration Test Results + uses: EnricoMi/publish-unit-test-result-action@v1 + if: always() + with: + files: ./results/IntegrationTest/junit.xml + + - name: Upload Integration Test Logs + uses: actions/upload-artifact@v3 + if: always() + with: + name: integration_test.log + path: | + ${{github.workspace}}/results/IntegrationTest/integration.log + + - name: Cleanup Integration Tests + shell: bash + run: | + + ${{github.workspace}}/integration_test/it-setup.sh cleanup --force + docker image ls + docker ps -a diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 0000000..ca3ef2c --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,105 @@ +--- +# MegaLinter GitHub Action configuration file +# More info at https://megalinter.github.io +name: lint + +on: + # Trigger mega-linter at every push. Action will also be visible from Pull Requests to main + pull_request: + branches: [main] + +env: # Comment env block if you do not want to apply fixes + # Apply linter fixes configuration + APPLY_FIXES: all # When active, APPLY_FIXES must also be defined as environment variable (in github/workflows/mega-linter.yml or other CI tool) + APPLY_FIXES_EVENT: all # Decide which event triggers application of fixes in a commit or a PR (pull_request, push, all) + APPLY_FIXES_MODE: commit # If APPLY_FIXES is used, defines if the fixes are directly committed (commit) or posted in a PR (pull_request) + DEFAULT_BRANCH: main + #TODO: Enable the following DOCKERFILE_DOCKERFILELINT, DOCKERFILE_HADOLINT, ACTION_ACTIONLINT + # Disable MyPy linter + DISABLE_LINTERS: SPELL_CSPELL,COPYPASTE_JSCPD,DOCKERFILE_DOCKERFILELINT,DOCKERFILE_HADOLINT,ACTION_ACTIONLINT,C_CPPLINT,CPP_CPPLINT,RUST_CLIPPY,PROTOBUF_PROTOLINT,PYTHON_MYPY + MARKDOWN_MARKDOWN_TABLE_FORMATTER_FILTER_REGEX_EXCLUDE: NOTICE-3RD-PARTY-CONTENT.md + CSS_FILTER_REGEX_EXCLUDE: '(doxygen-awesome.*\.css)' + GIT_FILTER_REGEX_EXCLUDE: '(doxygen-awesome.*\.css)' + +concurrency: + group: ${{ github.ref }}-${{ github.workflow }} + cancel-in-progress: true + +jobs: + build: + name: MegaLinter + runs-on: ubuntu-latest + + steps: + - name: Adding github workspace as safe directory + run: | + git config --global --add safe.directory $GITHUB_WORKSPACE + git config --global user.email "github-automation@users.noreply.github.com" + git config --global user.name "Github Automation" + + # Git Checkout + - name: Checkout Code + uses: actions/checkout@v3 + with: + token: ${{ secrets.PAT || secrets.GITHUB_TOKEN }} + fetch-depth: 0 + + # MegaLinter + - name: MegaLinter + id: ml + # You can override MegaLinter flavor used to have faster performances + # More info at https://megalinter.github.io/flavors/ + uses: megalinter/megalinter@v5 + env: + # All available variables are described in documentation + # https://megalinter.github.io/configuration/ + VALIDATE_ALL_CODEBASE: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} # Validates all source when push on main, else just the git diff with main. Override with true if you always want to lint all sources + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + # Ignore autogenerated python files + PYTHON_FLAKE8_FILTER_REGEX_EXCLUDE: gen_proto + PYTHON_FLAKE8_CONFIG_FILE: .flake8 + PYTHON_BLACK_FILTER_REGEX_EXCLUDE: gen_proto + PYTHON_ISORT_FILTER_REGEX_EXCLUDE: gen_proto + SPELL_MISSPELL_FILTER_REGEX_EXCLUDE: gen_proto + PYTHON_BANDIT_FILTER_REGEX_EXCLUDE: (integration_test\|test_) + # PYTHON_BANDIT_DISABLE_ERRORS: true + JSON_JSONLINT_FILTER_REGEX_EXCLUDE: .vscode + # ADD YOUR CUSTOM ENV VARIABLES HERE OR DEFINE THEM IN A FILE .mega-linter.yml AT THE ROOT OF YOUR REPOSITORY + # DISABLE: COPYPASTE,SPELL # Uncomment to disable copy-paste and spell checks + + # Upload MegaLinter artifacts + - name: Archive production artifacts + if: ${{ success() }} || ${{ failure() }} + uses: actions/upload-artifact@v3 + with: + name: MegaLinter reports + path: | + report + mega-linter.log + + # Create pull request if applicable (for now works only on PR from same repository, not from forks) + - name: Create Pull Request with applied fixes + id: cpr + if: steps.ml.outputs.has_updated_sources == 1 && (env.APPLY_FIXES_EVENT == 'all' || env.APPLY_FIXES_EVENT == github.event_name) && env.APPLY_FIXES_MODE == 'pull_request' && (github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository) + uses: peter-evans/create-pull-request@v4.0.2 + with: + token: ${{ secrets.PAT || secrets.GITHUB_TOKEN }} + commit-message: "[MegaLinter] Apply linters automatic fixes" + title: "[MegaLinter] Apply linters automatic fixes" + labels: bot + - name: Create PR output + if: steps.ml.outputs.has_updated_sources == 1 && (env.APPLY_FIXES_EVENT == 'all' || env.APPLY_FIXES_EVENT == github.event_name) && env.APPLY_FIXES_MODE == 'pull_request' && (github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository) + run: | + echo "Pull Request Number - ${{ steps.cpr.outputs.pull-request-number }}" + echo "Pull Request URL - ${{ steps.cpr.outputs.pull-request-url }}" + + # Push new commit if applicable (for now works only on PR from same repository, not from forks) + - name: Prepare commit + if: steps.ml.outputs.has_updated_sources == 1 && (env.APPLY_FIXES_EVENT == 'all' || env.APPLY_FIXES_EVENT == github.event_name) && env.APPLY_FIXES_MODE == 'commit' && github.ref != 'refs/heads/main' && (github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository) + run: sudo chown -Rc $UID .git/ + - name: Commit and push applied linter fixes + if: steps.ml.outputs.has_updated_sources == 1 && (env.APPLY_FIXES_EVENT == 'all' || env.APPLY_FIXES_EVENT == github.event_name) && env.APPLY_FIXES_MODE == 'commit' && github.ref != 'refs/heads/main' && (github.event_name == 'push' || github.event.pull_request.head.repo.full_name == github.repository) + uses: stefanzweifel/git-auto-commit-action@v4 + with: + branch: ${{ github.event.pull_request.head.ref || github.head_ref || github.ref }} + commit_message: "[MegaLinter] Apply linters fixes" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..8448edb --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,140 @@ +# /******************************************************************************** +# * Copyright (c) 2022 Contributors to the Eclipse Foundation +# * +# * See the NOTICE file(s) distributed with this work for additional +# * information regarding copyright ownership. +# * +# * This program and the accompanying materials are made available under the +# * terms of the Apache License 2.0 which is available at +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * SPDX-License-Identifier: Apache-2.0 +# ********************************************************************************/ + +name: release + +on: + # workflow_dispatch: #TODO: input versions manually + push: + tags: + - "v*.*.*" + +jobs: + call_dev_container_build: + uses: ./.github/workflows/dev_container_build.yml + with: + tag: ${{github.ref_name}} + call_seat_service_release: + uses: ./.github/workflows/seat_service_release.yml + needs: [call_dev_container_build] + call_seat_service_docu_build: + uses: ./.github/workflows/seat_service_docu_build.yml + needs: [call_dev_container_build] + call_hvac_service_build: + uses: ./.github/workflows/hvac_service.yml + create_release: + runs-on: ubuntu-latest + needs: + [ + call_seat_service_release, + call_seat_service_docu_build, + call_hvac_service_build, + ] + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Get the version + id: get_version + run: echo ::set-output name=VERSION::${GITHUB_REF/refs\/tags\//} + + - name: Download all artifacts + uses: actions/download-artifact@v3 + with: + path: bin/ + - name: Create release + id: create_release + uses: softprops/action-gh-release@v1 + with: + draft: true + fail_on_unmatched_files: true + files: | + bin/** + LICENSE + NOTICE.md + + release-seat-service-image: + name: "Download and push to ghcr (seat_service)" + runs-on: ubuntu-latest + needs: [call_seat_service_release] + steps: + - id: repository-name-adjusted + name: Prepare repository name in lower case for docker upload. This supports repository names in mixed case + uses: ASzc/change-string-case-action@v2 + with: + string: ${{ github.repository }} + + - id: get_version + uses: battila7/get-version-action@v2 + + - name: Retrieve saved Docker image + uses: actions/download-artifact@v3 + with: + name: oci_vservice-seat.tar + path: ${{github.workspace}} + + - name: Login to GitHub Container Registry + uses: docker/login-action@v1 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: "Push image to ghcr" + env: + COMPONENT_NAME: seat_service + COMPONENT_VERSION: ${{ steps.get_version.outputs.version }} + GIT_HUB_REPOSITORY_NAME_LOWER_CASE: ${{ steps.repository-name-adjusted.outputs.lowercase }} + shell: bash + run: | + skopeo copy --all oci-archive:oci_vservice-seat.tar "docker://ghcr.io/$GIT_HUB_REPOSITORY_NAME_LOWER_CASE/$COMPONENT_NAME:$COMPONENT_VERSION" + skopeo inspect --raw "docker://ghcr.io/$GIT_HUB_REPOSITORY_NAME_LOWER_CASE/$COMPONENT_NAME:$COMPONENT_VERSION" | jq + skopeo inspect "docker://ghcr.io/$GIT_HUB_REPOSITORY_NAME_LOWER_CASE/$COMPONENT_NAME:$COMPONENT_VERSION" + + release-hvac-service-image: + name: "Download and push to ghcr (hvac_service)" + runs-on: ubuntu-latest + + needs: [call_hvac_service_build] + steps: + - id: repository-name-adjusted + name: Prepare repository name in lower case for docker upload. This supports repository names in mixed case + uses: ASzc/change-string-case-action@v2 + with: + string: ${{ github.repository }} + + - id: get_version + uses: battila7/get-version-action@v2 + + - name: Retrieve saved Docker image + uses: actions/download-artifact@v3 + with: + name: oci_vservice-hvac.tar + path: ${{github.workspace}} + + - name: Login to GitHub Container Registry + uses: docker/login-action@v1 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: "Push image to ghcr" + env: + COMPONENT_NAME: hvac_service + COMPONENT_VERSION: ${{ steps.get_version.outputs.version }} + GIT_HUB_REPOSITORY_NAME_LOWER_CASE: ${{ steps.repository-name-adjusted.outputs.lowercase }} + shell: bash + run: | + skopeo copy --all oci-archive:oci_vservice-hvac.tar "docker://ghcr.io/$GIT_HUB_REPOSITORY_NAME_LOWER_CASE/$COMPONENT_NAME:$COMPONENT_VERSION" + skopeo inspect --raw "docker://ghcr.io/$GIT_HUB_REPOSITORY_NAME_LOWER_CASE/$COMPONENT_NAME:$COMPONENT_VERSION" | jq + skopeo inspect "docker://ghcr.io/$GIT_HUB_REPOSITORY_NAME_LOWER_CASE/$COMPONENT_NAME:$COMPONENT_VERSION" diff --git a/.github/workflows/release_prepare.yml b/.github/workflows/release_prepare.yml new file mode 100644 index 0000000..05f2c28 --- /dev/null +++ b/.github/workflows/release_prepare.yml @@ -0,0 +1,45 @@ +# /******************************************************************************** +# * Copyright (c) 2022 Contributors to the Eclipse Foundation +# * +# * See the NOTICE file(s) distributed with this work for additional +# * information regarding copyright ownership. +# * +# * This program and the accompanying materials are made available under the +# * terms of the Apache License 2.0 which is available at +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * SPDX-License-Identifier: Apache-2.0 +# ********************************************************************************/ + +name: release-prepare + +on: + workflow_dispatch: + inputs: + tag: + description: "Release version, eg:latest, 0.2.1" + required: true + default: "0.0.0" + +jobs: + bump-vdb-version: + runs-on: [ubuntu-latest] + + steps: + - name: adding github workspace as safe directory + run: git config --global --add safe.directory $GITHUB_WORKSPACE + + - uses: actions/checkout@v3 + + - name: Change version of VDB + working-directory: ${{github.workspace}} + run: ./prepare_release.sh ${{ inputs.tag }} + + - name: Create Pull Request with new vdb version + id: create-pr + uses: peter-evans/create-pull-request@v4.0.1 + with: + token: ${{ secrets.PAT || secrets.GITHUB_TOKEN }} + commit-message: "[Bot] Prepare release for ${{ inputs.tag }}" + title: "[Bot] Prepare release for ${{ inputs.tag }}" + labels: bot diff --git a/.github/workflows/seat_service_build.yml b/.github/workflows/seat_service_build.yml new file mode 100644 index 0000000..60e4ffc --- /dev/null +++ b/.github/workflows/seat_service_build.yml @@ -0,0 +1,70 @@ +# /******************************************************************************** +# * Copyright (c) 2022 Contributors to the Eclipse Foundation +# * +# * See the NOTICE file(s) distributed with this work for additional +# * information regarding copyright ownership. +# * +# * This program and the accompanying materials are made available under the +# * terms of the Apache License 2.0 which is available at +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * SPDX-License-Identifier: Apache-2.0 +# ********************************************************************************/ + +name: seat-service-build + +on: + pull_request: + paths: + - ".github/workflows/seat_service_build.yml" + - "seat_service/**" + - "vehicle_data_broker/proto/**" + workflow_dispatch: + +jobs: + build: + runs-on: ubuntu-latest + name: Build + container: + image: ghcr.io/${{ github.repository }}/oci_kuksa-val-services-ci:prerelease + credentials: + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + strategy: + matrix: + target-arch: [x86_64, aarch64] + fail-fast: true + env: + BUILD_TYPE: Debug + + steps: + - name: adding github workspace as safe directory + run: git config --global --add safe.directory $GITHUB_WORKSPACE + + - uses: actions/checkout@v3 + + - name: Run build + working-directory: ${{github.workspace}}/seat_service + run: ./build-debug.sh ${{ matrix.target-arch }} + + - name: Upload binaries + uses: actions/upload-artifact@v3 + with: + name: bin_vservice-seat_${{ matrix.target-arch }}_debug + path: | + ${{github.workspace}}/seat_service/target/${{ matrix.target-arch }}/debug/install + if-no-files-found: error + + - name: Test (x86_64) + if: ${{ matrix.target-arch == 'x86_64' }} + working-directory: ${{github.workspace}}/seat_service/target/x86_64/debug/ + run: ctest -j -T memcheck -C ${{env.BUILD_TYPE}} --output-on-failure -E '(example)' + + - name: Upload Test results (x86_64) + if: ${{ matrix.target-arch == 'x86_64' }} + uses: actions/upload-artifact@v3 + with: + name: report_test_vservice-seat + path: | + ${{github.workspace}}/seat_service/target/x86_64/debug/Testing + if-no-files-found: error diff --git a/.github/workflows/seat_service_docu_build.yml b/.github/workflows/seat_service_docu_build.yml new file mode 100644 index 0000000..ad102d8 --- /dev/null +++ b/.github/workflows/seat_service_docu_build.yml @@ -0,0 +1,56 @@ +# /******************************************************************************** +# * Copyright (c) 2022 Contributors to the Eclipse Foundation +# * +# * See the NOTICE file(s) distributed with this work for additional +# * information regarding copyright ownership. +# * +# * This program and the accompanying materials are made available under the +# * terms of the Apache License 2.0 which is available at +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * SPDX-License-Identifier: Apache-2.0 +# ********************************************************************************/ + +name: seat-service-docu-build + +on: + pull_request: + paths: + - ".github/workflows/seat_service_docu_build.yml" + - "seat_service/**" + workflow_call: + workflow_dispatch: + +jobs: + build: + runs-on: ubuntu-latest + name: Build docu + container: + image: ghcr.io/${{ github.repository }}/oci_kuksa-val-services-ci:prerelease + credentials: + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + steps: + - name: adding github workspace as safe directory + run: git config --global --add safe.directory $GITHUB_WORKSPACE + + - uses: actions/checkout@v3 + + - name: Build docu + working-directory: ${{github.workspace}}/seat_service + run: ./build-docu.sh + + - name: Tar files + shell: bash + working-directory: ${{github.workspace}}/seat_service/ + run: | + tar -czvf docu_vservice-seat.tar.gz \ + docs/doxygen/out/html/ + + - name: Upload docu + uses: actions/upload-artifact@v3 + with: + name: docu_vservice-seat + path: ${{github.workspace}}/seat_service/docu_vservice-seat.tar.gz + if-no-files-found: error diff --git a/.github/workflows/seat_service_release.yml b/.github/workflows/seat_service_release.yml new file mode 100644 index 0000000..b05c6d6 --- /dev/null +++ b/.github/workflows/seat_service_release.yml @@ -0,0 +1,129 @@ +# /******************************************************************************** +# * Copyright (c) 2022 Contributors to the Eclipse Foundation +# * +# * See the NOTICE file(s) distributed with this work for additional +# * information regarding copyright ownership. +# * +# * This program and the accompanying materials are made available under the +# * terms of the Apache License 2.0 which is available at +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * SPDX-License-Identifier: Apache-2.0 +# ********************************************************************************/ + +name: seat-service-release + +on: + pull_request: + paths: + - ".github/workflows/seat_service_release.yml" + - "seat_service/**" + - "vehicle_data_broker/proto/**" + workflow_dispatch: + workflow_call: + +jobs: + build: + runs-on: ubuntu-latest + name: Build + container: + image: ghcr.io/${{ github.repository }}/oci_kuksa-val-services-ci:prerelease + credentials: + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + strategy: + matrix: + target-arch: [x86_64, aarch64] + fail-fast: true + env: + CONAN_USER_HOME: "${{ github.workspace }}/conan_user_home/" + + steps: + - name: adding github workspace as safe directory + run: git config --global --add safe.directory $GITHUB_WORKSPACE + + - uses: actions/checkout@v3 + + - name: Using the builtin GitHub Cache Action for .conan + id: cache-conan + uses: actions/cache@v3.0.2 + env: + cache-name: cache-conan-modules + with: + path: ${{ env.CONAN_USER_HOME }} + key: ${{ runner.os }}-builder-${{ matrix.target-arch }}-${{ env.cache-name }}-${{ hashFiles('seat_service/conanfile.txt') }} + restore-keys: ${{ runner.os }}-builder-${{ matrix.target-arch }}-${{ env.cache-name }}- + + - name: Run build + working-directory: ${{github.workspace}}/seat_service + run: ./build-release.sh ${{ matrix.target-arch }} + + - name: Tar files + shell: bash + working-directory: ${{github.workspace}}/seat_service/ + run: | + tar -czvf bin_vservice-seat_${{ matrix.target-arch }}_release.tar.gz \ + target/${{ matrix.target-arch }}/release/install/ \ + target/${{ matrix.target-arch }}/release/licenses/ \ + proto/ + - name: Upload artifacts + uses: actions/upload-artifact@v3 + with: + name: bin_vservice-seat_${{ matrix.target-arch }}_release.tar.gz + path: ${{github.workspace}}/seat_service/bin_vservice-seat_${{ matrix.target-arch }}_release.tar.gz + if-no-files-found: error + + build-seat-service-image: + name: "Build multi-arch image (seat_service)" + runs-on: ubuntu-latest + needs: [build] + + steps: + - name: Checkout Repository + uses: actions/checkout@v3 + + - name: Retrieve build binaries + uses: actions/download-artifact@v3 + with: + path: ${{github.workspace}}/seat_service + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + + - name: Login to GitHub Container Registry + uses: docker/login-action@v1 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - id: repository-name-adjusted + name: Make repository name in lower case for docker upload. + uses: ASzc/change-string-case-action@v2 + with: + string: ${{ github.repository }} + + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + + - name: "Build seat service image" + id: image_build + uses: docker/build-push-action@v2 + with: + pull: true + push: false + outputs: | + type=oci,dest=./oci_vservice-seat.tar + context: . + file: ./seat_service/Dockerfile + platforms: linux/amd64, linux/arm64 + tags: ${{ github.sha }} + labels: | + org.opencontainers.image.source=https://github.com/${{steps.repository-name-adjusted.outputs.lowercase}} + + - name: Temporarily save Docker image + uses: actions/upload-artifact@v3 + with: + name: oci_vservice-seat.tar + path: ${{github.workspace}}/oci_vservice-seat.tar + retention-days: 1 diff --git a/.github/workflows/seat_service_seatctrl_test.yml b/.github/workflows/seat_service_seatctrl_test.yml new file mode 100644 index 0000000..2fcfed9 --- /dev/null +++ b/.github/workflows/seat_service_seatctrl_test.yml @@ -0,0 +1,71 @@ +# /******************************************************************************** +# * Copyright (c) 2022 Contributors to the Eclipse Foundation +# * +# * See the NOTICE file(s) distributed with this work for additional +# * information regarding copyright ownership. +# * +# * This program and the accompanying materials are made available under the +# * terms of the Apache License 2.0 which is available at +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * SPDX-License-Identifier: Apache-2.0 +# ********************************************************************************/ + +name: seat-service-seatcontroller-tests + +on: + pull_request: + paths: + - ".github/workflows/seat_service_seatctrl_test*" + - "seat_service/src/lib/seat_adjuster/**" + - "seat_service/CMakeLists.txt" + workflow_dispatch: + +env: + # Customize the CMake build type here (Release, Debug, RelWithDebInfo, etc.) + # NOTE: CodeCoverage requires debug build... + BUILD_TYPE: Debug + +jobs: + build: + runs-on: ubuntu-latest + name: Build + container: + image: ghcr.io/${{ github.repository }}/oci_kuksa-val-services-ci:prerelease + credentials: + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + steps: + - name: adding github workspace as safe directory + run: git config --global --add safe.directory $GITHUB_WORKSPACE + + - uses: actions/checkout@v3 + + - name: Run build + working-directory: ${{github.workspace}}/seat_service + run: ./build-seatctrl.sh + shell: bash + + - name: cobertura-report + uses: 5monkeys/cobertura-action@v12 + with: + path: ${{github.workspace}}/seat_service/build_seat_controller/x86_64/report_codecov_vservice-seat-ctrl.xml + repo_token: ${{ secrets.GITHUB_TOKEN }} + minimum_coverage: 70 + + - name: Upload Code coverage (html) + uses: actions/upload-artifact@v3 + with: + name: report_codecov_vservice-seat-ctrl + path: | + ${{github.workspace}}/seat_service/build_seat_controller/x86_64/report_codecov_*/** + if-no-files-found: error + + - name: Upload Test Results + uses: actions/upload-artifact@v3 + with: + name: report_test_vservice-seat-ctrl + path: | + ${{github.workspace}}/seat_service/build_seat_controller/x86_64/Testing/ + if-no-files-found: error diff --git a/.github/workflows/vulnerabilityscan.yml b/.github/workflows/vulnerabilityscan.yml new file mode 100644 index 0000000..ebdae34 --- /dev/null +++ b/.github/workflows/vulnerabilityscan.yml @@ -0,0 +1,70 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# +name: "vulnerabilityscan" + +on: + push: + branches: [main] + pull_request: + # The branches below must be a subset of the branches above + branches: [main] + schedule: + - cron: "15 12 * * 1" + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: ["python"] + # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] + # Learn more about CodeQL language support at https://git.io/codeql-language-support + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v2 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + # queries: ./path/to/local/query, your-org/your-repo/queries@main + + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v2 + + # ℹ️ Command-line programs to run using the OS shell. + # 📚 https://git.io/JvXDl + + # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines + # and modify them (or add more) to build your code if your project + # uses a compiled language + + #- run: | + # make bootstrap + # make release + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v2 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..d340849 --- /dev/null +++ b/.gitignore @@ -0,0 +1,9 @@ +assets/ +build/ +target/ +report/ +*.tar +__pycache__/ +.vscode/c_cpp_properties.json +.vscode/launch.json +/results/ diff --git a/.mypy.ini b/.mypy.ini new file mode 100644 index 0000000..9137383 --- /dev/null +++ b/.mypy.ini @@ -0,0 +1,6 @@ +# Global options: + +[mypy] +ignore_missing_imports = True +# exclude folder with generated files from grpcio +exclude = [ 'integration_test/gen_proto/' ] diff --git a/.vscode/README.md b/.vscode/README.md new file mode 100644 index 0000000..f783ef1 --- /dev/null +++ b/.vscode/README.md @@ -0,0 +1,29 @@ +# VS Code Tasks + +VS Code tasks are used to execute the VAL components (using similar setup as in `vehicle-app-python-template`). + +To execute a task, press `F1` (or `Ctrl+Shift+P`), `"Tasks: Run Task"`, select a task to run... + +List of VAL core component tasks: + +- `ensure-dapr` - make sure dapr is locally installed. All dapr related tasks depend on this. +- `run-databroker` - runs databroker via dapr (building amd64 binary if missing). Depends on "ensure-dapr". +- `run-hvacservice` - runs the HVAC service via dapr. Depends on "run-databroker". +- `run-seatservice` - runs seat service via dapr (building amd64 binary if missing). Depends on "run-databroker". + +List of VAL client tasks: + +- `run-databroker-cli` - runs databroker command line interface client +- `run-seat-cli` - runs seat service client. Asks user for following inputs: + - "Seat Position": Desired seat position [0..1000]. Default: 500. + - "Wait": client prints seat position, until desired position is reached. Default: "Wait". +- `run-hvac-cli` - runs the HVAC test client allowing to enable/disable the AC and setting the desired cabin temperature. + +Helper tasks: + +- `Terminate Tasks` - Stops all running vs code tasks +- `Start VAL` - Terminates all tasks and restarts VAL components. Could be useful after rebuilding binaries. +- `Clean VAL binaries` - removes VehicleDataBroker and SeatService binaries from target install to force rebuilding. Depends on "Terminate Tasks" +- `integration-test` - Runs local integration tests in `USE_DAPR=1` mode using pytest. Depends on core VAL service tasks + +**NOTE:** Tasks are defined in `.vscode/tasks.json` and mostly wrap scripts in `.vscode/scripts`, but also have extras like dependencies and terminal integration. diff --git a/.vscode/scripts/clean-binaries.sh b/.vscode/scripts/clean-binaries.sh new file mode 100755 index 0000000..7921819 --- /dev/null +++ b/.vscode/scripts/clean-binaries.sh @@ -0,0 +1,33 @@ +#!/bin/bash +#******************************************************************************** +# Copyright (c) 2022 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +#*******************************************************************************/ +# shellcheck disable=SC2086 + +echo "#######################################################" +echo "### Clean binaries ###" +echo "#######################################################" + +set -e + +ROOT_DIRECTORY=$(git rev-parse --show-toplevel) +# shellcheck source=/dev/null +source "$ROOT_DIRECTORY/.vscode/scripts/exec-check.sh" "$@" + +CLEAN_FILES="$ROOT_DIRECTORY/seat_service/target/x86_64/release/install" +CLEAN_FILES="$ROOT_DIRECTORY/seat_service/target/aarch64/release/install $CLEAN_FILES" + +CLEAN_FILES="$ROOT_DIRECTORY/target/release/vehicle-data-* $CLEAN_FILES" +CLEAN_FILES="$ROOT_DIRECTORY/target/aarch64-unknown-linux-gnu/release/vehicle-data-* $CLEAN_FILES" + +set -x +rm -rfv $CLEAN_FILES diff --git a/.vscode/scripts/ensure-dapr.sh b/.vscode/scripts/ensure-dapr.sh new file mode 100755 index 0000000..fc7575d --- /dev/null +++ b/.vscode/scripts/ensure-dapr.sh @@ -0,0 +1,44 @@ +#!/bin/bash +#******************************************************************************** +# Copyright (c) 2022 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +#*******************************************************************************/ +# shellcheck disable=SC2046 +# shellcheck disable=SC2086 + +echo "#######################################################" +echo "### Ensure dapr ###" +echo "#######################################################" + +ROOT_DIRECTORY="$(git rev-parse --show-toplevel)" +# shellcheck source=/dev/null +source "$ROOT_DIRECTORY/.vscode/scripts/exec-check.sh" "$@" + +version=$(dapr --version | grep "Runtime version: " | sed 's/^.*: //') + +if ! [[ $version =~ ^([0-9]{1,2})\.([0-9]{1,2})\.([0-9]{1,2}) ]]; then + daprReleaseUrl="https://api.github.com/repos/dapr/cli/releases" + latest_release=$(curl -s $daprReleaseUrl | grep \"tag_name\" | grep -v rc | awk 'NR==1{print $2}' | sed -n 's/\"\(.*\)\",/\1/p') + if [ -z "$latest_release" ]; then + echo "Installing dapr pre-defined version: 1.6.0" + wget -q https://raw.githubusercontent.com/dapr/cli/master/install/install.sh -O - | /bin/bash -s 1.6.0 + else + echo "Installing dapr latest version: $latest_release" + wget -q https://raw.githubusercontent.com/dapr/cli/master/install/install.sh -O - | /bin/bash + fi + + dapr uninstall + dapr init +else + echo "Dapr is already installed and initialized, skipping setup." +fi + +dapr --version diff --git a/.vscode/scripts/exec-check.sh b/.vscode/scripts/exec-check.sh new file mode 100755 index 0000000..a827c43 --- /dev/null +++ b/.vscode/scripts/exec-check.sh @@ -0,0 +1,21 @@ +#!/bin/bash +#******************************************************************************** +# Copyright (c) 2022 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +#*******************************************************************************/ + +#e cho "###[check] $* [$#]" +if [ $# -eq 0 ]; then + tput setaf 1 + echo "ERROR: To execute script, use VSCODE Tasks: [CTRL+SHIFT+P -> Tasks: Run Tasks -> $1]." + read -r -p "Press to close this window" + exit 1 +fi diff --git a/.vscode/scripts/k3d-uninstall.sh b/.vscode/scripts/k3d-uninstall.sh new file mode 100755 index 0000000..99ad5e6 --- /dev/null +++ b/.vscode/scripts/k3d-uninstall.sh @@ -0,0 +1,28 @@ +#!/bin/bash +#******************************************************************************** +# Copyright (c) 2022 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +#*******************************************************************************/ + +echo "#######################################################" +echo "### k3s uninstall ###" +echo "#######################################################" + +ROOT_DIRECTORY=$(git rev-parse --show-toplevel) +# shellcheck source=/dev/null +source "$ROOT_DIRECTORY/.vscode/scripts/exec-check.sh" "$@" + +k3d cluster delete dev-cluster +k3d node delete k3d-devregistry.localhost + +kubectl delete --all pods +kubectl delete --all deployments +kubectl delete --all namespaces diff --git a/.vscode/scripts/run-hvac-cli.sh b/.vscode/scripts/run-hvac-cli.sh new file mode 100755 index 0000000..124c4ae --- /dev/null +++ b/.vscode/scripts/run-hvac-cli.sh @@ -0,0 +1,59 @@ +#!/bin/bash +#******************************************************************************** +# Copyright (c) 2022 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +#*******************************************************************************/ +# shellcheck disable=SC2034 +# shellcheck disable=SC2086 + +echo "#######################################################" +echo "### Running HVAC Client ###" +echo "#######################################################" + +set -e + +ROOT_DIRECTORY=$(git rev-parse --show-toplevel) +# shellcheck source=/dev/null +source "$ROOT_DIRECTORY/.vscode/scripts/exec-check.sh" "$@" + +[ "$1" = "--task" ] && shift + +TEMP="$1" +STATUS_MODE="$2" + +# sanity checks for invalid user input +if [ -z "$TEMP" ] || [ "$STATUS_MODE" != "ON" ] && [ "$STATUS_MODE" != "OFF" ]; then + echo "Invalid arguments!" + echo + echo "Usage: $0 --task AC_TEMP [ON | OFF]" + echo + exit 1 +fi + +# replace [ON/OFF] with [1/0] for AC_STATUS +if [ "$STATUS_MODE" = "ON" ]; then + STATUS="1" +else + STATUS="0" +fi + +HVACSERVICE_PORT='50052' +HVACSERVICE_EXEC_PATH="$ROOT_DIRECTORY/hvac_service" +if [ ! -f "$HVACSERVICE_EXEC_PATH/testclient.py" ]; then + echo "Can't find $HVACSERVICE_EXEC_PATH/testclient.py" + exit 1 +fi + +cd "$HVACSERVICE_EXEC_PATH" || exit 1 +pip3 install -q -r requirements.txt + +set -x +python3 -u testclient.py --addr=localhost:$HVACSERVICE_PORT --temp=$TEMP --status=$STATUS diff --git a/.vscode/scripts/run-hvacservice.sh b/.vscode/scripts/run-hvacservice.sh new file mode 100755 index 0000000..215a9b1 --- /dev/null +++ b/.vscode/scripts/run-hvacservice.sh @@ -0,0 +1,64 @@ +#!/bin/bash +#******************************************************************************** +# Copyright (c) 2022 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +#*******************************************************************************/ +# shellcheck disable=SC2034 +# shellcheck disable=SC2086 + +echo "#######################################################" +echo "### Running HVAC Service ###" +echo "#######################################################" + +set -e + +ROOT_DIRECTORY=$(git rev-parse --show-toplevel) +# shellcheck source=/dev/null +source "$ROOT_DIRECTORY/.vscode/scripts/exec-check.sh" "$@" + +HVACSERVICE_PORT='50052' +HVACSERVICE_GRPC_PORT='52005' + +HVACSERVICE_EXEC_PATH="$ROOT_DIRECTORY/hvac_service" +if [ ! -f "$HVACSERVICE_EXEC_PATH/hvacservice.py" ]; then + echo "Can't find $HVACSERVICE_EXEC_PATH/hvacservice.py" + exit 1 +fi + +cd "$HVACSERVICE_EXEC_PATH" || exit 1 +pip3 install -q -r requirements.txt + +# NOTE: use curent sidecar's grpc port, don't connect directly to sidecar of vdb (DATABROKER_GRPC_PORT) +export DAPR_GRPC_PORT=$HVACSERVICE_GRPC_PORT +export HVACSERVICE_DAPR_APP_ID='hvacservice' +export VEHICLEDATABROKER_DAPR_APP_ID='vehicledatabroker' + +echo "*******************************************" +echo "* Hvac Service APP port: $HVACSERVICE_PORT" +echo "* Hvac Service Dapr sidecar port: $HVACSERVICE_GRPC_PORT" +echo "* DAPR_GRPC_PORT=$DAPR_GRPC_PORT" +echo "* metadata: [ HVACSERVICE_DAPR_APP_ID=$HVACSERVICE_DAPR_APP_ID, VEHICLEDATABROKER_DAPR_APP_ID=$VEHICLEDATABROKER_DAPR_APP_ID ]" +echo "*******************************************" +echo + +## uncomment for dapr debug logs +# DAPR_OPT="--enable-api-logging --log-level debug" + +dapr run \ + --app-id $HVACSERVICE_DAPR_APP_ID \ + --app-protocol grpc \ + --app-port $HVACSERVICE_PORT \ + --dapr-grpc-port $HVACSERVICE_GRPC_PORT \ + $DAPR_OPT \ + --components-path $ROOT_DIRECTORY/.dapr/components \ + --config $ROOT_DIRECTORY/.dapr/config.yaml \ + -- \ + python3 -u ./hvacservice.py diff --git a/.vscode/scripts/run-integrationtest.sh b/.vscode/scripts/run-integrationtest.sh new file mode 100755 index 0000000..b310de5 --- /dev/null +++ b/.vscode/scripts/run-integrationtest.sh @@ -0,0 +1,35 @@ +#!/bin/bash +#******************************************************************************** +# Copyright (c) 2022 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +#*******************************************************************************/ +# shell check disable=SC2002 +# shell check disable=SC2086 + +echo "#######################################################" +echo "### Running Integration Tests ###" +echo "#######################################################" + +set -e + +ROOT_DIRECTORY=$(git rev-parse --show-toplevel) +# shellcheck source=/dev/null +source "$ROOT_DIRECTORY/.vscode/scripts/exec-check.sh" "$@" + +pip install -q -r "${ROOT_DIRECTORY}/integration_test/requirements-dev.txt" +pip install -q -e "${ROOT_DIRECTORY}/integration_test/" + +pytest -v "${ROOT_DIRECTORY}/integration_test" \ + --log-file=./results/IntegrationTest/integration.log \ + --asyncio-mode=auto --override-ini \ + junit_family=xunit1 --junit-xml=./results/IntegrationTest/junit.xml + +exit $? diff --git a/.vscode/scripts/run-seatservice-cli.sh b/.vscode/scripts/run-seatservice-cli.sh new file mode 100755 index 0000000..ceb1599 --- /dev/null +++ b/.vscode/scripts/run-seatservice-cli.sh @@ -0,0 +1,57 @@ +#!/bin/bash +#******************************************************************************** +# Copyright (c) 2022 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +#*******************************************************************************/ +# shellcheck disable=SC2034 +# shellcheck disable=SC2086 + +echo "#######################################################" +echo "### Running Seatservice Client ###" +echo "#######################################################" + +set -e + +ROOT_DIRECTORY=$(git rev-parse --show-toplevel) +# shellcheck source=/dev/null +source "$ROOT_DIRECTORY/.vscode/scripts/exec-check.sh" "$@" + +[ "$1" = "--task" ] && shift +POS="$1" +shift +ARGS="$*" + +# handle environment sync flag from vs task +if [ "$SEAT_WAIT" = "wait" ] && ! echo "$ARGS" | grep -q "\-\-wait"; then + ARGS="--wait $ARGS" +fi + +SEATSERVICE_PORT='50051' + +#Detect host environment (distinguish for Mac M1 processor) +if [ "$(uname -m)" = "aarch64" ] || [ "$(uname -m)" = "arm64" ]; then + echo "Detected ARM architecture" + PROCESSOR="aarch64" +else + echo "Detected x86_64 architecture" + PROCESSOR="x86_64" +fi + +SEATSERVICE_EXEC_PATH="$ROOT_DIRECTORY/seat_service/target/$PROCESSOR/release/install/bin" + +if [ ! -x "$SEATSERVICE_EXEC_PATH/seat_svc_client" ]; then + echo "seat_svc_client binary is missing: $SEATSERVICE_EXEC_PATH" + exit 1 +fi + +#export DAPR_GRPC_PORT=$SEATSERVICE_GRPC_PORT +echo "$ $SEATSERVICE_EXEC_PATH/seat_svc_client $POS $ARGS" +"$SEATSERVICE_EXEC_PATH/seat_svc_client" "$POS" $ARGS # --target "127.0.0.1:$SEATSERVICE_PORT" diff --git a/.vscode/scripts/run-seatservice.sh b/.vscode/scripts/run-seatservice.sh new file mode 100755 index 0000000..dc62ba1 --- /dev/null +++ b/.vscode/scripts/run-seatservice.sh @@ -0,0 +1,73 @@ +#!/bin/bash +#******************************************************************************** +# Copyright (c) 2022 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +#*******************************************************************************/ +# shellcheck disable=SC2086 + +echo "#######################################################" +echo "### Running Seatservice ###" +echo "#######################################################" + +set -e + +ROOT_DIRECTORY=$(git rev-parse --show-toplevel) +# shellcheck source=/dev/null +source "$ROOT_DIRECTORY/.vscode/scripts/exec-check.sh" "$@" + +SEATSERVICE_PORT='50051' +SEATSERVICE_GRPC_PORT='52002' + +build_seatservice() { + local arch="$1" + echo "-- Building databroker for [$PROCESSOR]..." + cd "$ROOT_DIRECTORY/seat_service" || exit 1 + if [ "$arch" = "aarch64" ] || [ "$arch" = "arm64" ]; then + ./build-release.sh "aarch64" # only aarch64 is supported + fi + if [ "$arch" = "x86_64" ]; then + ./build-release.sh "x86_64" + fi +} + +#Detect host environment (distinguish for Mac M1 processor) +if [ "$(uname -m)" = "aarch64" ] || [ "$(uname -m)" = "arm64" ]; then + echo "Detected ARM architecture" + PROCESSOR="aarch64" +else + echo "Detected x86_64 architecture" + PROCESSOR="x86_64" +fi + +SEATSERVICE_EXEC_PATH="$ROOT_DIRECTORY/seat_service/target/$PROCESSOR/release/install/bin" + +if [ ! -x "$SEATSERVICE_EXEC_PATH/seat_service" ]; then + echo "seat_service binary is missing: $SEATSERVICE_EXEC_PATH" + build_seatservice "$PROCESSOR" + file "$SEATSERVICE_EXEC_PATH/seat_service" || exit 1 +fi + +export DAPR_GRPC_PORT=$SEATSERVICE_GRPC_PORT +export CAN="cansim" +export VEHICLEDATABROKER_DAPR_APP_ID="vehicledatabroker" +# needed to override vdb address +export BROKER_ADDR="127.0.0.1:$DAPR_GRPC_PORT" +# export SA_DEBUG=1 +# export SC_STAT=1 + +dapr run \ + --app-id seatservice \ + --app-protocol grpc \ + --app-port $SEATSERVICE_PORT \ + --dapr-grpc-port $SEATSERVICE_GRPC_PORT \ + --components-path $ROOT_DIRECTORY/.dapr/components \ + --config $ROOT_DIRECTORY/.dapr/config.yaml & +$SEATSERVICE_EXEC_PATH/val_start.sh diff --git a/.vscode/scripts/run-vehicledatabroker-cli.sh b/.vscode/scripts/run-vehicledatabroker-cli.sh new file mode 100755 index 0000000..8e6e729 --- /dev/null +++ b/.vscode/scripts/run-vehicledatabroker-cli.sh @@ -0,0 +1,46 @@ +#!/bin/bash +#******************************************************************************** +# Copyright (c) 2022 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +#*******************************************************************************/ +# shellcheck disable=SC2002 + +echo "#######################################################" +echo "### Running VehicleDataBroker CLI ###" +echo "#######################################################" + +ROOT_DIRECTORY=$(git rev-parse --show-toplevel) +# shellcheck source=/dev/null +source "$ROOT_DIRECTORY/.vscode/scripts/exec-check.sh" "$@" + +DATABROKER_VERSION="test" + +#Detect host environment (distinguish for Mac M1 processor) +if [ "$(uname -m)" = "aarch64" ] || [ "$(uname -m)" = "arm64" ]; then + echo "Detected AArch64 architecture" + PROCESSOR="aarch64" +else + echo "Detected x86_64 architecture" + PROCESSOR="x86_64" +fi +DATABROKER_BINARY_NAME="bin_release_databroker_$PROCESSOR.tar.gz" +DATABROKER_BINARY_PATH="$ROOT_DIRECTORY/.vscode/scripts/assets/databroker/$DATABROKER_VERSION/$PROCESSOR" +DATABROKERCLI_EXECUTABLE="$DATABROKER_BINARY_PATH/target/release/vehicle-data-cli" + +DOWNLOAD_URL=https://github.com/eclipse/kuksa.val/releases/download/$DATABROKER_VERSION/$DATABROKER_BINARY_NAME + +if [[ ! -f "$DATABROKERCLI_EXECUTABLE" ]]; then + echo "Downloading vehicle-data-broker:$DATABROKER_VERSION" + curl -o "$DATABROKER_BINARY_PATH"/"$DATABROKER_BINARY_NAME" --create-dirs -L -H "Accept: application/octet-stream" "$DOWNLOAD_URL" + tar -xf "$DATABROKER_BINARY_PATH"/"$DATABROKER_BINARY_NAME" -C "$DATABROKER_BINARY_PATH" +fi + +"$DATABROKERCLI_EXECUTABLE" diff --git a/.vscode/scripts/run-vehicledatabroker.sh b/.vscode/scripts/run-vehicledatabroker.sh new file mode 100755 index 0000000..8ef9cf4 --- /dev/null +++ b/.vscode/scripts/run-vehicledatabroker.sh @@ -0,0 +1,58 @@ +#!/bin/bash +#******************************************************************************** +# Copyright (c) 2022 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +#*******************************************************************************/ +# shellcheck disable=SC2086 + +echo "#######################################################" +echo "### Running Databroker ###" +echo "#######################################################" + +set -e + +ROOT_DIRECTORY=$(git rev-parse --show-toplevel) +# shellcheck source=/dev/null +source "$ROOT_DIRECTORY/.vscode/scripts/exec-check.sh" "$@" + +DATABROKER_VERSION="test" +DATABROKER_PORT='55555' +DATABROKER_GRPC_PORT='52001' + +#Detect host environment (distinguish for Mac M1 processor) +if [ "$(uname -m)" = "aarch64" ] || [ "$(uname -m)" = "arm64" ]; then + echo "Detected AArch64 architecture" + PROCESSOR="aarch64" +else + echo "Detected x86_64 architecture" + PROCESSOR="x86_64" +fi +DATABROKER_BINARY_NAME="bin_release_databroker_$PROCESSOR.tar.gz" +DATABROKER_BINARY_PATH="$ROOT_DIRECTORY/.vscode/scripts/assets/databroker/$DATABROKER_VERSION/$PROCESSOR" +DATABROKER_EXECUTABLE="$DATABROKER_BINARY_PATH/target/release/vehicle-data-broker" +DOWNLOAD_URL=https://github.com/eclipse/kuksa.val/releases/download/$DATABROKER_VERSION/$DATABROKER_BINARY_NAME + +if [[ ! -f "$DATABROKER_EXECUTABLE" ]]; then + echo "Downloading vehicle-data-broker:$DATABROKER_VERSION" + curl -o "$DATABROKER_BINARY_PATH"/"$DATABROKER_BINARY_NAME" --create-dirs -L -H "Accept: application/octet-stream" "$DOWNLOAD_URL" + tar -xf "$DATABROKER_BINARY_PATH"/"$DATABROKER_BINARY_NAME" -C $DATABROKER_BINARY_PATH +fi + +export DAPR_GRPC_PORT=$DATABROKER_GRPC_PORT +# export RUST_LOG="info,databroker=debug,vehicle_data_broker=debug" +dapr run \ + --app-id vehicledatabroker \ + --app-protocol grpc \ + --app-port $DATABROKER_PORT \ + --dapr-grpc-port $DATABROKER_GRPC_PORT \ + --components-path $ROOT_DIRECTORY/.dapr/components \ + --config $ROOT_DIRECTORY/.dapr/config.yaml & +$DATABROKER_EXECUTABLE --address 0.0.0.0 diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 0000000..21b8d77 --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,291 @@ +/******************************************************************************** + * Copyright (c) 2022 Contributors to the Eclipse Foundation + * + * See the NOTICE file(s) distributed with this work for additional + * information regarding copyright ownership. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License 2.0 which is available at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + ********************************************************************************/ + +{ + // See https://go.microsoft.com/fwlink/?LinkId=733558 + // for the documentation about the tasks.json format + "version": "2.0.0", + "tasks": [ + { + "label": "ensure-dapr", + "type": "shell", + "command": "./.vscode/scripts/ensure-dapr.sh --task", + "presentation": { + "close": true + }, + "problemMatcher": [] + }, + { + "label": "run-databroker", + "dependsOn": ["ensure-dapr"], + "dependsOrder": "sequence", + "type": "shell", + "command": "./.vscode/scripts/run-vehicledatabroker.sh --task", + "group": "test", + "presentation": { + "reveal": "always", + "panel": "dedicated", + "showReuseMessage": false + }, + "isBackground": true, + "runOptions": { + "instanceLimit": 1, + "reevaluateOnRerun": true + }, + "problemMatcher": { + "pattern": [ + { + "regexp": ".", + "file": 1, + "location": 2, + "message": 3 + } + ], + "background": { + "activeOnStart": true, + "beginsPattern": "^You're up and running! Dapr logs will appear here.", + "endsPattern": "." + } + } + }, + { + "label": "run-databroker-cli", + "type": "shell", + "command": "./.vscode/scripts/run-vehicledatabroker-cli.sh --task", + "group": "test", + "isBackground": false, + "presentation": { + "reveal": "always", + "panel": "dedicated", + "showReuseMessage": false + }, + "runOptions": { + "instanceLimit": 1, + "reevaluateOnRerun": true + } + }, + { + "label": "run-seatservice", + "dependsOn": ["ensure-dapr", "run-databroker"], + "dependsOrder": "sequence", + "type": "shell", + "command": "./.vscode/scripts/run-seatservice.sh --task", + "group": "test", + "presentation": { + "reveal": "always", + "panel": "dedicated", + "showReuseMessage": false + }, + "isBackground": true, + "runOptions": { + "instanceLimit": 1, + "reevaluateOnRerun": true + }, + "problemMatcher": { + "pattern": [ + { + "regexp": ".", + "file": 1, + "location": 2, + "message": 3 + } + ], + "background": { + "activeOnStart": true, + "beginsPattern": "^You're up and running! Dapr logs will appear here.", + "endsPattern": "." + } + } + }, + { + "label": "run-seat-cli", + "dependsOn": ["ensure-dapr"], + "dependsOrder": "sequence", + "type": "shell", + "command": "./.vscode/scripts/run-seatservice-cli.sh", + "args": ["--task", "${input:SEAT_POS}"], + "options": { + "env": { + "SEAT_WAIT": "${input:SEAT_WAIT}" + } + }, + "group": "test", + "isBackground": false, + "runOptions": { + "instanceLimit": 1 + }, + "presentation": { + "clear": true, + "showReuseMessage": false + } + }, + { + "label": "run-hvacservice", + "dependsOn": ["ensure-dapr", "run-databroker"], + "dependsOrder": "sequence", + "type": "shell", + "command": "./.vscode/scripts/run-hvacservice.sh --task", + "group": "test", + "presentation": { + "reveal": "always", + "panel": "dedicated", + "showReuseMessage": false + }, + "isBackground": true, + "runOptions": { + "instanceLimit": 1, + "reevaluateOnRerun": true + }, + "problemMatcher": { + "pattern": [ + { + "regexp": ".", + "file": 1, + "location": 2, + "message": 3 + } + ], + "background": { + "activeOnStart": true, + "beginsPattern": "^You're up and running! Dapr logs will appear here.", + "endsPattern": "." + } + } + }, + { + "label": "run-hvac-cli", + // "detail": "Runs HVAC Client", + "dependsOn": ["ensure-dapr", "run-hvacservice"], + "dependsOrder": "sequence", + "type": "shell", + "command": "./.vscode/scripts/run-hvac-cli.sh", + "args": ["${input:AC_TEMP}", "${input:AC_STATUS}"], + "group": "test", + "isBackground": false, + "runOptions": { + "instanceLimit": 1 + }, + "presentation": { + "clear": true, + "showReuseMessage": false + } + }, + { + "label": "Start VAL", + "dependsOn": [ + // "Clean VAL binaries", + "Terminate Tasks", + "ensure-dapr", + "run-databroker", + "run-hvacservice", + "run-seatservice" + ], + "dependsOrder": "sequence", + "runOptions": { + "runOn": "folderOpen", + "instanceLimit": 1 + }, + "problemMatcher": [] + }, + { + "label": "integration-test", + "type": "shell", + "command": "./.vscode/scripts/run-integrationtest.sh --task", + "group": "test", + "dependsOn": [ + "ensure-dapr", + "run-databroker", + "run-seatservice", + "run-feedercan" + ], + "dependsOrder": "sequence", + "presentation": { + "reveal": "always", + "panel": "dedicated", + "showReuseMessage": false + }, + "isBackground": false, + "runOptions": { + "instanceLimit": 1, + "reevaluateOnRerun": true + }, + "problemMatcher": { + "pattern": [ + { + "regexp": "^FAILED|failed", + "file": 1, + "location": 2, + "message": 3 + } + ], + "background": { + "activeOnStart": true, + "beginsPattern": "^========================================================================================= ", + "endsPattern": "." + } + } + }, + { + "label": "Clean VAL binaries", + "type": "shell", + "dependsOn": ["Terminate Tasks"], + "dependsOrder": "sequence", + "command": "./.vscode/scripts/clean-binaries.sh --task", + "isBackground": false, + "presentation": { + "close": true + }, + "problemMatcher": [] + }, + { + "label": "Terminate Tasks", + "command": "echo ${input:terminate}", + "type": "shell", + "problemMatcher": [] + } + ], + "inputs": [ + { + "id": "SEAT_POS", + "type": "promptString", + "description": "Seat Position [0..1000] (1000=100%)", + "default": "500" + }, + { + "id": "SEAT_WAIT", + "type": "pickString", + "description": "Wait until seat position reached?", + "options": ["wait", "no-wait"], + "default": "wait" + }, + { + "id": "AC_TEMP", + "type": "promptString", + "description": "AC Temperature", + "default": "23.0" + }, + { + "id": "AC_STATUS", + "type": "pickString", + "description": "AC Status", + "options": ["ON", "OFF"], + "default": "ON" + }, + { + "id": "terminate", + "type": "command", + "command": "workbench.action.tasks.terminate", + "args": "terminateAll" + } + ] +} diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..c728bd4 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,65 @@ +# How to Contribute to Eclipse Kuksa + +First of all, thanks for considering to contribute to Eclipse Kuksa. We really +appreciate the time and effort you want to spend helping to improve things around here. + +In order to get you started as fast as possible we need to go through some organizational issues first, though. + +## Eclipse Contributor Agreement + +Before your contribution can be accepted by the project team contributors must +electronically sign the Eclipse Contributor Agreement (ECA). + +* + +Commits that are provided by non-committers must have a Signed-off-by field in +the footer indicating that the author is aware of the terms by which the +contribution has been provided to the project. The non-committer must +additionally have an Eclipse Foundation account and must have a signed Eclipse +Contributor Agreement (ECA) on file. + +For more information, please see the Eclipse Committer Handbook: + + +## Making Your Changes + +* Fork the repository on GitHub. +* Create a new branch for your changes. +* Make your changes following the code style guide (see Code Style Guide section above). +* When you create new files make sure you include a proper license header at the top of the file (see License Header section below). +* Make sure you include test cases for non-trivial features. +* Make sure test cases provide sufficient code coverage (see GitHub actions for minimal accepted coverage). +* Make sure the test suite passes after your changes. +* Commit your changes into that branch. +* Use descriptive and meaningful commit messages. Start the first line of the commit message with the issue number and title e.g. `[#9865] Add token based authentication`. +* Squash multiple commits that are related to each other semantically into a single one. +* Make sure you use the `-s` flag when committing as explained above. +* Push your changes to your branch in your forked repository. + +## License Header + +Please make sure any file you newly create contains a proper license header like this: + +```python +#******************************************************************************** +# Copyright (c) 2022 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +#*******************************************************************************/ +``` +Please adjusted the comment character to the specific file format. + +## Submitting the Changes + +Submit a pull request via the normal GitHub UI. + +## After Submitting + +* Do not use your branch for any other development, otherwise further changes that you make will be visible in the PR. diff --git a/LICENSE b/LICENSE index e48e096..f49a4e1 100644 --- a/LICENSE +++ b/LICENSE @@ -1,277 +1,201 @@ -Eclipse Public License - v 2.0 - - THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE - PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION - OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT. - -1. DEFINITIONS - -"Contribution" means: - - a) in the case of the initial Contributor, the initial content - Distributed under this Agreement, and - - b) in the case of each subsequent Contributor: - i) changes to the Program, and - ii) additions to the Program; - where such changes and/or additions to the Program originate from - and are Distributed by that particular Contributor. A Contribution - "originates" from a Contributor if it was added to the Program by - such Contributor itself or anyone acting on such Contributor's behalf. - Contributions do not include changes or additions to the Program that - are not Modified Works. - -"Contributor" means any person or entity that Distributes the Program. - -"Licensed Patents" mean patent claims licensable by a Contributor which -are necessarily infringed by the use or sale of its Contribution alone -or when combined with the Program. - -"Program" means the Contributions Distributed in accordance with this -Agreement. - -"Recipient" means anyone who receives the Program under this Agreement -or any Secondary License (as applicable), including Contributors. - -"Derivative Works" shall mean any work, whether in Source Code or other -form, that is based on (or derived from) the Program and for which the -editorial revisions, annotations, elaborations, or other modifications -represent, as a whole, an original work of authorship. - -"Modified Works" shall mean any work in Source Code or other form that -results from an addition to, deletion from, or modification of the -contents of the Program, including, for purposes of clarity any new file -in Source Code form that contains any contents of the Program. Modified -Works shall not include works that contain only declarations, -interfaces, types, classes, structures, or files of the Program solely -in each case in order to link to, bind by name, or subclass the Program -or Modified Works thereof. - -"Distribute" means the acts of a) distributing or b) making available -in any manner that enables the transfer of a copy. - -"Source Code" means the form of a Program preferred for making -modifications, including but not limited to software source code, -documentation source, and configuration files. - -"Secondary License" means either the GNU General Public License, -Version 2.0, or any later versions of that license, including any -exceptions or additional permissions as identified by the initial -Contributor. - -2. GRANT OF RIGHTS - - a) Subject to the terms of this Agreement, each Contributor hereby - grants Recipient a non-exclusive, worldwide, royalty-free copyright - license to reproduce, prepare Derivative Works of, publicly display, - publicly perform, Distribute and sublicense the Contribution of such - Contributor, if any, and such Derivative Works. - - b) Subject to the terms of this Agreement, each Contributor hereby - grants Recipient a non-exclusive, worldwide, royalty-free patent - license under Licensed Patents to make, use, sell, offer to sell, - import and otherwise transfer the Contribution of such Contributor, - if any, in Source Code or other form. This patent license shall - apply to the combination of the Contribution and the Program if, at - the time the Contribution is added by the Contributor, such addition - of the Contribution causes such combination to be covered by the - Licensed Patents. The patent license shall not apply to any other - combinations which include the Contribution. No hardware per se is - licensed hereunder. - - c) Recipient understands that although each Contributor grants the - licenses to its Contributions set forth herein, no assurances are - provided by any Contributor that the Program does not infringe the - patent or other intellectual property rights of any other entity. - Each Contributor disclaims any liability to Recipient for claims - brought by any other entity based on infringement of intellectual - property rights or otherwise. As a condition to exercising the - rights and licenses granted hereunder, each Recipient hereby - assumes sole responsibility to secure any other intellectual - property rights needed, if any. For example, if a third party - patent license is required to allow Recipient to Distribute the - Program, it is Recipient's responsibility to acquire that license - before distributing the Program. - - d) Each Contributor represents that to its knowledge it has - sufficient copyright rights in its Contribution, if any, to grant - the copyright license set forth in this Agreement. - - e) Notwithstanding the terms of any Secondary License, no - Contributor makes additional grants to any Recipient (other than - those set forth in this Agreement) as a result of such Recipient's - receipt of the Program under the terms of a Secondary License - (if permitted under the terms of Section 3). - -3. REQUIREMENTS - -3.1 If a Contributor Distributes the Program in any form, then: - - a) the Program must also be made available as Source Code, in - accordance with section 3.2, and the Contributor must accompany - the Program with a statement that the Source Code for the Program - is available under this Agreement, and informs Recipients how to - obtain it in a reasonable manner on or through a medium customarily - used for software exchange; and - - b) the Contributor may Distribute the Program under a license - different than this Agreement, provided that such license: - i) effectively disclaims on behalf of all other Contributors all - warranties and conditions, express and implied, including - warranties or conditions of title and non-infringement, and - implied warranties or conditions of merchantability and fitness - for a particular purpose; - - ii) effectively excludes on behalf of all other Contributors all - liability for damages, including direct, indirect, special, - incidental and consequential damages, such as lost profits; - - iii) does not attempt to limit or alter the recipients' rights - in the Source Code under section 3.2; and - - iv) requires any subsequent distribution of the Program by any - party to be under a license that satisfies the requirements - of this section 3. - -3.2 When the Program is Distributed as Source Code: - - a) it must be made available under this Agreement, or if the - Program (i) is combined with other material in a separate file or - files made available under a Secondary License, and (ii) the initial - Contributor attached to the Source Code the notice described in - Exhibit A of this Agreement, then the Program may be made available - under the terms of such Secondary Licenses, and - - b) a copy of this Agreement must be included with each copy of - the Program. - -3.3 Contributors may not remove or alter any copyright, patent, -trademark, attribution notices, disclaimers of warranty, or limitations -of liability ("notices") contained within the Program from any copy of -the Program which they Distribute, provided that Contributors may add -their own appropriate notices. - -4. COMMERCIAL DISTRIBUTION - -Commercial distributors of software may accept certain responsibilities -with respect to end users, business partners and the like. While this -license is intended to facilitate the commercial use of the Program, -the Contributor who includes the Program in a commercial product -offering should do so in a manner which does not create potential -liability for other Contributors. Therefore, if a Contributor includes -the Program in a commercial product offering, such Contributor -("Commercial Contributor") hereby agrees to defend and indemnify every -other Contributor ("Indemnified Contributor") against any losses, -damages and costs (collectively "Losses") arising from claims, lawsuits -and other legal actions brought by a third party against the Indemnified -Contributor to the extent caused by the acts or omissions of such -Commercial Contributor in connection with its distribution of the Program -in a commercial product offering. The obligations in this section do not -apply to any claims or Losses relating to any actual or alleged -intellectual property infringement. In order to qualify, an Indemnified -Contributor must: a) promptly notify the Commercial Contributor in -writing of such claim, and b) allow the Commercial Contributor to control, -and cooperate with the Commercial Contributor in, the defense and any -related settlement negotiations. The Indemnified Contributor may -participate in any such claim at its own expense. - -For example, a Contributor might include the Program in a commercial -product offering, Product X. That Contributor is then a Commercial -Contributor. If that Commercial Contributor then makes performance -claims, or offers warranties related to Product X, those performance -claims and warranties are such Commercial Contributor's responsibility -alone. Under this section, the Commercial Contributor would have to -defend claims against the other Contributors related to those performance -claims and warranties, and if a court requires any other Contributor to -pay any damages as a result, the Commercial Contributor must pay -those damages. - -5. NO WARRANTY - -EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT -PERMITTED BY APPLICABLE LAW, THE PROGRAM IS PROVIDED ON AN "AS IS" -BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR -IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF -TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR -PURPOSE. Each Recipient is solely responsible for determining the -appropriateness of using and distributing the Program and assumes all -risks associated with its exercise of rights under this Agreement, -including but not limited to the risks and costs of program errors, -compliance with applicable laws, damage to or loss of data, programs -or equipment, and unavailability or interruption of operations. - -6. DISCLAIMER OF LIABILITY - -EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, AND TO THE EXTENT -PERMITTED BY APPLICABLE LAW, NEITHER RECIPIENT NOR ANY CONTRIBUTORS -SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, -EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST -PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE -EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE -POSSIBILITY OF SUCH DAMAGES. - -7. GENERAL - -If any provision of this Agreement is invalid or unenforceable under -applicable law, it shall not affect the validity or enforceability of -the remainder of the terms of this Agreement, and without further -action by the parties hereto, such provision shall be reformed to the -minimum extent necessary to make such provision valid and enforceable. - -If Recipient institutes patent litigation against any entity -(including a cross-claim or counterclaim in a lawsuit) alleging that the -Program itself (excluding combinations of the Program with other software -or hardware) infringes such Recipient's patent(s), then such Recipient's -rights granted under Section 2(b) shall terminate as of the date such -litigation is filed. - -All Recipient's rights under this Agreement shall terminate if it -fails to comply with any of the material terms or conditions of this -Agreement and does not cure such failure in a reasonable period of -time after becoming aware of such noncompliance. If all Recipient's -rights under this Agreement terminate, Recipient agrees to cease use -and distribution of the Program as soon as reasonably practicable. -However, Recipient's obligations under this Agreement and any licenses -granted by Recipient relating to the Program shall continue and survive. - -Everyone is permitted to copy and distribute copies of this Agreement, -but in order to avoid inconsistency the Agreement is copyrighted and -may only be modified in the following manner. The Agreement Steward -reserves the right to publish new versions (including revisions) of -this Agreement from time to time. No one other than the Agreement -Steward has the right to modify this Agreement. The Eclipse Foundation -is the initial Agreement Steward. The Eclipse Foundation may assign the -responsibility to serve as the Agreement Steward to a suitable separate -entity. Each new version of the Agreement will be given a distinguishing -version number. The Program (including Contributions) may always be -Distributed subject to the version of the Agreement under which it was -received. In addition, after a new version of the Agreement is published, -Contributor may elect to Distribute the Program (including its -Contributions) under the new version. - -Except as expressly stated in Sections 2(a) and 2(b) above, Recipient -receives no rights or licenses to the intellectual property of any -Contributor under this Agreement, whether expressly, by implication, -estoppel or otherwise. All rights in the Program not expressly granted -under this Agreement are reserved. Nothing in this Agreement is intended -to be enforceable by any entity that is not a Contributor or Recipient. -No third-party beneficiary rights are created under this Agreement. - -Exhibit A - Form of Secondary Licenses Notice - -"This Source Code may also be made available under the following -Secondary Licenses when the conditions for such availability set forth -in the Eclipse Public License, v. 2.0 are satisfied: {name license(s), -version(s), and exceptions or additional permissions here}." - - Simply including a copy of this Agreement, including this Exhibit A - is not sufficient to license the Source Code under Secondary Licenses. - - If it is not possible or desirable to put the notice in a particular - file, then You may include the notice in a location (such as a LICENSE - file in a relevant directory) where a recipient would be likely to - look for such a notice. - - You may add additional accurate notices of copyright ownership. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/NOTICE-3RD-PARTY-CONTENT.md b/NOTICE-3RD-PARTY-CONTENT.md new file mode 100644 index 0000000..1b8b3f3 --- /dev/null +++ b/NOTICE-3RD-PARTY-CONTENT.md @@ -0,0 +1,52 @@ +# Licenses Notice +## Python +| Dependency | Version | License | +|:-----------|:-------:|--------:| +|attrs|21.4.0|MIT| +|grpcio|1.46.3|Apache 2.0| +|iniconfig|1.1.1|MIT| +|packaging|21.3|Apache 2.0
Simplified BSD| +|pluggy|1.0.0|MIT| +|protobuf|3.20.1|Google License| +|py|1.11.0|MIT| +|pyparsing|3.0.9|MIT| +|pytest|7.1.2|MIT| +|pytest-asyncio|0.18.3|Apache 2.0| +|pytest-ordering|0.6|MIT| +|six|1.16.0|MIT| +|tomli|2.0.1|MIT| +|types-protobuf|3.19.21|Apache 2.0| +## c++ +| Dependency | Version | License | +|:-----------|:-------:|--------:| +|abseil|20211102.0|Apache 2.0| +|c-ares|1.18.1|unknown| +|grpc|1.37.1|Apache 2.0| +|gtest|1.10.0|New BSD| +|openssl|1.1.1n|unknown| +|protobuf|3.20.0|Google License| +|re2|20220201|unknown| +|zlib|1.2.12|zlib/libpng license| +## Workflows +| Dependency | Version | License | +|:-----------|:-------:|--------:| +|5monkeys/cobertura-action|v12|MIT License| +|actions-rs/install|v0.1|MIT License| +|actions/cache|v3.0.2|MIT License| +|actions/checkout|v3|MIT License| +|actions/checkout|v2|MIT License| +|actions/download-artifact|v3|MIT License| +|actions/upload-artifact|v3|MIT License| +|ASzc/change-string-case-action|v2|ISC License| +|battila7/get-version-action|v2|MIT License| +|docker/build-push-action|v2|Apache License 2.0| +|docker/login-action|v1|Apache License 2.0| +|docker/setup-buildx-action|v1|Apache License 2.0| +|docker/setup-qemu-action|v1|Apache License 2.0| +|EnricoMi/publish-unit-test-result-action|v1|Apache License 2.0| +|github/codeql-action|v2|MIT License| +|megalinter/megalinter|v5|GNU Affero General Public License v3.0| +|peter-evans/create-pull-request|v4.0.1|MIT License| +|peter-evans/create-pull-request|v4.0.2|MIT License| +|softprops/action-gh-release|v1|MIT License| +|stefanzweifel/git-auto-commit-action|v4|MIT License| diff --git a/NOTICE.md b/NOTICE.md new file mode 100644 index 0000000..08378a5 --- /dev/null +++ b/NOTICE.md @@ -0,0 +1,33 @@ +# License Notice + +## Declared Project Licenses + +This program and the accompanying materials are made available under the terms +of the Apache License 2.0 which is available at + + +SPDX-License-Identifier: Apache-2.0 + +## Contributors +* Robert Bosch GmbH - initial API and implementation +* Microsoft Corporation - initial API and implementation + +## Third-party Content + +### Development +| Software | License | +|:--------------------:|:--------------------------------------------------:| +| GRPC | | +| */CodeCoverage.cmake | Copyright (c) 2012 - 2017, Lars Bilke | +| */generated\CAN.xxx | MIT License | + +### Tools +| Software | License | +|:------------------------------------:|:-----------------------------------------------------------------------------------------:| +| gTest | | +| Doxygen Awesome | MIT License | +| ./seat_service/docs/doxygen/doxyfile | [GNU General Public License v2.0](https://github.com/doxygen/doxygen/blob/master/LICENSE) | +| ./seat_service/toolchains/* | [MIT License](https://github.com/conan-io/docs/blob/develop/LICENSE) | + +### Further 3rd party licenses used in project +[3rd party licenses](./NOTICE-3RD-PARTY-CONTENT.md) diff --git a/README.md b/README.md new file mode 100644 index 0000000..fa6c8c5 --- /dev/null +++ b/README.md @@ -0,0 +1,148 @@ +# Kuksa.VAL.services + +- [Kuksa.VAL.services](#kuksavalservices) + - [Overview](#overview) + - [Contribution](#contribution) + - [Build Seat Service Containers](#build-seat-service-containers) + - [Running Seat Service / Databroker Containers](#running-seat-service--databroker-containers) + - [Privacy Customer Information](#privacy-customer-information) + - [Your Role](#your-role) + - [Where may the processing of personal related data be relevant?](#where-may-the-processing-of-personal-related-data-be-relevant) + - [What have we done to make the software data protection friendly?](#what-have-we-done-to-make-the-software-data-protection-friendly) + +## Overview + +The Kuksa.VAL.services repository is part of the overall Eclipse Kuksa Vehicle Abstraction Layer (VAL) set of repositories. +The VAL is offering a *Vehicle API*, which is an abstraction of vehicle data and functions to be used by *Vehicle Apps*. +Vehicle data is provided in form of a data model, which is accessible via the Vehicle Data Broker - see [Kuksa.VAL repository](https://github.com/eclipse/kuksa.val). +Vehicle functions are made available by a set of so-called *vehicle services* (short: *vservice*). +This repository contains examples of vservices and their implementations to show, how a Vehicle API and the underlying abstraction layer could be realized. + +It currently consists of +* a simple example [HVAC service (Python)](./hvac_service) and +* a more complex example [seat control service (C++)](./seat_service). + +More elaborate or completely differing implementations are target of "real world grown" projects. + + +## Contribution + +For contribution guidelines see [CONTRIBUTING.md](CONTRIBUTING.md) + + +## Build Seat Service Containers + +From the terminal, make the seat_service as your working directory: + +``` bash +cd seat_service +``` + +When you are inside the seat_service directory, create binaries: + +``` bash +./build-release.sh x86_64 + +#Use following commands for aarch64 +./build-release.sh aarch64 +``` +Build a tar file of all binaries. +``` bash +#Replace x86_64 with aarch64 for arm64 architecture +tar -czvf bin_vservice-seat_x86_64_release.tar.gz \ + target/x86_64/release/install/ \ + target/x86_64/release/licenses/ \ + proto/ +``` +To build the image execute following commands from root directory as context. +``` bash +docker build -f seat_service/Dockerfile -t seat_service: . + +#Use following command if buildplatform is required +DOCKER_BUILDKIT=1 docker build -f seat_service/Dockerfile -t seat_service: . +``` +The image creation may take around 2 minutes. + +## Running Seat Service / Databroker Containers + +To directly run the containers following commands can be used: + +1. Seat Service container + + By default the container will execute the `./val_start.sh` script, that sets default environment variables for seat service. + It needs `CAN` environment variable with special value `cansim` (to use simulated socketcan calls) or any valid can device within the container. + + ``` bash + # executes ./va_start.sh + docker run --rm -it -p 50051:50051/tcp seat-service + ``` + + To run any specific command in the container, just append you command (e.g. bash) at the end. + + ``` bash + docker run --rm -it -p 50051:50051/tcp seat-service + ``` + + +For accessing databroker from seat service container there are two ways of running the containers. + +1. The simplest way to run the containers is to sacrifice the isolation a bit and run all the containers in the host's network namespace with docker run --network host + + ``` bash + #By default the container will execute the ./vehicle-data-broker command as entrypoint. + docker run --rm -it --network host -e 'RUST_LOG=info,vehicle_data_broker=debug' databroker + ``` + + ``` bash + #By default the container will execute the ./val_start.sh command as entrypoint + docker run --rm -it --network host seat-service + ``` + +1. There is a more subtle way to share a single network namespace between multiple containers. + So, we can start a sandbox container that will do nothing but sleep and reusing a network namespace of an this existing container: + + ``` bash + #Run sandbox container + docker run -d --rm --name sandbox -p 55555:55555 alpine sleep infinity + ``` + + ``` bash + #Run databroker container + docker run --rm -it --network container:sandbox -e HOST=0.0.0.0 -e PORT=55555 databroker + ``` + + ``` bash + #Run seat-service container + docker run --rm -it --network container:sandbox -e HOST=0.0.0.0 -e PORT=55555 -e PORT=50051 seat-service + ``` + +1. Another option is to use `:` and bind to `0.0.0.0` inside containers + + +## Privacy Customer Information + +Your privacy is important to us. +The following Information is to provide you with all information relevant to data protection in order to be able to use the software, in a data protection compliant manner. +It is provided as an information source for your solution-specific data protection and data privacy topics. +This is not intended to provide and should not be relied on for legal advice. + +### Your Role + +First things first: when you choose and use our software, you are most likely acting in the role of data controller, if personal related data is being processed. +Therefore, you must ensure that the processing of personal data complies with the respective local legal requirements, +e.g. when processing data within the scope of General Data Protection Regulation (GDPR) the legal requirements for a controller from the GDPR. + +### Where may the processing of personal related data be relevant? + +When using our software in combination with other software components, personal data or data categories may be collected for the purpose of developing, testing and running in-vehicle applications (Vehicle Apps). +Possible examples are the vehicle identification number (VIN), the number plate, GPS data, video data, audio data, or other measurement data. +You can determine which data or data categories are collected when configuring the software. +These data are stored in volatile memory and are deleted by shutting down the system. +You are responsible for the compliant handling of the data in accordance with the applicable local law. + +### What have we done to make the software data protection friendly? + +This section describes the measures taken to integrate the requirements of the data protection directly into the software development. +The technical measures described below follow a "privacy by design" approach. + +- Deletion possibility: The software does not save data permanently since it uses only volatile memory. All collected or processed data can be deleted by rebooting the host hardware. diff --git a/hvac_service/.gitignore b/hvac_service/.gitignore new file mode 100644 index 0000000..b742f4d --- /dev/null +++ b/hvac_service/.gitignore @@ -0,0 +1,4 @@ +__pycache__/ +.mypy_cache/ +.venv/ +*.pyo diff --git a/hvac_service/Dockerfile b/hvac_service/Dockerfile new file mode 100644 index 0000000..c33f9ad --- /dev/null +++ b/hvac_service/Dockerfile @@ -0,0 +1,71 @@ +# /******************************************************************************** +# * Copyright (c) 2022 Contributors to the Eclipse Foundation +# * +# * See the NOTICE file(s) distributed with this work for additional +# * information regarding copyright ownership. +# * +# * This program and the accompanying materials are made available under the +# * terms of the Apache License 2.0 which is available at +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * SPDX-License-Identifier: Apache-2.0 +# ********************************************************************************/ + +# Build stage, to create a Virtual Environent +FROM --platform=$TARGETPLATFORM python:3.9-slim-bullseye as builder + +ARG TARGETPLATFORM +ARG BUILDPLATFORM + +RUN echo "-- Running on $BUILDPLATFORM, building for $TARGETPLATFORM" + +RUN apt-get update -qqy && apt-get upgrade -qqy && apt-get install -qqy binutils g++ + +COPY . / + +RUN python3 -m venv /opt/venv + +ENV PATH="/opt/venv/bin:$PATH" + +RUN /opt/venv/bin/python3 -m pip install --upgrade pip \ + && pip3 install --no-cache-dir -r requirements.txt + +RUN pip3 install wheel && pip3 install scons && pip3 install pyinstaller && pip3 install patchelf && pip3 install staticx + +RUN pyinstaller --clean -F -s hvacservice.py +# --debug=imports + +WORKDIR /dist +RUN staticx hvacservice hvacservice-exe + +# Runner stage, to copy in the virtual environment and the app +FROM scratch +#FROM alpine:latest +# FROM arm64v8/python:3.8.12-alpine as Runtime + +LABEL org.opencontainers.image.source="https://github.com/eclipse/kuksa.val.services" + +# needed as /dist/binary unpacks and runs from /tmp +WORKDIR /tmp +# optional volume mapping +WORKDIR /conf + +WORKDIR /dist + +COPY --from=builder /dist/hvacservice-exe . + +ENV PATH="/dist:$PATH" + +# useful dumps about feeding values +ENV LOG_LEVEL="info,databroker=debug,hvac_service=debug" + +# Vehicle Data Broker host:port +#ENV VDB_ADDRESS="localhost:55555" +# Override VDB_ADDRESS port if set +#ENV DAPR_GRPC_PORT="55555" +# VDB DAPR APP ID +ENV VEHICLEDATABROKER_DAPR_APP_ID=vehicledatabroker + +ENV PYTHONUNBUFFERED=yes + +CMD ["./hvacservice-exe"] diff --git a/hvac_service/README.md b/hvac_service/README.md new file mode 100644 index 0000000..2896819 --- /dev/null +++ b/hvac_service/README.md @@ -0,0 +1,31 @@ +# HVAC service example + +The HVAC service is a service dummy allowing to control the state of the A/C and the desired cabin temperature. +"Dummy" means, that changes of those two states are just forwarded and reflected as two respective data points in the data broker. + +```text + +----------------+ + | | + ----( O------| Data broker |-----O )------ + | Broker | | Collector | + | +----------------+ | + | | + | | ++-----------------+ +----------------+ +| | | | +| HVAC client |-----------( O-----------| HVAC service | +| | HVAC | | ++-----------------+ service +----------------+ +``` + +## Configuration + +| parameter | default value | Env var | description | +|----------------|-----------------------|----------------------------------------------------------------------------------|---------------------------------| +| listen_address | `"127.0.0.1:50052"` | `HVAC_ADDR` | Listen for rpc calls | +| broker_address | `"127.0.0.1:55555"` | `"127.0.0.1:$DAPR_GRPC_PORT"` (if DAPR_GRPC_PORT is set)
`VDB_ADDRESS` (else) | Connect to data broker instance | +| broker_app_id | `"vehicledatabroker"` | `VEHICLEDATABROKER_DAPR_APP_ID` | Connect to data broker instance | + +Configuration options have the following priority (highest at top): +1. environment variable +1. default value diff --git a/hvac_service/docker-build.sh b/hvac_service/docker-build.sh new file mode 100755 index 0000000..0d8816e --- /dev/null +++ b/hvac_service/docker-build.sh @@ -0,0 +1,102 @@ +#!/bin/bash +#******************************************************************************** +# Copyright (c) 2022 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +#*******************************************************************************/ +# shellcheck disable=SC2181 +# shellcheck disable=SC2086 + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +CONTEXT_DIR="$SCRIPT_DIR" +# name of docker image: ${DOCKER_ARCH)/${DOCKER_IMAGE} +DOCKER_IMAGE="oci_vservice-hvac" + +print_usage() { + echo "USAGE: $0 [OPTIONS] TARGETS" + echo + echo "Standalone build helper for $DOCKER_IMAGE docker image." + echo + echo "OPTIONS:" + echo " -l, --local local docker import (does not export tar)" + echo " -v, --verbose enable plain docker output and disable cache" + echo " --help show help" + echo + echo "TARGETS:" + echo " x86_64|amd64, aarch64|amd64 Target arch to build for, if not set - defaults to multiarch" + echo +} + +LOCAL=0 +VERBOSE=0 +while [ $# -gt 0 ]; do + if [ "$1" = "--local" ] || [ "$1" = "-l" ]; then + LOCAL=1 + elif [ "$1" = "--verbose" ] || [ "$1" = "-v" ]; then + VERBOSE=1 + elif [ "$1" = "--help" ]; then + print_usage + exit 0 + else + TARGET="$1" + fi + shift +done + +target_arch() { + local target="$1" + case "$target" in + "x86_64" | "amd64") + echo "amd64" + ;; + "arm64" | "aarch64") + echo "arm64" + ;; + "armv6" | "arm") + echo "arm/v6" + ;; + "multiarch" | "") + echo "multiarch" + ;; + *) + return 1 + ;; + esac + return 0 +} + +if [ -z "$TARGET" ] && [ $LOCAL -eq 1 ]; then + echo "Multiarch archives are not supported for local builds, removing --local flag ..." + LOCAL=0 +fi + +DOCKER_ARCH=$(target_arch "$TARGET") +DOCKER_EXPORT="./${DOCKER_ARCH//\//_}-${DOCKER_IMAGE}.tar" + +if [ "$DOCKER_ARCH" = "multiarch" ]; then + DOCKER_ARGS="--platform linux/amd64,linux/arm64 -t $DOCKER_ARCH/$DOCKER_IMAGE --output type=oci,dest=$DOCKER_EXPORT" +else + if [ $LOCAL -eq 1 ]; then + DOCKER_ARGS="--load -t $DOCKER_ARCH/$DOCKER_IMAGE" + docker image tag $DOCKER_ARCH/$DOCKER_IMAGE ghcr.io/eclipse/kuksa.val.services/$DOCKER_IMAGE:prerelease + DOCKER_EXPORT="(local)" + else + DOCKER_ARGS="--platform linux/$DOCKER_ARCH -t $DOCKER_ARCH/$DOCKER_IMAGE --output type=oci,dest=$DOCKER_EXPORT" + fi +fi + +if [ "$VERBOSE" = "1" ]; then + DOCKER_ARGS="--no-cache --progress=plain $DOCKER_ARGS" +fi + +cd "$CONTEXT_DIR" || exit 1 +echo "# docker buildx build $DOCKER_ARGS -f Dockerfile $CONTEXT_DIR" +DOCKER_BUILDKIT=1 docker buildx build $DOCKER_ARGS -f Dockerfile "$CONTEXT_DIR" $DOCKER_EXT +[ $? -eq 0 ] && echo "# Exported $DOCKER_ARCH/$DOCKER_IMAGE in $DOCKER_EXPORT" diff --git a/hvac_service/hvacservice.py b/hvac_service/hvacservice.py new file mode 100644 index 0000000..78f5220 --- /dev/null +++ b/hvac_service/hvacservice.py @@ -0,0 +1,261 @@ +# /******************************************************************************** +# * Copyright (c) 2022 Contributors to the Eclipse Foundation +# * +# * See the NOTICE file(s) distributed with this work for additional +# * information regarding copyright ownership. +# * +# * This program and the accompanying materials are made available under the +# * terms of the Apache License 2.0 which is available at +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * SPDX-License-Identifier: Apache-2.0 +# ********************************************************************************/ + +import asyncio +import logging +import os +import signal +import threading +import time +from concurrent.futures import ThreadPoolExecutor +from threading import Thread + +import grpc +from sdv.databroker.v1.collector_pb2 import ( + RegisterDatapointsRequest, + RegistrationMetadata, + UpdateDatapointsRequest, +) +from sdv.databroker.v1.collector_pb2_grpc import CollectorStub +from sdv.databroker.v1.types_pb2 import ChangeType, DataType +from sdv.edge.comfort.hvac.v1.hvac_pb2 import ( + SetAcStatusReply, + SetAcStatusRequest, + SetTemperatureReply, + SetTemperatureRequest, +) +from sdv.edge.comfort.hvac.v1.hvac_pb2_grpc import ( + HvacServicer, + add_HvacServicer_to_server, +) + +log = logging.getLogger("hvac_service") +event = threading.Event() + +# HVAC Service bind "host:port" +HVAC_ADDRESS = os.getenv("HVAC_ADDR", "0.0.0.0:50052") +# VehicleDataBroker address, overridden if "DAPR_GRPC_PORT" is set in environment +VDB_ADDRESS = os.getenv("VDB_ADDRESS", "127.0.0.1:55555") + + +def is_grpc_fatal_error(e: grpc.RpcError) -> bool: + if ( + e.code() == grpc.StatusCode.UNAVAILABLE + or e.code() == grpc.StatusCode.UNKNOWN + or e.code() == grpc.StatusCode.UNAUTHENTICATED + or e.code() == grpc.StatusCode.INTERNAL + ): + log.error("Feeding aborted due to RpcError(%s, '%s')", e.code(), e.details()) + return True + else: + log.warning("Unhandled RpcError(%s, '%s')", e.code(), e.details()) + return False + + +class HvacService: + """API to access signals.""" + + def __init__(self, hvac_address: str): + if os.getenv("DAPR_GRPC_PORT") is not None: + grpc_port = int(os.getenv("DAPR_GRPC_PORT")) + self._vdb_address = f"127.0.0.1:{grpc_port}" + else: + self._vdb_address = VDB_ADDRESS + self._address = hvac_address + self._ids = {} + self._connected = False + self._registered = False + self._shutdown = False + self._databroker_thread = Thread( + target=self.connect_to_databroker, daemon=True, name="databroker-connector" + ) + self._databroker_thread.start() + # self.connect_to_databroker() + + def connect_to_databroker(self) -> None: + log.info("Connecting to Data Broker [%s]", self._vdb_address) + if os.getenv("VEHICLEDATABROKER_DAPR_APP_ID") is not None: + self._metadata = ( + ("dapr-app-id", os.getenv("VEHICLEDATABROKER_DAPR_APP_ID")), + ) + # give some time for dapr sidecar startup... + time.sleep(2) + else: + self._metadata = None + self._channel: grpc.Channel = grpc.insecure_channel(self._vdb_address) + self._stub = CollectorStub(self._channel) + log.info("Using gRPC metadata: %s", self._metadata) + self._channel.subscribe( + lambda connectivity: self.on_broker_connectivity_change(connectivity), + try_to_connect=False, + ) + self._run() + + def on_broker_connectivity_change(self, connectivity): + log.info("[%s] Connectivity changed to: %s", self._vdb_address, connectivity) + if ( + connectivity == grpc.ChannelConnectivity.READY + or connectivity == grpc.ChannelConnectivity.IDLE + ): + # Can change between READY and IDLE. Only act if coming from + # unconnected state + if not self._connected: + log.info("Connected to data broker") + try: + self.register_datapoints() + log.info("datapoints are registered.") + self._registered = True + except grpc.RpcError as err: + log.error("Failed to register datapoints") + is_grpc_fatal_error(err) + # log.error("Failed to register datapoints", exc_info=True) + except Exception: + log.error("Failed to register datapoints", exc_info=True) + self._connected = True + else: + if self._connected: + log.info("Disconnected from data broker") + else: + if connectivity == grpc.ChannelConnectivity.CONNECTING: + log.info("Trying to connect to data broker") + self._connected = False + self._registered = False + + def _run(self): + while self._shutdown is False: + if not self._connected: + time.sleep(0.2) + continue + elif not self._registered: + try: + log.debug("Try to register datapoints") + self.register_datapoints() + self._registered = True + except grpc.RpcError as err: + is_grpc_fatal_error(err) + log.debug("Failed to register datapoints", exc_info=True) + time.sleep(3) + except Exception: + log.error("Failed to register datapoints", exc_info=True) + time.sleep(1) + continue + else: + # TODO: check if dapr grpc proxy has active connection(e.g. send last temp value) + time.sleep(1) + + def serve(self): + log.info("Starting HVAC Service on %s", self._address) + server = grpc.server(ThreadPoolExecutor(max_workers=10)) + _servicer = self._HvacService(self) + add_HvacServicer_to_server(_servicer, server) + server.add_insecure_port(self._address) + server.start() + server.wait_for_termination() + + async def close(self): + """Closes runtime gRPC channel.""" + if self._channel: + await self._channel.close() + + def __enter__(self) -> "HvacService": + return self + + def __exit__(self, exc_type, exc_value, traceback) -> None: + asyncio.run_coroutine_threadsafe(self.close(), asyncio.get_event_loop()) + + def register_datapoints(self): + # Provided via CAN feeder: + log.info("Try register datapoints") + self.register( + "Vehicle.Cabin.IsAirConditioningActive", + DataType.BOOL, + ChangeType.ON_CHANGE, + ) + self.register( + "Vehicle.Cabin.DesiredAmbientAirTemperature", + DataType.FLOAT, + ChangeType.ON_CHANGE, + ) + + def register(self, name, data_type, change_type): + self._register(name, data_type, change_type) + + def _register(self, name, data_type, change_type): + request = RegisterDatapointsRequest() + registration_metadata = RegistrationMetadata() + registration_metadata.name = name + registration_metadata.data_type = data_type + registration_metadata.description = "" + registration_metadata.change_type = change_type + request.list.append(registration_metadata) + response = self._stub.RegisterDatapoints(request, metadata=self._metadata) + self._ids[name] = response.results[name] + + def set_float_datapoint(self, name: str, value: float): + id = self._ids[name] + request = UpdateDatapointsRequest() + request.datapoints[id].float_value = value + try: + log.info(" Feeding '%s' with value %s", name, value) + self._stub.UpdateDatapoints(request, metadata=self._metadata) + except grpc.RpcError as err: + log.warning("Feeding %s failed", name, exc_info=True) + self._connected = is_grpc_fatal_error(err) + raise err + + def set_bool_datapoint(self, name: str, value: bool): + id = self._ids[name] + request = UpdateDatapointsRequest() + request.datapoints[id].bool_value = value + log.info(" Feeding '%s' with value %s", name, value) + try: + self._stub.UpdateDatapoints(request, metadata=self._metadata) + except grpc.RpcError as err: + log.warning("Feeding %s failed", name, exc_info=True) + self._connected = is_grpc_fatal_error(err) + raise err + + class _HvacService(HvacServicer): + def __init__(self, servicer): + self.servicer: HvacService = servicer + + def SetTemperature(self, request: SetTemperatureRequest, context): + log.info("* Request to set AC %s", str(request).replace("\n", " ")) + self.servicer.set_float_datapoint( + "Vehicle.Cabin.DesiredAmbientAirTemperature", request.temperature + ) + log.info(" Temp updated.\n") + return SetTemperatureReply() + + def SetAcStatus(self, request: SetAcStatusRequest, context): + log.info("* Request to set AC %s", str(request).replace("\n", " ")) + self.servicer.set_bool_datapoint( + "Vehicle.Cabin.IsAirConditioningActive", request.status + ) + log.info(" AC Status updated.\n") + return SetAcStatusReply() + + +async def main(): + """Main function""" + hvac_service = HvacService(HVAC_ADDRESS) + hvac_service.serve() + + +if __name__ == "__main__": + logging.basicConfig(level=logging.INFO) + log.setLevel(logging.DEBUG) + LOOP = asyncio.get_event_loop() + LOOP.add_signal_handler(signal.SIGTERM, LOOP.stop) + LOOP.run_until_complete(main()) + LOOP.close() diff --git a/hvac_service/proto/sdv/edge/comfort/hvac/v1/hvac.proto b/hvac_service/proto/sdv/edge/comfort/hvac/v1/hvac.proto new file mode 100644 index 0000000..a54d36f --- /dev/null +++ b/hvac_service/proto/sdv/edge/comfort/hvac/v1/hvac.proto @@ -0,0 +1,78 @@ +/******************************************************************************** +* Copyright (c) 2022 Contributors to the Eclipse Foundation +* +* See the NOTICE file(s) distributed with this work for additional +* information regarding copyright ownership. +* +* This program and the accompanying materials are made available under the +* terms of the Apache License 2.0 which is available at +* http://www.apache.org/licenses/LICENSE-2.0 +* +* SPDX-License-Identifier: Apache-2.0 +********************************************************************************/ + +syntax = "proto3"; + +package sdv.edge.comfort.hvac.v1; + +/** + * @brief Example HVAC service for controlling the heating, ventilation, and air + conditioning elements of the vehicle cabin. + * This definition is designed here according to the draft of the comfort seats + * service definition of the COVESA Vehicle Service Catalog (VSC) (see + * https://github.com/COVESA/vehicle_service_catalog) as a definition of an + * HVAC service is currently missing in VSC. + */ +service Hvac { + /** Set the desired ac status + * + * Returns gRPC status codes: + * * OK - AcStatus set + * * INVALID_ARGUMENT - The requested AcStatus is not supported by the service instance + * * INTERNAL - A HVAC service internal error happened - see error message for details + */ + rpc SetAcStatus(SetAcStatusRequest) returns (SetAcStatusReply); + + /** Set the desired cabin temparature + * + * Returns gRPC status codes: + * * OK - Desired temperature set + * * OUT_OF_RANGE - The specified temperature is not supported in this vehicle + * * INTERNAL - A HVAC service internal error happened - see error message for details + */ + rpc SetTemperature(SetTemperatureRequest) returns (SetTemperatureReply); + +} + +/** + * @brief + * + */ +message SetAcStatusRequest { + AcStatus status = 1; // The desired status of A/C +} + +/** + * @brief + * + */ +message SetAcStatusReply {} + +/** + * @brief + * + */ +message SetTemperatureRequest { + float temperature = 1; // The desired cabin temperature in degree Celsius +} + +/** + * @brief + * + */ +message SetTemperatureReply {} + +enum AcStatus { + OFF = 0; + ON = 1; +} \ No newline at end of file diff --git a/hvac_service/requirements-dev.txt b/hvac_service/requirements-dev.txt new file mode 100644 index 0000000..20252f6 --- /dev/null +++ b/hvac_service/requirements-dev.txt @@ -0,0 +1,4 @@ +grpcio>=1.26.0 +protobuf==3.19.3 +types-protobuf >= 0.1.14 +grpcio-tools>=1.26.0 diff --git a/hvac_service/requirements.txt b/hvac_service/requirements.txt new file mode 100644 index 0000000..7c9f19b --- /dev/null +++ b/hvac_service/requirements.txt @@ -0,0 +1,3 @@ +grpcio +protobuf +types-protobuf diff --git a/hvac_service/sdv/__init__.py b/hvac_service/sdv/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/hvac_service/sdv/databroker/__init__.py b/hvac_service/sdv/databroker/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/hvac_service/sdv/databroker/v1/__init__.py b/hvac_service/sdv/databroker/v1/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/hvac_service/sdv/databroker/v1/broker_pb2.py b/hvac_service/sdv/databroker/v1/broker_pb2.py new file mode 100644 index 0000000..463b2ef --- /dev/null +++ b/hvac_service/sdv/databroker/v1/broker_pb2.py @@ -0,0 +1,145 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: sdv/databroker/v1/broker.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from sdv.databroker.v1 import types_pb2 as sdv_dot_databroker_dot_v1_dot_types__pb2 + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n\x1esdv/databroker/v1/broker.proto\x12\x11sdv.databroker.v1\x1a\x1dsdv/databroker/v1/types.proto"*\n\x14GetDatapointsRequest\x12\x12\n\ndatapoints\x18\x01 \x03(\t"\xb0\x01\n\x12GetDatapointsReply\x12I\n\ndatapoints\x18\x01 \x03(\x0b\x32\x35.sdv.databroker.v1.GetDatapointsReply.DatapointsEntry\x1aO\n\x0f\x44\x61tapointsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1c.sdv.databroker.v1.Datapoint:\x02\x38\x01"!\n\x10SubscribeRequest\x12\r\n\x05query\x18\x02 \x01(\t"\x9c\x01\n\x0eSubscribeReply\x12=\n\x06\x66ields\x18\x01 \x03(\x0b\x32-.sdv.databroker.v1.SubscribeReply.FieldsEntry\x1aK\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1c.sdv.databroker.v1.Datapoint:\x02\x38\x01"#\n\x12GetMetadataRequest\x12\r\n\x05names\x18\x01 \x03(\t"=\n\x10GetMetadataReply\x12)\n\x04list\x18\x01 \x03(\x0b\x32\x1b.sdv.databroker.v1.Metadata2\x9b\x02\n\x06\x42roker\x12_\n\rGetDatapoints\x12\'.sdv.databroker.v1.GetDatapointsRequest\x1a%.sdv.databroker.v1.GetDatapointsReply\x12U\n\tSubscribe\x12#.sdv.databroker.v1.SubscribeRequest\x1a!.sdv.databroker.v1.SubscribeReply0\x01\x12Y\n\x0bGetMetadata\x12%.sdv.databroker.v1.GetMetadataRequest\x1a#.sdv.databroker.v1.GetMetadataReplyb\x06proto3' +) + + +_GETDATAPOINTSREQUEST = DESCRIPTOR.message_types_by_name["GetDatapointsRequest"] +_GETDATAPOINTSREPLY = DESCRIPTOR.message_types_by_name["GetDatapointsReply"] +_GETDATAPOINTSREPLY_DATAPOINTSENTRY = _GETDATAPOINTSREPLY.nested_types_by_name[ + "DatapointsEntry" +] +_SUBSCRIBEREQUEST = DESCRIPTOR.message_types_by_name["SubscribeRequest"] +_SUBSCRIBEREPLY = DESCRIPTOR.message_types_by_name["SubscribeReply"] +_SUBSCRIBEREPLY_FIELDSENTRY = _SUBSCRIBEREPLY.nested_types_by_name["FieldsEntry"] +_GETMETADATAREQUEST = DESCRIPTOR.message_types_by_name["GetMetadataRequest"] +_GETMETADATAREPLY = DESCRIPTOR.message_types_by_name["GetMetadataReply"] +GetDatapointsRequest = _reflection.GeneratedProtocolMessageType( + "GetDatapointsRequest", + (_message.Message,), + { + "DESCRIPTOR": _GETDATAPOINTSREQUEST, + "__module__": "sdv.databroker.v1.broker_pb2" + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.GetDatapointsRequest) + }, +) +_sym_db.RegisterMessage(GetDatapointsRequest) + +GetDatapointsReply = _reflection.GeneratedProtocolMessageType( + "GetDatapointsReply", + (_message.Message,), + { + "DatapointsEntry": _reflection.GeneratedProtocolMessageType( + "DatapointsEntry", + (_message.Message,), + { + "DESCRIPTOR": _GETDATAPOINTSREPLY_DATAPOINTSENTRY, + "__module__": "sdv.databroker.v1.broker_pb2" + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.GetDatapointsReply.DatapointsEntry) + }, + ), + "DESCRIPTOR": _GETDATAPOINTSREPLY, + "__module__": "sdv.databroker.v1.broker_pb2" + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.GetDatapointsReply) + }, +) +_sym_db.RegisterMessage(GetDatapointsReply) +_sym_db.RegisterMessage(GetDatapointsReply.DatapointsEntry) + +SubscribeRequest = _reflection.GeneratedProtocolMessageType( + "SubscribeRequest", + (_message.Message,), + { + "DESCRIPTOR": _SUBSCRIBEREQUEST, + "__module__": "sdv.databroker.v1.broker_pb2" + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.SubscribeRequest) + }, +) +_sym_db.RegisterMessage(SubscribeRequest) + +SubscribeReply = _reflection.GeneratedProtocolMessageType( + "SubscribeReply", + (_message.Message,), + { + "FieldsEntry": _reflection.GeneratedProtocolMessageType( + "FieldsEntry", + (_message.Message,), + { + "DESCRIPTOR": _SUBSCRIBEREPLY_FIELDSENTRY, + "__module__": "sdv.databroker.v1.broker_pb2" + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.SubscribeReply.FieldsEntry) + }, + ), + "DESCRIPTOR": _SUBSCRIBEREPLY, + "__module__": "sdv.databroker.v1.broker_pb2" + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.SubscribeReply) + }, +) +_sym_db.RegisterMessage(SubscribeReply) +_sym_db.RegisterMessage(SubscribeReply.FieldsEntry) + +GetMetadataRequest = _reflection.GeneratedProtocolMessageType( + "GetMetadataRequest", + (_message.Message,), + { + "DESCRIPTOR": _GETMETADATAREQUEST, + "__module__": "sdv.databroker.v1.broker_pb2" + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.GetMetadataRequest) + }, +) +_sym_db.RegisterMessage(GetMetadataRequest) + +GetMetadataReply = _reflection.GeneratedProtocolMessageType( + "GetMetadataReply", + (_message.Message,), + { + "DESCRIPTOR": _GETMETADATAREPLY, + "__module__": "sdv.databroker.v1.broker_pb2" + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.GetMetadataReply) + }, +) +_sym_db.RegisterMessage(GetMetadataReply) + +_BROKER = DESCRIPTOR.services_by_name["Broker"] +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _GETDATAPOINTSREPLY_DATAPOINTSENTRY._options = None + _GETDATAPOINTSREPLY_DATAPOINTSENTRY._serialized_options = b"8\001" + _SUBSCRIBEREPLY_FIELDSENTRY._options = None + _SUBSCRIBEREPLY_FIELDSENTRY._serialized_options = b"8\001" + _GETDATAPOINTSREQUEST._serialized_start = 84 + _GETDATAPOINTSREQUEST._serialized_end = 126 + _GETDATAPOINTSREPLY._serialized_start = 129 + _GETDATAPOINTSREPLY._serialized_end = 305 + _GETDATAPOINTSREPLY_DATAPOINTSENTRY._serialized_start = 226 + _GETDATAPOINTSREPLY_DATAPOINTSENTRY._serialized_end = 305 + _SUBSCRIBEREQUEST._serialized_start = 307 + _SUBSCRIBEREQUEST._serialized_end = 340 + _SUBSCRIBEREPLY._serialized_start = 343 + _SUBSCRIBEREPLY._serialized_end = 499 + _SUBSCRIBEREPLY_FIELDSENTRY._serialized_start = 424 + _SUBSCRIBEREPLY_FIELDSENTRY._serialized_end = 499 + _GETMETADATAREQUEST._serialized_start = 501 + _GETMETADATAREQUEST._serialized_end = 536 + _GETMETADATAREPLY._serialized_start = 538 + _GETMETADATAREPLY._serialized_end = 599 + _BROKER._serialized_start = 602 + _BROKER._serialized_end = 885 +# @@protoc_insertion_point(module_scope) diff --git a/hvac_service/sdv/databroker/v1/broker_pb2_grpc.py b/hvac_service/sdv/databroker/v1/broker_pb2_grpc.py new file mode 100644 index 0000000..7456201 --- /dev/null +++ b/hvac_service/sdv/databroker/v1/broker_pb2_grpc.py @@ -0,0 +1,182 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc +from sdv.databroker.v1 import broker_pb2 as sdv_dot_databroker_dot_v1_dot_broker__pb2 + + +class BrokerStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.GetDatapoints = channel.unary_unary( + "/sdv.databroker.v1.Broker/GetDatapoints", + request_serializer=sdv_dot_databroker_dot_v1_dot_broker__pb2.GetDatapointsRequest.SerializeToString, + response_deserializer=sdv_dot_databroker_dot_v1_dot_broker__pb2.GetDatapointsReply.FromString, + ) + self.Subscribe = channel.unary_stream( + "/sdv.databroker.v1.Broker/Subscribe", + request_serializer=sdv_dot_databroker_dot_v1_dot_broker__pb2.SubscribeRequest.SerializeToString, + response_deserializer=sdv_dot_databroker_dot_v1_dot_broker__pb2.SubscribeReply.FromString, + ) + self.GetMetadata = channel.unary_unary( + "/sdv.databroker.v1.Broker/GetMetadata", + request_serializer=sdv_dot_databroker_dot_v1_dot_broker__pb2.GetMetadataRequest.SerializeToString, + response_deserializer=sdv_dot_databroker_dot_v1_dot_broker__pb2.GetMetadataReply.FromString, + ) + + +class BrokerServicer(object): + """Missing associated documentation comment in .proto file.""" + + def GetDatapoints(self, request, context): + """Request a set of datapoints (values) + + Returns a list of requested data points. + + InvalidArgument is returned if the request is malformed. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def Subscribe(self, request, context): + """Subscribe to a set of data points or conditional expressions + using the Data Broker Query Syntax (described in QUERY.md) + + Returns a stream of replies. + + InvalidArgument is returned if the request is malformed. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def GetMetadata(self, request, context): + """Request the metadata of a set of datapoints + + Returns metadata of the requested data points that exist. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + +def add_BrokerServicer_to_server(servicer, server): + rpc_method_handlers = { + "GetDatapoints": grpc.unary_unary_rpc_method_handler( + servicer.GetDatapoints, + request_deserializer=sdv_dot_databroker_dot_v1_dot_broker__pb2.GetDatapointsRequest.FromString, + response_serializer=sdv_dot_databroker_dot_v1_dot_broker__pb2.GetDatapointsReply.SerializeToString, + ), + "Subscribe": grpc.unary_stream_rpc_method_handler( + servicer.Subscribe, + request_deserializer=sdv_dot_databroker_dot_v1_dot_broker__pb2.SubscribeRequest.FromString, + response_serializer=sdv_dot_databroker_dot_v1_dot_broker__pb2.SubscribeReply.SerializeToString, + ), + "GetMetadata": grpc.unary_unary_rpc_method_handler( + servicer.GetMetadata, + request_deserializer=sdv_dot_databroker_dot_v1_dot_broker__pb2.GetMetadataRequest.FromString, + response_serializer=sdv_dot_databroker_dot_v1_dot_broker__pb2.GetMetadataReply.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + "sdv.databroker.v1.Broker", rpc_method_handlers + ) + server.add_generic_rpc_handlers((generic_handler,)) + + +# This class is part of an EXPERIMENTAL API. +class Broker(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def GetDatapoints( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/sdv.databroker.v1.Broker/GetDatapoints", + sdv_dot_databroker_dot_v1_dot_broker__pb2.GetDatapointsRequest.SerializeToString, + sdv_dot_databroker_dot_v1_dot_broker__pb2.GetDatapointsReply.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def Subscribe( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_stream( + request, + target, + "/sdv.databroker.v1.Broker/Subscribe", + sdv_dot_databroker_dot_v1_dot_broker__pb2.SubscribeRequest.SerializeToString, + sdv_dot_databroker_dot_v1_dot_broker__pb2.SubscribeReply.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def GetMetadata( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/sdv.databroker.v1.Broker/GetMetadata", + sdv_dot_databroker_dot_v1_dot_broker__pb2.GetMetadataRequest.SerializeToString, + sdv_dot_databroker_dot_v1_dot_broker__pb2.GetMetadataReply.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) diff --git a/hvac_service/sdv/databroker/v1/collector_pb2.py b/hvac_service/sdv/databroker/v1/collector_pb2.py new file mode 100644 index 0000000..ae826f1 --- /dev/null +++ b/hvac_service/sdv/databroker/v1/collector_pb2.py @@ -0,0 +1,214 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: sdv/databroker/v1/collector.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from sdv.databroker.v1 import types_pb2 as sdv_dot_databroker_dot_v1_dot_types__pb2 + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n!sdv/databroker/v1/collector.proto\x12\x11sdv.databroker.v1\x1a\x1dsdv/databroker/v1/types.proto"\xba\x01\n\x17UpdateDatapointsRequest\x12N\n\ndatapoints\x18\x01 \x03(\x0b\x32:.sdv.databroker.v1.UpdateDatapointsRequest.DatapointsEntry\x1aO\n\x0f\x44\x61tapointsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1c.sdv.databroker.v1.Datapoint:\x02\x38\x01"\xaf\x01\n\x15UpdateDatapointsReply\x12\x44\n\x06\x65rrors\x18\x01 \x03(\x0b\x32\x34.sdv.databroker.v1.UpdateDatapointsReply.ErrorsEntry\x1aP\n\x0b\x45rrorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\x30\n\x05value\x18\x02 \x01(\x0e\x32!.sdv.databroker.v1.DatapointError:\x02\x38\x01"\xba\x01\n\x17StreamDatapointsRequest\x12N\n\ndatapoints\x18\x01 \x03(\x0b\x32:.sdv.databroker.v1.StreamDatapointsRequest.DatapointsEntry\x1aO\n\x0f\x44\x61tapointsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1c.sdv.databroker.v1.Datapoint:\x02\x38\x01"\xaf\x01\n\x15StreamDatapointsReply\x12\x44\n\x06\x65rrors\x18\x01 \x03(\x0b\x32\x34.sdv.databroker.v1.StreamDatapointsReply.ErrorsEntry\x1aP\n\x0b\x45rrorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\x30\n\x05value\x18\x02 \x01(\x0e\x32!.sdv.databroker.v1.DatapointError:\x02\x38\x01"R\n\x19RegisterDatapointsRequest\x12\x35\n\x04list\x18\x01 \x03(\x0b\x32\'.sdv.databroker.v1.RegistrationMetadata"\x9d\x01\n\x14RegistrationMetadata\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\tdata_type\x18\x02 \x01(\x0e\x32\x1b.sdv.databroker.v1.DataType\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x32\n\x0b\x63hange_type\x18\x04 \x01(\x0e\x32\x1d.sdv.databroker.v1.ChangeType"\x93\x01\n\x17RegisterDatapointsReply\x12H\n\x07results\x18\x01 \x03(\x0b\x32\x37.sdv.databroker.v1.RegisterDatapointsReply.ResultsEntry\x1a.\n\x0cResultsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x32\xd3\x02\n\tCollector\x12n\n\x12RegisterDatapoints\x12,.sdv.databroker.v1.RegisterDatapointsRequest\x1a*.sdv.databroker.v1.RegisterDatapointsReply\x12h\n\x10UpdateDatapoints\x12*.sdv.databroker.v1.UpdateDatapointsRequest\x1a(.sdv.databroker.v1.UpdateDatapointsReply\x12l\n\x10StreamDatapoints\x12*.sdv.databroker.v1.StreamDatapointsRequest\x1a(.sdv.databroker.v1.StreamDatapointsReply(\x01\x30\x01\x62\x06proto3' +) + + +_UPDATEDATAPOINTSREQUEST = DESCRIPTOR.message_types_by_name["UpdateDatapointsRequest"] +_UPDATEDATAPOINTSREQUEST_DATAPOINTSENTRY = ( + _UPDATEDATAPOINTSREQUEST.nested_types_by_name["DatapointsEntry"] +) +_UPDATEDATAPOINTSREPLY = DESCRIPTOR.message_types_by_name["UpdateDatapointsReply"] +_UPDATEDATAPOINTSREPLY_ERRORSENTRY = _UPDATEDATAPOINTSREPLY.nested_types_by_name[ + "ErrorsEntry" +] +_STREAMDATAPOINTSREQUEST = DESCRIPTOR.message_types_by_name["StreamDatapointsRequest"] +_STREAMDATAPOINTSREQUEST_DATAPOINTSENTRY = ( + _STREAMDATAPOINTSREQUEST.nested_types_by_name["DatapointsEntry"] +) +_STREAMDATAPOINTSREPLY = DESCRIPTOR.message_types_by_name["StreamDatapointsReply"] +_STREAMDATAPOINTSREPLY_ERRORSENTRY = _STREAMDATAPOINTSREPLY.nested_types_by_name[ + "ErrorsEntry" +] +_REGISTERDATAPOINTSREQUEST = DESCRIPTOR.message_types_by_name[ + "RegisterDatapointsRequest" +] +_REGISTRATIONMETADATA = DESCRIPTOR.message_types_by_name["RegistrationMetadata"] +_REGISTERDATAPOINTSREPLY = DESCRIPTOR.message_types_by_name["RegisterDatapointsReply"] +_REGISTERDATAPOINTSREPLY_RESULTSENTRY = _REGISTERDATAPOINTSREPLY.nested_types_by_name[ + "ResultsEntry" +] +UpdateDatapointsRequest = _reflection.GeneratedProtocolMessageType( + "UpdateDatapointsRequest", + (_message.Message,), + { + "DatapointsEntry": _reflection.GeneratedProtocolMessageType( + "DatapointsEntry", + (_message.Message,), + { + "DESCRIPTOR": _UPDATEDATAPOINTSREQUEST_DATAPOINTSENTRY, + "__module__": "sdv.databroker.v1.collector_pb2" + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.UpdateDatapointsRequest.DatapointsEntry) + }, + ), + "DESCRIPTOR": _UPDATEDATAPOINTSREQUEST, + "__module__": "sdv.databroker.v1.collector_pb2" + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.UpdateDatapointsRequest) + }, +) +_sym_db.RegisterMessage(UpdateDatapointsRequest) +_sym_db.RegisterMessage(UpdateDatapointsRequest.DatapointsEntry) + +UpdateDatapointsReply = _reflection.GeneratedProtocolMessageType( + "UpdateDatapointsReply", + (_message.Message,), + { + "ErrorsEntry": _reflection.GeneratedProtocolMessageType( + "ErrorsEntry", + (_message.Message,), + { + "DESCRIPTOR": _UPDATEDATAPOINTSREPLY_ERRORSENTRY, + "__module__": "sdv.databroker.v1.collector_pb2" + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.UpdateDatapointsReply.ErrorsEntry) + }, + ), + "DESCRIPTOR": _UPDATEDATAPOINTSREPLY, + "__module__": "sdv.databroker.v1.collector_pb2" + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.UpdateDatapointsReply) + }, +) +_sym_db.RegisterMessage(UpdateDatapointsReply) +_sym_db.RegisterMessage(UpdateDatapointsReply.ErrorsEntry) + +StreamDatapointsRequest = _reflection.GeneratedProtocolMessageType( + "StreamDatapointsRequest", + (_message.Message,), + { + "DatapointsEntry": _reflection.GeneratedProtocolMessageType( + "DatapointsEntry", + (_message.Message,), + { + "DESCRIPTOR": _STREAMDATAPOINTSREQUEST_DATAPOINTSENTRY, + "__module__": "sdv.databroker.v1.collector_pb2" + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.StreamDatapointsRequest.DatapointsEntry) + }, + ), + "DESCRIPTOR": _STREAMDATAPOINTSREQUEST, + "__module__": "sdv.databroker.v1.collector_pb2" + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.StreamDatapointsRequest) + }, +) +_sym_db.RegisterMessage(StreamDatapointsRequest) +_sym_db.RegisterMessage(StreamDatapointsRequest.DatapointsEntry) + +StreamDatapointsReply = _reflection.GeneratedProtocolMessageType( + "StreamDatapointsReply", + (_message.Message,), + { + "ErrorsEntry": _reflection.GeneratedProtocolMessageType( + "ErrorsEntry", + (_message.Message,), + { + "DESCRIPTOR": _STREAMDATAPOINTSREPLY_ERRORSENTRY, + "__module__": "sdv.databroker.v1.collector_pb2" + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.StreamDatapointsReply.ErrorsEntry) + }, + ), + "DESCRIPTOR": _STREAMDATAPOINTSREPLY, + "__module__": "sdv.databroker.v1.collector_pb2" + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.StreamDatapointsReply) + }, +) +_sym_db.RegisterMessage(StreamDatapointsReply) +_sym_db.RegisterMessage(StreamDatapointsReply.ErrorsEntry) + +RegisterDatapointsRequest = _reflection.GeneratedProtocolMessageType( + "RegisterDatapointsRequest", + (_message.Message,), + { + "DESCRIPTOR": _REGISTERDATAPOINTSREQUEST, + "__module__": "sdv.databroker.v1.collector_pb2" + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.RegisterDatapointsRequest) + }, +) +_sym_db.RegisterMessage(RegisterDatapointsRequest) + +RegistrationMetadata = _reflection.GeneratedProtocolMessageType( + "RegistrationMetadata", + (_message.Message,), + { + "DESCRIPTOR": _REGISTRATIONMETADATA, + "__module__": "sdv.databroker.v1.collector_pb2" + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.RegistrationMetadata) + }, +) +_sym_db.RegisterMessage(RegistrationMetadata) + +RegisterDatapointsReply = _reflection.GeneratedProtocolMessageType( + "RegisterDatapointsReply", + (_message.Message,), + { + "ResultsEntry": _reflection.GeneratedProtocolMessageType( + "ResultsEntry", + (_message.Message,), + { + "DESCRIPTOR": _REGISTERDATAPOINTSREPLY_RESULTSENTRY, + "__module__": "sdv.databroker.v1.collector_pb2" + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.RegisterDatapointsReply.ResultsEntry) + }, + ), + "DESCRIPTOR": _REGISTERDATAPOINTSREPLY, + "__module__": "sdv.databroker.v1.collector_pb2" + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.RegisterDatapointsReply) + }, +) +_sym_db.RegisterMessage(RegisterDatapointsReply) +_sym_db.RegisterMessage(RegisterDatapointsReply.ResultsEntry) + +_COLLECTOR = DESCRIPTOR.services_by_name["Collector"] +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _UPDATEDATAPOINTSREQUEST_DATAPOINTSENTRY._options = None + _UPDATEDATAPOINTSREQUEST_DATAPOINTSENTRY._serialized_options = b"8\001" + _UPDATEDATAPOINTSREPLY_ERRORSENTRY._options = None + _UPDATEDATAPOINTSREPLY_ERRORSENTRY._serialized_options = b"8\001" + _STREAMDATAPOINTSREQUEST_DATAPOINTSENTRY._options = None + _STREAMDATAPOINTSREQUEST_DATAPOINTSENTRY._serialized_options = b"8\001" + _STREAMDATAPOINTSREPLY_ERRORSENTRY._options = None + _STREAMDATAPOINTSREPLY_ERRORSENTRY._serialized_options = b"8\001" + _REGISTERDATAPOINTSREPLY_RESULTSENTRY._options = None + _REGISTERDATAPOINTSREPLY_RESULTSENTRY._serialized_options = b"8\001" + _UPDATEDATAPOINTSREQUEST._serialized_start = 88 + _UPDATEDATAPOINTSREQUEST._serialized_end = 274 + _UPDATEDATAPOINTSREQUEST_DATAPOINTSENTRY._serialized_start = 195 + _UPDATEDATAPOINTSREQUEST_DATAPOINTSENTRY._serialized_end = 274 + _UPDATEDATAPOINTSREPLY._serialized_start = 277 + _UPDATEDATAPOINTSREPLY._serialized_end = 452 + _UPDATEDATAPOINTSREPLY_ERRORSENTRY._serialized_start = 372 + _UPDATEDATAPOINTSREPLY_ERRORSENTRY._serialized_end = 452 + _STREAMDATAPOINTSREQUEST._serialized_start = 455 + _STREAMDATAPOINTSREQUEST._serialized_end = 641 + _STREAMDATAPOINTSREQUEST_DATAPOINTSENTRY._serialized_start = 195 + _STREAMDATAPOINTSREQUEST_DATAPOINTSENTRY._serialized_end = 274 + _STREAMDATAPOINTSREPLY._serialized_start = 644 + _STREAMDATAPOINTSREPLY._serialized_end = 819 + _STREAMDATAPOINTSREPLY_ERRORSENTRY._serialized_start = 372 + _STREAMDATAPOINTSREPLY_ERRORSENTRY._serialized_end = 452 + _REGISTERDATAPOINTSREQUEST._serialized_start = 821 + _REGISTERDATAPOINTSREQUEST._serialized_end = 903 + _REGISTRATIONMETADATA._serialized_start = 906 + _REGISTRATIONMETADATA._serialized_end = 1063 + _REGISTERDATAPOINTSREPLY._serialized_start = 1066 + _REGISTERDATAPOINTSREPLY._serialized_end = 1213 + _REGISTERDATAPOINTSREPLY_RESULTSENTRY._serialized_start = 1167 + _REGISTERDATAPOINTSREPLY_RESULTSENTRY._serialized_end = 1213 + _COLLECTOR._serialized_start = 1216 + _COLLECTOR._serialized_end = 1555 +# @@protoc_insertion_point(module_scope) diff --git a/hvac_service/sdv/databroker/v1/collector_pb2_grpc.py b/hvac_service/sdv/databroker/v1/collector_pb2_grpc.py new file mode 100644 index 0000000..a0b90ab --- /dev/null +++ b/hvac_service/sdv/databroker/v1/collector_pb2_grpc.py @@ -0,0 +1,214 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc +from sdv.databroker.v1 import ( + collector_pb2 as sdv_dot_databroker_dot_v1_dot_collector__pb2, +) + + +class CollectorStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.RegisterDatapoints = channel.unary_unary( + "/sdv.databroker.v1.Collector/RegisterDatapoints", + request_serializer=sdv_dot_databroker_dot_v1_dot_collector__pb2.RegisterDatapointsRequest.SerializeToString, + response_deserializer=sdv_dot_databroker_dot_v1_dot_collector__pb2.RegisterDatapointsReply.FromString, + ) + self.UpdateDatapoints = channel.unary_unary( + "/sdv.databroker.v1.Collector/UpdateDatapoints", + request_serializer=sdv_dot_databroker_dot_v1_dot_collector__pb2.UpdateDatapointsRequest.SerializeToString, + response_deserializer=sdv_dot_databroker_dot_v1_dot_collector__pb2.UpdateDatapointsReply.FromString, + ) + self.StreamDatapoints = channel.stream_stream( + "/sdv.databroker.v1.Collector/StreamDatapoints", + request_serializer=sdv_dot_databroker_dot_v1_dot_collector__pb2.StreamDatapointsRequest.SerializeToString, + response_deserializer=sdv_dot_databroker_dot_v1_dot_collector__pb2.StreamDatapointsReply.FromString, + ) + + +class CollectorServicer(object): + """Missing associated documentation comment in .proto file.""" + + def RegisterDatapoints(self, request, context): + """A feeder (provider) shall call this as a first step to announce its "owned" data points + to the Data Broker. + If the registration of at least one of the passed data point fails, the overall registration + is rejected and the gRPC status code ABORTED is returned (to indicate the "aborted" registration). + The details, which data point(s) caused the failure and the reason, is passed in back in human- + readable form in the status message. Possible failure resaons are: + * PERMISSION_DENIED - Not allowed to register this name + * ALREADY_REGISTERED - The data point is already registered by some other feeder + * RE_REGISTRATION_MISMATCH - Already registered by this feeder but with differing metadata + * INVALID_NAME - The passed name of the datapoint has an invalid structure + * INVALID_VALUE_TYPE - The passed ValueType is not supported + * INVALID_CHANGE_TYPE - The passed ChangeType is not supported + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def UpdateDatapoints(self, request, context): + """TODO: Convert RegisterDatapointsReply into a stream in order to be able to communicate + subscription state (i.e. if there are subscribing clients) + or + Use a separate function (typically immediately) called after successful + registration of datapoints, e.g.: + + rpc GetSubscriptionStates() returns (stream SubscriptionStatesReply); + or + rpc ProvideDatapoints(ProvideDatapointsRequest) returns (stream ProvideDatapointsReply); + + Provide a set of updated datapoint values to the broker. + This is the unary equivalent of `StreamDatapoints` below and is better suited for cases + where the frequency of updates is rather low. + + NOTE: The values provided in a single request are handled as a single update in the + data broker. This ensures that any clients requesting (or subscribing to) a set of + datapoints will get a consistent update, i.e. that either all values are updated or + none are. + + Returns: any errors encountered updating the datapoints + + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def StreamDatapoints(self, request_iterator, context): + """Provide a stream with updated datapoint values to the broker. + This is the streaming equivalent of `UpdateDatapoints` above and is better suited for + cases where the frequency of updates is high. + + NOTE: The values provided in a single request are handled as a single update in the + data broker. This ensures that any clients requesting (or subscribing to) a set of + datapoints will get a consistent update, i.e. that either all values are updated or + none are. + + Returns: any errors encountered updating the datapoints + + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + +def add_CollectorServicer_to_server(servicer, server): + rpc_method_handlers = { + "RegisterDatapoints": grpc.unary_unary_rpc_method_handler( + servicer.RegisterDatapoints, + request_deserializer=sdv_dot_databroker_dot_v1_dot_collector__pb2.RegisterDatapointsRequest.FromString, + response_serializer=sdv_dot_databroker_dot_v1_dot_collector__pb2.RegisterDatapointsReply.SerializeToString, + ), + "UpdateDatapoints": grpc.unary_unary_rpc_method_handler( + servicer.UpdateDatapoints, + request_deserializer=sdv_dot_databroker_dot_v1_dot_collector__pb2.UpdateDatapointsRequest.FromString, + response_serializer=sdv_dot_databroker_dot_v1_dot_collector__pb2.UpdateDatapointsReply.SerializeToString, + ), + "StreamDatapoints": grpc.stream_stream_rpc_method_handler( + servicer.StreamDatapoints, + request_deserializer=sdv_dot_databroker_dot_v1_dot_collector__pb2.StreamDatapointsRequest.FromString, + response_serializer=sdv_dot_databroker_dot_v1_dot_collector__pb2.StreamDatapointsReply.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + "sdv.databroker.v1.Collector", rpc_method_handlers + ) + server.add_generic_rpc_handlers((generic_handler,)) + + +# This class is part of an EXPERIMENTAL API. +class Collector(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def RegisterDatapoints( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/sdv.databroker.v1.Collector/RegisterDatapoints", + sdv_dot_databroker_dot_v1_dot_collector__pb2.RegisterDatapointsRequest.SerializeToString, + sdv_dot_databroker_dot_v1_dot_collector__pb2.RegisterDatapointsReply.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def UpdateDatapoints( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/sdv.databroker.v1.Collector/UpdateDatapoints", + sdv_dot_databroker_dot_v1_dot_collector__pb2.UpdateDatapointsRequest.SerializeToString, + sdv_dot_databroker_dot_v1_dot_collector__pb2.UpdateDatapointsReply.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def StreamDatapoints( + request_iterator, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.stream_stream( + request_iterator, + target, + "/sdv.databroker.v1.Collector/StreamDatapoints", + sdv_dot_databroker_dot_v1_dot_collector__pb2.StreamDatapointsRequest.SerializeToString, + sdv_dot_databroker_dot_v1_dot_collector__pb2.StreamDatapointsReply.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) diff --git a/hvac_service/sdv/databroker/v1/types_pb2.py b/hvac_service/sdv/databroker/v1/types_pb2.py new file mode 100644 index 0000000..b9cf1ad --- /dev/null +++ b/hvac_service/sdv/databroker/v1/types_pb2.py @@ -0,0 +1,217 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: sdv/databroker/v1/types.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import enum_type_wrapper + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n\x1dsdv/databroker/v1/types.proto\x12\x11sdv.databroker.v1\x1a\x1fgoogle/protobuf/timestamp.proto"\x1d\n\x0bStringArray\x12\x0e\n\x06values\x18\x01 \x03(\t"\x1b\n\tBoolArray\x12\x0e\n\x06values\x18\x01 \x03(\x08"\x1c\n\nInt32Array\x12\x0e\n\x06values\x18\x01 \x03(\x11"\x1c\n\nInt64Array\x12\x0e\n\x06values\x18\x01 \x03(\x12"\x1d\n\x0bUint32Array\x12\x0e\n\x06values\x18\x01 \x03(\r"\x1d\n\x0bUint64Array\x12\x0e\n\x06values\x18\x01 \x03(\x04"\x1c\n\nFloatArray\x12\x0e\n\x06values\x18\x01 \x03(\x02"\x1d\n\x0b\x44oubleArray\x12\x0e\n\x06values\x18\x01 \x03(\x01"\xe2\x06\n\tDatapoint\x12-\n\ttimestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12=\n\rfailure_value\x18\n \x01(\x0e\x32$.sdv.databroker.v1.Datapoint.FailureH\x00\x12\x16\n\x0cstring_value\x18\x0b \x01(\tH\x00\x12\x14\n\nbool_value\x18\x0c \x01(\x08H\x00\x12\x15\n\x0bint32_value\x18\r \x01(\x11H\x00\x12\x15\n\x0bint64_value\x18\x0e \x01(\x12H\x00\x12\x16\n\x0cuint32_value\x18\x0f \x01(\rH\x00\x12\x16\n\x0cuint64_value\x18\x10 \x01(\x04H\x00\x12\x15\n\x0b\x66loat_value\x18\x11 \x01(\x02H\x00\x12\x16\n\x0c\x64ouble_value\x18\x12 \x01(\x01H\x00\x12\x36\n\x0cstring_array\x18\x15 \x01(\x0b\x32\x1e.sdv.databroker.v1.StringArrayH\x00\x12\x32\n\nbool_array\x18\x16 \x01(\x0b\x32\x1c.sdv.databroker.v1.BoolArrayH\x00\x12\x34\n\x0bint32_array\x18\x17 \x01(\x0b\x32\x1d.sdv.databroker.v1.Int32ArrayH\x00\x12\x34\n\x0bint64_array\x18\x18 \x01(\x0b\x32\x1d.sdv.databroker.v1.Int64ArrayH\x00\x12\x36\n\x0cuint32_array\x18\x19 \x01(\x0b\x32\x1e.sdv.databroker.v1.Uint32ArrayH\x00\x12\x36\n\x0cuint64_array\x18\x1a \x01(\x0b\x32\x1e.sdv.databroker.v1.Uint64ArrayH\x00\x12\x34\n\x0b\x66loat_array\x18\x1b \x01(\x0b\x32\x1d.sdv.databroker.v1.FloatArrayH\x00\x12\x36\n\x0c\x64ouble_array\x18\x1c \x01(\x0b\x32\x1e.sdv.databroker.v1.DoubleArrayH\x00"m\n\x07\x46\x61ilure\x12\x11\n\rINVALID_VALUE\x10\x00\x12\x11\n\rNOT_AVAILABLE\x10\x01\x12\x15\n\x11UNKNOWN_DATAPOINT\x10\x02\x12\x11\n\rACCESS_DENIED\x10\x03\x12\x12\n\x0eINTERNAL_ERROR\x10\x04\x42\x07\n\x05value"\x9d\x01\n\x08Metadata\x12\n\n\x02id\x18\x01 \x01(\x05\x12\x0c\n\x04name\x18\x04 \x01(\t\x12.\n\tdata_type\x18\x05 \x01(\x0e\x32\x1b.sdv.databroker.v1.DataType\x12\x32\n\x0b\x63hange_type\x18\x06 \x01(\x0e\x32\x1d.sdv.databroker.v1.ChangeType\x12\x13\n\x0b\x64\x65scription\x18\x07 \x01(\t*\x84\x03\n\x08\x44\x61taType\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x42OOL\x10\x01\x12\x08\n\x04INT8\x10\x02\x12\t\n\x05INT16\x10\x03\x12\t\n\x05INT32\x10\x04\x12\t\n\x05INT64\x10\x05\x12\t\n\x05UINT8\x10\x06\x12\n\n\x06UINT16\x10\x07\x12\n\n\x06UINT32\x10\x08\x12\n\n\x06UINT64\x10\t\x12\t\n\x05\x46LOAT\x10\n\x12\n\n\x06\x44OUBLE\x10\x0b\x12\r\n\tTIMESTAMP\x10\x0c\x12\x10\n\x0cSTRING_ARRAY\x10\x14\x12\x0e\n\nBOOL_ARRAY\x10\x15\x12\x0e\n\nINT8_ARRAY\x10\x16\x12\x0f\n\x0bINT16_ARRAY\x10\x17\x12\x0f\n\x0bINT32_ARRAY\x10\x18\x12\x0f\n\x0bINT64_ARRAY\x10\x19\x12\x0f\n\x0bUINT8_ARRAY\x10\x1a\x12\x10\n\x0cUINT16_ARRAY\x10\x1b\x12\x10\n\x0cUINT32_ARRAY\x10\x1c\x12\x10\n\x0cUINT64_ARRAY\x10\x1d\x12\x0f\n\x0b\x46LOAT_ARRAY\x10\x1e\x12\x10\n\x0c\x44OUBLE_ARRAY\x10\x1f\x12\x13\n\x0fTIMESTAMP_ARRAY\x10 *s\n\x0e\x44\x61tapointError\x12\x15\n\x11UNKNOWN_DATAPOINT\x10\x00\x12\x10\n\x0cINVALID_TYPE\x10\x01\x12\x11\n\rACCESS_DENIED\x10\x02\x12\x12\n\x0eINTERNAL_ERROR\x10\x03\x12\x11\n\rOUT_OF_BOUNDS\x10\x04*7\n\nChangeType\x12\n\n\x06STATIC\x10\x00\x12\r\n\tON_CHANGE\x10\x01\x12\x0e\n\nCONTINUOUS\x10\x02\x62\x06proto3' +) + +_DATATYPE = DESCRIPTOR.enum_types_by_name["DataType"] +DataType = enum_type_wrapper.EnumTypeWrapper(_DATATYPE) +_DATAPOINTERROR = DESCRIPTOR.enum_types_by_name["DatapointError"] +DatapointError = enum_type_wrapper.EnumTypeWrapper(_DATAPOINTERROR) +_CHANGETYPE = DESCRIPTOR.enum_types_by_name["ChangeType"] +ChangeType = enum_type_wrapper.EnumTypeWrapper(_CHANGETYPE) +STRING = 0 +BOOL = 1 +INT8 = 2 +INT16 = 3 +INT32 = 4 +INT64 = 5 +UINT8 = 6 +UINT16 = 7 +UINT32 = 8 +UINT64 = 9 +FLOAT = 10 +DOUBLE = 11 +TIMESTAMP = 12 +STRING_ARRAY = 20 +BOOL_ARRAY = 21 +INT8_ARRAY = 22 +INT16_ARRAY = 23 +INT32_ARRAY = 24 +INT64_ARRAY = 25 +UINT8_ARRAY = 26 +UINT16_ARRAY = 27 +UINT32_ARRAY = 28 +UINT64_ARRAY = 29 +FLOAT_ARRAY = 30 +DOUBLE_ARRAY = 31 +TIMESTAMP_ARRAY = 32 +UNKNOWN_DATAPOINT = 0 +INVALID_TYPE = 1 +ACCESS_DENIED = 2 +INTERNAL_ERROR = 3 +OUT_OF_BOUNDS = 4 +STATIC = 0 +ON_CHANGE = 1 +CONTINUOUS = 2 + + +_STRINGARRAY = DESCRIPTOR.message_types_by_name["StringArray"] +_BOOLARRAY = DESCRIPTOR.message_types_by_name["BoolArray"] +_INT32ARRAY = DESCRIPTOR.message_types_by_name["Int32Array"] +_INT64ARRAY = DESCRIPTOR.message_types_by_name["Int64Array"] +_UINT32ARRAY = DESCRIPTOR.message_types_by_name["Uint32Array"] +_UINT64ARRAY = DESCRIPTOR.message_types_by_name["Uint64Array"] +_FLOATARRAY = DESCRIPTOR.message_types_by_name["FloatArray"] +_DOUBLEARRAY = DESCRIPTOR.message_types_by_name["DoubleArray"] +_DATAPOINT = DESCRIPTOR.message_types_by_name["Datapoint"] +_METADATA = DESCRIPTOR.message_types_by_name["Metadata"] +_DATAPOINT_FAILURE = _DATAPOINT.enum_types_by_name["Failure"] +StringArray = _reflection.GeneratedProtocolMessageType( + "StringArray", + (_message.Message,), + { + "DESCRIPTOR": _STRINGARRAY, + "__module__": "sdv.databroker.v1.types_pb2" + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.StringArray) + }, +) +_sym_db.RegisterMessage(StringArray) + +BoolArray = _reflection.GeneratedProtocolMessageType( + "BoolArray", + (_message.Message,), + { + "DESCRIPTOR": _BOOLARRAY, + "__module__": "sdv.databroker.v1.types_pb2" + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.BoolArray) + }, +) +_sym_db.RegisterMessage(BoolArray) + +Int32Array = _reflection.GeneratedProtocolMessageType( + "Int32Array", + (_message.Message,), + { + "DESCRIPTOR": _INT32ARRAY, + "__module__": "sdv.databroker.v1.types_pb2" + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.Int32Array) + }, +) +_sym_db.RegisterMessage(Int32Array) + +Int64Array = _reflection.GeneratedProtocolMessageType( + "Int64Array", + (_message.Message,), + { + "DESCRIPTOR": _INT64ARRAY, + "__module__": "sdv.databroker.v1.types_pb2" + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.Int64Array) + }, +) +_sym_db.RegisterMessage(Int64Array) + +Uint32Array = _reflection.GeneratedProtocolMessageType( + "Uint32Array", + (_message.Message,), + { + "DESCRIPTOR": _UINT32ARRAY, + "__module__": "sdv.databroker.v1.types_pb2" + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.Uint32Array) + }, +) +_sym_db.RegisterMessage(Uint32Array) + +Uint64Array = _reflection.GeneratedProtocolMessageType( + "Uint64Array", + (_message.Message,), + { + "DESCRIPTOR": _UINT64ARRAY, + "__module__": "sdv.databroker.v1.types_pb2" + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.Uint64Array) + }, +) +_sym_db.RegisterMessage(Uint64Array) + +FloatArray = _reflection.GeneratedProtocolMessageType( + "FloatArray", + (_message.Message,), + { + "DESCRIPTOR": _FLOATARRAY, + "__module__": "sdv.databroker.v1.types_pb2" + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.FloatArray) + }, +) +_sym_db.RegisterMessage(FloatArray) + +DoubleArray = _reflection.GeneratedProtocolMessageType( + "DoubleArray", + (_message.Message,), + { + "DESCRIPTOR": _DOUBLEARRAY, + "__module__": "sdv.databroker.v1.types_pb2" + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.DoubleArray) + }, +) +_sym_db.RegisterMessage(DoubleArray) + +Datapoint = _reflection.GeneratedProtocolMessageType( + "Datapoint", + (_message.Message,), + { + "DESCRIPTOR": _DATAPOINT, + "__module__": "sdv.databroker.v1.types_pb2" + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.Datapoint) + }, +) +_sym_db.RegisterMessage(Datapoint) + +Metadata = _reflection.GeneratedProtocolMessageType( + "Metadata", + (_message.Message,), + { + "DESCRIPTOR": _METADATA, + "__module__": "sdv.databroker.v1.types_pb2" + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.Metadata) + }, +) +_sym_db.RegisterMessage(Metadata) + +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _DATATYPE._serialized_start = 1358 + _DATATYPE._serialized_end = 1746 + _DATAPOINTERROR._serialized_start = 1748 + _DATAPOINTERROR._serialized_end = 1863 + _CHANGETYPE._serialized_start = 1865 + _CHANGETYPE._serialized_end = 1920 + _STRINGARRAY._serialized_start = 85 + _STRINGARRAY._serialized_end = 114 + _BOOLARRAY._serialized_start = 116 + _BOOLARRAY._serialized_end = 143 + _INT32ARRAY._serialized_start = 145 + _INT32ARRAY._serialized_end = 173 + _INT64ARRAY._serialized_start = 175 + _INT64ARRAY._serialized_end = 203 + _UINT32ARRAY._serialized_start = 205 + _UINT32ARRAY._serialized_end = 234 + _UINT64ARRAY._serialized_start = 236 + _UINT64ARRAY._serialized_end = 265 + _FLOATARRAY._serialized_start = 267 + _FLOATARRAY._serialized_end = 295 + _DOUBLEARRAY._serialized_start = 297 + _DOUBLEARRAY._serialized_end = 326 + _DATAPOINT._serialized_start = 329 + _DATAPOINT._serialized_end = 1195 + _DATAPOINT_FAILURE._serialized_start = 1077 + _DATAPOINT_FAILURE._serialized_end = 1186 + _METADATA._serialized_start = 1198 + _METADATA._serialized_end = 1355 +# @@protoc_insertion_point(module_scope) diff --git a/hvac_service/sdv/databroker/v1/types_pb2_grpc.py b/hvac_service/sdv/databroker/v1/types_pb2_grpc.py new file mode 100644 index 0000000..8a93939 --- /dev/null +++ b/hvac_service/sdv/databroker/v1/types_pb2_grpc.py @@ -0,0 +1,3 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc diff --git a/hvac_service/sdv/edge/__init__.py b/hvac_service/sdv/edge/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/hvac_service/sdv/edge/comfort/__init__.py b/hvac_service/sdv/edge/comfort/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/hvac_service/sdv/edge/comfort/hvac/__init__.py b/hvac_service/sdv/edge/comfort/hvac/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/hvac_service/sdv/edge/comfort/hvac/v1/__init__.py b/hvac_service/sdv/edge/comfort/hvac/v1/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/hvac_service/sdv/edge/comfort/hvac/v1/hvac_pb2.py b/hvac_service/sdv/edge/comfort/hvac/v1/hvac_pb2.py new file mode 100644 index 0000000..cb311c9 --- /dev/null +++ b/hvac_service/sdv/edge/comfort/hvac/v1/hvac_pb2.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: sdv/edge/comfort/hvac/v1/hvac.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import enum_type_wrapper + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n#sdv/edge/comfort/hvac/v1/hvac.proto\x12\x18sdv.edge.comfort.hvac.v1"H\n\x12SetAcStatusRequest\x12\x32\n\x06status\x18\x01 \x01(\x0e\x32".sdv.edge.comfort.hvac.v1.AcStatus"\x12\n\x10SetAcStatusReply",\n\x15SetTemperatureRequest\x12\x13\n\x0btemperature\x18\x01 \x01(\x02"\x15\n\x13SetTemperatureReply*\x1b\n\x08\x41\x63Status\x12\x07\n\x03OFF\x10\x00\x12\x06\n\x02ON\x10\x01\x32\xe1\x01\n\x04Hvac\x12g\n\x0bSetAcStatus\x12,.sdv.edge.comfort.hvac.v1.SetAcStatusRequest\x1a*.sdv.edge.comfort.hvac.v1.SetAcStatusReply\x12p\n\x0eSetTemperature\x12/.sdv.edge.comfort.hvac.v1.SetTemperatureRequest\x1a-.sdv.edge.comfort.hvac.v1.SetTemperatureReplyb\x06proto3' +) + +_ACSTATUS = DESCRIPTOR.enum_types_by_name["AcStatus"] +AcStatus = enum_type_wrapper.EnumTypeWrapper(_ACSTATUS) +OFF = 0 +ON = 1 + + +_SETACSTATUSREQUEST = DESCRIPTOR.message_types_by_name["SetAcStatusRequest"] +_SETACSTATUSREPLY = DESCRIPTOR.message_types_by_name["SetAcStatusReply"] +_SETTEMPERATUREREQUEST = DESCRIPTOR.message_types_by_name["SetTemperatureRequest"] +_SETTEMPERATUREREPLY = DESCRIPTOR.message_types_by_name["SetTemperatureReply"] +SetAcStatusRequest = _reflection.GeneratedProtocolMessageType( + "SetAcStatusRequest", + (_message.Message,), + { + "DESCRIPTOR": _SETACSTATUSREQUEST, + "__module__": "sdv.edge.comfort.hvac.v1.hvac_pb2" + # @@protoc_insertion_point(class_scope:sdv.edge.comfort.hvac.v1.SetAcStatusRequest) + }, +) +_sym_db.RegisterMessage(SetAcStatusRequest) + +SetAcStatusReply = _reflection.GeneratedProtocolMessageType( + "SetAcStatusReply", + (_message.Message,), + { + "DESCRIPTOR": _SETACSTATUSREPLY, + "__module__": "sdv.edge.comfort.hvac.v1.hvac_pb2" + # @@protoc_insertion_point(class_scope:sdv.edge.comfort.hvac.v1.SetAcStatusReply) + }, +) +_sym_db.RegisterMessage(SetAcStatusReply) + +SetTemperatureRequest = _reflection.GeneratedProtocolMessageType( + "SetTemperatureRequest", + (_message.Message,), + { + "DESCRIPTOR": _SETTEMPERATUREREQUEST, + "__module__": "sdv.edge.comfort.hvac.v1.hvac_pb2" + # @@protoc_insertion_point(class_scope:sdv.edge.comfort.hvac.v1.SetTemperatureRequest) + }, +) +_sym_db.RegisterMessage(SetTemperatureRequest) + +SetTemperatureReply = _reflection.GeneratedProtocolMessageType( + "SetTemperatureReply", + (_message.Message,), + { + "DESCRIPTOR": _SETTEMPERATUREREPLY, + "__module__": "sdv.edge.comfort.hvac.v1.hvac_pb2" + # @@protoc_insertion_point(class_scope:sdv.edge.comfort.hvac.v1.SetTemperatureReply) + }, +) +_sym_db.RegisterMessage(SetTemperatureReply) + +_HVAC = DESCRIPTOR.services_by_name["Hvac"] +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _ACSTATUS._serialized_start = 228 + _ACSTATUS._serialized_end = 255 + _SETACSTATUSREQUEST._serialized_start = 65 + _SETACSTATUSREQUEST._serialized_end = 137 + _SETACSTATUSREPLY._serialized_start = 139 + _SETACSTATUSREPLY._serialized_end = 157 + _SETTEMPERATUREREQUEST._serialized_start = 159 + _SETTEMPERATUREREQUEST._serialized_end = 203 + _SETTEMPERATUREREPLY._serialized_start = 205 + _SETTEMPERATUREREPLY._serialized_end = 226 + _HVAC._serialized_start = 258 + _HVAC._serialized_end = 483 +# @@protoc_insertion_point(module_scope) diff --git a/hvac_service/sdv/edge/comfort/hvac/v1/hvac_pb2_grpc.py b/hvac_service/sdv/edge/comfort/hvac/v1/hvac_pb2_grpc.py new file mode 100644 index 0000000..f23f80e --- /dev/null +++ b/hvac_service/sdv/edge/comfort/hvac/v1/hvac_pb2_grpc.py @@ -0,0 +1,158 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc +from sdv.edge.comfort.hvac.v1 import ( + hvac_pb2 as sdv_dot_edge_dot_comfort_dot_hvac_dot_v1_dot_hvac__pb2, +) + + +class HvacStub(object): + """* + @brief Example HVAC service for controlling the heating, ventilation, and air + conditioning elements of the vehicle cabin. + This definition is designed here according to the draft of the comfort seats + service definition of the COVESA Vehicle Service Catalog (VSC) (see + https://github.com/COVESA/vehicle_service_catalog) as a definition of an + HVAC service is currently missing in VSC. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.SetAcStatus = channel.unary_unary( + "/sdv.edge.comfort.hvac.v1.Hvac/SetAcStatus", + request_serializer=sdv_dot_edge_dot_comfort_dot_hvac_dot_v1_dot_hvac__pb2.SetAcStatusRequest.SerializeToString, + response_deserializer=sdv_dot_edge_dot_comfort_dot_hvac_dot_v1_dot_hvac__pb2.SetAcStatusReply.FromString, + ) + self.SetTemperature = channel.unary_unary( + "/sdv.edge.comfort.hvac.v1.Hvac/SetTemperature", + request_serializer=sdv_dot_edge_dot_comfort_dot_hvac_dot_v1_dot_hvac__pb2.SetTemperatureRequest.SerializeToString, + response_deserializer=sdv_dot_edge_dot_comfort_dot_hvac_dot_v1_dot_hvac__pb2.SetTemperatureReply.FromString, + ) + + +class HvacServicer(object): + """* + @brief Example HVAC service for controlling the heating, ventilation, and air + conditioning elements of the vehicle cabin. + This definition is designed here according to the draft of the comfort seats + service definition of the COVESA Vehicle Service Catalog (VSC) (see + https://github.com/COVESA/vehicle_service_catalog) as a definition of an + HVAC service is currently missing in VSC. + """ + + def SetAcStatus(self, request, context): + """* Set the desired ac status + + Returns gRPC status codes: + * OK - AcStatus set + * INVALID_ARGUMENT - The requested AcStatus is not supported by the service instance + * INTERNAL - A HVAC service internal error happened - see error message for details + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def SetTemperature(self, request, context): + """* Set the desired cabin temperature + + Returns gRPC status codes: + * OK - Desired temperature set + * OUT_OF_RANGE - The specified temperature is not supported in this vehicle + * INTERNAL - A HVAC service internal error happened - see error message for details + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + +def add_HvacServicer_to_server(servicer, server): + rpc_method_handlers = { + "SetAcStatus": grpc.unary_unary_rpc_method_handler( + servicer.SetAcStatus, + request_deserializer=sdv_dot_edge_dot_comfort_dot_hvac_dot_v1_dot_hvac__pb2.SetAcStatusRequest.FromString, + response_serializer=sdv_dot_edge_dot_comfort_dot_hvac_dot_v1_dot_hvac__pb2.SetAcStatusReply.SerializeToString, + ), + "SetTemperature": grpc.unary_unary_rpc_method_handler( + servicer.SetTemperature, + request_deserializer=sdv_dot_edge_dot_comfort_dot_hvac_dot_v1_dot_hvac__pb2.SetTemperatureRequest.FromString, + response_serializer=sdv_dot_edge_dot_comfort_dot_hvac_dot_v1_dot_hvac__pb2.SetTemperatureReply.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + "sdv.edge.comfort.hvac.v1.Hvac", rpc_method_handlers + ) + server.add_generic_rpc_handlers((generic_handler,)) + + +# This class is part of an EXPERIMENTAL API. +class Hvac(object): + """* + @brief Example HVAC service for controlling the heating, ventilation, and air + conditioning elements of the vehicle cabin. + This definition is designed here according to the draft of the comfort seats + service definition of the COVESA Vehicle Service Catalog (VSC) (see + https://github.com/COVESA/vehicle_service_catalog) as a definition of an + HVAC service is currently missing in VSC. + """ + + @staticmethod + def SetAcStatus( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/sdv.edge.comfort.hvac.v1.Hvac/SetAcStatus", + sdv_dot_edge_dot_comfort_dot_hvac_dot_v1_dot_hvac__pb2.SetAcStatusRequest.SerializeToString, + sdv_dot_edge_dot_comfort_dot_hvac_dot_v1_dot_hvac__pb2.SetAcStatusReply.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def SetTemperature( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/sdv.edge.comfort.hvac.v1.Hvac/SetTemperature", + sdv_dot_edge_dot_comfort_dot_hvac_dot_v1_dot_hvac__pb2.SetTemperatureRequest.SerializeToString, + sdv_dot_edge_dot_comfort_dot_hvac_dot_v1_dot_hvac__pb2.SetTemperatureReply.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) diff --git a/hvac_service/testclient.py b/hvac_service/testclient.py new file mode 100755 index 0000000..92b2d7e --- /dev/null +++ b/hvac_service/testclient.py @@ -0,0 +1,116 @@ +#!/usr/bin/env python3 +# /******************************************************************************** +# * Copyright (c) 2022 Contributors to the Eclipse Foundation +# * +# * See the NOTICE file(s) distributed with this work for additional +# * information regarding copyright ownership. +# * +# * This program and the accompanying materials are made available under the +# * terms of the Apache License 2.0 which is available at +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * SPDX-License-Identifier: Apache-2.0 +# ********************************************************************************/ + +import getopt +import logging +import os +import sys + +import grpc +import sdv.edge.comfort.hvac.v1.hvac_pb2 as pb2 +import sdv.edge.comfort.hvac.v1.hvac_pb2_grpc as pb2_grpc +from sdv.edge.comfort.hvac.v1.hvac_pb2 import AcStatus + +logger = logging.getLogger(__name__) + + +class HVACTestClient(object): + """ + Client for gRPC functionality + """ + + def __init__(self, hvac_addr: str): + self._hvac_addr = hvac_addr + logger.info("Connecting to HVAC service %s", self._hvac_addr) + + # instantiate a channel + self.channel = grpc.insecure_channel(self._hvac_addr) + + # bind the client and the server + self.stub = pb2_grpc.HvacStub(self.channel) + + def execute_methods(self, ac_status: AcStatus, ac_temp: float) -> None: + """ + Client function to call the rpc for HVACService methods + """ + logger.info("Setting AC Status: %s", self.get_hvac_str(ac_status)) + request = pb2.SetAcStatusRequest(status=ac_status) + self.stub.SetAcStatus(request) + + logger.info("Setting Temperature: %s", ac_temp) + request = pb2.SetTemperatureRequest(temperature=ac_temp) + self.stub.SetTemperature(request) + + logger.info("Done.") + + def get_hvac_str(self, hvac_value) -> str: + if hvac_value == 0: + return "AcStatus.OFF" + elif hvac_value == 1: + return "AcStatus.ON" + else: + return "Invalid value: {}".format(hvac_value) + + +def main(argv): + """Main function""" + + default_addr = "127.0.0.1:50052" + default_temp = "42.0" + default_status = "1" + + _usage = ( + "Usage: ./testclient.py --addr " # shorten line + " --temp=AC_TEMP --status=AC_STATUS\n\n" + "Environment:\n" + " 'VDB_ADDR' Databroker address (host:port). Default: {}\n" + " 'AC_TEMP' Desired AC Temperature. Default: {}\n" + " 'AC_STATUS' AC Status (0=OFF, 1=ON). Default: {}\n".format( + default_addr, default_temp, default_status + ) + ) + + # environment values (overridden by cmdargs) + hvac_addr = os.getenv("HVAC_ADDR", default_addr) + ac_temp = float(os.environ.get("AC_TEMP", default_temp)) + ac_status = AcStatus.ON if os.getenv("AC_STATUS") != "0" else AcStatus.OFF + + # parse cmdline args + try: + opts, args = getopt.getopt(argv, "ha:t:s:", ["addr=", "temp=", "status="]) + for opt, arg in opts: + if opt == "-h": + print(_usage) + sys.exit(0) + elif opt in ("-a", "--addr"): + hvac_addr = arg + elif opt in ("-t", "--temp"): + ac_temp = float(arg) + elif opt in ("-s", "--status"): + ac_status = AcStatus.ON if arg != "0" else AcStatus.OFF + else: + print("Unknown arg: {}".format(opt)) + print(_usage) + sys.exit(1) + except getopt.GetoptError: + print(_usage) + sys.exit(1) + + client = HVACTestClient(hvac_addr) + client.execute_methods(ac_status, ac_temp) + + +if __name__ == "__main__": + logging.basicConfig(level=logging.INFO) + main(sys.argv[1:]) diff --git a/hvac_service/update-protobuf.sh b/hvac_service/update-protobuf.sh new file mode 100755 index 0000000..a775f4a --- /dev/null +++ b/hvac_service/update-protobuf.sh @@ -0,0 +1,56 @@ +#!/bin/bash +#******************************************************************************** +# Copyright (c) 2022 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +#*******************************************************************************/ +# shellcheck disable=SC2086 + +PROTO_DIRS=( + "../vehicle_data_broker/proto" + "./proto" +) + +for src_dir in "${PROTO_DIRS[@]}"; do + if [ ! -d "$src_dir" ]; then + echo "Error! Proto dir does not exist: $src_dir" + exit 1 + fi +done + +# make sure deps are installed +echo "# Installing requirements.txt ..." +pip3 install -q -r requirements-dev.txt + +set -xe +PROTO_FILES=$(find "${PROTO_DIRS[@]}" -name '*.proto') + +printf -v PROTO_PATH "%s:" "${PROTO_DIRS[@]}" +PROTO_PATH="${PROTO_PATH%:}" + +echo "# Generating grpc stubs from: ${PROTO_PATH} ..." +python3 -m grpc_tools.protoc \ + --python_out=. \ + --grpc_python_out=. \ + --proto_path="${PROTO_PATH}" \ + $PROTO_FILES +set +xe + +echo "# Ensure each generated folder contains an __init__.py ..." +# Get root package names +# shellcheck disable=SC2068 # Double quotes don't work with grep +ROOT_PACKAGES=$(grep -Poshr "^package[[:space:]]+\K[_0-9A-Za-z]+" ${PROTO_FILES[@]}) +# Remove duplicates +IFS=" " read -r -a ROOT_PACKAGES <<<"$(tr ' ' '\n' <<<"${ROOT_PACKAGES[@]}" | sort -u | tr '\n' ' ')" +# Recursively add __init__.py files +find "${ROOT_PACKAGES[@]}" -type d -exec touch {}/"__init__.py" \; + +echo "# Generated files:" +find "${ROOT_PACKAGES[@]}" -type f -name '*.py' diff --git a/integration_test/.gitignore b/integration_test/.gitignore new file mode 100644 index 0000000..6595022 --- /dev/null +++ b/integration_test/.gitignore @@ -0,0 +1 @@ +val_integration.egg-info/ diff --git a/integration_test/README.md b/integration_test/README.md new file mode 100644 index 0000000..be811b0 --- /dev/null +++ b/integration_test/README.md @@ -0,0 +1,95 @@ +# Integration Tests + +Integration tests can be run in develop (local) mode or in CI environment for validating VAL components in Dapr/Kubernetes environment.\ +SeatService is running in simulated CAN mode `CAN="cansim"` and SeatService client is used to start seat movement. + +## Integration Test overview + +- `integration_test/test_feeder.py`:\ + This test covers feedercan datapoints. It checks if they are registered and also that some datapoints have changing values.\ + **NOTE:** If feedercan default config is changed, make sure those changes are reflected in feeder test. + +- `integration_test/test_val_seat.py`:\ + This test covers seat service metadata and several seat move scenarios.\ + It uses an external script for asking the seat to move to desired position (using `seat_svc_client` as grpc client) + +- `integration_test/broker_subscriber.py`:\ +Databroker grpc subscription handler (useful for scripting) + + ```text + Usage: ./broker_subscriber.py --addr [ --get-meta=META | --query --timeout --count ] + + Environment: + 'BROKER_ADDR' Default: localhost:55555 + 'QUERY' SQL datapoint query. ('*' = subscribe for all meta). Default: SELECT Vehicle.Cabin.Seat.Row1.Pos1.Position + 'COUNT' Receive specified count of events and exit (0=inf) + 'TIMEOUT' Abort receiving if no data comes for specified timeout in seconds (0=inf) + 'META' Comma separated list of datapoint names to query. ('*' = all meta) + ``` + +## Usage of VS Code Tasks + +Integration test are using VS Code tasks defined in `.vscode/tasks.json`, see [README there](../.vscode/README.md). + +## Local Testing with "dapr run" + +Dapr mode is executing VAL binaries with `dapr run` (using similar setup as in `vehicle-app-python-template`). +Integration tests check for `USE_DAPR=1` environment variable to support standalone dapr mode (e.g. use custom dapr proxy ports and add dapr metadata to grpc calls) + +### Local setup + +Either use `integration-test` vs code task, or execute the follwing commands in a terminal to install python dependencies: + +```shell +cd integration_test/ +pip install -r requirements.txt +pip install -r requirements-dev.txt +pip install -e . +``` + +And then launch pytest manually: + +```shell +pytest -v . --asyncio-mode=auto +``` + +**NOTE:** In `USE_DAPR=1` mode, tests are using `task-seat-move.sh` script for wrapping calls through `run-seatservice-cli.sh` vs task script. + +### Required VS Code tasks for local testing (dapr) + +Python Integration tests depend on the following VS Code tasks: + +- `ensure-dapr` +- `run-databroker` +- `run-seatservice` +- `run-feedercan` + +It is possible to use VS Code `Testing` panel for debugging failed test cases and also directly running the python file in debug mode. + +## Local / CI testing using Docker images + +This mode is a placeholder for proper Kubernetes cluster testing. +For the moment it uses **released** or at least **tagged** images from `ghcr.io`. +To force using this mode (e.g. in CI) export `USE_DAPR=0` environment variable for pytest process + +Relevant scripts: + +- `integration_test/it-config` : This config defines the used images, tags, and docker specific options per val container. +- `integration_test/it-seat-move.sh`: This script is used to execute `seat_svc_client` from seat_service container to initiate seat movement for integration tests. +- `integration_test/it-setup.sh`: This is the main script handling val containers lifecycle: + + ```text + Usage: ./it-setup.sh {Options} [ init | start | stop | status | cleanup ] + + Options: + --force for 'init' and 'cleanup' commands, forces rebuilding/pulling/removing VAL images + --logs for 'status' command, shows docker logs per var container + --help Prints this message and exit. + + Commands: + init Pulls VAL images from a repository or builds them if missing (use --force to force init) + start Starts VAL Containers (also implies init) + stop Stops VAL Containers + status Shows status of VAL Containers. Use --log to see last logs from VAL containers + cleanup Removes VAL Containers. Use --force to also remove configured VAL images + ``` diff --git a/integration_test/__init__.py b/integration_test/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/integration_test/broker_subscribe.py b/integration_test/broker_subscribe.py new file mode 100755 index 0000000..e1f96a3 --- /dev/null +++ b/integration_test/broker_subscribe.py @@ -0,0 +1,483 @@ +#!/usr/bin/env python3 +# /******************************************************************************** +# * Copyright (c) 2022 Contributors to the Eclipse Foundation +# * +# * See the NOTICE file(s) distributed with this work for additional +# * information regarding copyright ownership. +# * +# * This program and the accompanying materials are made available under the +# * terms of the Apache License 2.0 which is available at +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * SPDX-License-Identifier: Apache-2.0 +# ********************************************************************************/ + +import asyncio +import getopt +import json +import logging +import os +import signal +import sys +import time + +import grpc +from gen_proto.sdv.databroker.v1 import broker_pb2 +from gen_proto.sdv.databroker.v1.broker_pb2_grpc import BrokerStub +from gen_proto.sdv.databroker.v1.types_pb2 import DataType + +SEAT_POS = "Vehicle.Cabin.Seat.Row1.Pos1.Position" + +# allow log level change via 'LOG_LEVEL' env. var +LOG_LEVEL = os.environ.get("LOG_LEVEL", "INFO") +logging.basicConfig(format="<%(levelname)s>\t%(message)s", level=LOG_LEVEL) +logger = logging.getLogger(__name__) + +__HEADER_PRINTED = False + + +def on_change_event(key, value, value_type, timestamp): + global __HEADER_PRINTED + if not __HEADER_PRINTED: + print( + "##### | {:40s} | {:8s} | {:20s} | {:16} |".format( + "[Name]", "[Value]", "[ValueType]", "[Timestamp]" + ), + flush=True, + ) + __HEADER_PRINTED = True + print( + "#SUB# | {:40s} | {:8s} | {:20s} | {:<16.3f} |".format( + str(key), str(value), value_type, timestamp + ), + flush=True, + ) + + +def on_change_event_json(key, value, value_type, timestamp): + sub_event = { + "name": str(key), + "value": value, + "valueType": value_type, + "ts": timestamp, + } + event_json = json.dumps(sub_event) # , sort_keys=True) + print("#SUB-JSON# {}".format(event_json), flush=True) + + +def print_json_metadata(metadata) -> str: + meta_list = [] + for meta in metadata: + meta_list.append( + { + "id": meta.id, + "name": meta.name, + "dataType": DataType.Name(meta.data_type), + "desc": meta.description, + } + ) + + meta_json = json.dumps(meta_list, sort_keys=True) + print("#META-JSON# {}".format(meta_json), flush=True) + return meta_json + + +class BrokerSubscribe(object): + + VERBOSE = False + + def __init__( + self, databroker_address="localhost:55555", max_events=0, timeout=0 + ) -> None: + # logger.setLevel(logging.DEBUG) + + self.configDir = os.path.dirname(os.path.realpath(__file__)) + self.databroker_address = databroker_address + self.max_events = max_events + self.timeout = timeout + self._running = False + self._events = 0 + self._ts = None + + # GRPC: Connect to the collector service + logger.info("Connecting to databroker: {}".format(self.databroker_address)) + self._channel = grpc.insecure_channel(self.databroker_address) + self.broker_stub = BrokerStub(self._channel) + + def _get_grpc_error(self, err): + status_code = err.code() + return "\n GrpcError[Status:{} {}]\n GRPC Details:'{}']".format( + status_code.name, status_code.value, err.details() + ) + + async def close(self): + """Close runtime gRPC channel.""" + if self._channel: + await self._channel.close() + + def __enter__(self) -> "BrokerSubscribe": + return self + + def __exit__(self, exc_type, exc_value, traceback) -> None: + asyncio.run_coroutine_threadsafe(self.close(), asyncio.get_event_loop()) + + def _parse_datapoint(self, dp): + """ + * + * Parse protobuf definitions. + * + * @returns: ( , <'oneof value'> ) + + message Datapoint { + // Timestamp of the value + google.protobuf.Timestamp timestamp = 1; + + // values + oneof value { + Failure failure_value = 10; // from enum Failure + string string_value = 11; + bool bool_value = 12; + sint32 int32_value = 13; + sint64 int64_value = 14; + uint32 uint32_value = 15; + uint64 uint64_value = 16; + float float_value = 17; + double double_value = 18; + StringArray string_array = 21; + BoolArray bool_array = 22; + Int32Array int32_array = 23; + Int64Array int64_array = 24; + Uint32Array uint32_array = 25; + Uint64Array uint64_array = 26; + FloatArray float_array = 27; + DoubleArray double_array = 28; + } + + message StringArray { + repeated string values = 1; + } + + enum Failure { + // The data point is known, but doesn't have a valid value + INVALID_VALUE = 0; + // The data point is known, but no value is available + NOT_AVAILABLE = 1; + // Unknown datapoint + UNKNOWN_DATAPOINT = 2; + // Access denied + ACCESS_DENIED = 3; + // Unexpected internal error + INTERNAL_ERROR = 4; + } + """ + + one_value = dp.WhichOneof("value") + logger.debug(" dp.value: {}".format(one_value)) + # _inspect_object(" --> one_value", one_value, public_only=True, extended=True) + if dp.HasField("timestamp"): + ts = ( + dp.timestamp.seconds + int(dp.timestamp.nanos / 10**6) / 1000 + ) # round to msec + + logger.debug(" dp.timestamp: {}".format(ts)) + + value = None + if one_value is None: + raise Exception('"oneof value" is missing in: {}'.format(dp)) + elif one_value == "failure_value": + value = dp.failure_value + elif one_value == "string_value": + value = dp.string_value + elif one_value == "bool_value": + value = dp.bool_value + elif one_value == "int32_value": + value = dp.int32_value + elif one_value == "int64_value": + value = dp.int64_value + elif one_value == "uint32_value": + value = dp.uint32_value + elif one_value == "uint64_value": + value = dp.uint64_value + elif one_value == "float_value": + value = dp.float_value + elif one_value == "double_value": + value = dp.double_value + elif one_value == "string_array": + value = dp.string_array.values + elif one_value == "bool_array": + value = dp.bool_array.values + elif one_value == "int32_array": + value = dp.int32_array.values + elif one_value == "int64_array": + value = dp.int64_array.values + elif one_value == "uint32_array": + value = dp.uint32_array.values + elif one_value == "uint64_array": + value = dp.uint64_array.values + elif one_value == "float_array": + value = dp.float_array.values + elif one_value == "double_array": + value = dp.double_array.values + else: + raise Exception("Unknown value {} in Datapoint:{}".format(one_value, dp)) + + result = {} + result["value"] = value + result["ts"] = ts + result["type"] = one_value + + logger.debug("_parse_datapoint() -> {}".format(result)) + return result + + def get_metadata(self, names=[]): + """ + Wraps broker.GetMetadata(names) and returns list of MetaData + @param names: list of names or [] for returning full MetaData + @return List of registered MetaData entries + """ + try: + request = broker_pb2.GetMetadataRequest() + for n in names: + request.names.append(n) + response = self.broker_stub.GetMetadata(request) + assert response.list is not None # nosec + logger.debug("broker.GetMetadata({}) -> {}".format(names, response.list)) + + # message GetMetadataReply { + # // Contains metadata of the requested data points. If a data point + # // doesn't exist (i.e. not known to the Data Broker) the corresponding + # // Metadata isn't part of the returned list. + # repeated databroker.v1.Metadata list = 1; + # } + # message Metadata { + # // Id to be used in "get" and "subscribe" requests. Ids stay valid during + # // one power cycle, only. + # int32 id = 1; + # string name = 4; + # DataType data_type = 5; + # ChangeType change_type = 6; // CONTINUOUS or STATIC or ON_CHANGE + # string description = 7; + # } + for metadata in response.list: + logger.debug( + " * Metadata[id:{}, name:{}, type:{}, descr:{}]".format( + metadata.id, + metadata.name, + metadata.data_type, + metadata.description, + ) + ) + + return response.list + except grpc.RpcError as e: + logging.error( + "broker.GetMetadata({}) failed! {}".format( + names, self._get_grpc_error(e) + ) + ) + raise e + + def print_meta_data(self, meta): + print( + " | {:2} | {:40s} | {:12s} | {}".format( + "ID", "[Name]", "[DataType]", "[Description]" + ), + flush=True, + ) + for metadata in meta: + print( + "#META# | {:2} | {:40s} | {:12s} | {}".format( + metadata.id, + metadata.name, + DataType.Name(metadata.data_type), + metadata.description, + ), + flush=True, + ) + print_json_metadata(meta) + # print('#META# {{ "id":{}, "name":"{}", "data_type":"{}", "descr":"{}" }}'.format( + # metadata.id, metadata.name, DataType.Name(metadata.data_type), metadata.description), flush=True) + # print("#META# {}".format(str(x).replace("\n", " ")), flush=True) + + def get_wildcard_query(self): + """Gets All Metadata entries from Broker and generates + SELECT query including all of available names + + Returns: + str: Query (for Broker.Subscribe()) including all available Metadata names + """ + meta = self.get_metadata([]) + self.print_meta_data(meta) + + # for x in meta: + # print("#META# {}".format(str(x).replace("\n", " "))) + # id = x.id if hasattr(x, 'id') else None + # print("#META# { id:{}, name:{}, type:{}, desc:{} }".format(id , + # x.name, x.data_type, x.description), flush=True) + + all = ",\n ".join([(x.name) for x in meta]) + query = "SELECT {}".format(all) + return query + + def get_registered_metadata(self, names=[]): + meta = self.get_metadata(names) + self.print_meta_data(meta) + return meta + + async def subscribe_datapoints(self, query, sub_callback=None): + try: + request = broker_pb2.SubscribeRequest() + request.query = query + logger.info("broker.Subscribe('{}')".format(query)) + if self.timeout > 0: + response = self.broker_stub.Subscribe(request, timeout=self.timeout) + else: + response = self.broker_stub.Subscribe(request) + + for subscribe_reply in response: + """ + message SubscribeReply { + // Contains the fields specified by the query. + // If a requested data point value is not available, the corresponding + // Datapoint will have it's respective failure value set. + map fields = 1; + }""" + + # SubscribeReply.fields: + # map + if not hasattr(subscribe_reply, "fields"): + logger.warning("Missing 'fields' in {}".format(subscribe_reply)) + continue + + logger.debug("SubscribeReply.{}".format(subscribe_reply)) + map = subscribe_reply.fields + for key in map: + dp = map[key] + parsed = self._parse_datapoint(dp) + logger.info( + "EVENT: {}={} ({}) TS:{}".format( + key, parsed["value"], parsed["type"], parsed["ts"] + ) + ) + if sub_callback: + logger.debug("calling cb:{}".format(sub_callback)) + try: + sub_callback( + key, parsed["value"], parsed["type"], parsed["ts"] + ) + except Exception: + logging.exception("sub_callback() error", exc_info=True) + pass + # apply limits per reply, not for each field in reply... + if self.timeout > 0: + now = time.time() + if self._ts and now - self._ts >= self.timeout: + logger.info("Terminating after {} events.".format(self._events)) + self._running = False + return + self._ts = now + if self.max_events > 0: + self._events += 1 + if self._events >= self.max_events: + logger.info("Terminating after {} events.".format(self._events)) + self._running = False + return + + except grpc.RpcError as e: + if e.code() == grpc.StatusCode.DEADLINE_EXCEEDED: + # expected code if we used timeout, just stop subscription + logger.info("Exitting due to idle timeout: {}".format(self.timeout)) + self._running = False + else: + logging.error( + "broker.Subscribe() failed! {}".format(self._get_grpc_error(e)) + ) + raise e + except Exception: + logging.exception("broker.Subscribe() error", exc_info=True) + + +def main(argv): + """Main function""" + + default_addr = "localhost:55555" + default_query = "SELECT {}".format(SEAT_POS) + + _usage = ( + "Usage: ./broker_subscriber.py --addr " # shorten line + " [ --get-meta=META | --query --timeout --count ]\n\n" + "Environment:\n" + " 'BROKER_ADDR' Default: {}\n" + " 'QUERY' SQL datapoint query. ('*' = subscribe for all meta). Default: {}\n" + " 'COUNT' Receive specified count of events and exit (0=inf)\n" + " 'TIMEOUT' Abort receiving if no data comes for specified timeout in seconds (0=inf)\n" + " 'META' Comma separated list of datapoint names to query. ('*' = all meta)\n".format( + default_addr, default_query + ) + ) + + # environment values (overridden by cmdargs) + broker_addr = os.environ.get("BROKER_ADDR", default_addr) + query = os.environ.get("QUERY", default_query) + count = int(os.environ.get("COUNT", "0")) + timeout = float(os.environ.get("TIMEOUT", "0")) + meta = os.environ.get("META") + + # parse cmdline args + try: + opts, args = getopt.getopt( + argv, "ha:q:c:t:g", ["addr=", "query=", "count=", "timeout=", "get-meta="] + ) + for opt, arg in opts: + if opt == "-h": + print(_usage) + sys.exit(0) + elif opt in ("-a", "--addr"): + broker_addr = arg + elif opt in ("-q", "--query"): + query = arg + elif opt in ("-c", "--count"): + count = int(arg) + elif opt in ("-t", "--timeout"): + timeout = float(arg) + elif opt in ("-g", "--get-meta"): + if arg is not None: + meta = opt + else: + meta = "*" + else: + print("Unhandled arg: {}".format(opt)) + print(_usage) + sys.exit(1) + except getopt.GetoptError: + print(_usage) + sys.exit(1) + + listener = BrokerSubscribe(broker_addr, max_events=count, timeout=timeout) + if query == "*": + query = listener.get_wildcard_query() + # logger.info("Replaced '*' query with:\n{}", query); + + get_meta = None + # parse meta arg/env var and split it to list + if meta: + if meta == "*": + get_meta = [] + else: + get_meta = meta.split(",") + + LOOP = asyncio.get_event_loop() + LOOP.add_signal_handler(signal.SIGTERM, LOOP.stop) + if get_meta is not None: + meta = listener.get_registered_metadata(get_meta) + print_json_metadata(meta) + else: + LOOP.run_until_complete( + listener.subscribe_datapoints(query, on_change_event_json) + ) + # LOOP.call_at(time.time() + 500, print("#### CALL_AT called!")) + LOOP.close() + + +if __name__ == "__main__": + main(sys.argv[1:]) diff --git a/integration_test/gen_proto/sdv/databroker/v1/broker_pb2.py b/integration_test/gen_proto/sdv/databroker/v1/broker_pb2.py new file mode 100644 index 0000000..98dc94a --- /dev/null +++ b/integration_test/gen_proto/sdv/databroker/v1/broker_pb2.py @@ -0,0 +1,114 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: sdv/databroker/v1/broker.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from gen_proto.sdv.databroker.v1 import types_pb2 as sdv_dot_databroker_dot_v1_dot_types__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1esdv/databroker/v1/broker.proto\x12\x11sdv.databroker.v1\x1a\x1dsdv/databroker/v1/types.proto\"*\n\x14GetDatapointsRequest\x12\x12\n\ndatapoints\x18\x01 \x03(\t\"\xb0\x01\n\x12GetDatapointsReply\x12I\n\ndatapoints\x18\x01 \x03(\x0b\x32\x35.sdv.databroker.v1.GetDatapointsReply.DatapointsEntry\x1aO\n\x0f\x44\x61tapointsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1c.sdv.databroker.v1.Datapoint:\x02\x38\x01\"!\n\x10SubscribeRequest\x12\r\n\x05query\x18\x02 \x01(\t\"\x9c\x01\n\x0eSubscribeReply\x12=\n\x06\x66ields\x18\x01 \x03(\x0b\x32-.sdv.databroker.v1.SubscribeReply.FieldsEntry\x1aK\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1c.sdv.databroker.v1.Datapoint:\x02\x38\x01\"#\n\x12GetMetadataRequest\x12\r\n\x05names\x18\x01 \x03(\t\"=\n\x10GetMetadataReply\x12)\n\x04list\x18\x01 \x03(\x0b\x32\x1b.sdv.databroker.v1.Metadata2\x9b\x02\n\x06\x42roker\x12_\n\rGetDatapoints\x12\'.sdv.databroker.v1.GetDatapointsRequest\x1a%.sdv.databroker.v1.GetDatapointsReply\x12U\n\tSubscribe\x12#.sdv.databroker.v1.SubscribeRequest\x1a!.sdv.databroker.v1.SubscribeReply0\x01\x12Y\n\x0bGetMetadata\x12%.sdv.databroker.v1.GetMetadataRequest\x1a#.sdv.databroker.v1.GetMetadataReplyb\x06proto3') + + + +_GETDATAPOINTSREQUEST = DESCRIPTOR.message_types_by_name['GetDatapointsRequest'] +_GETDATAPOINTSREPLY = DESCRIPTOR.message_types_by_name['GetDatapointsReply'] +_GETDATAPOINTSREPLY_DATAPOINTSENTRY = _GETDATAPOINTSREPLY.nested_types_by_name['DatapointsEntry'] +_SUBSCRIBEREQUEST = DESCRIPTOR.message_types_by_name['SubscribeRequest'] +_SUBSCRIBEREPLY = DESCRIPTOR.message_types_by_name['SubscribeReply'] +_SUBSCRIBEREPLY_FIELDSENTRY = _SUBSCRIBEREPLY.nested_types_by_name['FieldsEntry'] +_GETMETADATAREQUEST = DESCRIPTOR.message_types_by_name['GetMetadataRequest'] +_GETMETADATAREPLY = DESCRIPTOR.message_types_by_name['GetMetadataReply'] +GetDatapointsRequest = _reflection.GeneratedProtocolMessageType('GetDatapointsRequest', (_message.Message,), { + 'DESCRIPTOR' : _GETDATAPOINTSREQUEST, + '__module__' : 'sdv.databroker.v1.broker_pb2' + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.GetDatapointsRequest) + }) +_sym_db.RegisterMessage(GetDatapointsRequest) + +GetDatapointsReply = _reflection.GeneratedProtocolMessageType('GetDatapointsReply', (_message.Message,), { + + 'DatapointsEntry' : _reflection.GeneratedProtocolMessageType('DatapointsEntry', (_message.Message,), { + 'DESCRIPTOR' : _GETDATAPOINTSREPLY_DATAPOINTSENTRY, + '__module__' : 'sdv.databroker.v1.broker_pb2' + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.GetDatapointsReply.DatapointsEntry) + }) + , + 'DESCRIPTOR' : _GETDATAPOINTSREPLY, + '__module__' : 'sdv.databroker.v1.broker_pb2' + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.GetDatapointsReply) + }) +_sym_db.RegisterMessage(GetDatapointsReply) +_sym_db.RegisterMessage(GetDatapointsReply.DatapointsEntry) + +SubscribeRequest = _reflection.GeneratedProtocolMessageType('SubscribeRequest', (_message.Message,), { + 'DESCRIPTOR' : _SUBSCRIBEREQUEST, + '__module__' : 'sdv.databroker.v1.broker_pb2' + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.SubscribeRequest) + }) +_sym_db.RegisterMessage(SubscribeRequest) + +SubscribeReply = _reflection.GeneratedProtocolMessageType('SubscribeReply', (_message.Message,), { + + 'FieldsEntry' : _reflection.GeneratedProtocolMessageType('FieldsEntry', (_message.Message,), { + 'DESCRIPTOR' : _SUBSCRIBEREPLY_FIELDSENTRY, + '__module__' : 'sdv.databroker.v1.broker_pb2' + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.SubscribeReply.FieldsEntry) + }) + , + 'DESCRIPTOR' : _SUBSCRIBEREPLY, + '__module__' : 'sdv.databroker.v1.broker_pb2' + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.SubscribeReply) + }) +_sym_db.RegisterMessage(SubscribeReply) +_sym_db.RegisterMessage(SubscribeReply.FieldsEntry) + +GetMetadataRequest = _reflection.GeneratedProtocolMessageType('GetMetadataRequest', (_message.Message,), { + 'DESCRIPTOR' : _GETMETADATAREQUEST, + '__module__' : 'sdv.databroker.v1.broker_pb2' + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.GetMetadataRequest) + }) +_sym_db.RegisterMessage(GetMetadataRequest) + +GetMetadataReply = _reflection.GeneratedProtocolMessageType('GetMetadataReply', (_message.Message,), { + 'DESCRIPTOR' : _GETMETADATAREPLY, + '__module__' : 'sdv.databroker.v1.broker_pb2' + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.GetMetadataReply) + }) +_sym_db.RegisterMessage(GetMetadataReply) + +_BROKER = DESCRIPTOR.services_by_name['Broker'] +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _GETDATAPOINTSREPLY_DATAPOINTSENTRY._options = None + _GETDATAPOINTSREPLY_DATAPOINTSENTRY._serialized_options = b'8\001' + _SUBSCRIBEREPLY_FIELDSENTRY._options = None + _SUBSCRIBEREPLY_FIELDSENTRY._serialized_options = b'8\001' + _GETDATAPOINTSREQUEST._serialized_start=84 + _GETDATAPOINTSREQUEST._serialized_end=126 + _GETDATAPOINTSREPLY._serialized_start=129 + _GETDATAPOINTSREPLY._serialized_end=305 + _GETDATAPOINTSREPLY_DATAPOINTSENTRY._serialized_start=226 + _GETDATAPOINTSREPLY_DATAPOINTSENTRY._serialized_end=305 + _SUBSCRIBEREQUEST._serialized_start=307 + _SUBSCRIBEREQUEST._serialized_end=340 + _SUBSCRIBEREPLY._serialized_start=343 + _SUBSCRIBEREPLY._serialized_end=499 + _SUBSCRIBEREPLY_FIELDSENTRY._serialized_start=424 + _SUBSCRIBEREPLY_FIELDSENTRY._serialized_end=499 + _GETMETADATAREQUEST._serialized_start=501 + _GETMETADATAREQUEST._serialized_end=536 + _GETMETADATAREPLY._serialized_start=538 + _GETMETADATAREPLY._serialized_end=599 + _BROKER._serialized_start=602 + _BROKER._serialized_end=885 +# @@protoc_insertion_point(module_scope) diff --git a/integration_test/gen_proto/sdv/databroker/v1/broker_pb2.pyi b/integration_test/gen_proto/sdv/databroker/v1/broker_pb2.pyi new file mode 100644 index 0000000..b69a9f1 --- /dev/null +++ b/integration_test/gen_proto/sdv/databroker/v1/broker_pb2.pyi @@ -0,0 +1,145 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" +import builtins +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import gen_proto.sdv.databroker.v1.types_pb2 +import typing +import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class GetDatapointsRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + DATAPOINTS_FIELD_NUMBER: builtins.int + @property + def datapoints(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[typing.Text]: + """A list of requested data points.""" + pass + def __init__(self, + *, + datapoints: typing.Optional[typing.Iterable[typing.Text]] = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["datapoints",b"datapoints"]) -> None: ... +global___GetDatapointsRequest = GetDatapointsRequest + +class GetDatapointsReply(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + class DatapointsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: typing.Text + @property + def value(self) -> sdv.databroker.v1.types_pb2.Datapoint: ... + def __init__(self, + *, + key: typing.Text = ..., + value: typing.Optional[sdv.databroker.v1.types_pb2.Datapoint] = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value",b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key",b"key","value",b"value"]) -> None: ... + + DATAPOINTS_FIELD_NUMBER: builtins.int + @property + def datapoints(self) -> google.protobuf.internal.containers.MessageMap[typing.Text, sdv.databroker.v1.types_pb2.Datapoint]: + """Contains the values of the requested data points. + If a requested data point is not available, the corresponding Datapoint + will have the respective failure value set. + """ + pass + def __init__(self, + *, + datapoints: typing.Optional[typing.Mapping[typing.Text, sdv.databroker.v1.types_pb2.Datapoint]] = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["datapoints",b"datapoints"]) -> None: ... +global___GetDatapointsReply = GetDatapointsReply + +class SubscribeRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + QUERY_FIELD_NUMBER: builtins.int + query: typing.Text + """Subscribe to a set of data points (or expressions) described + by the provided query. + The query syntax is a subset of SQL and is described in more + detail in the QUERY.md file. + """ + + def __init__(self, + *, + query: typing.Text = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["query",b"query"]) -> None: ... +global___SubscribeRequest = SubscribeRequest + +class SubscribeReply(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + class FieldsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: typing.Text + @property + def value(self) -> sdv.databroker.v1.types_pb2.Datapoint: ... + def __init__(self, + *, + key: typing.Text = ..., + value: typing.Optional[sdv.databroker.v1.types_pb2.Datapoint] = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value",b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key",b"key","value",b"value"]) -> None: ... + + FIELDS_FIELD_NUMBER: builtins.int + @property + def fields(self) -> google.protobuf.internal.containers.MessageMap[typing.Text, sdv.databroker.v1.types_pb2.Datapoint]: + """Contains the fields specified by the query. + If a requested data point value is not available, the corresponding + Datapoint will have it's respective failure value set. + """ + pass + def __init__(self, + *, + fields: typing.Optional[typing.Mapping[typing.Text, sdv.databroker.v1.types_pb2.Datapoint]] = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["fields",b"fields"]) -> None: ... +global___SubscribeReply = SubscribeReply + +class GetMetadataRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + NAMES_FIELD_NUMBER: builtins.int + @property + def names(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[typing.Text]: + """Request metadata for a list of data points referenced by their names. + The names are dot separated strings, e.g. + "Vehicle.Cabin.Seat.Row1.Pos1.Position" or "Vehicle.Speed". + + If no names are provided, metadata for all known data points will be + returned. + """ + pass + def __init__(self, + *, + names: typing.Optional[typing.Iterable[typing.Text]] = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["names",b"names"]) -> None: ... +global___GetMetadataRequest = GetMetadataRequest + +class GetMetadataReply(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + LIST_FIELD_NUMBER: builtins.int + @property + def list(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[sdv.databroker.v1.types_pb2.Metadata]: + """Contains metadata of the requested data points. If a data point + doesn't exist (i.e. not known to the Data Broker) the corresponding + Metadata isn't part of the returned list. + """ + pass + def __init__(self, + *, + list: typing.Optional[typing.Iterable[sdv.databroker.v1.types_pb2.Metadata]] = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["list",b"list"]) -> None: ... +global___GetMetadataReply = GetMetadataReply diff --git a/integration_test/gen_proto/sdv/databroker/v1/broker_pb2_grpc.py b/integration_test/gen_proto/sdv/databroker/v1/broker_pb2_grpc.py new file mode 100644 index 0000000..5c87db1 --- /dev/null +++ b/integration_test/gen_proto/sdv/databroker/v1/broker_pb2_grpc.py @@ -0,0 +1,146 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from gen_proto.sdv.databroker.v1 import broker_pb2 as sdv_dot_databroker_dot_v1_dot_broker__pb2 + + +class BrokerStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.GetDatapoints = channel.unary_unary( + '/sdv.databroker.v1.Broker/GetDatapoints', + request_serializer=sdv_dot_databroker_dot_v1_dot_broker__pb2.GetDatapointsRequest.SerializeToString, + response_deserializer=sdv_dot_databroker_dot_v1_dot_broker__pb2.GetDatapointsReply.FromString, + ) + self.Subscribe = channel.unary_stream( + '/sdv.databroker.v1.Broker/Subscribe', + request_serializer=sdv_dot_databroker_dot_v1_dot_broker__pb2.SubscribeRequest.SerializeToString, + response_deserializer=sdv_dot_databroker_dot_v1_dot_broker__pb2.SubscribeReply.FromString, + ) + self.GetMetadata = channel.unary_unary( + '/sdv.databroker.v1.Broker/GetMetadata', + request_serializer=sdv_dot_databroker_dot_v1_dot_broker__pb2.GetMetadataRequest.SerializeToString, + response_deserializer=sdv_dot_databroker_dot_v1_dot_broker__pb2.GetMetadataReply.FromString, + ) + + +class BrokerServicer(object): + """Missing associated documentation comment in .proto file.""" + + def GetDatapoints(self, request, context): + """Request a set of datapoints (values) + + Returns a list of requested data points. + + InvalidArgument is returned if the request is malformed. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Subscribe(self, request, context): + """Subscribe to a set of data points or conditional expressions + using the Data Broker Query Syntax (described in QUERY.md) + + Returns a stream of replies. + + InvalidArgument is returned if the request is malformed. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetMetadata(self, request, context): + """Request the metadata of a set of datapoints + + Returns metadata of the requested data points that exist. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_BrokerServicer_to_server(servicer, server): + rpc_method_handlers = { + 'GetDatapoints': grpc.unary_unary_rpc_method_handler( + servicer.GetDatapoints, + request_deserializer=sdv_dot_databroker_dot_v1_dot_broker__pb2.GetDatapointsRequest.FromString, + response_serializer=sdv_dot_databroker_dot_v1_dot_broker__pb2.GetDatapointsReply.SerializeToString, + ), + 'Subscribe': grpc.unary_stream_rpc_method_handler( + servicer.Subscribe, + request_deserializer=sdv_dot_databroker_dot_v1_dot_broker__pb2.SubscribeRequest.FromString, + response_serializer=sdv_dot_databroker_dot_v1_dot_broker__pb2.SubscribeReply.SerializeToString, + ), + 'GetMetadata': grpc.unary_unary_rpc_method_handler( + servicer.GetMetadata, + request_deserializer=sdv_dot_databroker_dot_v1_dot_broker__pb2.GetMetadataRequest.FromString, + response_serializer=sdv_dot_databroker_dot_v1_dot_broker__pb2.GetMetadataReply.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'sdv.databroker.v1.Broker', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class Broker(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def GetDatapoints(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/sdv.databroker.v1.Broker/GetDatapoints', + sdv_dot_databroker_dot_v1_dot_broker__pb2.GetDatapointsRequest.SerializeToString, + sdv_dot_databroker_dot_v1_dot_broker__pb2.GetDatapointsReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def Subscribe(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_stream(request, target, '/sdv.databroker.v1.Broker/Subscribe', + sdv_dot_databroker_dot_v1_dot_broker__pb2.SubscribeRequest.SerializeToString, + sdv_dot_databroker_dot_v1_dot_broker__pb2.SubscribeReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetMetadata(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/sdv.databroker.v1.Broker/GetMetadata', + sdv_dot_databroker_dot_v1_dot_broker__pb2.GetMetadataRequest.SerializeToString, + sdv_dot_databroker_dot_v1_dot_broker__pb2.GetMetadataReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/integration_test/gen_proto/sdv/databroker/v1/collector_pb2.py b/integration_test/gen_proto/sdv/databroker/v1/collector_pb2.py new file mode 100644 index 0000000..25d69dd --- /dev/null +++ b/integration_test/gen_proto/sdv/databroker/v1/collector_pb2.py @@ -0,0 +1,163 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: sdv/databroker/v1/collector.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from gen_proto.sdv.databroker.v1 import types_pb2 as sdv_dot_databroker_dot_v1_dot_types__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n!sdv/databroker/v1/collector.proto\x12\x11sdv.databroker.v1\x1a\x1dsdv/databroker/v1/types.proto\"\xba\x01\n\x17UpdateDatapointsRequest\x12N\n\ndatapoints\x18\x01 \x03(\x0b\x32:.sdv.databroker.v1.UpdateDatapointsRequest.DatapointsEntry\x1aO\n\x0f\x44\x61tapointsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1c.sdv.databroker.v1.Datapoint:\x02\x38\x01\"\xaf\x01\n\x15UpdateDatapointsReply\x12\x44\n\x06\x65rrors\x18\x01 \x03(\x0b\x32\x34.sdv.databroker.v1.UpdateDatapointsReply.ErrorsEntry\x1aP\n\x0b\x45rrorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\x30\n\x05value\x18\x02 \x01(\x0e\x32!.sdv.databroker.v1.DatapointError:\x02\x38\x01\"\xba\x01\n\x17StreamDatapointsRequest\x12N\n\ndatapoints\x18\x01 \x03(\x0b\x32:.sdv.databroker.v1.StreamDatapointsRequest.DatapointsEntry\x1aO\n\x0f\x44\x61tapointsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1c.sdv.databroker.v1.Datapoint:\x02\x38\x01\"\xaf\x01\n\x15StreamDatapointsReply\x12\x44\n\x06\x65rrors\x18\x01 \x03(\x0b\x32\x34.sdv.databroker.v1.StreamDatapointsReply.ErrorsEntry\x1aP\n\x0b\x45rrorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\x30\n\x05value\x18\x02 \x01(\x0e\x32!.sdv.databroker.v1.DatapointError:\x02\x38\x01\"R\n\x19RegisterDatapointsRequest\x12\x35\n\x04list\x18\x01 \x03(\x0b\x32\'.sdv.databroker.v1.RegistrationMetadata\"\x9d\x01\n\x14RegistrationMetadata\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\tdata_type\x18\x02 \x01(\x0e\x32\x1b.sdv.databroker.v1.DataType\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x32\n\x0b\x63hange_type\x18\x04 \x01(\x0e\x32\x1d.sdv.databroker.v1.ChangeType\"\x93\x01\n\x17RegisterDatapointsReply\x12H\n\x07results\x18\x01 \x03(\x0b\x32\x37.sdv.databroker.v1.RegisterDatapointsReply.ResultsEntry\x1a.\n\x0cResultsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x32\xd3\x02\n\tCollector\x12n\n\x12RegisterDatapoints\x12,.sdv.databroker.v1.RegisterDatapointsRequest\x1a*.sdv.databroker.v1.RegisterDatapointsReply\x12h\n\x10UpdateDatapoints\x12*.sdv.databroker.v1.UpdateDatapointsRequest\x1a(.sdv.databroker.v1.UpdateDatapointsReply\x12l\n\x10StreamDatapoints\x12*.sdv.databroker.v1.StreamDatapointsRequest\x1a(.sdv.databroker.v1.StreamDatapointsReply(\x01\x30\x01\x62\x06proto3') + + + +_UPDATEDATAPOINTSREQUEST = DESCRIPTOR.message_types_by_name['UpdateDatapointsRequest'] +_UPDATEDATAPOINTSREQUEST_DATAPOINTSENTRY = _UPDATEDATAPOINTSREQUEST.nested_types_by_name['DatapointsEntry'] +_UPDATEDATAPOINTSREPLY = DESCRIPTOR.message_types_by_name['UpdateDatapointsReply'] +_UPDATEDATAPOINTSREPLY_ERRORSENTRY = _UPDATEDATAPOINTSREPLY.nested_types_by_name['ErrorsEntry'] +_STREAMDATAPOINTSREQUEST = DESCRIPTOR.message_types_by_name['StreamDatapointsRequest'] +_STREAMDATAPOINTSREQUEST_DATAPOINTSENTRY = _STREAMDATAPOINTSREQUEST.nested_types_by_name['DatapointsEntry'] +_STREAMDATAPOINTSREPLY = DESCRIPTOR.message_types_by_name['StreamDatapointsReply'] +_STREAMDATAPOINTSREPLY_ERRORSENTRY = _STREAMDATAPOINTSREPLY.nested_types_by_name['ErrorsEntry'] +_REGISTERDATAPOINTSREQUEST = DESCRIPTOR.message_types_by_name['RegisterDatapointsRequest'] +_REGISTRATIONMETADATA = DESCRIPTOR.message_types_by_name['RegistrationMetadata'] +_REGISTERDATAPOINTSREPLY = DESCRIPTOR.message_types_by_name['RegisterDatapointsReply'] +_REGISTERDATAPOINTSREPLY_RESULTSENTRY = _REGISTERDATAPOINTSREPLY.nested_types_by_name['ResultsEntry'] +UpdateDatapointsRequest = _reflection.GeneratedProtocolMessageType('UpdateDatapointsRequest', (_message.Message,), { + + 'DatapointsEntry' : _reflection.GeneratedProtocolMessageType('DatapointsEntry', (_message.Message,), { + 'DESCRIPTOR' : _UPDATEDATAPOINTSREQUEST_DATAPOINTSENTRY, + '__module__' : 'sdv.databroker.v1.collector_pb2' + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.UpdateDatapointsRequest.DatapointsEntry) + }) + , + 'DESCRIPTOR' : _UPDATEDATAPOINTSREQUEST, + '__module__' : 'sdv.databroker.v1.collector_pb2' + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.UpdateDatapointsRequest) + }) +_sym_db.RegisterMessage(UpdateDatapointsRequest) +_sym_db.RegisterMessage(UpdateDatapointsRequest.DatapointsEntry) + +UpdateDatapointsReply = _reflection.GeneratedProtocolMessageType('UpdateDatapointsReply', (_message.Message,), { + + 'ErrorsEntry' : _reflection.GeneratedProtocolMessageType('ErrorsEntry', (_message.Message,), { + 'DESCRIPTOR' : _UPDATEDATAPOINTSREPLY_ERRORSENTRY, + '__module__' : 'sdv.databroker.v1.collector_pb2' + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.UpdateDatapointsReply.ErrorsEntry) + }) + , + 'DESCRIPTOR' : _UPDATEDATAPOINTSREPLY, + '__module__' : 'sdv.databroker.v1.collector_pb2' + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.UpdateDatapointsReply) + }) +_sym_db.RegisterMessage(UpdateDatapointsReply) +_sym_db.RegisterMessage(UpdateDatapointsReply.ErrorsEntry) + +StreamDatapointsRequest = _reflection.GeneratedProtocolMessageType('StreamDatapointsRequest', (_message.Message,), { + + 'DatapointsEntry' : _reflection.GeneratedProtocolMessageType('DatapointsEntry', (_message.Message,), { + 'DESCRIPTOR' : _STREAMDATAPOINTSREQUEST_DATAPOINTSENTRY, + '__module__' : 'sdv.databroker.v1.collector_pb2' + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.StreamDatapointsRequest.DatapointsEntry) + }) + , + 'DESCRIPTOR' : _STREAMDATAPOINTSREQUEST, + '__module__' : 'sdv.databroker.v1.collector_pb2' + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.StreamDatapointsRequest) + }) +_sym_db.RegisterMessage(StreamDatapointsRequest) +_sym_db.RegisterMessage(StreamDatapointsRequest.DatapointsEntry) + +StreamDatapointsReply = _reflection.GeneratedProtocolMessageType('StreamDatapointsReply', (_message.Message,), { + + 'ErrorsEntry' : _reflection.GeneratedProtocolMessageType('ErrorsEntry', (_message.Message,), { + 'DESCRIPTOR' : _STREAMDATAPOINTSREPLY_ERRORSENTRY, + '__module__' : 'sdv.databroker.v1.collector_pb2' + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.StreamDatapointsReply.ErrorsEntry) + }) + , + 'DESCRIPTOR' : _STREAMDATAPOINTSREPLY, + '__module__' : 'sdv.databroker.v1.collector_pb2' + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.StreamDatapointsReply) + }) +_sym_db.RegisterMessage(StreamDatapointsReply) +_sym_db.RegisterMessage(StreamDatapointsReply.ErrorsEntry) + +RegisterDatapointsRequest = _reflection.GeneratedProtocolMessageType('RegisterDatapointsRequest', (_message.Message,), { + 'DESCRIPTOR' : _REGISTERDATAPOINTSREQUEST, + '__module__' : 'sdv.databroker.v1.collector_pb2' + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.RegisterDatapointsRequest) + }) +_sym_db.RegisterMessage(RegisterDatapointsRequest) + +RegistrationMetadata = _reflection.GeneratedProtocolMessageType('RegistrationMetadata', (_message.Message,), { + 'DESCRIPTOR' : _REGISTRATIONMETADATA, + '__module__' : 'sdv.databroker.v1.collector_pb2' + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.RegistrationMetadata) + }) +_sym_db.RegisterMessage(RegistrationMetadata) + +RegisterDatapointsReply = _reflection.GeneratedProtocolMessageType('RegisterDatapointsReply', (_message.Message,), { + + 'ResultsEntry' : _reflection.GeneratedProtocolMessageType('ResultsEntry', (_message.Message,), { + 'DESCRIPTOR' : _REGISTERDATAPOINTSREPLY_RESULTSENTRY, + '__module__' : 'sdv.databroker.v1.collector_pb2' + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.RegisterDatapointsReply.ResultsEntry) + }) + , + 'DESCRIPTOR' : _REGISTERDATAPOINTSREPLY, + '__module__' : 'sdv.databroker.v1.collector_pb2' + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.RegisterDatapointsReply) + }) +_sym_db.RegisterMessage(RegisterDatapointsReply) +_sym_db.RegisterMessage(RegisterDatapointsReply.ResultsEntry) + +_COLLECTOR = DESCRIPTOR.services_by_name['Collector'] +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _UPDATEDATAPOINTSREQUEST_DATAPOINTSENTRY._options = None + _UPDATEDATAPOINTSREQUEST_DATAPOINTSENTRY._serialized_options = b'8\001' + _UPDATEDATAPOINTSREPLY_ERRORSENTRY._options = None + _UPDATEDATAPOINTSREPLY_ERRORSENTRY._serialized_options = b'8\001' + _STREAMDATAPOINTSREQUEST_DATAPOINTSENTRY._options = None + _STREAMDATAPOINTSREQUEST_DATAPOINTSENTRY._serialized_options = b'8\001' + _STREAMDATAPOINTSREPLY_ERRORSENTRY._options = None + _STREAMDATAPOINTSREPLY_ERRORSENTRY._serialized_options = b'8\001' + _REGISTERDATAPOINTSREPLY_RESULTSENTRY._options = None + _REGISTERDATAPOINTSREPLY_RESULTSENTRY._serialized_options = b'8\001' + _UPDATEDATAPOINTSREQUEST._serialized_start=88 + _UPDATEDATAPOINTSREQUEST._serialized_end=274 + _UPDATEDATAPOINTSREQUEST_DATAPOINTSENTRY._serialized_start=195 + _UPDATEDATAPOINTSREQUEST_DATAPOINTSENTRY._serialized_end=274 + _UPDATEDATAPOINTSREPLY._serialized_start=277 + _UPDATEDATAPOINTSREPLY._serialized_end=452 + _UPDATEDATAPOINTSREPLY_ERRORSENTRY._serialized_start=372 + _UPDATEDATAPOINTSREPLY_ERRORSENTRY._serialized_end=452 + _STREAMDATAPOINTSREQUEST._serialized_start=455 + _STREAMDATAPOINTSREQUEST._serialized_end=641 + _STREAMDATAPOINTSREQUEST_DATAPOINTSENTRY._serialized_start=195 + _STREAMDATAPOINTSREQUEST_DATAPOINTSENTRY._serialized_end=274 + _STREAMDATAPOINTSREPLY._serialized_start=644 + _STREAMDATAPOINTSREPLY._serialized_end=819 + _STREAMDATAPOINTSREPLY_ERRORSENTRY._serialized_start=372 + _STREAMDATAPOINTSREPLY_ERRORSENTRY._serialized_end=452 + _REGISTERDATAPOINTSREQUEST._serialized_start=821 + _REGISTERDATAPOINTSREQUEST._serialized_end=903 + _REGISTRATIONMETADATA._serialized_start=906 + _REGISTRATIONMETADATA._serialized_end=1063 + _REGISTERDATAPOINTSREPLY._serialized_start=1066 + _REGISTERDATAPOINTSREPLY._serialized_end=1213 + _REGISTERDATAPOINTSREPLY_RESULTSENTRY._serialized_start=1167 + _REGISTERDATAPOINTSREPLY_RESULTSENTRY._serialized_end=1213 + _COLLECTOR._serialized_start=1216 + _COLLECTOR._serialized_end=1555 +# @@protoc_insertion_point(module_scope) diff --git a/integration_test/gen_proto/sdv/databroker/v1/collector_pb2.pyi b/integration_test/gen_proto/sdv/databroker/v1/collector_pb2.pyi new file mode 100644 index 0000000..4e34c2a --- /dev/null +++ b/integration_test/gen_proto/sdv/databroker/v1/collector_pb2.pyi @@ -0,0 +1,190 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" +import builtins +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import gen_proto.sdv.databroker.v1.types_pb2 +import typing +import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class UpdateDatapointsRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + class DatapointsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.int + @property + def value(self) -> sdv.databroker.v1.types_pb2.Datapoint: ... + def __init__(self, + *, + key: builtins.int = ..., + value: typing.Optional[sdv.databroker.v1.types_pb2.Datapoint] = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value",b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key",b"key","value",b"value"]) -> None: ... + + DATAPOINTS_FIELD_NUMBER: builtins.int + @property + def datapoints(self) -> google.protobuf.internal.containers.MessageMap[builtins.int, sdv.databroker.v1.types_pb2.Datapoint]: ... + def __init__(self, + *, + datapoints: typing.Optional[typing.Mapping[builtins.int, sdv.databroker.v1.types_pb2.Datapoint]] = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["datapoints",b"datapoints"]) -> None: ... +global___UpdateDatapointsRequest = UpdateDatapointsRequest + +class UpdateDatapointsReply(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + class ErrorsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.int + value: sdv.databroker.v1.types_pb2.DatapointError.ValueType + def __init__(self, + *, + key: builtins.int = ..., + value: sdv.databroker.v1.types_pb2.DatapointError.ValueType = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key",b"key","value",b"value"]) -> None: ... + + ERRORS_FIELD_NUMBER: builtins.int + @property + def errors(self) -> google.protobuf.internal.containers.ScalarMap[builtins.int, sdv.databroker.v1.types_pb2.DatapointError.ValueType]: + """If empty, everything went well""" + pass + def __init__(self, + *, + errors: typing.Optional[typing.Mapping[builtins.int, sdv.databroker.v1.types_pb2.DatapointError.ValueType]] = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["errors",b"errors"]) -> None: ... +global___UpdateDatapointsReply = UpdateDatapointsReply + +class StreamDatapointsRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + class DatapointsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.int + @property + def value(self) -> sdv.databroker.v1.types_pb2.Datapoint: ... + def __init__(self, + *, + key: builtins.int = ..., + value: typing.Optional[sdv.databroker.v1.types_pb2.Datapoint] = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value",b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key",b"key","value",b"value"]) -> None: ... + + DATAPOINTS_FIELD_NUMBER: builtins.int + @property + def datapoints(self) -> google.protobuf.internal.containers.MessageMap[builtins.int, sdv.databroker.v1.types_pb2.Datapoint]: ... + def __init__(self, + *, + datapoints: typing.Optional[typing.Mapping[builtins.int, sdv.databroker.v1.types_pb2.Datapoint]] = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["datapoints",b"datapoints"]) -> None: ... +global___StreamDatapointsRequest = StreamDatapointsRequest + +class StreamDatapointsReply(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + class ErrorsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.int + value: sdv.databroker.v1.types_pb2.DatapointError.ValueType + def __init__(self, + *, + key: builtins.int = ..., + value: sdv.databroker.v1.types_pb2.DatapointError.ValueType = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key",b"key","value",b"value"]) -> None: ... + + ERRORS_FIELD_NUMBER: builtins.int + @property + def errors(self) -> google.protobuf.internal.containers.ScalarMap[builtins.int, sdv.databroker.v1.types_pb2.DatapointError.ValueType]: + """If empty, everything went well""" + pass + def __init__(self, + *, + errors: typing.Optional[typing.Mapping[builtins.int, sdv.databroker.v1.types_pb2.DatapointError.ValueType]] = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["errors",b"errors"]) -> None: ... +global___StreamDatapointsReply = StreamDatapointsReply + +class RegisterDatapointsRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + LIST_FIELD_NUMBER: builtins.int + @property + def list(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___RegistrationMetadata]: ... + def __init__(self, + *, + list: typing.Optional[typing.Iterable[global___RegistrationMetadata]] = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["list",b"list"]) -> None: ... +global___RegisterDatapointsRequest = RegisterDatapointsRequest + +class RegistrationMetadata(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + NAME_FIELD_NUMBER: builtins.int + DATA_TYPE_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + CHANGE_TYPE_FIELD_NUMBER: builtins.int + name: typing.Text + """Name of the data point consisting of elements separated by a dot '.' + (e.g. "Vehicle.Cabin.Seat.Row1.Pos1.Position" or "Vehicle.Speed") + allowing to define datapoints as a tree structure like define by + COVESA VSS (https://covesa.github.io/vehicle_signal_specification/). + """ + + data_type: sdv.databroker.v1.types_pb2.DataType.ValueType + description: typing.Text + """"Free text" description allowing to give additional details targeted + to an app developer. + """ + + change_type: sdv.databroker.v1.types_pb2.ChangeType.ValueType + def __init__(self, + *, + name: typing.Text = ..., + data_type: sdv.databroker.v1.types_pb2.DataType.ValueType = ..., + description: typing.Text = ..., + change_type: sdv.databroker.v1.types_pb2.ChangeType.ValueType = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["change_type",b"change_type","data_type",b"data_type","description",b"description","name",b"name"]) -> None: ... +global___RegistrationMetadata = RegistrationMetadata + +class RegisterDatapointsReply(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + class ResultsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: typing.Text + value: builtins.int + def __init__(self, + *, + key: typing.Text = ..., + value: builtins.int = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["key",b"key","value",b"value"]) -> None: ... + + RESULTS_FIELD_NUMBER: builtins.int + @property + def results(self) -> google.protobuf.internal.containers.ScalarMap[typing.Text, builtins.int]: + """Maps each data point name passed in RegisterDatapointsRequest to a data point id""" + pass + def __init__(self, + *, + results: typing.Optional[typing.Mapping[typing.Text, builtins.int]] = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["results",b"results"]) -> None: ... +global___RegisterDatapointsReply = RegisterDatapointsReply diff --git a/integration_test/gen_proto/sdv/databroker/v1/collector_pb2_grpc.py b/integration_test/gen_proto/sdv/databroker/v1/collector_pb2_grpc.py new file mode 100644 index 0000000..419bfde --- /dev/null +++ b/integration_test/gen_proto/sdv/databroker/v1/collector_pb2_grpc.py @@ -0,0 +1,176 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from gen_proto.sdv.databroker.v1 import collector_pb2 as sdv_dot_databroker_dot_v1_dot_collector__pb2 + + +class CollectorStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.RegisterDatapoints = channel.unary_unary( + '/sdv.databroker.v1.Collector/RegisterDatapoints', + request_serializer=sdv_dot_databroker_dot_v1_dot_collector__pb2.RegisterDatapointsRequest.SerializeToString, + response_deserializer=sdv_dot_databroker_dot_v1_dot_collector__pb2.RegisterDatapointsReply.FromString, + ) + self.UpdateDatapoints = channel.unary_unary( + '/sdv.databroker.v1.Collector/UpdateDatapoints', + request_serializer=sdv_dot_databroker_dot_v1_dot_collector__pb2.UpdateDatapointsRequest.SerializeToString, + response_deserializer=sdv_dot_databroker_dot_v1_dot_collector__pb2.UpdateDatapointsReply.FromString, + ) + self.StreamDatapoints = channel.stream_stream( + '/sdv.databroker.v1.Collector/StreamDatapoints', + request_serializer=sdv_dot_databroker_dot_v1_dot_collector__pb2.StreamDatapointsRequest.SerializeToString, + response_deserializer=sdv_dot_databroker_dot_v1_dot_collector__pb2.StreamDatapointsReply.FromString, + ) + + +class CollectorServicer(object): + """Missing associated documentation comment in .proto file.""" + + def RegisterDatapoints(self, request, context): + """A feeder (provider) shall call this as a first step to announce its "owned" data points + to the Data Broker. + If the registration of at least one of the passed data point fails, the overall registration + is rejected and the gRPC status code ABORTED is returned (to indicate the "aborted" registration). + The details, which data point(s) caused the failure and the reason, is passed in back in human- + readable form in the status message. Possible failure resaons are: + * PERMISSION_DENIED - Not allowed to register this name + * ALREADY_REGISTERED - The data point is already registered by some other feeder + * RE_REGISTRATION_MISMATCH - Already registered by this feeder but with differing metadata + * INVALID_NAME - The passed name of the datapoint has an invalid structure + * INVALID_VALUE_TYPE - The passed ValueType is not supported + * INVALID_CHANGE_TYPE - The passed ChangeType is not supported + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateDatapoints(self, request, context): + """TODO: Convert RegisterDatapointsReply into a stream in order to be able to communicate + subscription state (i.e. if there are subscribing clients) + or + Use a separate function (typically immediately) called after successful + registration of datapoints, e.g.: + + rpc GetSubscriptionStates() returns (stream SubscriptionStatesReply); + or + rpc ProvideDatapoints(ProvideDatapointsRequest) returns (stream ProvideDatapointsReply); + + Provide a set of updated datapoint values to the broker. + This is the unary equivalent of `StreamDatapoints` below and is better suited for cases + where the frequency of updates is rather low. + + NOTE: The values provided in a single request are handled as a single update in the + data broker. This ensures that any clients requesting (or subscribing to) a set of + datapoints will get a consistent update, i.e. that either all values are updated or + none are. + + Returns: any errors encountered updating the datapoints + + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def StreamDatapoints(self, request_iterator, context): + """Provide a stream with updated datapoint values to the broker. + This is the streaming equivalent of `UpdateDatapoints` above and is better suited for + cases where the frequency of updates is high. + + NOTE: The values provided in a single request are handled as a single update in the + data broker. This ensures that any clients requesting (or subscribing to) a set of + datapoints will get a consistent update, i.e. that either all values are updated or + none are. + + Returns: any errors encountered updating the datapoints + + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_CollectorServicer_to_server(servicer, server): + rpc_method_handlers = { + 'RegisterDatapoints': grpc.unary_unary_rpc_method_handler( + servicer.RegisterDatapoints, + request_deserializer=sdv_dot_databroker_dot_v1_dot_collector__pb2.RegisterDatapointsRequest.FromString, + response_serializer=sdv_dot_databroker_dot_v1_dot_collector__pb2.RegisterDatapointsReply.SerializeToString, + ), + 'UpdateDatapoints': grpc.unary_unary_rpc_method_handler( + servicer.UpdateDatapoints, + request_deserializer=sdv_dot_databroker_dot_v1_dot_collector__pb2.UpdateDatapointsRequest.FromString, + response_serializer=sdv_dot_databroker_dot_v1_dot_collector__pb2.UpdateDatapointsReply.SerializeToString, + ), + 'StreamDatapoints': grpc.stream_stream_rpc_method_handler( + servicer.StreamDatapoints, + request_deserializer=sdv_dot_databroker_dot_v1_dot_collector__pb2.StreamDatapointsRequest.FromString, + response_serializer=sdv_dot_databroker_dot_v1_dot_collector__pb2.StreamDatapointsReply.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'sdv.databroker.v1.Collector', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class Collector(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def RegisterDatapoints(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/sdv.databroker.v1.Collector/RegisterDatapoints', + sdv_dot_databroker_dot_v1_dot_collector__pb2.RegisterDatapointsRequest.SerializeToString, + sdv_dot_databroker_dot_v1_dot_collector__pb2.RegisterDatapointsReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def UpdateDatapoints(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/sdv.databroker.v1.Collector/UpdateDatapoints', + sdv_dot_databroker_dot_v1_dot_collector__pb2.UpdateDatapointsRequest.SerializeToString, + sdv_dot_databroker_dot_v1_dot_collector__pb2.UpdateDatapointsReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def StreamDatapoints(request_iterator, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.stream_stream(request_iterator, target, '/sdv.databroker.v1.Collector/StreamDatapoints', + sdv_dot_databroker_dot_v1_dot_collector__pb2.StreamDatapointsRequest.SerializeToString, + sdv_dot_databroker_dot_v1_dot_collector__pb2.StreamDatapointsReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/integration_test/gen_proto/sdv/databroker/v1/types_pb2.py b/integration_test/gen_proto/sdv/databroker/v1/types_pb2.py new file mode 100644 index 0000000..7cf9bc4 --- /dev/null +++ b/integration_test/gen_proto/sdv/databroker/v1/types_pb2.py @@ -0,0 +1,175 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: sdv/databroker/v1/types.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1dsdv/databroker/v1/types.proto\x12\x11sdv.databroker.v1\x1a\x1fgoogle/protobuf/timestamp.proto\"\x1d\n\x0bStringArray\x12\x0e\n\x06values\x18\x01 \x03(\t\"\x1b\n\tBoolArray\x12\x0e\n\x06values\x18\x01 \x03(\x08\"\x1c\n\nInt32Array\x12\x0e\n\x06values\x18\x01 \x03(\x11\"\x1c\n\nInt64Array\x12\x0e\n\x06values\x18\x01 \x03(\x12\"\x1d\n\x0bUint32Array\x12\x0e\n\x06values\x18\x01 \x03(\r\"\x1d\n\x0bUint64Array\x12\x0e\n\x06values\x18\x01 \x03(\x04\"\x1c\n\nFloatArray\x12\x0e\n\x06values\x18\x01 \x03(\x02\"\x1d\n\x0b\x44oubleArray\x12\x0e\n\x06values\x18\x01 \x03(\x01\"\xe2\x06\n\tDatapoint\x12-\n\ttimestamp\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12=\n\rfailure_value\x18\n \x01(\x0e\x32$.sdv.databroker.v1.Datapoint.FailureH\x00\x12\x16\n\x0cstring_value\x18\x0b \x01(\tH\x00\x12\x14\n\nbool_value\x18\x0c \x01(\x08H\x00\x12\x15\n\x0bint32_value\x18\r \x01(\x11H\x00\x12\x15\n\x0bint64_value\x18\x0e \x01(\x12H\x00\x12\x16\n\x0cuint32_value\x18\x0f \x01(\rH\x00\x12\x16\n\x0cuint64_value\x18\x10 \x01(\x04H\x00\x12\x15\n\x0b\x66loat_value\x18\x11 \x01(\x02H\x00\x12\x16\n\x0c\x64ouble_value\x18\x12 \x01(\x01H\x00\x12\x36\n\x0cstring_array\x18\x15 \x01(\x0b\x32\x1e.sdv.databroker.v1.StringArrayH\x00\x12\x32\n\nbool_array\x18\x16 \x01(\x0b\x32\x1c.sdv.databroker.v1.BoolArrayH\x00\x12\x34\n\x0bint32_array\x18\x17 \x01(\x0b\x32\x1d.sdv.databroker.v1.Int32ArrayH\x00\x12\x34\n\x0bint64_array\x18\x18 \x01(\x0b\x32\x1d.sdv.databroker.v1.Int64ArrayH\x00\x12\x36\n\x0cuint32_array\x18\x19 \x01(\x0b\x32\x1e.sdv.databroker.v1.Uint32ArrayH\x00\x12\x36\n\x0cuint64_array\x18\x1a \x01(\x0b\x32\x1e.sdv.databroker.v1.Uint64ArrayH\x00\x12\x34\n\x0b\x66loat_array\x18\x1b \x01(\x0b\x32\x1d.sdv.databroker.v1.FloatArrayH\x00\x12\x36\n\x0c\x64ouble_array\x18\x1c \x01(\x0b\x32\x1e.sdv.databroker.v1.DoubleArrayH\x00\"m\n\x07\x46\x61ilure\x12\x11\n\rINVALID_VALUE\x10\x00\x12\x11\n\rNOT_AVAILABLE\x10\x01\x12\x15\n\x11UNKNOWN_DATAPOINT\x10\x02\x12\x11\n\rACCESS_DENIED\x10\x03\x12\x12\n\x0eINTERNAL_ERROR\x10\x04\x42\x07\n\x05value\"\x9d\x01\n\x08Metadata\x12\n\n\x02id\x18\x01 \x01(\x05\x12\x0c\n\x04name\x18\x04 \x01(\t\x12.\n\tdata_type\x18\x05 \x01(\x0e\x32\x1b.sdv.databroker.v1.DataType\x12\x32\n\x0b\x63hange_type\x18\x06 \x01(\x0e\x32\x1d.sdv.databroker.v1.ChangeType\x12\x13\n\x0b\x64\x65scription\x18\x07 \x01(\t*\x84\x03\n\x08\x44\x61taType\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x42OOL\x10\x01\x12\x08\n\x04INT8\x10\x02\x12\t\n\x05INT16\x10\x03\x12\t\n\x05INT32\x10\x04\x12\t\n\x05INT64\x10\x05\x12\t\n\x05UINT8\x10\x06\x12\n\n\x06UINT16\x10\x07\x12\n\n\x06UINT32\x10\x08\x12\n\n\x06UINT64\x10\t\x12\t\n\x05\x46LOAT\x10\n\x12\n\n\x06\x44OUBLE\x10\x0b\x12\r\n\tTIMESTAMP\x10\x0c\x12\x10\n\x0cSTRING_ARRAY\x10\x14\x12\x0e\n\nBOOL_ARRAY\x10\x15\x12\x0e\n\nINT8_ARRAY\x10\x16\x12\x0f\n\x0bINT16_ARRAY\x10\x17\x12\x0f\n\x0bINT32_ARRAY\x10\x18\x12\x0f\n\x0bINT64_ARRAY\x10\x19\x12\x0f\n\x0bUINT8_ARRAY\x10\x1a\x12\x10\n\x0cUINT16_ARRAY\x10\x1b\x12\x10\n\x0cUINT32_ARRAY\x10\x1c\x12\x10\n\x0cUINT64_ARRAY\x10\x1d\x12\x0f\n\x0b\x46LOAT_ARRAY\x10\x1e\x12\x10\n\x0c\x44OUBLE_ARRAY\x10\x1f\x12\x13\n\x0fTIMESTAMP_ARRAY\x10 *s\n\x0e\x44\x61tapointError\x12\x15\n\x11UNKNOWN_DATAPOINT\x10\x00\x12\x10\n\x0cINVALID_TYPE\x10\x01\x12\x11\n\rACCESS_DENIED\x10\x02\x12\x12\n\x0eINTERNAL_ERROR\x10\x03\x12\x11\n\rOUT_OF_BOUNDS\x10\x04*7\n\nChangeType\x12\n\n\x06STATIC\x10\x00\x12\r\n\tON_CHANGE\x10\x01\x12\x0e\n\nCONTINUOUS\x10\x02\x62\x06proto3') + +_DATATYPE = DESCRIPTOR.enum_types_by_name['DataType'] +DataType = enum_type_wrapper.EnumTypeWrapper(_DATATYPE) +_DATAPOINTERROR = DESCRIPTOR.enum_types_by_name['DatapointError'] +DatapointError = enum_type_wrapper.EnumTypeWrapper(_DATAPOINTERROR) +_CHANGETYPE = DESCRIPTOR.enum_types_by_name['ChangeType'] +ChangeType = enum_type_wrapper.EnumTypeWrapper(_CHANGETYPE) +STRING = 0 +BOOL = 1 +INT8 = 2 +INT16 = 3 +INT32 = 4 +INT64 = 5 +UINT8 = 6 +UINT16 = 7 +UINT32 = 8 +UINT64 = 9 +FLOAT = 10 +DOUBLE = 11 +TIMESTAMP = 12 +STRING_ARRAY = 20 +BOOL_ARRAY = 21 +INT8_ARRAY = 22 +INT16_ARRAY = 23 +INT32_ARRAY = 24 +INT64_ARRAY = 25 +UINT8_ARRAY = 26 +UINT16_ARRAY = 27 +UINT32_ARRAY = 28 +UINT64_ARRAY = 29 +FLOAT_ARRAY = 30 +DOUBLE_ARRAY = 31 +TIMESTAMP_ARRAY = 32 +UNKNOWN_DATAPOINT = 0 +INVALID_TYPE = 1 +ACCESS_DENIED = 2 +INTERNAL_ERROR = 3 +OUT_OF_BOUNDS = 4 +STATIC = 0 +ON_CHANGE = 1 +CONTINUOUS = 2 + + +_STRINGARRAY = DESCRIPTOR.message_types_by_name['StringArray'] +_BOOLARRAY = DESCRIPTOR.message_types_by_name['BoolArray'] +_INT32ARRAY = DESCRIPTOR.message_types_by_name['Int32Array'] +_INT64ARRAY = DESCRIPTOR.message_types_by_name['Int64Array'] +_UINT32ARRAY = DESCRIPTOR.message_types_by_name['Uint32Array'] +_UINT64ARRAY = DESCRIPTOR.message_types_by_name['Uint64Array'] +_FLOATARRAY = DESCRIPTOR.message_types_by_name['FloatArray'] +_DOUBLEARRAY = DESCRIPTOR.message_types_by_name['DoubleArray'] +_DATAPOINT = DESCRIPTOR.message_types_by_name['Datapoint'] +_METADATA = DESCRIPTOR.message_types_by_name['Metadata'] +_DATAPOINT_FAILURE = _DATAPOINT.enum_types_by_name['Failure'] +StringArray = _reflection.GeneratedProtocolMessageType('StringArray', (_message.Message,), { + 'DESCRIPTOR' : _STRINGARRAY, + '__module__' : 'sdv.databroker.v1.types_pb2' + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.StringArray) + }) +_sym_db.RegisterMessage(StringArray) + +BoolArray = _reflection.GeneratedProtocolMessageType('BoolArray', (_message.Message,), { + 'DESCRIPTOR' : _BOOLARRAY, + '__module__' : 'sdv.databroker.v1.types_pb2' + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.BoolArray) + }) +_sym_db.RegisterMessage(BoolArray) + +Int32Array = _reflection.GeneratedProtocolMessageType('Int32Array', (_message.Message,), { + 'DESCRIPTOR' : _INT32ARRAY, + '__module__' : 'sdv.databroker.v1.types_pb2' + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.Int32Array) + }) +_sym_db.RegisterMessage(Int32Array) + +Int64Array = _reflection.GeneratedProtocolMessageType('Int64Array', (_message.Message,), { + 'DESCRIPTOR' : _INT64ARRAY, + '__module__' : 'sdv.databroker.v1.types_pb2' + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.Int64Array) + }) +_sym_db.RegisterMessage(Int64Array) + +Uint32Array = _reflection.GeneratedProtocolMessageType('Uint32Array', (_message.Message,), { + 'DESCRIPTOR' : _UINT32ARRAY, + '__module__' : 'sdv.databroker.v1.types_pb2' + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.Uint32Array) + }) +_sym_db.RegisterMessage(Uint32Array) + +Uint64Array = _reflection.GeneratedProtocolMessageType('Uint64Array', (_message.Message,), { + 'DESCRIPTOR' : _UINT64ARRAY, + '__module__' : 'sdv.databroker.v1.types_pb2' + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.Uint64Array) + }) +_sym_db.RegisterMessage(Uint64Array) + +FloatArray = _reflection.GeneratedProtocolMessageType('FloatArray', (_message.Message,), { + 'DESCRIPTOR' : _FLOATARRAY, + '__module__' : 'sdv.databroker.v1.types_pb2' + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.FloatArray) + }) +_sym_db.RegisterMessage(FloatArray) + +DoubleArray = _reflection.GeneratedProtocolMessageType('DoubleArray', (_message.Message,), { + 'DESCRIPTOR' : _DOUBLEARRAY, + '__module__' : 'sdv.databroker.v1.types_pb2' + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.DoubleArray) + }) +_sym_db.RegisterMessage(DoubleArray) + +Datapoint = _reflection.GeneratedProtocolMessageType('Datapoint', (_message.Message,), { + 'DESCRIPTOR' : _DATAPOINT, + '__module__' : 'sdv.databroker.v1.types_pb2' + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.Datapoint) + }) +_sym_db.RegisterMessage(Datapoint) + +Metadata = _reflection.GeneratedProtocolMessageType('Metadata', (_message.Message,), { + 'DESCRIPTOR' : _METADATA, + '__module__' : 'sdv.databroker.v1.types_pb2' + # @@protoc_insertion_point(class_scope:sdv.databroker.v1.Metadata) + }) +_sym_db.RegisterMessage(Metadata) + +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _DATATYPE._serialized_start=1358 + _DATATYPE._serialized_end=1746 + _DATAPOINTERROR._serialized_start=1748 + _DATAPOINTERROR._serialized_end=1863 + _CHANGETYPE._serialized_start=1865 + _CHANGETYPE._serialized_end=1920 + _STRINGARRAY._serialized_start=85 + _STRINGARRAY._serialized_end=114 + _BOOLARRAY._serialized_start=116 + _BOOLARRAY._serialized_end=143 + _INT32ARRAY._serialized_start=145 + _INT32ARRAY._serialized_end=173 + _INT64ARRAY._serialized_start=175 + _INT64ARRAY._serialized_end=203 + _UINT32ARRAY._serialized_start=205 + _UINT32ARRAY._serialized_end=234 + _UINT64ARRAY._serialized_start=236 + _UINT64ARRAY._serialized_end=265 + _FLOATARRAY._serialized_start=267 + _FLOATARRAY._serialized_end=295 + _DOUBLEARRAY._serialized_start=297 + _DOUBLEARRAY._serialized_end=326 + _DATAPOINT._serialized_start=329 + _DATAPOINT._serialized_end=1195 + _DATAPOINT_FAILURE._serialized_start=1077 + _DATAPOINT_FAILURE._serialized_end=1186 + _METADATA._serialized_start=1198 + _METADATA._serialized_end=1355 +# @@protoc_insertion_point(module_scope) diff --git a/integration_test/gen_proto/sdv/databroker/v1/types_pb2.pyi b/integration_test/gen_proto/sdv/databroker/v1/types_pb2.pyi new file mode 100644 index 0000000..735a920 --- /dev/null +++ b/integration_test/gen_proto/sdv/databroker/v1/types_pb2.pyi @@ -0,0 +1,375 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" +import builtins +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import typing +import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class _DataType: + ValueType = typing.NewType('ValueType', builtins.int) + V: typing_extensions.TypeAlias = ValueType +class _DataTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_DataType.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + STRING: _DataType.ValueType # 0 + BOOL: _DataType.ValueType # 1 + INT8: _DataType.ValueType # 2 + INT16: _DataType.ValueType # 3 + INT32: _DataType.ValueType # 4 + INT64: _DataType.ValueType # 5 + UINT8: _DataType.ValueType # 6 + UINT16: _DataType.ValueType # 7 + UINT32: _DataType.ValueType # 8 + UINT64: _DataType.ValueType # 9 + FLOAT: _DataType.ValueType # 10 + DOUBLE: _DataType.ValueType # 11 + TIMESTAMP: _DataType.ValueType # 12 + STRING_ARRAY: _DataType.ValueType # 20 + BOOL_ARRAY: _DataType.ValueType # 21 + INT8_ARRAY: _DataType.ValueType # 22 + INT16_ARRAY: _DataType.ValueType # 23 + INT32_ARRAY: _DataType.ValueType # 24 + INT64_ARRAY: _DataType.ValueType # 25 + UINT8_ARRAY: _DataType.ValueType # 26 + UINT16_ARRAY: _DataType.ValueType # 27 + UINT32_ARRAY: _DataType.ValueType # 28 + UINT64_ARRAY: _DataType.ValueType # 29 + FLOAT_ARRAY: _DataType.ValueType # 30 + DOUBLE_ARRAY: _DataType.ValueType # 31 + TIMESTAMP_ARRAY: _DataType.ValueType # 32 +class DataType(_DataType, metaclass=_DataTypeEnumTypeWrapper): + """Data type of a signal + + Protobuf doesn't support int8, int16, uint8 or uint16. + These are mapped to sint32 and uint32 respectively. + """ + pass + +STRING: DataType.ValueType # 0 +BOOL: DataType.ValueType # 1 +INT8: DataType.ValueType # 2 +INT16: DataType.ValueType # 3 +INT32: DataType.ValueType # 4 +INT64: DataType.ValueType # 5 +UINT8: DataType.ValueType # 6 +UINT16: DataType.ValueType # 7 +UINT32: DataType.ValueType # 8 +UINT64: DataType.ValueType # 9 +FLOAT: DataType.ValueType # 10 +DOUBLE: DataType.ValueType # 11 +TIMESTAMP: DataType.ValueType # 12 +STRING_ARRAY: DataType.ValueType # 20 +BOOL_ARRAY: DataType.ValueType # 21 +INT8_ARRAY: DataType.ValueType # 22 +INT16_ARRAY: DataType.ValueType # 23 +INT32_ARRAY: DataType.ValueType # 24 +INT64_ARRAY: DataType.ValueType # 25 +UINT8_ARRAY: DataType.ValueType # 26 +UINT16_ARRAY: DataType.ValueType # 27 +UINT32_ARRAY: DataType.ValueType # 28 +UINT64_ARRAY: DataType.ValueType # 29 +FLOAT_ARRAY: DataType.ValueType # 30 +DOUBLE_ARRAY: DataType.ValueType # 31 +TIMESTAMP_ARRAY: DataType.ValueType # 32 +global___DataType = DataType + + +class _DatapointError: + ValueType = typing.NewType('ValueType', builtins.int) + V: typing_extensions.TypeAlias = ValueType +class _DatapointErrorEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_DatapointError.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + UNKNOWN_DATAPOINT: _DatapointError.ValueType # 0 + INVALID_TYPE: _DatapointError.ValueType # 1 + ACCESS_DENIED: _DatapointError.ValueType # 2 + INTERNAL_ERROR: _DatapointError.ValueType # 3 + OUT_OF_BOUNDS: _DatapointError.ValueType # 4 +class DatapointError(_DatapointError, metaclass=_DatapointErrorEnumTypeWrapper): + pass + +UNKNOWN_DATAPOINT: DatapointError.ValueType # 0 +INVALID_TYPE: DatapointError.ValueType # 1 +ACCESS_DENIED: DatapointError.ValueType # 2 +INTERNAL_ERROR: DatapointError.ValueType # 3 +OUT_OF_BOUNDS: DatapointError.ValueType # 4 +global___DatapointError = DatapointError + + +class _ChangeType: + ValueType = typing.NewType('ValueType', builtins.int) + V: typing_extensions.TypeAlias = ValueType +class _ChangeTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_ChangeType.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + STATIC: _ChangeType.ValueType # 0 + """Value never changes""" + + ON_CHANGE: _ChangeType.ValueType # 1 + """Updates are provided every time the value changes (i.e.""" + + CONTINUOUS: _ChangeType.ValueType # 2 + """window is open / closed) + Value is updated continuously. Broker needs to tell + """ + +class ChangeType(_ChangeType, metaclass=_ChangeTypeEnumTypeWrapper): + pass + +STATIC: ChangeType.ValueType # 0 +"""Value never changes""" + +ON_CHANGE: ChangeType.ValueType # 1 +"""Updates are provided every time the value changes (i.e.""" + +CONTINUOUS: ChangeType.ValueType # 2 +"""window is open / closed) +Value is updated continuously. Broker needs to tell +""" + +global___ChangeType = ChangeType + + +class StringArray(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + VALUES_FIELD_NUMBER: builtins.int + @property + def values(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[typing.Text]: ... + def __init__(self, + *, + values: typing.Optional[typing.Iterable[typing.Text]] = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["values",b"values"]) -> None: ... +global___StringArray = StringArray + +class BoolArray(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + VALUES_FIELD_NUMBER: builtins.int + @property + def values(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: ... + def __init__(self, + *, + values: typing.Optional[typing.Iterable[builtins.bool]] = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["values",b"values"]) -> None: ... +global___BoolArray = BoolArray + +class Int32Array(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + VALUES_FIELD_NUMBER: builtins.int + @property + def values(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + def __init__(self, + *, + values: typing.Optional[typing.Iterable[builtins.int]] = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["values",b"values"]) -> None: ... +global___Int32Array = Int32Array + +class Int64Array(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + VALUES_FIELD_NUMBER: builtins.int + @property + def values(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + def __init__(self, + *, + values: typing.Optional[typing.Iterable[builtins.int]] = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["values",b"values"]) -> None: ... +global___Int64Array = Int64Array + +class Uint32Array(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + VALUES_FIELD_NUMBER: builtins.int + @property + def values(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + def __init__(self, + *, + values: typing.Optional[typing.Iterable[builtins.int]] = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["values",b"values"]) -> None: ... +global___Uint32Array = Uint32Array + +class Uint64Array(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + VALUES_FIELD_NUMBER: builtins.int + @property + def values(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... + def __init__(self, + *, + values: typing.Optional[typing.Iterable[builtins.int]] = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["values",b"values"]) -> None: ... +global___Uint64Array = Uint64Array + +class FloatArray(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + VALUES_FIELD_NUMBER: builtins.int + @property + def values(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]: ... + def __init__(self, + *, + values: typing.Optional[typing.Iterable[builtins.float]] = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["values",b"values"]) -> None: ... +global___FloatArray = FloatArray + +class DoubleArray(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + VALUES_FIELD_NUMBER: builtins.int + @property + def values(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]: ... + def __init__(self, + *, + values: typing.Optional[typing.Iterable[builtins.float]] = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["values",b"values"]) -> None: ... +global___DoubleArray = DoubleArray + +class Datapoint(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + class _Failure: + ValueType = typing.NewType('ValueType', builtins.int) + V: typing_extensions.TypeAlias = ValueType + class _FailureEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Datapoint._Failure.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + INVALID_VALUE: Datapoint._Failure.ValueType # 0 + """The data point is known, but doesn't have a valid value""" + + NOT_AVAILABLE: Datapoint._Failure.ValueType # 1 + """The data point is known, but no value is available""" + + UNKNOWN_DATAPOINT: Datapoint._Failure.ValueType # 2 + """Unknown datapoint""" + + ACCESS_DENIED: Datapoint._Failure.ValueType # 3 + """Access denied""" + + INTERNAL_ERROR: Datapoint._Failure.ValueType # 4 + """Unexpected internal error""" + + class Failure(_Failure, metaclass=_FailureEnumTypeWrapper): + pass + + INVALID_VALUE: Datapoint.Failure.ValueType # 0 + """The data point is known, but doesn't have a valid value""" + + NOT_AVAILABLE: Datapoint.Failure.ValueType # 1 + """The data point is known, but no value is available""" + + UNKNOWN_DATAPOINT: Datapoint.Failure.ValueType # 2 + """Unknown datapoint""" + + ACCESS_DENIED: Datapoint.Failure.ValueType # 3 + """Access denied""" + + INTERNAL_ERROR: Datapoint.Failure.ValueType # 4 + """Unexpected internal error""" + + + TIMESTAMP_FIELD_NUMBER: builtins.int + FAILURE_VALUE_FIELD_NUMBER: builtins.int + STRING_VALUE_FIELD_NUMBER: builtins.int + BOOL_VALUE_FIELD_NUMBER: builtins.int + INT32_VALUE_FIELD_NUMBER: builtins.int + INT64_VALUE_FIELD_NUMBER: builtins.int + UINT32_VALUE_FIELD_NUMBER: builtins.int + UINT64_VALUE_FIELD_NUMBER: builtins.int + FLOAT_VALUE_FIELD_NUMBER: builtins.int + DOUBLE_VALUE_FIELD_NUMBER: builtins.int + STRING_ARRAY_FIELD_NUMBER: builtins.int + BOOL_ARRAY_FIELD_NUMBER: builtins.int + INT32_ARRAY_FIELD_NUMBER: builtins.int + INT64_ARRAY_FIELD_NUMBER: builtins.int + UINT32_ARRAY_FIELD_NUMBER: builtins.int + UINT64_ARRAY_FIELD_NUMBER: builtins.int + FLOAT_ARRAY_FIELD_NUMBER: builtins.int + DOUBLE_ARRAY_FIELD_NUMBER: builtins.int + @property + def timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: + """Timestamp of the value""" + pass + failure_value: global___Datapoint.Failure.ValueType + string_value: typing.Text + bool_value: builtins.bool + int32_value: builtins.int + int64_value: builtins.int + uint32_value: builtins.int + uint64_value: builtins.int + float_value: builtins.float + double_value: builtins.float + @property + def string_array(self) -> global___StringArray: ... + @property + def bool_array(self) -> global___BoolArray: ... + @property + def int32_array(self) -> global___Int32Array: ... + @property + def int64_array(self) -> global___Int64Array: ... + @property + def uint32_array(self) -> global___Uint32Array: ... + @property + def uint64_array(self) -> global___Uint64Array: ... + @property + def float_array(self) -> global___FloatArray: ... + @property + def double_array(self) -> global___DoubleArray: ... + def __init__(self, + *, + timestamp: typing.Optional[google.protobuf.timestamp_pb2.Timestamp] = ..., + failure_value: global___Datapoint.Failure.ValueType = ..., + string_value: typing.Text = ..., + bool_value: builtins.bool = ..., + int32_value: builtins.int = ..., + int64_value: builtins.int = ..., + uint32_value: builtins.int = ..., + uint64_value: builtins.int = ..., + float_value: builtins.float = ..., + double_value: builtins.float = ..., + string_array: typing.Optional[global___StringArray] = ..., + bool_array: typing.Optional[global___BoolArray] = ..., + int32_array: typing.Optional[global___Int32Array] = ..., + int64_array: typing.Optional[global___Int64Array] = ..., + uint32_array: typing.Optional[global___Uint32Array] = ..., + uint64_array: typing.Optional[global___Uint64Array] = ..., + float_array: typing.Optional[global___FloatArray] = ..., + double_array: typing.Optional[global___DoubleArray] = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["bool_array",b"bool_array","bool_value",b"bool_value","double_array",b"double_array","double_value",b"double_value","failure_value",b"failure_value","float_array",b"float_array","float_value",b"float_value","int32_array",b"int32_array","int32_value",b"int32_value","int64_array",b"int64_array","int64_value",b"int64_value","string_array",b"string_array","string_value",b"string_value","timestamp",b"timestamp","uint32_array",b"uint32_array","uint32_value",b"uint32_value","uint64_array",b"uint64_array","uint64_value",b"uint64_value","value",b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["bool_array",b"bool_array","bool_value",b"bool_value","double_array",b"double_array","double_value",b"double_value","failure_value",b"failure_value","float_array",b"float_array","float_value",b"float_value","int32_array",b"int32_array","int32_value",b"int32_value","int64_array",b"int64_array","int64_value",b"int64_value","string_array",b"string_array","string_value",b"string_value","timestamp",b"timestamp","uint32_array",b"uint32_array","uint32_value",b"uint32_value","uint64_array",b"uint64_array","uint64_value",b"uint64_value","value",b"value"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["value",b"value"]) -> typing.Optional[typing_extensions.Literal["failure_value","string_value","bool_value","int32_value","int64_value","uint32_value","uint64_value","float_value","double_value","string_array","bool_array","int32_array","int64_array","uint32_array","uint64_array","float_array","double_array"]]: ... +global___Datapoint = Datapoint + +class Metadata(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + ID_FIELD_NUMBER: builtins.int + NAME_FIELD_NUMBER: builtins.int + DATA_TYPE_FIELD_NUMBER: builtins.int + CHANGE_TYPE_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + id: builtins.int + """Id to be used in "get" and "subscribe" requests. Ids stay valid during + one power cycle, only. + """ + + name: typing.Text + data_type: global___DataType.ValueType + change_type: global___ChangeType.ValueType + """CONTINUOUS or STATIC or ON_CHANGE""" + + description: typing.Text + def __init__(self, + *, + id: builtins.int = ..., + name: typing.Text = ..., + data_type: global___DataType.ValueType = ..., + change_type: global___ChangeType.ValueType = ..., + description: typing.Text = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["change_type",b"change_type","data_type",b"data_type","description",b"description","id",b"id","name",b"name"]) -> None: ... +global___Metadata = Metadata diff --git a/integration_test/gen_proto/sdv/databroker/v1/types_pb2_grpc.py b/integration_test/gen_proto/sdv/databroker/v1/types_pb2_grpc.py new file mode 100644 index 0000000..2daafff --- /dev/null +++ b/integration_test/gen_proto/sdv/databroker/v1/types_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/integration_test/it-config b/integration_test/it-config new file mode 100755 index 0000000..8557288 --- /dev/null +++ b/integration_test/it-config @@ -0,0 +1,52 @@ +#!/bin/bash +# /******************************************************************************** +# * Copyright (c) 2022 Contributors to the Eclipse Foundation +# * +# * See the NOTICE file(s) distributed with this work for additional +# * information regarding copyright ownership. +# * +# * This program and the accompanying materials are made available under the +# * terms of the Apache License 2.0 which is available at +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * SPDX-License-Identifier: Apache-2.0 +# ********************************************************************************/ + + +### Local docker image config (may force rebuild + tag local images) +# export VDB_TAG="latest" +# export SEAT_TAG="latest" +# export FEEDER_TAG="latest" + +export VDB_TAG="v0.16.0" +export SEAT_TAG="v0.16.0" +export FEEDER_TAG="v0.16.0" + +### ghcr.io image config +export DOCKER_REPO="ghcr.io" +export VDB_IMAGE="${DOCKER_REPO}/eclipse/kuksa.val/oci-databroker:${VDB_TAG}" +export SEAT_IMAGE="${DOCKER_REPO}/eclipse/kuksa.val.services/oci_vservice-seat:${SEAT_TAG}" +export FEEDER_IMAGE="${DOCKER_REPO}/eclipse/kuksa.val.feeders/oci_feeder-can:${FEEDER_TAG}" + +export VDB_CONTAINER="it-databroker" +export SEAT_CONTAINER="it-seat_service" +export FEEDER_CONTAINER="it-feeder_can" + +VDB_HOST_PORT="35555" +SEAT_HOST_PORT="30051" +DOCKER_VDB_ADDRESS="${VDB_CONTAINER}:55555" + +export VDB_DOCKER_OPT="-p ${VDB_HOST_PORT}:55555/tcp --expose ${VDB_HOST_PORT} -e RUST_LOG=info,databroker=debug,vehicle_data_broker=debug -e VEHICLEDATABROKER_DAPR_APP_ID=vdb --name ${VDB_CONTAINER}" +export SEAT_DOCKER_OPT="-p ${SEAT_HOST_PORT}:50051/tcp --expose ${SEAT_HOST_PORT} -e BROKER_ADDR=${DOCKER_VDB_ADDRESS} -e SIM=cansim -e SC_RPM=100 -e VEHICLEDATABROKER_DAPR_APP_ID=vdb --name ${SEAT_CONTAINER}" +export FEEDER_DOCKER_OPT="-e VDB_ADDRESS=${DOCKER_VDB_ADDRESS} -e LOG_LEVEL=info,databroker=debug,dbcfeeder=debug,dbcfeeder.broker_client=debug --name ${FEEDER_CONTAINER}" + +export DOCKER_NETWORK="val-int" # "host" + +export DOCKER_OPT="" + +# limit docker log output to 100 lines / 5 min +#export DOCKER_LOG="-n 100" +[ -z "${DOCKER_LOG}" ] && export DOCKER_LOG="--since 5m -n 1000" + +# output dir with results and logs +export RESULTS="/tmp/val-it-results" diff --git a/integration_test/it-seat-move.sh b/integration_test/it-seat-move.sh new file mode 100755 index 0000000..253b310 --- /dev/null +++ b/integration_test/it-seat-move.sh @@ -0,0 +1,66 @@ +#!/bin/bash +# /******************************************************************************** +# * Copyright (c) 2022 Contributors to the Eclipse Foundation +# * +# * See the NOTICE file(s) distributed with this work for additional +# * information regarding copyright ownership. +# * +# * This program and the accompanying materials are made available under the +# * terms of the Apache License 2.0 which is available at +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * SPDX-License-Identifier: Apache-2.0 +# ********************************************************************************/ + +# shellcheck disable=SC2181 +# shellcheck disable=SC2086 +# shellcheck disable=SC2230 + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" + +set -e +# shellcheck source=/dev/null +source "${SCRIPT_DIR}/it-config" + +# parse 1st arg (optional as pos) +pos="$1" +[ -z "$pos" ] && pos="500" + +# pass extra args, e.g. "--wait" to wait reaching desired position +shift +args="$*" + +exec_seat_client() { + local pos="$1" + shift + local extra_args="$*" + #echo "#[$$]# Moving Seat to ${pos} ..." + echo "$ docker exec ${SEAT_CONTAINER} /app/bin/seat_svc_client ${extra_args} ${pos}" + docker exec "${SEAT_CONTAINER}" /app/bin/seat_svc_client ${extra_args} "${pos}" + #echo "#[$$]# Moving Seat to ${pos}. Done!" +} + +### Checks if container (name) is running +__check_container_state() { + local name="$1" + local verbose="$2" + RESULT=$(docker ps -a --filter name=${name} --format='{{.Names}}\t{{.Image}}\tstate:{{.State}}\t{{.Status}}\tNetwork:{{.Networks}} ({{.Ports}})') + [ "$verbose" = "1" ] && echo "$RESULT" 1>&2 + if echo "$RESULT" | grep -q "state:running"; then + return 0 + else + return 1 + fi +} + +${SCRIPT_DIR}/it-setup.sh start + +if ! __check_container_state ${VDB_CONTAINER} || + ! __check_container_state "${SEAT_CONTAINER}"; then + echo "Containers for IntegrationTest test are not running!" + exit 10 +fi + +echo "### Moving SEAT to ${pos}" +exec_seat_client ${args} "$pos" +exit $? diff --git a/integration_test/it-setup.sh b/integration_test/it-setup.sh new file mode 100755 index 0000000..7b4e355 --- /dev/null +++ b/integration_test/it-setup.sh @@ -0,0 +1,328 @@ +#!/bin/bash +# /******************************************************************************** +# * Copyright (c) 2022 Contributors to the Eclipse Foundation +# * +# * See the NOTICE file(s) distributed with this work for additional +# * information regarding copyright ownership. +# * +# * This program and the accompanying materials are made available under the +# * terms of the Apache License 2.0 which is available at +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * SPDX-License-Identifier: Apache-2.0 +# ********************************************************************************/ + +# shellcheck disable=SC2181 +# shellcheck disable=SC2086 +# shellcheck disable=SC2230 +# shellcheck disable=SC2034 + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" + +set -e + +# shellcheck source=/dev/null +source ${SCRIPT_DIR}/it-config + +### cleanup of IT containers and optionally images +cleanup() { + local force="$1" + # ensure containers are not running + echo "# Cleanup iteration test containers..." + if docker ps -a | grep "${VDB_CONTAINER}"; then + docker container rm -f "${VDB_CONTAINER}" + fi + if docker ps -a | grep "${SEAT_CONTAINER}"; then + docker container rm -f "${SEAT_CONTAINER}" + fi + if docker ps -a | grep "${FEEDER_CONTAINER}"; then + docker container rm -f "${FEEDER_CONTAINER}" + fi + if [ "$force" = "1" ]; then + echo "# Cleanup VAL ghcr images..." + docker image rm -f "${VDB_IMAGE}" + docker image rm -f "${SEAT_IMAGE}" + docker image rm -f "${FEEDER_IMAGE}" + fi +} + +abort() { + local code=$1 + local msg="$2" + + printf "[ERR:%d] Aborting: %s\n" ${code} "${msg}" + # cleanup 0 + exit ${code} +} + +### logins to $DOCKER_REPO + pull of images +pull_images() { + local force="$1" + if [ -z "${DOCKER_REPO}" ]; then + return 0 + fi + if [ "${force}" = "1" ] || + ! __check_docker_image "${VDB_IMAGE}" || + ! __check_docker_image "${SEAT_IMAGE}" || + ! __check_docker_image "${FEEDER_IMAGE}"; then + echo "- Pulling images form ${DOCKER_REPO} (May need manual login)..." + docker login "${DOCKER_REPO}" + + docker pull "${VDB_IMAGE}" + docker pull "${SEAT_IMAGE}" + docker pull "${FEEDER_IMAGE}" + fi +} + +build_images() { + local force="$1" + if [ "$force" = "1" ] || ! __check_docker_image "${SEAT_IMAGE}"; then + echo "# Building amd64/seat-service:latest ..." + if cd ${SCRIPT_DIR}/../seat_service && ./docker-build.sh -l x86_64; then + docker tag amd64/seat-service:latest ${SEAT_IMAGE} + fi + fi + if [ "$force" = "1" ] || ! __check_docker_image "${VDB_IMAGE}"; then + echo "# Building amd64/databroker:latest ..." + if cd ${SCRIPT_DIR}/../vehicle_data_broker && ./docker-build.sh -l x86_64; then + docker tag amd64/databroker:latest ${VDB_IMAGE} + fi + fi + if [ "$force" = "1" ] || ! __check_docker_image "${FEEDER_IMAGE}"; then + echo "# Building amd64/feeder_can:latest ..." + if cd ${SCRIPT_DIR}/../feeder_can && ./docker-build.sh -l x86_64; then + docker tag amd64/feeder_can:latest ${FEEDER_IMAGE} + fi + fi +} + +### Checks if container (name) is running +__check_container_state() { + local name="$1" + local verbose="$2" + RESULT=$(docker ps -a --filter name=${name} --format='{{.Names}}\t{{.Image}}\tstate:{{.State}}\t{{.Status}}\tNetwork:{{.Networks}} ({{.Ports}})') + [ "$verbose" = "1" ] && echo "$RESULT" 1>&2 + if echo "$RESULT" | grep -q "state:running"; then + return 0 + else + return 1 + fi +} + +### Checks if docker image:tag is locally available +__check_docker_image() { + local name="$1" + docker_images=$(docker images --format '{{.Repository}}:{{.Tag}}') + if echo "${docker_images}" | grep -q "${name}"; then + return 0 + else + return 1 + fi +} + +### Checks if $VDB_CONTAINER and $SEAT_CONTAINER are both running +check_it_containers() { + local verbose="$1" + + local seat_err=0 + local vdb_err=0 + local feed_err=0 + __check_container_state "${VDB_CONTAINER}" "${verbose}" || vdb_err=1 + __check_container_state "${SEAT_CONTAINER}" "${verbose}" || seat_err=1 + __check_container_state "${FEEDER_CONTAINER}" "${verbose}" || feed_err=1 + + if [ ${vdb_err} -ne 0 ] || [ ${seat_err} -ne 0 ] || [ ${feed_err} -ne 0 ]; then + return 1 + fi + return 0 +} + +### Starts $VDB_CONTAINER and $SEAT_CONTAINER +start_containers() { + echo "- Running ${VDB_CONTAINER} ..." + # DataBroker container options + rc=0 + docker run -d ${DOCKER_OPT} ${VDB_DOCKER_OPT} "${VDB_IMAGE}" || rc=1 + + echo "- Running ${SEAT_CONTAINER} ..." + # SeatService container options. BROKER_ADDR is needed to reach it-databroker ports within val-test network + docker run -d ${DOCKER_OPT} ${SEAT_DOCKER_OPT} "${SEAT_IMAGE}" || rc=2 + + echo "- Running ${FEEDER_CONTAINER} ..." + # SeatService container options. BROKER_ADDR is needed to reach it-databroker ports within val-test network + docker run -d ${DOCKER_OPT} ${FEEDER_DOCKER_OPT} "${FEEDER_IMAGE}" || rc=3 + + echo + __check_container_state "${VDB_CONTAINER}" 1 || rc=1 + __check_container_state "${SEAT_CONTAINER}" 1 || rc=2 + __check_container_state "${FEEDER_CONTAINER}" 1 || rc=3 + echo + + return ${rc} +} + +network_setup() { + local network="$1" + + if [ "$network" = "host" ]; then + return 0 + fi + if ! docker network ls | grep -q "${network}"; then + echo "- Create ${network} docker network" + docker network create ${network} + return $? + fi +} + +it_init() { + local force="$1" + + # RESULTS dir is defined in it-config + # shellcheck disable=SC2153 + if [ -n "${RESULTS}" ]; then + [ -d "${RESULTS}" ] && rm -rf "${RESULTS}" + mkdir -p "${RESULTS}" + fi + + if [ -z "$(which docker)" ]; then + abort 1 "Please install docker!" + fi + + # auto pull/build images (only if missing) + if echo "${VDB_TAG}${SEAT_TAG}${FEEDER_TAG}" | grep -q "latest"; then + build_images "${force}" + else + pull_images "${force}" + fi + + network_setup "${DOCKER_NETWORK}" + return 0 +} + +it_start() { + local force="$1" + set -e + + # initial cleanup for images/containers + cleanup "$force" + + # ensure images are pulled, containers are started + it_init "$force" + + start_containers + return $? +} + +it_stop() { + set -e + + # cleanup for containres only + cleanup 0 + return $? +} + +it_status() { + local logs="$1" + echo + echo "### Docker Images" + docker images --format 'table {{.Repository}}:{{.Tag}}\t{{.Size}}\t{{.CreatedSince}}' | grep "${DOCKER_REPO}/eclipse/kuksa\.val" + echo "-----------------------" + echo "### Docker Containers" + check_it_containers 1 + echo "-----------------------" + echo + if [ "${logs}" = "1" ]; then + echo "### [${VDB_CONTAINER}] Logs:" + docker logs $DOCKER_LOG "${VDB_CONTAINER}" + echo "-----------------------" + echo + echo "### [${SEAT_CONTAINER}] Logs:" + docker logs $DOCKER_LOG "${SEAT_CONTAINER}" + echo "-----------------------" + echo + echo + echo "### [${FEEDER_CONTAINER}] Logs:" + docker logs $DOCKER_LOG "${FEEDER_CONTAINER}" + echo "-----------------------" + echo + fi +} + +it_cleanup() { + local force="$1" + cleanup "$force" +} + +it_usage() { + # {--force} + + echo "Usage: $0 {Options} [ init | start | stop | status | cleanup ]" + echo + echo "Options:" + echo " --force for 'init' and 'cleanup' commands, forces rebuilding/pulling/removing VAL images" + echo " --logs for 'status' command, shows docker logs per var container" + echo " --help Prints this message and exit." + echo + echo "Commands:" + echo " init Pulls VAL images from a repository or builds them if missing (use --force to force init)" + echo " start Starts VAL Containers (also implies init)" + echo " stop Stops VAL Containers" + echo " status Shows status of VAL Containers. Use --log to see last logs from VAL containers" + echo " cleanup Removes VAL Containers. Use --force to also remove configured VAL images" +} + +# FIXME: BROKER_ADDR=${VDB_CONTAINER}:55555 should be changed in case of host network... +export DOCKER_OPT="--network ${DOCKER_NETWORK} ${DOCKER_OPT}" + +# parse options in $FORCE and $CMD +CMD="" +FORCE=0 +LOGS=0 +while [ -n "$1" ]; do + if [ "$1" = "--help" ] || [ "$1" = "-h" ]; then + it_usage + exit 0 + elif [ "$1" = "--force" ] || [ "$1" = "-f" ]; then + FORCE=1 + elif [ "$1" = "--logs" ] || [ "$1" = "-l" ]; then + LOGS=1 + else + CMD="$1" + fi + shift +done + +rc=0 +case "${CMD}" in +init) # handle source + it_init $FORCE + rc=$? + ;; +start) + it_start $FORCE + rc=$? + ;; +stop) + it_stop $FORCE + rc=$? + ;; +status) + it_status $LOGS + rc=$? + ;; +cleanup) + it_cleanup $FORCE + rc=$? + ;; +help) + it_usage + exit 0 + ;; +*) + echo "Invalid argument: ${CMD}" + it_usage + exit 1 + ;; +esac + +exit $rc diff --git a/integration_test/requirements-dev.txt b/integration_test/requirements-dev.txt new file mode 100644 index 0000000..9c5cafa --- /dev/null +++ b/integration_test/requirements-dev.txt @@ -0,0 +1,12 @@ +grpcio>=1.26.0 +grpcio-tools>=1.26.0 +protobuf==3.19.3 +pydocstyle >= 6.1.1 +mypy >= 0.910 +mypy-protobuf >= 3.0.0 +types-protobuf >= 0.1.14 +grpc-stubs >= 1.24.7 +pylint +pytest +pytest-ordering +pytest-asyncio \ No newline at end of file diff --git a/integration_test/requirements.txt b/integration_test/requirements.txt new file mode 100644 index 0000000..309c84e --- /dev/null +++ b/integration_test/requirements.txt @@ -0,0 +1,6 @@ +grpcio +protobuf +types-protobuf +pytest +pytest-ordering +pytest-asyncio \ No newline at end of file diff --git a/integration_test/setup.py b/integration_test/setup.py new file mode 100644 index 0000000..73376bc --- /dev/null +++ b/integration_test/setup.py @@ -0,0 +1,16 @@ +# /******************************************************************************** +# * Copyright (c) 2022 Contributors to the Eclipse Foundation +# * +# * See the NOTICE file(s) distributed with this work for additional +# * information regarding copyright ownership. +# * +# * This program and the accompanying materials are made available under the +# * terms of the Apache License 2.0 which is available at +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * SPDX-License-Identifier: Apache-2.0 +# ********************************************************************************/ + +from setuptools import find_packages, setup + +setup(name="val-integration", packages=find_packages()) diff --git a/integration_test/task-seat-move.sh b/integration_test/task-seat-move.sh new file mode 100755 index 0000000..0c5fea0 --- /dev/null +++ b/integration_test/task-seat-move.sh @@ -0,0 +1,52 @@ +#!/bin/bash +# /******************************************************************************** +# * Copyright (c) 2022 Contributors to the Eclipse Foundation +# * +# * See the NOTICE file(s) distributed with this work for additional +# * information regarding copyright ownership. +# * +# * This program and the accompanying materials are made available under the +# * terms of the Apache License 2.0 which is available at +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * SPDX-License-Identifier: Apache-2.0 +# ********************************************************************************/ + +# shellcheck disable=SC2181 +# shellcheck disable=SC2086 +# shellcheck disable=SC2230 + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" + +DAPR_VDB="vehicledatabroker" +DAPR_SEATSVC="seatservice" + +set -e + +# parse 1st arg (optional as pos) +pos="$1" +[ -z "$pos" ] && pos="500" + +# pass extra args, e.g. "--wait" to wait reaching desired position +shift +args="$*" + +### Checks if dapr application (name) is running +__check_dapr_app() { + local name="$1" + RESULT=$(dapr list | grep "$name") + if [ -n "$RESULT" ]; then + return 0 + else + return 1 + fi +} + +if ! __check_dapr_app "$DAPR_VDB" || ! __check_dapr_app "$DAPR_SEATSVC"; then + echo "Please run vs-code tasks: [run-vehicledatabroker, run-seatservice]" + exit 10 +fi + +echo "### Moving SEAT to ${pos}" +$SCRIPT_DIR/../.vscode/scripts/run-seatservice-cli.sh --task "$pos" $args +exit $? diff --git a/integration_test/test_feeder.py b/integration_test/test_feeder.py new file mode 100644 index 0000000..652ae81 --- /dev/null +++ b/integration_test/test_feeder.py @@ -0,0 +1,150 @@ +#!/usr/bin/env python3 +# /******************************************************************************** +# * Copyright (c) 2022 Contributors to the Eclipse Foundation +# * +# * See the NOTICE file(s) distributed with this work for additional +# * information regarding copyright ownership. +# * +# * This program and the accompanying materials are made available under the +# * terms of the Apache License 2.0 which is available at +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * SPDX-License-Identifier: Apache-2.0 +# ********************************************************************************/ + +import json +import logging +import os + +import pytest +from gen_proto.sdv.databroker.v1.types_pb2 import Datapoint +from vdb_helper import VDBHelper + +logger = logging.getLogger(__name__) +logger.setLevel(os.getenv("LOG_LEVEL", "DEBUG")) + +# Env USE_DAPR forces usage of vscode tasks and scripts using 'dapr run' with predefined ports +USE_DAPR = os.getenv("USE_DAPR", "1") != "0" + +if USE_DAPR: + DEFAULT_VDB_ADDRESS = "localhost:55555" +else: + DEFAULT_VDB_ADDRESS = "localhost:35555" + +VDB_ADDRESS = os.environ.get("VDB_ADDRESS", DEFAULT_VDB_ADDRESS) + + +@pytest.fixture +async def setup_helper() -> VDBHelper: + logger.info("Using VDB_ADDR={}".format(VDB_ADDRESS)) + helper = VDBHelper(VDB_ADDRESS) + return helper + + +@pytest.mark.asyncio +async def test_feeder_vdb_connection() -> None: + logger.info("Connecting to VehicleDataBrocker {}".format(VDB_ADDRESS)) + helper = VDBHelper(VDB_ADDRESS) + logger.info("VDBHelper._address = {}".format(helper._address)) + await helper.close() + + +@pytest.mark.asyncio +async def test_feeder_metadata_registered(setup_helper: VDBHelper) -> None: + helper = setup_helper + feeder_names = [ + "Vehicle.OBD.Speed", + "Vehicle.Powertrain.Transmission.Gear", + "Vehicle.Chassis.ParkingBrake.IsEngaged", + "Vehicle.OBD.EngineLoad", + ] + + meta = await helper.get_vdb_metadata(feeder_names) + logger.debug( + "# get_vdb_metadata({}) -> \n{}".format( + feeder_names, str(meta).replace("\n", " ") + ) + ) + + assert len(meta) > 0, "VDB Metadata is empty" # nosec B101 + assert len(meta) == len( # nosec B101 + feeder_names + ), "Filtered meta with unexpected size: {}".format(meta) + meta_list = helper.vdb_metadata_to_json(meta) + logger.debug("get_vdb_metadata() --> \n{}".format(json.dumps(meta_list, indent=2))) + + meta_names = [d["name"] for d in meta_list] + + for name in feeder_names: + assert name in meta_names, "{} not registered!".format(name) # nosec B101 + + name_reg = meta_list[meta_names.index(name)] + + assert len(name_reg) == 4 and name_reg["name"] == name # nosec B101 + logger.info("[feeder] Found metadata: {}".format(name_reg)) + # TODO: check for expected types? + # assert ( # nosec B101 + # name_reg["data_type"] == DataType.UINT32 + # ), "{} datatype is {}".format(name, name_reg["data_type"]) + + await helper.close() + + +@pytest.mark.asyncio +async def test_feeder_events(setup_helper: VDBHelper) -> None: + helper: VDBHelper = setup_helper + + timeout = 3 + name1 = "Vehicle.OBD.Speed" + name2 = "Vehicle.OBD.EngineLoad" + alias1 = "speed" + alias2 = "load" + + query = "SELECT {} as {}, {} as {}".format(name1, alias1, name2, alias2) + + events = [] + # inner function for collecting subscription events + + def inner_callback(name: str, dp: Datapoint): + dd = helper.datapoint_to_dict(name, dp) + events.append(dd) + + logger.info("# subscribing('{}', timeout={})".format(query, timeout)) + + await helper.subscribe_datapoints( + query, timeout=timeout, sub_callback=inner_callback + ) + logger.debug("Received events:{}".format(events)) + + assert len(events) > 0, "No events from feeder for {} sec.".format( # nosec B101 + timeout + ) + + # list of received names + event_names = set([e["name"] for e in events]) + # list of received values + alias_values1 = set([e["value"] for e in events if e["name"] == alias1]) + alias_values2 = set([e["value"] for e in events if e["name"] == alias2]) + + logger.debug(" --> names : {}".format(event_names)) + # event_values = [e['value'] for e in events] + # logger.debug(" --> values : {}".format(event_values)) + # logger.debug(" --> <{}> : {}".format(name, event_values_name)) + + assert set([alias1, alias2]) == set( # nosec B101 + event_names + ), "Unexpected event aliases received: {}".format(event_names) + + assert ( # nosec B101 + len(alias_values1) > 1 + ), "{} values not changing: {}. Is feeder running?".format(alias1, alias_values1) + + assert ( # nosec B101 + len(alias_values2) > 1 + ), "{} values not changing: {}. Is feeder running?".format(alias2, alias_values2) + + await helper.close() + + +if __name__ == "__main__": + pytest.main(["-vvs", "--log-cli-level=INFO", os.path.abspath(__file__)]) diff --git a/integration_test/test_val_seat.py b/integration_test/test_val_seat.py new file mode 100644 index 0000000..1000512 --- /dev/null +++ b/integration_test/test_val_seat.py @@ -0,0 +1,247 @@ +#!/usr/bin/env python3 +# /******************************************************************************** +# * Copyright (c) 2022 Contributors to the Eclipse Foundation +# * +# * See the NOTICE file(s) distributed with this work for additional +# * information regarding copyright ownership. +# * +# * This program and the accompanying materials are made available under the +# * terms of the Apache License 2.0 which is available at +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * SPDX-License-Identifier: Apache-2.0 +# ********************************************************************************/ + +import json +import logging +import os +import subprocess # nosec + +import grpc +import pytest +from gen_proto.sdv.databroker.v1.types_pb2 import Datapoint, DataType +from vdb_helper import VDBHelper + +logger = logging.getLogger(__name__) +logger.setLevel(os.getenv("LOG_LEVEL", "INFO")) + +# Env USE_DAPR forces usage of vscode tasks and scripts using 'dapr run' with predefined ports +USE_DAPR = os.getenv("USE_DAPR", "1") != "0" + +if USE_DAPR: + DEFAULT_VDB_ADDRESS = "localhost:55555" + DEFAULT_SCRIPT_SEAT_MOVE = "task-seat-move.sh" +else: + DEFAULT_VDB_ADDRESS = "localhost:35555" + DEFAULT_SCRIPT_SEAT_MOVE = "it-seat-move.sh" + +VDB_ADDRESS = os.environ.get("VDB_ADDRESS", DEFAULT_VDB_ADDRESS) + +SCRIPT_SEAT_MOVE = os.getenv( + "SCRIPT_SEAT_MOVE", + os.path.join(os.path.dirname(__file__), DEFAULT_SCRIPT_SEAT_MOVE), +) + + +def execute_script(args: list) -> None: + logger.info("$ {}".format(" ".join(args))) + try: + process = subprocess.run(args, check=True) # nosec + # , shell=True, capture_output=True, check=True) + logger.debug("rc:{}".format(process.returncode)) + # logger.debug("-->>\n[out] {}\n\[err]{}\n".format(process.stdout, process.stderr)) + except Exception as ex: + logging.exception(ex) + + +@pytest.fixture +async def setup_helper() -> VDBHelper: + logger.info("Using VDB_ADDR={}".format(VDB_ADDRESS)) + helper = VDBHelper(VDB_ADDRESS) + return helper + + +@pytest.mark.asyncio +async def test_vdb_metadata_get(setup_helper: VDBHelper) -> None: + helper = setup_helper + name = os.getenv("TEST_NAME", "Vehicle.Cabin.Seat.Row1.Pos1.Position") + + meta = await helper.get_vdb_metadata() + # logger.debug("# get_vdb_metadata() -> \n{}".format(str(meta).replace('\n', ' '))) + + assert len(meta) > 0, "VDB Metadata is empty" # nosec B101 + meta_list = helper.vdb_metadata_to_json(meta) + logger.debug("get_vdb_metadata()->\n{}".format(json.dumps(meta_list, indent=2))) + + meta_names = [d["name"] for d in meta_list] + + assert name in meta_names, "{} not registered!".format(name) # nosec B101 + name_reg = meta_list[meta_names.index(name)] + + assert len(name_reg) == 4 and name_reg["name"] == name # nosec B101 + logger.info("Found metadata: {}".format(name_reg)) + + assert ( # nosec B101 + name_reg["data_type"] == DataType.UINT32 + ), "{} datatype is {}".format(name, name_reg["data_type"]) + await helper.close() + + +@pytest.mark.asyncio +async def test_subscribe_seat_pos_0(setup_helper: VDBHelper) -> None: + helper: VDBHelper = setup_helper + name = os.getenv("TEST_NAME", "Vehicle.Cabin.Seat.Row1.Pos1.Position") + query = "SELECT {}".format(name) + + start_value = int(os.getenv("TEST_START_VALUE", "500")) + expected_value = int(os.getenv("TEST_VALUE", "0")) + timeout = int(os.getenv("TEST_TIMEOUT", "10")) + + # initiate seat move to 42 + logger.info(" -- moving seat to initial pos: {} (sync)".format(start_value)) + + # sync move to predefined pos + execute_script([SCRIPT_SEAT_MOVE, str(start_value), "-w"]) + + events = [] + # inner function for collecting subscription events + + def inner_callback(name: str, dp: Datapoint): + dd = helper.datapoint_to_dict(name, dp) + events.append(dd) + + logger.info(" -- moving seat to test position {} (async)...".format(expected_value)) + execute_script([SCRIPT_SEAT_MOVE, str(expected_value)]) + + logger.debug( + "\n# subscribing('{}', timeout={}), expecting:{}".format( + query, timeout, expected_value + ) + ) + await helper.subscribe_datapoints( + query, timeout=timeout, sub_callback=inner_callback + ) + + assert ( # nosec B101 + len(events) > 0 + ), "Not received events for '{}' in {} sec.".format(name, timeout) + # list of received names + event_names = set([e["name"] for e in events]) + # list of received values + event_values_name = [e["value"] for e in events if e["name"] == name] + + logger.debug(" --> names : {}".format(event_names)) + # event_values = [e['value'] for e in events] + # logger.debug(" --> values : {}".format(event_values)) + logger.debug(" --> <{}> : {}".format(name, event_values_name)) + + assert name in event_names, "{} event not received! {}".format( # nosec B101 + name, event_names + ) + + assert ( # nosec B101 + expected_value in event_values_name + ), "{} value {} missing! {}".format(name, expected_value, event_values_name) + + await helper.close() + + +@pytest.mark.asyncio +async def test_subscribe_seat_pos_where_eq(setup_helper: VDBHelper) -> None: + helper = setup_helper + + name = os.getenv("TEST_NAME", "Vehicle.Cabin.Seat.Row1.Pos1.Position") + expected_value = int(os.getenv("TEST_VALUE", "1000")) + timeout = int(os.getenv("TEST_TIMEOUT", "10")) + + query = "SELECT {} where {} = {}".format(name, name, expected_value) + + events = [] + # inner function for collecting subscription events + + def inner_callback(name: str, dp: Datapoint): + dd = helper.datapoint_to_dict(name, dp) + events.append(dd) + + logger.info(" -- moving seat to test position {} (async)...".format(expected_value)) + execute_script([SCRIPT_SEAT_MOVE, str(expected_value)]) + + logger.debug( + "\n# subscribing('{}', timeout={}), expecting:{}".format( + query, timeout, expected_value + ) + ) + await helper.subscribe_datapoints( + query, timeout=timeout, sub_callback=inner_callback + ) + + assert ( # nosec B101 + len(events) > 0 + ), "Not received subscription events for '{}' in {} sec.".format(name, timeout) + + # list of received names + event_names = set([e["name"] for e in events]) + # list of received values + event_values_name = [e["value"] for e in events if e["name"] == name] + + logger.debug(" --> names : {}".format(event_names)) + # event_values = [e['value'] for e in events] + # logger.debug(" --> values : {}".format(event_values)) + logger.debug(" --> <{}> : {}".format(name, event_values_name)) + + assert name in event_names, "{} event not received! {}".format( # nosec B101 + name, event_names + ) + + assert ( # nosec B101 + expected_value in event_values_name + ), "{} value {} missing! {}".format(name, expected_value, event_values_name) + + assert ( # nosec B101 + len(set(event_values_name)) == 1 + ), "Should get only 1 value for {}, got: {}".format(name, event_values_name) + await helper.close() + + +@pytest.mark.asyncio +async def test_subscribe_seat_pos_where_error(setup_helper: VDBHelper) -> None: + helper = setup_helper + + name = os.getenv("TEST_NAME", "Vehicle.Cabin.Seat.Row1.Pos1.Position") + expected_value = int(os.getenv("TEST_VALUE", "-42")) + timeout = int(os.getenv("TEST_TIMEOUT", "10")) + query = "SELECT {} where ".format(name) + + events = [] + + # inner function for collecting subscription events + + def inner_callback(name: str, dp: Datapoint): + dd = helper.datapoint_to_dict(name, dp) + events.append(dd) + + with pytest.raises(grpc.RpcError): + logger.debug( + "\n# subscribing('{}', timeout={}), expecting:{}".format( + query, timeout, expected_value + ) + ) + await helper.subscribe_datapoints( + query, timeout=timeout, sub_callback=inner_callback + ) + + assert ( # nosec B101 + len(events) == 0 + ), "Should not receive events for query:'{}'. Got {}".format(query, events) + + await helper.close() + + +async def main() -> None: + log_level = os.environ.get("LOG_LEVEL", "INFO") + logging.basicConfig(format="<%(levelname)s>\t%(message)s", level=log_level) + + +if __name__ == "__main__": + # execute_script([SCRIPT_SEAT_MOVE, "500", "-w"]) + pytest.main(["-vvs", "--log-cli-level=INFO", os.path.abspath(__file__)]) diff --git a/integration_test/update-protobuf.sh b/integration_test/update-protobuf.sh new file mode 100755 index 0000000..4172c35 --- /dev/null +++ b/integration_test/update-protobuf.sh @@ -0,0 +1,53 @@ +#!/bin/bash +# /******************************************************************************** +# * Copyright (c) 2022 Contributors to the Eclipse Foundation +# * +# * See the NOTICE file(s) distributed with this work for additional +# * information regarding copyright ownership. +# * +# * This program and the accompanying materials are made available under the +# * terms of the Apache License 2.0 which is available at +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * SPDX-License-Identifier: Apache-2.0 +# ********************************************************************************/ +# shellcheck disable=SC2086 + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +cd "$SCRIPT_DIR" || exit 1 + +GEN_DIR="./gen_proto" + +[ -d "$GEN_DIR" ] || mkdir -p "$GEN_DIR" + +DATABROKER_PROTO="../vehicle_data_broker/proto" + +if [ ! -d "$DATABROKER_PROTO" ]; then + echo "Warning! Can't find DataBroker proto dir in: $DATABROKER_PROTO" + exit 1 +fi + +# make sure deps are installed +echo "# Installing requirements-dev.txt ..." +pip3 install -q -r requirements-dev.txt +#pip3 install -q -r requirements.txt + +set -xe +#protoc-gen-mypy \ +PROTO_FILES=$(find "$DATABROKER_PROTO" -name '*.proto') + +echo "# Generating grpc stubs from: $DATABROKER_PROTO ..." +python3 -m grpc_tools.protoc \ + --python_out="$GEN_DIR" \ + --grpc_python_out="$GEN_DIR" \ + --proto_path="$DATABROKER_PROTO" \ + --mypy_out="$GEN_DIR" \ + $PROTO_FILES +set +x + +echo "# Generated files:" +find "$GEN_DIR" -type f -name '*.py' + +echo "# Replacing packages in $GEN_DIR" +find "$GEN_DIR" -type f -name '*.py' -print -exec sed -i 's/^from sdv.databroker.v1/from gen_proto.sdv.databroker.v1/g' {} ';' +find "$GEN_DIR" -type f -name '*.pyi' -print -exec sed -i 's/^import sdv.databroker.v1/import gen_proto.sdv.databroker.v1/g' {} ';' diff --git a/integration_test/vdb_helper.py b/integration_test/vdb_helper.py new file mode 100644 index 0000000..bb2b82d --- /dev/null +++ b/integration_test/vdb_helper.py @@ -0,0 +1,315 @@ +# /******************************************************************************** +# * Copyright (c) 2022 Contributors to the Eclipse Foundation +# * +# * See the NOTICE file(s) distributed with this work for additional +# * information regarding copyright ownership. +# * +# * This program and the accompanying materials are made available under the +# * terms of the Apache License 2.0 which is available at +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * SPDX-License-Identifier: Apache-2.0 +# ********************************************************************************/ + +# Disable name checks due to proto generated classes +# pylint: disable=C0103 + +import asyncio +import logging +import os +import signal +from typing import Callable, Dict, Mapping, Optional + +import grpc +from gen_proto.sdv.databroker.v1.broker_pb2 import ( + GetDatapointsRequest, + GetMetadataRequest, + SubscribeRequest, +) +from gen_proto.sdv.databroker.v1.broker_pb2_grpc import BrokerStub +from gen_proto.sdv.databroker.v1.collector_pb2 import ( + RegisterDatapointsRequest, + RegistrationMetadata, + UpdateDatapointsRequest, +) +from gen_proto.sdv.databroker.v1.collector_pb2_grpc import CollectorStub +from gen_proto.sdv.databroker.v1.types_pb2 import ChangeType, Datapoint, DataType + +logger = logging.getLogger(__name__) + + +class VDBHelper: + """ + VDBHelper wraps collector and broker APIs of the Vehicle Data Broker. + """ + + def __init__(self, address: str) -> None: + + if os.getenv("DAPR_GRPC_PORT") is not None: + self._address = "127.0.0.1:{}".format(os.getenv("DAPR_GRPC_PORT")) + else: + self._address = address + + logger.info("VDBHelper connecting to {}".format(self._address)) + # WARNING: always await grpc response! + self._channel = grpc.aio.insecure_channel(self._address) # type: ignore + + self._collector_stub = CollectorStub(self._channel) + self._broker_stub = BrokerStub(self._channel) + self._grpc_metadata = self.default_metadata() + logger.debug("VDBHelper using metadata {}".format(self._grpc_metadata)) + self._ids: Dict[str, int] = None # type: ignore + self._vdb_metadata = None + + async def close(self) -> None: + """Closes runtime gRPC channel.""" + if self._channel: + await self._channel.close() + + def default_metadata(self): + if os.environ.get("VEHICLEDATABROKER_DAPR_APP_ID"): + return ("dapr-app-id", os.environ.get("VEHICLEDATABROKER_DAPR_APP_ID")) + return None + + def __enter__(self) -> "VDBHelper": + return self + + def __exit__(self, exc_type, exc_value, traceback) -> None: + asyncio.run_coroutine_threadsafe(self.close(), asyncio.get_event_loop()) + + async def __register_datapoints(self, datapoints: list): + response = await self._collector_stub.RegisterDatapoints( + RegisterDatapointsRequest(list=datapoints), metadata=self._grpc_metadata + ) + return response + + async def __update_datapoints(self, datapoints: Mapping[int, Datapoint]): + response = await self._collector_stub.UpdateDatapoints( + UpdateDatapointsRequest(datapoints=datapoints), metadata=self._grpc_metadata + ) + return response + + async def __get_datapoints(self, datapoints: list): + response = await self._broker_stub.GetDatapoints( + GetDatapointsRequest(datapoints=datapoints), metadata=self._grpc_metadata + ) + return response + + async def get_vdb_metadata(self, names=[]): + """Requests Metadata from VDB, allows for optional list of names + + Args: + names (list, optional): List of names to get. Defaults to []. + + Returns: + _type_: list, can be converted to json using parse_vdb_metadata() + """ + response = await self._broker_stub.GetMetadata( + GetMetadataRequest(names=names), metadata=self._grpc_metadata + ) + return response.list + + def vdb_metadata_to_json(self, metadata) -> list: + """Parses metadata.list to json format + + Args: + metadata (_type_): MetaDataReply.list + + Returns: + list: Json objects + """ + return [ + { + "id": m.id, + "name": m.name, + "data_type": m.data_type, + "description": m.description, + } + for m in metadata + ] + + def datapoint_to_dict(self, name: str, dp: Datapoint) -> dict: + """Convert Datapoint object to dictionary + + Args: + name (str): Datapoint Name + dp (Datapoint): Datapoint + + Returns: + dict: with keys "name", "ts", "value", "type" + """ + value_type = dp.WhichOneof("value") + if value_type: + # try to get directly dp.${which_one} attribute + value = getattr(dp, value_type) + ts = ( + dp.timestamp.seconds + int(dp.timestamp.nanos / 10**6) / 1000 + ) # round to msec + result = {"name": name, "ts": ts, "value": value, "type": value_type} + return result + + async def get_vdb_datapoints(self, datapoints=None): + if datapoints is None: + await self.__initialize_metadata() + datapoints = self._ids.keys() + + response = await self.__get_datapoints(datapoints=datapoints) + # map datapoints = 1; + return response + + async def __initialize_metadata(self, names=[]) -> None: + if self._ids is None: + self._ids = {} + response = await self._broker_stub.GetMetadata( + [], metadata=self._grpc_metadata + ) + self._vdb_metadata = response.list + + for item in response.list: + self._ids[item.name] = item.id + + async def __get_or_create_datapoint_id_by_name( + self, name: str, data_type: DataType + ): + await self.__initialize_metadata() + + key_list = self._ids.keys() + if name not in key_list: + response = await self.register_datapoint(name, data_type) + datapoint_id = int(response) + self._ids[name] = datapoint_id + + return self._ids[name] + + async def register_datapoint(self, name: str, data_type: DataType) -> int: + await self.__initialize_metadata() + + registration_metadata = RegistrationMetadata() + registration_metadata.name = name + registration_metadata.data_type = data_type + registration_metadata.description = "" + registration_metadata.change_type = ChangeType.CONTINUOUS + + response = await self.__register_datapoints(datapoints=[registration_metadata]) + metadata_id = int(response.results[name]) + self._ids[name] = metadata_id + return metadata_id + + async def set_int32_datapoint(self, name: str, value: int): + datapoint = Datapoint() + datapoint.int32_value = value + datapoint_id = await self.__get_or_create_datapoint_id_by_name( + name, DataType.INT32 # type: ignore + ) + return await self.__update_datapoints({datapoint_id: datapoint}) + + async def set_uint32_datapoint(self, name: str, value: int): + datapoint = Datapoint() + datapoint.uint32_value = value + datapoint_id = await self.__get_or_create_datapoint_id_by_name( + name, DataType.UINT32 # type: ignore + ) + return await self.__update_datapoints({datapoint_id: datapoint}) + + async def set_float_datapoint(self, name: str, value: float): + datapoint = Datapoint() + datapoint.float_value = value + datapoint_id = await self.__get_or_create_datapoint_id_by_name( + name, DataType.FLOAT # type: ignore + ) + return await self.__update_datapoints({datapoint_id: datapoint}) + + def __get_grpc_error(self, err: grpc.RpcError) -> str: + status_code = err.code() + return "grpcError[Status:{} {}, details:'{}']".format( + status_code.name, status_code.value, err.details() + ) + + async def subscribe_datapoints( + self, + query: str, + sub_callback: Callable[[str, Datapoint], None], + timeout: Optional[int] = None, + ) -> None: + try: + request = SubscribeRequest(query=query) + logger.info("broker.Subscribe('{}')".format(query)) + response = self._broker_stub.Subscribe( + request, metadata=self._grpc_metadata, timeout=timeout + ) + # NOTE: async before iteration is crucial here with aio.channel! + async for subscribe_reply in response: + logger.debug("Streaming SubscribeReply ...") + """ from broker.proto: + message SubscribeReply { + // Contains the fields specified by the query. + // If a requested data point value is not available, the corresponding + // Datapoint will have it's respective failure value set. + map fields = 1; + }""" + if not hasattr(subscribe_reply, "fields"): + raise Exception("Missing 'fields' in {}".format(subscribe_reply)) + + logger.debug("SubscribeReply.{}".format(subscribe_reply.fields)) + for name in subscribe_reply.fields: + dp = subscribe_reply.fields[name] + try: + logger.debug("Calling sub_callback({}, dp:{})".format(name, dp)) + sub_callback(name, dp) + except Exception: + logging.exception("sub_callback() error", exc_info=True) + pass + logger.debug("Streaming SubscribeReply done...") + + except grpc.RpcError as e: + if ( + e.code() == grpc.StatusCode.DEADLINE_EXCEEDED + ): # expected code if we used timeout, just stop subscription + logger.debug("Exitting after timeout: {}".format(timeout)) + else: + logging.error( + "broker.Subscribe({}) failed!\n --> {}".format( + query, self.__get_grpc_error(e) + ) + ) + raise e + except Exception: + logging.exception("broker.Subscribe() error", exc_info=True) + + +def __on_subscribe_event(name: str, dp: Datapoint) -> None: + value_type = dp.WhichOneof("value") + if value_type: + # try to get directly dp.${which_one} attribute + value = getattr(dp, value_type) + ts = ( + dp.timestamp.seconds + int(dp.timestamp.nanos / 10**6) / 1000 + ) # round to msec + + print( + "#SUB# name:{}, value:{}, value_type:{}, ts:{}".format( + name, value, value_type, ts + ), + flush=True, + ) + + +async def main() -> None: + LOG_LEVEL = os.environ.get("LOG_LEVEL", "INFO") + logging.basicConfig(format="<%(levelname)s>\t%(message)s", level=LOG_LEVEL) + + vdb_addr = os.environ.get("VDB_ADDR", "localhost:55555") + query = os.environ.get("QUERY", "SELECT Vehicle.Cabin.Seat.Row1.Pos1.Position") + helper = VDBHelper(vdb_addr) + + await helper.subscribe_datapoints( + query, sub_callback=__on_subscribe_event, timeout=1 + ) + await helper.close() + + +if __name__ == "__main__": + LOOP = asyncio.get_event_loop() + LOOP.add_signal_handler(signal.SIGTERM, LOOP.stop) + LOOP.run_until_complete(main()) + LOOP.close() diff --git a/prepare_release.sh b/prepare_release.sh new file mode 100755 index 0000000..f2a9a97 --- /dev/null +++ b/prepare_release.sh @@ -0,0 +1,46 @@ +#!/bin/bash +#******************************************************************************** +# Copyright (c) 2022 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +#*******************************************************************************/ +set -e + +if [ "$#" -ne 1 ]; then + echo "Usage: $0 " + exit 1 +fi + +VERSION_REGEX="[0-9]+\.[0-9]+(\.[0-9]+)?" +VERSION="$1" + +if [ "$(echo "$1" | sed -E "s/$VERSION_REGEX//")" ]; then + echo " should be of the form MAJOR.MINOR[.PATCH]" + exit 1 +fi + +REPO_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)" + +WORKFLOWS_ROOT="$REPO_ROOT/.github/workflows" +# Update workflow versions. +sed -i -E "s/(^.*):v${VERSION_REGEX}(.*)$/\1:v${VERSION}/" \ + "$WORKFLOWS_ROOT/seat_service_build.yml" \ + "$WORKFLOWS_ROOT/seat_service_docu_build.yml" \ + "$WORKFLOWS_ROOT/seat_service_release.yml" \ + "$WORKFLOWS_ROOT/seat_service_seatctrl_test.yml" + +DOCKERDEV_ROOT="$REPO_ROOT/.devcontainer" +# Update docker dev files. +sed -i -E "s/(^.*):v${VERSION_REGEX}(.*)$/\1:v${VERSION}/" \ + "$DOCKERDEV_ROOT/Dockerfile" + +# Create release commit and tag it +#git commit -a -m "Release ${VERSION}" +#git tag -a "v${VERSION}" -m "Release ${VERSION}" diff --git a/seat_service/.gitignore b/seat_service/.gitignore new file mode 100644 index 0000000..2fd81b7 --- /dev/null +++ b/seat_service/.gitignore @@ -0,0 +1,6 @@ +build*/ +docs/doxygen/out/* +docs/conan/out/* +src/**/*.proto +target/ +*.tar.gz \ No newline at end of file diff --git a/seat_service/CMakeLists.txt b/seat_service/CMakeLists.txt new file mode 100644 index 0000000..d7ff207 --- /dev/null +++ b/seat_service/CMakeLists.txt @@ -0,0 +1,39 @@ +#******************************************************************************** +# Copyright (c) 2022 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +#*******************************************************************************/ +cmake_minimum_required(VERSION 3.5.1) + +# Project +project(SdvHAL C CXX) +# use conan as dependency management +include(${CMAKE_BINARY_DIR}/conan_paths.cmake) + +option(SDV_BUILD_TESTING "Build with Tests" ON) + +message("--- CMAKE_SOURCE_DIR = ${CMAKE_SOURCE_DIR}") + +if(MSVC) + add_definitions(-D_WIN32_WINNT=0x600) +endif() + +# enable testing +if (SDV_BUILD_TESTING) + message(STATUS "Build tests ON") + enable_testing() + include(CTest) + find_package(GTest) +else() + message(STATUS "Build tests OFF") +endif() + +add_subdirectory(src) + diff --git a/seat_service/Dockerfile b/seat_service/Dockerfile new file mode 100644 index 0000000..81c6503 --- /dev/null +++ b/seat_service/Dockerfile @@ -0,0 +1,61 @@ +# /******************************************************************************** +# * Copyright (c) 2022 Contributors to the Eclipse Foundation +# * +# * See the NOTICE file(s) distributed with this work for additional +# * information regarding copyright ownership. +# * +# * This program and the accompanying materials are made available under the +# * terms of the Apache License 2.0 which is available at +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * SPDX-License-Identifier: Apache-2.0 +# ********************************************************************************/ + + +FROM --platform=$BUILDPLATFORM alpine:latest as builder + +ARG TARGETPLATFORM + +WORKDIR /workspace + +COPY seat_service/bin_vservice-seat_*_release.* /workspace + +# FIXME! Binaries are built from another base image, potentially incompatible with ubuntu:latest image +# RUN tar -xf bin_vservice-seat_x86_64_release.tar.gz && tar -xf bin_vservice-seat_aarch64_release.tar.gz + +RUN if [ "$TARGETPLATFORM" = "linux/amd64" ] ; \ + then tar -xf bin_vservice-seat_x86_64_release.tar.gz && cp -rv ./target/x86_64/release/install/ ./bins; \ + else tar -xf bin_vservice-seat_aarch64_release.tar.gz && cp -rv ./target/aarch64/release/install/ ./bins; fi + +FROM --platform=$TARGETPLATFORM ubuntu:latest as runtime + +LABEL org.opencontainers.image.description "VAL Seat Service container, providing VSC Seat impl and supporting simulated/real can ECU" + +## Uncomment for ip/can tools +#RUN apt-get -qqy update && apt-get install -qqy net-tools can-utils iproute2 && apt clean && rm -rf /var/lib/apt/lists/* +# make sure localhost can be resolved! +RUN cat /etc/hosts | grep -q localhost || echo "127.0.0.1 localhost" >> /etc/hosts + +COPY --from=builder workspace/bins /app + +WORKDIR /app/bin + +### "cansim" is special value for using SeatAdjuster CAN simulator (even without vcan support in container) +ENV CAN=cansim + +### [vxcan] options +# By default, wait for can-forward.sh to move vxcan1 in container's namespace after container start +#ENV CAN=vxcan1 +# If set, val_start.sh shall wait up to specified time (sec) for can interface to appear in the container. +#ENV CAN_WAIT=30 + +# Seat Service GRPC bind host +ENV SERVICE_HOST=0.0.0.0 +# SeatService GRPC bind port +ENV SERVICE_PORT=50051 +EXPOSE 50051 + +# Override this variable to change DataBroker address. Port can be also overridden by DAPR_GRPC_PORT variable. +ENV BROKER_ADDR=0.0.0.0:55555 + +CMD [ "./val_start.sh" ] diff --git a/seat_service/README.md b/seat_service/README.md new file mode 100644 index 0000000..cc01a99 --- /dev/null +++ b/seat_service/README.md @@ -0,0 +1,108 @@ +# Seat Service Example + +- [Seat Service Example](#seat-service-example) + - [Overview](#overview) + - [Context](#context) + - [Internals](#internals) + - [Development environment](#development-environment) + - [Prerequisites](#prerequisites) + - [Usage on CLI](#usage-on-cli) + - [Build Seat Service](#build-seat-service) + - [Usage in Visual Studio Code](#usage-in-visual-studio-code) + - [Configuration](#configuration) + - [Seat Controller Documentation](#seat-controller-documentation) + - [Generate documentation](#generate-documentation) + +## Overview + +This represents the example seat control service. More elaborate or completely differing implementations are target of particular projects providing a vehicle abstraction layer. +### Context +![SeatService_context](docs/assets/SeatService_context.svg) + +### Internals +![SeatService_internal](docs/assets/SeatService_internal.svg) + +## Development environment + +### Prerequisites + +1. Install and configure (if needed) local authentication proxy e.g. CNTLM or Px +2. Install and configure docker + - [Get Docker](https://docs.docker.com/get-docker/) +3. Build base development docker Image + + ``` bash + cd tools/ && \ + docker build -t oci_kuksa-val-services-ci:latest . + ``` + +### Usage on CLI + +From the checked-out git folder, to enter a shell execute: + +``` bash +//Linux +docker run --rm -it -v $(pwd):/workspace oci_kuksa-val-services-ci:latest + +//Windows (cmd) +docker run --rm -it -v %cd%:/workspace oci_kuksa-val-services-ci:latest + +//Windows (Powershell) +docker run --rm -it -v ${PWD}:/workspace oci_kuksa-val-services-ci:latest +``` + +#### Build Seat Service + +Building the seat service via dev container must be triggered from the project root folder (seat service is referencing vehicle_data_broker/proto), e.g.: + +``` bash +//Linux +docker run --rm -it -v $(pwd):/workspace oci_kuksa-val-services-ci:latest /bin/bash -c "cd seat_service; ./build-debug.sh" +``` + +### Usage in Visual Studio Code + +It is also possible to open the repo as a remote container in VScode using the approach [Developing inside a Container](https://code.visualstudio.com/docs/remote/containers). All needed tools for VScode are automatically installed in this case + +1. Install VScode extension with ID ```ms-vscode-remote.remote-containers``` +2. Hit "F1" and type ``Remote-Containers: Reopen in Container`` + +## Configuration + +| parameter | default value | cli parameter | Env var | description | +|----------------|---------------|----------------|-------------------------------|---------------------------------| +| can_if_name | - | can_if_name | - | Use socketCAN device | +| listen_address | "localhost" | listen_address | - | Listen for rpc calls | +| listen_port | 50051 | port | - | Listen for rpc calls | +| broker_address | "localhost" | - | - | Connect to data broker instance | +| broker_port | 55555 | - | DAPR_GRPC_PORT | Connect to data broker instance | +| broker_app_id | | - | VEHICLEDATABROKER_DAPR_APP_ID | Connect to data broker instance | + +Further configuration of the seat controller see [Seat Controller Documentation](#seat-controller-documentation). + +## Seat Controller Documentation + +Seat Controller module handles SocketCAN messaging and provides Control Loop for moving a seat to desired position. +It also provides `cansim` module for simulating a HW Seat ECU even without `vcan` support (e.g. CI pipeline). + +For more details about Seat Controller, Seat CAN Simulator and related tools, +check [README](./src/lib/seat_adjuster/seat_controller/README.md) + +## Generate documentation + +- Run Doxygen: + doxygen is able to run with the following command from the main directory: + + ``` bash + doxygen ./docs/doxygen/doxyfile + ``` + + or using: + + ``` bash + build-docu.sh + ``` + +- The output will be stored to ``./docs/out``. You can watch the documentation with open the following file in the browser: + + ``./docs/doxygen/out/html/index.html`` diff --git a/seat_service/build-debug.sh b/seat_service/build-debug.sh new file mode 100755 index 0000000..2110f02 --- /dev/null +++ b/seat_service/build-debug.sh @@ -0,0 +1,38 @@ +#!/bin/bash +#******************************************************************************** +# Copyright (c) 2022 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +#*******************************************************************************/ + +# Specify: +# first argument: TARGET_ARCH = "x86_64" or "aarch64"; default: "x86_64" +# second argument: TARGET_ARCH = "; default: "$SCRIPT_DIR/target/$TARGET_ARCH/Debug" + +set -ex + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" + +TARGET_ARCH="$1" +[ -z "$TARGET_ARCH" ] && TARGET_ARCH="x86_64" + +BUILD_DIR="$2" +[ -z "$BUILD_DIR" ] && BUILD_DIR="$SCRIPT_DIR"/target/"$TARGET_ARCH"/debug + +cmake -E make_directory "$BUILD_DIR" +# build with dependencies of build_type Release +conan install -if="$BUILD_DIR" --build=missing --profile:build=default --profile:host="${SCRIPT_DIR}/toolchains/target_${TARGET_ARCH}_Release" "$SCRIPT_DIR" +cd "$BUILD_DIR" || exit +# shellcheck disable=SC1091 +source activate.sh # Set environment variables for cross build +cmake "$SCRIPT_DIR" -DCMAKE_BUILD_TYPE=Debug -DSDV_COVERAGE=ON -DSDV_BUILD_TESTING=ON -DCONAN_CMAKE_SILENT_OUTPUT=ON -DCMAKE_INSTALL_PREFIX="./install" +sleep 1 +cmake --build . -j +cmake --install . diff --git a/seat_service/build-docu.sh b/seat_service/build-docu.sh new file mode 100755 index 0000000..c32477f --- /dev/null +++ b/seat_service/build-docu.sh @@ -0,0 +1,21 @@ +#!/bin/bash +#******************************************************************************** +# Copyright (c) 2022 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +#*******************************************************************************/ + +set -x + +SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd) + +conan info . --graph="${SCRIPT_DIR}"/docs/conan/out/dependency_graph.html + +doxygen "${SCRIPT_DIR}"/docs/doxygen/doxyfile diff --git a/seat_service/build-release.sh b/seat_service/build-release.sh new file mode 100755 index 0000000..f01a2c8 --- /dev/null +++ b/seat_service/build-release.sh @@ -0,0 +1,64 @@ +#!/bin/bash +#******************************************************************************** +# Copyright (c) 2022 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +#*******************************************************************************/ + +# Specify: +# first argument: TARGET_ARCH = "x86_64" or "aarch64"; default: "x86_64" +# second argument: TARGET_ARCH = "; default: "$SCRIPT_DIR/target/$TARGET_ARCH/release" + +# shellcheck disable=SC2086 +# shellcheck disable=SC2230 +set -x + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" + +TARGET_ARCH="$1" +[ -z "$TARGET_ARCH" ] && TARGET_ARCH="x86_64" + +BUILD_DIR="$2" +[ -z "$BUILD_DIR" ] && BUILD_DIR="$SCRIPT_DIR/target/$TARGET_ARCH/release" + +cmake -E make_directory "$BUILD_DIR" +conan install -if="$BUILD_DIR" --build=missing --profile:build=default --profile:host="${SCRIPT_DIR}/toolchains/target_${TARGET_ARCH}_Release" "$SCRIPT_DIR" + +set -e +cd "$BUILD_DIR" +# shellcheck disable=SC1091 + +source ./activate.sh # Set environment variables for cross build + +#CMAKE_CXX_FLAGS_RELEASE="${CMAKE_CXX_FLAGS_RELEASE} -s" +cmake -DCMAKE_BUILD_TYPE=Release -DSDV_BUILD_TESTING=OFF -DCONAN_CMAKE_SILENT_OUTPUT=ON -DCMAKE_INSTALL_PREFIX="./install" "$SCRIPT_DIR" + +cmake --build . -j +cmake --install . +set +e + +# Ensure release is sripped +if [ "$TARGET_ARCH" = "aarch64" ]; then + STRIP="$(which aarch64-linux-gnu-strip)" +else + STRIP="strip" +fi + +echo +echo "### Check for stripped binaries" +BINARIES="./install/bin/seat_service ./install/bin/seat_svc_client ./install/bin/broker_feeder ./install/bin/tools/libcansim.so" + +file $BINARIES +if [ -n "$STRIP" ]; then + echo "### Stripping binaries in: $(pwd)" + $STRIP -s --strip-unneeded $BINARIES + file $BINARIES + echo +fi diff --git a/seat_service/build-seatctrl.sh b/seat_service/build-seatctrl.sh new file mode 100755 index 0000000..643b1f9 --- /dev/null +++ b/seat_service/build-seatctrl.sh @@ -0,0 +1,54 @@ +#!/bin/bash +#******************************************************************************** +# Copyright (c) 2022 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +#*******************************************************************************/ + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +SOURCE_DIR="$SCRIPT_DIR/src/lib/seat_adjuster/seat_controller/" +TARGET_ARCH="x86_64" +BUILD_DIR="$SCRIPT_DIR/build_seat_controller/$TARGET_ARCH" + +set -e -x + +BUILD_TYPE="$1" +[ -z "$BUILD_TYPE" ] && BUILD_TYPE="Debug" + +# NOTE: coverage requires Debug build... +echo "Build and install seat_controller ($BUILD_TYPE) for host architecture." + +# Enable testing (on host) +CMAKE_OPT="-DSDV_BUILD_TESTING=ON -DSDV_COVERAGE=ON -DCONAN_CMAKE_SILENT_OUTPUT=ON -DCMAKE_INSTALL_PREFIX=./install -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DCMAKE_POSITION_INDEPENDENT_CODE=TRUE" + +# Create Build Environment +# Use a bash shell so we can use the same syntax for environment variable +# access regardless of the host operating system +cmake -E make_directory "$BUILD_DIR" +conan install -if="$BUILD_DIR" --build=missing --profile:build=default --profile:host="${SCRIPT_DIR}/toolchains/target_${TARGET_ARCH}_${BUILD_TYPE}" "$SCRIPT_DIR" +cd "$BUILD_DIR" +# shellcheck disable=SC1091 +source activate.sh # Set environment variables for cross build +# shellcheck disable=SC2086 +cmake "${SOURCE_DIR}" ${CMAKE_OPT} +sleep 1 +cmake --build . + +# Build +echo "Test on host architecture." +make -j + +# Run Unit Tests +ctest -j -T memcheck -C $BUILD_TYPE --output-on-failure + +# Generate Code Coverage +make report_codecov_vservice-seat-ctrl +make report_codecov_vservice-seat-ctrl_html +make report_codecov_vservice-seat-ctrl_lcov diff --git a/seat_service/clang-format.sh b/seat_service/clang-format.sh new file mode 100755 index 0000000..c89a581 --- /dev/null +++ b/seat_service/clang-format.sh @@ -0,0 +1,17 @@ +#!/bin/sh +#******************************************************************************** +# Copyright (c) 2022 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +#*******************************************************************************/ + +find ./src -path ./src/lib/seat_adjuster/seat_controller -print -prune -o -iname './*.h' -exec clang-format --style=file -i {} \; +find ./src -path ./src/lib/seat_adjuster/seat_controller -print -prune -o -iname './*.cc' -exec clang-format --style=file -i {} \; +find ./src -path ./src/lib/seat_adjuster/seat_controller -print -prune -o -iname './*.c' -exec clang-format --style=file -i {} \; diff --git a/seat_service/conanfile.txt b/seat_service/conanfile.txt new file mode 100644 index 0000000..98afb43 --- /dev/null +++ b/seat_service/conanfile.txt @@ -0,0 +1,42 @@ + +#******************************************************************************** +# Copyright (c) 2022 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +#*******************************************************************************/ + +[requires] +gtest/1.10.0 +grpc/1.37.1 + +[build_requires] +grpc/1.37.1 # Is needed in the build context to run generate code from proto files + +[generators] +cmake_find_package +cmake_paths +virtualenv + +[options] +grpc:fPIC=True +grpc:cpp_plugin=True +grpc:codegen=True + +# Follwing options do speed up cross build, but there is no prebuild package for this combination +#grpc:csharp_ext=False +#grpc:csharp_plugin=False +#grpc:node_plugin=False +#grpc:objective_c_plugin=False +#grpc:php_plugin=False +#grpc:python_plugin=False +#grpc:ruby_plugin=False + +[imports] +., license* -> ./licenses @ folder=True, ignore_case=True \ No newline at end of file diff --git a/seat_service/docker-build.sh b/seat_service/docker-build.sh new file mode 100755 index 0000000..fc439e9 --- /dev/null +++ b/seat_service/docker-build.sh @@ -0,0 +1,112 @@ +#!/bin/bash +#******************************************************************************** +# Copyright (c) 2022 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +#*******************************************************************************/ +# shellcheck disable=SC2181 +# shellcheck disable=SC2086 +# shellcheck disable=SC2230 + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +BASEDIR="$SCRIPT_DIR/.." + +print_usage() { + echo "USAGE: $0 [OPTIONS] TARGETS" + echo + echo "Standalone build helper for seat-service container." + echo + echo "OPTIONS:" + echo " -l, --local local docker import (does not export tar)" + echo " --help show help" + echo + echo "TARGETS:" + echo " x86_64, aarch64 Target arch to build for, if not set - defaults to multiarch" + echo +} + +LOCAL=0 +while [ $# -gt 0 ]; do + if [ "$1" = "--local" ] || [ "$1" = "-l" ]; then + LOCAL=1 + elif [ "$1" = "--help" ]; then + print_usage + exit 0 + else + TARGET="$1" + break + fi + shift +done + +target_arch() { + local target="$1" + case "$target" in + "x86_64") + echo "amd64" + ;; + "aarch64") + echo "arm64" + ;; + "") + echo "multiarch" + ;; + *) + return 1 + ;; + esac + return 0 +} + +build_release() { + local arch="$1" + + cd "$BASEDIR/seat_service" || return 1 + echo "-- Building release for: $arch ..." + ./build-release.sh "$arch" + + echo "-- Building bin_vservice-seat_${arch}_release.tar.gz ..." + + tar -czvf "bin_vservice-seat_${arch}_release.tar.gz" \ + "target/${arch}/release/install/" \ + "target/${arch}/release/licenses/" \ + "proto/" +} + +# Dockerfile requires both bin_vservice-seat_*_release artifacts +build_release aarch64 +build_release x86_64 + +cd "$BASEDIR" || exit 1 +echo "-- Building seat-service container ..." +# DOCKER_BUILDKIT=1 docker build -f seat_service/Dockerfile -t seat-service . + +if [ -z "$TARGET" ] && [ $LOCAL -eq 1 ]; then + echo "Multiarch archives are not supported for local builds, removing --local flag ..." + LOCAL=0 +fi + +DOCKER_ARCH=$(target_arch "$TARGET") +DOCKER_EXPORT="./${DOCKER_ARCH}_seat-service.tar" + +if [ "$DOCKER_ARCH" = "multiarch" ]; then + DOCKER_ARGS="--platform linux/amd64,linux/arm64 -t $DOCKER_ARCH/seat-service --output type=oci,dest=$DOCKER_EXPORT" +else + if [ $LOCAL -eq 1 ]; then + DOCKER_ARGS="--load -t $DOCKER_ARCH/seat-service" + DOCKER_EXPORT="(local)" + else + DOCKER_ARGS="--platform linux/$DOCKER_ARCH -t $DOCKER_ARCH/seat-service --output type=oci,dest=$DOCKER_EXPORT" + fi +fi + +echo "# docker buildx build $DOCKER_ARGS -f seat_service/Dockerfile ." +DOCKER_BUILDKIT=1 docker buildx build $DOCKER_ARGS -f seat_service/Dockerfile . +[ $? -eq 0 ] && echo "# Exported $DOCKER_ARCH/seat-service in $DOCKER_EXPORT" diff --git a/seat_service/docs/SeatService.drawio b/seat_service/docs/SeatService.drawio new file mode 100644 index 0000000..7ce7cee --- /dev/null +++ b/seat_service/docs/SeatService.drawio @@ -0,0 +1 @@ +7VvbcuI4EP0aHpPCNr7wCIbJ1lZmKzVkdmaetoQtbG2M5ZFFAvP1K9mS74CZGMMmeQmodXX36T7dMhlo9np7R0Dkf8YuDAbq0N0OtNlAVRVdH7IPLtmlEmuspwKPIFcMygUL9AsKoZjnbZAL49JAinFAUVQWOjgMoUNLMkAIfikPW+GgvGsEPFgTLBwQ1KXfkEt98RSqmcv/gMjz5c6KMU57lsB58gjehGK/EIcw7VkDuYx4xtgHLn4piLT5QLMJxjT9tt7aMOBqlRpL533a05sdmcCQtpmweYSf78JxMNd+/byful9H8+jPG/EYzyDYCFUMVAOso4E2Ddiq03ILbqGzoQiHbAoMnxHB4ZrvLgd5pSmylUxdEvYtkdhsBgH8ZHP7q1AN3UlLPENCETPMJEBeyEQU86WAaAVwxZeII+Cg0HvkfTMrF9wn3TM1l3wRRlMSmQ8ivo2zWULeTGHIMKhNXUQYrviTabMYb7jppisc0oU42Yi1fboO+Pjkqaq6F+bgx4fbgkjY4g7iNaRkx4aIXk0TuBAuow6Fy7zkADQsMcYvgE+TQiBA72Vr59ZnXwQATgCD0gSGxHpRyUjGzw0H7XQNiIeYxias14i27O8w/ysHSbPLpdjJoqrs+PLD6vJD69AmOdr62vaIz4Al26WFm/SjHSn4G/rI4QcbTqKocITlYe1VXFY6p8OcARI2AjMvWAVJrFshDsGi67gER4/s7JDDmbte1ePXyHX50p04mVlxMsusOZnS5GTjc/mY2uBjFX1mYQqvI0Yn/OGn/0JKd5KaLCnIOGnUqK6DTt5ah0pNY2qDwqSMwABQ9Fzm1SYtiv0eMOIEIje7UUclg43KC+DVKmbAqRohO/Qr7KLU7ABdliOIJibUxx4OQTDPpdOE+KErgJyPuccJMynSTiLhARuKy84At4h+L3z/wZe61UVrthUrJ42dbDD63H1PBqrWSAr4TOVWkc18atIqzX2ABDF1cVcVQnfCcyjWTNKXRPIpcdykf68fMqIkDjygVE2kctLd9ypfgIdr/CAkW6PrVT6qXTsPJlOGqfwmzYkmrYP+QY48EvnPesyOKPT6H7R8zAUEPAAuIHlGDrw6FuZzbRxgkmyjuQBaK4dnz5TgJ1joMRwLLlfd8LZZ5W2jnhz3y9ujS/O2dqoO3wdvmx+x+iNW92aPvGCaAQrYx5QHQXJ1QbuDEGyo5RA80i4dgq1Lh2DzVB2+jxDcYIceSydZH/0o1kpcJUZWRnGFKKbsrJc/WT2lW9qgUE/l9dNVVFPysvz/Vk7JcxccV+S8bS6As5hWi5Vv4RZYUYxymNWawqzZEDUU9VxxVmm6B65Y6fXue8RN4idIHV82EtfIPG2RWlV0pu6wp/N4IGqMI0euYH7fh9veiCha1y7cTBq6UmH56ssFofp0VvfcobS4D+0Saz4IVjYiTlBHXJEHSiwgmvK1pZEVvzKpUKp4bY+68sWfei7YKdcGO/3CsNt/xVevQE6W7C2OXBCRf7wvD3ar2ihbkrNlzM0sr4iq2VYQoCjmsHrxEYU8AvKeFwKisiPUKBTEUfo6fYW23MOmBDI+TKq3FHIRt0BiE3060Gfc/zY0QCG0szfxtWjdAS0aoworNrwcVZpI8Wyc2HTP8MGJnQQns21wGvcUnKwy+Ax5+dhXcGpR6L5lThzemnoBeMqZYNeaE3uCXfXCpS3smBnBrjBMhOy9+4wsvZF7cxSnK3aL6cYfGl0r4cpbvXfEuHqlDh2ZF2ZctX5rULuOtSd/8TvYTdx8AVsxWsEsBFMg6vxxRwocV37PpTW8stKGB67/utdgU5L75nOWU35Kcf6cRdX7IQ+zmjD3XFCpLd6PfuQsr4Zd6x/U9AU76/dgd2rOMh5eIGeRSjxPzpJI4giEvWY2Ng4ClkRgUmDMo4eoHTWu/ValvMabz5Yss7dsiTXz/xFIoZ3/D4Y2/w8=7V1rc5u4Gv41njn7wR7u4I+x03a7uz3tNO3Z7acMBmHTYMQBOXHOrz+vQNwkEZPY2G7idiYB3dDlea96pYz0+Xr7IXWT1Sfso2ikKf52pF+PNE0zFAV+0ZTHIsWZmkXCMg39IkmtE27C/yGWyOotN6GPslZBgnFEwqSd6OE4Rh5ppblpih/axQIctb+auEskJNx4biSm/h36ZMVGYSp1+u8oXK7KL6vlgNduWZglZCvXxw+NJP3dSJ+nGJPiab2do4hOXjkvmz+mf87+jB6+j6fZ/cfZ35n7ThkXjb1/TpVqCCmKyYubfviZfPmQfv7j5p/o293Xj6GlkG/jcmz3brRhEzbSrAi+MkvokMkjm0frvxs6ztnaTZdhPNKvIFdJtvATEvPZouljgpMiz2rkEbQlYzcKl6yeB8NAad0mPC3Zb3cN9WeR+Ia2yNsQdwHdKdPbdcq3vPQibbRZpPAJAYa5lI2QZoyzHMe0s6pJR8J39Qa5tPYNSu9DD5VtwuQXzbY/BcnC5yEtqdO0Vk80NlfXEQpoPr5HaRDl0AtCWFN9tiJr+HWtwqOf4uQbTD6iyFCq1qAKTDtHAjvwo1agBm6A8BqR9BHqsVYqrDBGYFns/aEmK1W1WeKqQVNVSZfR8rJqvMYrPDDIPge+qgS+3GQC2Sb00cPrBMf5DMx+IkIeS3bglAkVHzAgpVwDBlZ9Rmc0BL5yxTLWoe/Tb/Sb8Ceor3PGVWF+NcnslmkpilwS3rfZnmzG2fe+4DCngGp5jdbyGu0GcBBkADF+wapO77GG2kAsyOjBgprZPvJwClOIWZlN7KM0CmMk5VQcV9GvKEtg/ODjOon6E3+RthJY1otGX7AMmunI+NZIo2U/AUf512Qy+e2pDg37+XlJjSfsx3yTUgL9grOQrvoTPeFYCs8KcsjNdnJtKhuYgqRq7P29uw4jSnK/o+ge0Vbb7J1WneMIp/mXdd9FTuBBekZSfIcaOZbnoEUwIP/XrDb/tw0J/7ckDGo6GPfXXw/nuPJ/bjJo+NxZxwdEaHcrmvmtqJGLElH3muU6UrsGFGWVIOcryjZRd9XNIvPScIGqyt7iQqGdFGo65m4KLc2341CosVs/SzElF59przglK7zEsRv9hema5VOca2dsXdwNwe0FQLF/RQ1FeI1BpBQp7/MFzZvM7hDxVuUL3qQe+oLSEEaI0pvE9cJ4yTJJrkd3ZCIf7ErW7bqX7+pUsFFC8k9ZGp5/0OeJyd6ut42s68eWtk7bfgESYB7z4fTgkaQ0EToLmnJs7alWGmZbakw1s90EW5Ci1gDapbkbgdBMmGR0BR9WIUF02WnOQ+ombaQJNoGbJYXPIgi3FMKzFIHlmFupxfImdFz5SM3ZyLymAN8QKhnmlbtDgOhg7EHlBLgjYQ8S+a0Oxh2sX1J+d/oo5uNP2N/s7aHIQFreeiB5UhxFuZ/kDLUB2kmPpNGtjwIXRDjtcBAuWzp0PcaqdBiHUJRsd5UDW/MW3cNS3YIQjxaud9dR49iWQ9VDnKD43PrkRTg7jWFXdQGEzG0iM6jOYHqy/frmh/cCzcoqtspd9FJNcxxO8CimKHgcmeQZznFo/5KiR2o6XrvEfY+Qf66iorDlvm6osXgSP8/NakN8/ND5/QuNgp6ut21HZypRDo/r3XEutuNpbUezt+04HcR2tPS23FAV68jG4/RiPJbGoymsxYmtxxIcv5gI77QelyhG0AwgQVOWX7/M8zFsFnsak9ClOTUjPUJFyRVVGYpGz1dRQMuQesKpWlNQwOm2iNhkx4uM/vqe+LA+59evG5Iid92rX2ej6gQm/S9VdfJ/rNVGevFvQBYniBvVmkrsFIn/XFWekG37MTkxwEEVVrGhxFCvhKDhNJcqA2FOytJ+6K5x7H9bUTgWWWVZo0xgypDaS4vhkMPDa4EJwWsq6Vp6W6X7VPrOj/KLnboPLHT62KhEX3808+pq+dvZ6Uz607EfY2Wil9oGw2MVKbGnWmVr2sTkkM5L6Q7FSmiLVwumOtcQU4u7Gxo+wETXBBrySpF4m6R4+3grkBQV7AJzYhZCk5OxpB24L9hqVpgEf+Vl8hgjlvK1EXbUYLar0Pdzak4xcUmnXjiDGZ9TKgANUZvDu1q/50pjArQKGiOMxQ1zjCM3Iw8oIwPyUluzBYxpEoVR5vUptwUOz0zFeAEqNGd0jdPSg3IYIHRGjZ0vFHKhzgauDoULw+LNCPvkqBD3qJsBGRdEDIwIjQtaME8OCNmW8cWwlDufM2ZUCoHQA9uVzZ/HinR8xjcPH97Y8+O/YEzjGZqCXKi7rUs2rGTO8OrwyOGZkhgr8W5LwLjLnjIHFxH27tpGoDXifdyNleHsskG81pXlprbsttqMO5HlVlpXuy23jkgpGTj2NNX06Q7zamg77WA2mLjf+j1DT0GXeTI494XvZqscn+phgKs+A7iDA7D3dot1AeCzAXjZTTwxvEsZupu/DrObeOpIVP2ymVg5N1VuLU4eilpGyh+LPazcKJiHqReJTKLTs81eS3loVRpzeZBR5dfvGYziGa73wRmF0pNRGB0o25dRcDJP1Y7MKYwep1rfCKewVcGPrMs8hkflFeJ+wlPuF99N0obnpXKUvMD5kiVu3OmQyfz7CSXKiU/9gblPe1Ltc0zu1ckO54ys8Sc/WMcVPKdhuTe1AdiDOVarA9oyCmkz86Hga06ticbjVxJZp5nVllwTwryOfDgIS09OHgPCIlw9vA5AOE1omHZGcXrzFJ6qhNz3yAVCyCu+KcSNQc9t4c2RBFtrioRfDge2Ix8CPDfd6qx8DFZf3WqY44CqbesTu41QzejnUziYenU5ElgthzY9N0Os+0xgxdJvsAeTM7/694Xf0yA1hycoRzkDDUN0+BY66W2QB1pcQm4Ormtym1ZTGSkfdSPdEH2ul+DFo0h5u6eU7zq4UQchWpZlt2A1Vhkb2VMT0LhDgVPzhTsLhrGjoY6dBcCR+9goxki8s8Omyhly5WmUmkSKJo8e4GiInuW3QmfKL0JnHdp0TWdOG1uvhMaOSQWm6MLPb1JwWSDfReMYWuNwLOPEGof5lo5LnJVfQTsQJ1RbiNJfBxs8GIsTdx4u3G0o7mbwQYASv72lVu794/C37hsPm55yzimuSPzkUr/5phHcy5+M4C4m3Eg2eg7RhUEjgp91twu397V7QDTVS0HlQB8BqW7sCZHHsu00/sKYw/aHv3Xj+D2QXr5x/G5QEP+HUs3ui396bIQ+85vCidxDjv95A9lxP1F5WXaSYg9lWVs3otdwLXNd5HPhsR61feBPi45CU1Imisn9q6UI5I8tQcw0Ur7lu0HWSIghR6pvIlsQd5AztWzdtQaUE5YuHKdU1J6uN+Hc5eEkhbjp9oGeAwk9SAzYWTfFi1xYZKpgjTdZcbm+AlxVUzBZ5QWqvV4lBP6P1jBR+YEUMeq8RE6MSY9tky4o+W5695muNXlkYKmho0rWPQgCzZPemONbC8scct0dQ3C8q0oZpt/aPjnuwou7W9cogBmm6wyrCj9pgAgdP3N4ARx8+Ll4rApkxV83aCx+THCVW/jxad9jWi/A6YOb0nMIyia/IIE+4aBuhn0uRR4CHT6vkoJZQ+//bR6zUxqfWApQfeto4zfqplMJ0kzZ3QT87urhkCZu1X0smUQNtpDunYHdK653nhO4OcAKKIFCD0wozFYNsLK/C0MPwbUgwsDVvCfzrWPEsnlJ5MgOU9YhdUfiSLawNINH2rfOiYwGdn6YzHm72/mhydevt39jv3UQt+NuEEwCY9klWZU8nyBvFQN6l485rfoogSlGuTukH00zLs84OosdS5AXBjln9xFxw+iiSNTRWGdFtNPXTrR9j6MUmzwHj4TSJHEb/HWnff2S2lQIqnJ4sT+wa7Ls+uvFS9/IucJpfXi82CJe7BfiRVdEvBzZlW2J+zKvDC99T3MWlvrh8eKIeHFeihdVxAvPq4bGi7j1weIls7Zd0AgOZFYFNRHcMG6ZFNRmgKeIQktT1vieep+gQm1YMLOW2rJUsaF6BnvfxCSMqsZ8lIVpbtWWV4vToRROFdejeHvzCs5U9JRIrxeWmq+DaTiW/so5UO9Yb2tfs0TONRzxnjkFmJJmW+bUtA3Dcrgm+7Ijx5R43kAFsjRDVXXDNlXb1g7EneC1/guvRfH67+Tq7/4P \ No newline at end of file diff --git a/seat_service/docs/assets/SeatService_context.svg b/seat_service/docs/assets/SeatService_context.svg new file mode 100644 index 0000000..bad07f2 --- /dev/null +++ b/seat_service/docs/assets/SeatService_context.svg @@ -0,0 +1 @@ +
<<execution environment>>
Central ECU
<<execution environment>>...


<<executable>>

Vehicle App

<<executable>>...


<<executable>>

Seat Service

<<executable>>...


<<executable>>

Vehicle Data Broker

<<executable>>...
Seat ECU
Seat ECU




<<dapr_gRPC>>
Seats service
<...




<<dapr_gRPC>>
Broker service
<...
CAN Bus
CAN Bus





<<dapr_gRPC>>
Collector

service
<...
Viewer does not support full SVG 1.1
\ No newline at end of file diff --git a/seat_service/docs/assets/SeatService_internal.svg b/seat_service/docs/assets/SeatService_internal.svg new file mode 100644 index 0000000..47b63ae --- /dev/null +++ b/seat_service/docs/assets/SeatService_internal.svg @@ -0,0 +1 @@ +

<<executable>>
Seat Service

<<executable>>...

:SeatServiceImpl


+ Move(...)

+ MoveComponent(...)

+ CurrentPosition(...)

:SeatServiceImpl...

:SeatAdjusterImpl


+ GetSeatPosition(): int
+ SetSeatPosition(int): SetResult
+ SubscribePosition(cb)

:SeatAdjusterImpl...

<<C-Module>>
seat_controller


seatctrl_default_config(...)
seatctrl_init_ctx(...)
seatctrl_set_event_callback(...)

seatctrl_open(...)

seatctrl_close(...)

seatctrl_get_position(...)

seatctrl_set_position(...)


<<C-Module>>...

:SeatDataFeeder


+ Run()

+ Shutdown()

:SeatDataFeeder...

<<generated gRPC stub>>
:Collector::Stub


+ RegisterDatapoints(...)

+ UpdateDatapoints(...)

+ StreamDatapoints(...)

<<generated gRPC stub>>...
1
1
collector_proxy_DataBrokerFeederSeatAdjuster

<<generated gRPC stub>>
:Seats::Service


+ Move(...)

+ MoveComponent(...)

+ CurrentPosition(...)

<<generated gRPC stub>>...
Extends
Extends
Use
Use
<<dapr gRPC service>>
sdv.edge.databroker.collector.v1.Collector
<<dapr gRPC service>>...
<<dapr gRPC service>>
sdv.edge.comfort.seats.v1.Seats 
<<dapr gRPC service>>...
SocketCAN
SocketCAN
broker_feeder_
1
1
1
1
seat_adjuster_
1
1
adjuster_
    :DataBrokerFeederImpl    

   + createInstance(...)
   + Run()
   + Shutdown()
   + FeedValue(...)
   + FeedValues(...)

:DataBrokerFeederImpl...
Generic feeder class re-usable in other service implementations
Generic feeder c...
Defines the data points fed by the seat service into the broker and forwards updates of seat data received from SeatAdjuster to the generic feeder
Defines the data points fe...
Implements the internal SeatAdjuster interface and establishes the connection to the seat controller
Implements the inte...
Separates the service technology independent internal SeatAdjuster interface from the gRPC specific details
Separates the servi...
Connects to the SocketCAN and contains the control loop moving a seat forward or backward until the desired position is reached
Connects to the SocketCAN...
Viewer does not support full SVG 1.1
\ No newline at end of file diff --git a/seat_service/docs/doxygen/doxyfile b/seat_service/docs/doxygen/doxyfile new file mode 100644 index 0000000..0e5232a --- /dev/null +++ b/seat_service/docs/doxygen/doxyfile @@ -0,0 +1,2615 @@ +# Doxyfile 1.9.2 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project. +# +# All text after a double hash (##) is considered a comment and is placed in +# front of the TAG it is preceding. +# +# All text after a single hash (#) is considered a comment and will be ignored. +# The format is: +# TAG = value [value, ...] +# For lists, items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (\" \"). + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the configuration +# file that follow. The default is UTF-8 which is also the encoding used for all +# text before the first occurrence of this tag. Doxygen uses libiconv (or the +# iconv built into libc) for the transcoding. See +# https://www.gnu.org/software/libiconv/ for the list of possible encodings. +# The default value is: UTF-8. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by +# double-quotes, unless you are using Doxywizard) that should identify the +# project for which the documentation is generated. This name is used in the +# title of most generated pages and in a few other places. +# The default value is: My Project. + +PROJECT_NAME = "Vehicle API" + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. This +# could be handy for archiving the generated documentation or if some version +# control system is used. + +PROJECT_NUMBER = + +# Using the PROJECT_BRIEF tag one can provide an optional one line description +# for a project that appears at the top of each page and should give viewer a +# quick idea about the purpose of the project. Keep the description short. + +PROJECT_BRIEF = + +# With the PROJECT_LOGO tag one can specify a logo or an icon that is included +# in the documentation. The maximum height of the logo should not exceed 55 +# pixels and the maximum width should not exceed 200 pixels. Doxygen will copy +# the logo to the output directory. + +PROJECT_LOGO = + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path +# into which the generated documentation will be written. If a relative path is +# entered, it will be relative to the location where doxygen was started. If +# left blank the current directory will be used. + +OUTPUT_DIRECTORY = ./docs/doxygen/out + +# If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub- +# directories (in 2 levels) under the output directory of each output format and +# will distribute the generated files over these directories. Enabling this +# option can be useful when feeding doxygen a huge amount of source files, where +# putting all generated files in the same directory would otherwise causes +# performance problems for the file system. +# The default value is: NO. + +CREATE_SUBDIRS = YES + +# If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII +# characters to appear in the names of generated files. If set to NO, non-ASCII +# characters will be escaped, for example _xE3_x81_x84 will be used for Unicode +# U+3044. +# The default value is: NO. + +ALLOW_UNICODE_NAMES = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese, +# Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States), +# Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian, +# Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages), +# Korean, Korean-en (Korean with English messages), Latvian, Lithuanian, +# Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian, +# Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish, +# Ukrainian and Vietnamese. +# The default value is: English. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member +# descriptions after the members that are listed in the file and class +# documentation (similar to Javadoc). Set to NO to disable this. +# The default value is: YES. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief +# description of a member or function before the detailed description +# +# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. +# The default value is: YES. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator that is +# used to form the text in various listings. Each string in this list, if found +# as the leading text of the brief description, will be stripped from the text +# and the result, after processing the whole list, is used as the annotated +# text. Otherwise, the brief description is used as-is. If left blank, the +# following values are used ($name is automatically replaced with the name of +# the entity):The $name class, The $name widget, The $name file, is, provides, +# specifies, contains, represents, a, an and the. + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# doxygen will generate a detailed section even if there is only a brief +# description. +# The default value is: NO. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. +# The default value is: NO. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path +# before files name in the file list and in the header files. If set to NO the +# shortest path that makes the file name unique will be used +# The default value is: YES. + +FULL_PATH_NAMES = YES + +# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path. +# Stripping is only done if one of the specified strings matches the left-hand +# part of the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the path to +# strip. +# +# Note that you can specify absolute paths here, but also relative paths, which +# will be relative from the directory where doxygen is started. +# This tag requires that the tag FULL_PATH_NAMES is set to YES. + +STRIP_FROM_PATH = + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the +# path mentioned in the documentation of a class, which tells the reader which +# header file to include in order to use a class. If left blank only the name of +# the header file containing the class definition is used. Otherwise one should +# specify the list of include paths that are normally passed to the compiler +# using the -I flag. + +STRIP_FROM_INC_PATH = + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but +# less readable) file names. This can be useful is your file systems doesn't +# support long names like on DOS, Mac, or CD-ROM. +# The default value is: NO. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the +# first line (until the first dot) of a Javadoc-style comment as the brief +# description. If set to NO, the Javadoc-style will behave just like regular Qt- +# style comments (thus requiring an explicit @brief command for a brief +# description.) +# The default value is: NO. + +JAVADOC_AUTOBRIEF = NO + +# If the JAVADOC_BANNER tag is set to YES then doxygen will interpret a line +# such as +# /*************** +# as being the beginning of a Javadoc-style comment "banner". If set to NO, the +# Javadoc-style will behave just like regular comments and it will not be +# interpreted by doxygen. +# The default value is: NO. + +JAVADOC_BANNER = NO + +# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first +# line (until the first dot) of a Qt-style comment as the brief description. If +# set to NO, the Qt-style will behave just like regular Qt-style comments (thus +# requiring an explicit \brief command for a brief description.) +# The default value is: NO. + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a +# multi-line C++ special comment block (i.e. a block of //! or /// comments) as +# a brief description. This used to be the default behavior. The new default is +# to treat a multi-line C++ comment block as a detailed description. Set this +# tag to YES if you prefer the old behavior instead. +# +# Note that setting this tag to YES also means that rational rose comments are +# not recognized any more. +# The default value is: NO. + +MULTILINE_CPP_IS_BRIEF = NO + +# By default Python docstrings are displayed as preformatted text and doxygen's +# special commands cannot be used. By setting PYTHON_DOCSTRING to NO the +# doxygen's special commands can be used and the contents of the docstring +# documentation blocks is shown as doxygen documentation. +# The default value is: YES. + +PYTHON_DOCSTRING = YES + +# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the +# documentation from any documented member that it re-implements. +# The default value is: YES. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new +# page for each member. If set to NO, the documentation of a member will be part +# of the file/class/namespace that contains it. +# The default value is: NO. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen +# uses this value to replace tabs by spaces in code fragments. +# Minimum value: 1, maximum value: 16, default value: 4. + +TAB_SIZE = 4 + +# This tag can be used to specify a number of aliases that act as commands in +# the documentation. An alias has the form: +# name=value +# For example adding +# "sideeffect=@par Side Effects:^^" +# will allow you to put the command \sideeffect (or @sideeffect) in the +# documentation, which will result in a user-defined paragraph with heading +# "Side Effects:". Note that you cannot put \n's in the value part of an alias +# to insert newlines (in the resulting output). You can put ^^ in the value part +# of an alias to insert a newline as if a physical newline was in the original +# file. When you need a literal { or } or , in the value part of an alias you +# have to escape them by means of a backslash (\), this can lead to conflicts +# with the commands \{ and \} for these it is advised to use the version @{ and +# @} or use a double escape (\\{ and \\}) + +ALIASES = + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources +# only. Doxygen will then generate output that is more tailored for C. For +# instance, some of the names that are used will be different. The list of all +# members will be omitted, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or +# Python sources only. Doxygen will then generate output that is more tailored +# for that language. For instance, namespaces will be presented as packages, +# qualified scopes will look different, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources. Doxygen will then generate output that is tailored for Fortran. +# The default value is: NO. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for VHDL. +# The default value is: NO. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Set the OPTIMIZE_OUTPUT_SLICE tag to YES if your project consists of Slice +# sources only. Doxygen will then generate output that is more tailored for that +# language. For instance, namespaces will be presented as modules, types will be +# separated into more groups, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_SLICE = NO + +# Doxygen selects the parser to use depending on the extension of the files it +# parses. With this tag you can assign which parser to use for a given +# extension. Doxygen has a built-in mapping, but you can override or extend it +# using this tag. The format is ext=language, where ext is a file extension, and +# language is one of the parsers supported by doxygen: IDL, Java, JavaScript, +# Csharp (C#), C, C++, Lex, D, PHP, md (Markdown), Objective-C, Python, Slice, +# VHDL, Fortran (fixed format Fortran: FortranFixed, free formatted Fortran: +# FortranFree, unknown formatted Fortran: Fortran. In the later case the parser +# tries to guess whether the code is fixed or free formatted code, this is the +# default for Fortran type files). For instance to make doxygen treat .inc files +# as Fortran files (default is PHP), and .f files as C (default is Fortran), +# use: inc=Fortran f=C. +# +# Note: For files without extension you can use no_extension as a placeholder. +# +# Note that for custom extensions you also need to set FILE_PATTERNS otherwise +# the files are not read by doxygen. When specifying no_extension you should add +# * to the FILE_PATTERNS. +# +# Note see also the list of default file extension mappings. + +EXTENSION_MAPPING = + +# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments +# according to the Markdown format, which allows for more readable +# documentation. See https://daringfireball.net/projects/markdown/ for details. +# The output of markdown processing is further processed by doxygen, so you can +# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in +# case of backward compatibilities issues. +# The default value is: YES. + +MARKDOWN_SUPPORT = YES + +# When the TOC_INCLUDE_HEADINGS tag is set to a non-zero value, all headings up +# to that level are automatically included in the table of contents, even if +# they do not have an id attribute. +# Note: This feature currently applies only to Markdown headings. +# Minimum value: 0, maximum value: 99, default value: 5. +# This tag requires that the tag MARKDOWN_SUPPORT is set to YES. + +TOC_INCLUDE_HEADINGS = 5 + +# When enabled doxygen tries to link words that correspond to documented +# classes, or namespaces to their corresponding documentation. Such a link can +# be prevented in individual cases by putting a % sign in front of the word or +# globally by setting AUTOLINK_SUPPORT to NO. +# The default value is: YES. + +AUTOLINK_SUPPORT = YES + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should set this +# tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); +# versus func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. +# The default value is: NO. + +BUILTIN_STL_SUPPORT = NO + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. +# The default value is: NO. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip (see: +# https://www.riverbankcomputing.com/software/sip/intro) sources only. Doxygen +# will parse them like normal C++ but will assume all classes use public instead +# of private inheritance when no explicit protection keyword is present. +# The default value is: NO. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate +# getter and setter methods for a property. Setting this option to YES will make +# doxygen to replace the get and set methods by a property in the documentation. +# This will only work if the methods are indeed getting or setting a simple +# type. If this is not the case, or you want to show the methods anyway, you +# should set this option to NO. +# The default value is: YES. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. +# The default value is: NO. + +DISTRIBUTE_GROUP_DOC = NO + +# If one adds a struct or class to a group and this option is enabled, then also +# any nested class or struct is added to the same group. By default this option +# is disabled and one has to add nested compounds explicitly via \ingroup. +# The default value is: NO. + +GROUP_NESTED_COMPOUNDS = NO + +# Set the SUBGROUPING tag to YES to allow class member groups of the same type +# (for instance a group of public functions) to be put as a subgroup of that +# type (e.g. under the Public Functions section). Set it to NO to prevent +# subgrouping. Alternatively, this can be done per class using the +# \nosubgrouping command. +# The default value is: YES. + +SUBGROUPING = YES + +# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions +# are shown inside the group in which they are included (e.g. using \ingroup) +# instead of on a separate page (for HTML and Man pages) or section (for LaTeX +# and RTF). +# +# Note that this feature does not work in combination with +# SEPARATE_MEMBER_PAGES. +# The default value is: NO. + +INLINE_GROUPED_CLASSES = NO + +# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions +# with only public data fields or simple typedef fields will be shown inline in +# the documentation of the scope in which they are defined (i.e. file, +# namespace, or group documentation), provided this scope is documented. If set +# to NO, structs, classes, and unions are shown on a separate page (for HTML and +# Man pages) or section (for LaTeX and RTF). +# The default value is: NO. + +INLINE_SIMPLE_STRUCTS = NO + +# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or +# enum is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically be +# useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. +# The default value is: NO. + +TYPEDEF_HIDES_STRUCT = NO + +# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This +# cache is used to resolve symbols given their name and scope. Since this can be +# an expensive process and often the same symbol appears multiple times in the +# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small +# doxygen will become slower. If the cache is too large, memory is wasted. The +# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range +# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536 +# symbols. At the end of a run doxygen will report the cache usage and suggest +# the optimal cache size from a speed point of view. +# Minimum value: 0, maximum value: 9, default value: 0. + +LOOKUP_CACHE_SIZE = 0 + +# The NUM_PROC_THREADS specifies the number threads doxygen is allowed to use +# during processing. When set to 0 doxygen will based this on the number of +# cores available in the system. You can set it explicitly to a value larger +# than 0 to get more control over the balance between CPU load and processing +# speed. At this moment only the input processing can be done using multiple +# threads. Since this is still an experimental feature the default is set to 1, +# which effectively disables parallel processing. Please report any issues you +# encounter. Generating dot graphs in parallel is controlled by the +# DOT_NUM_THREADS setting. +# Minimum value: 0, maximum value: 32, default value: 1. + +NUM_PROC_THREADS = 4 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in +# documentation are documented, even if no documentation was available. Private +# class members and static file members will be hidden unless the +# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES. +# Note: This will also disable the warnings about undocumented members that are +# normally produced when WARNINGS is set to YES. +# The default value is: NO. + +EXTRACT_ALL = NO + +# If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will +# be included in the documentation. +# The default value is: NO. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_PRIV_VIRTUAL tag is set to YES, documented private virtual +# methods of a class will be included in the documentation. +# The default value is: NO. + +EXTRACT_PRIV_VIRTUAL = NO + +# If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal +# scope will be included in the documentation. +# The default value is: NO. + +EXTRACT_PACKAGE = NO + +# If the EXTRACT_STATIC tag is set to YES, all static members of a file will be +# included in the documentation. +# The default value is: NO. + +EXTRACT_STATIC = NO + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined +# locally in source files will be included in the documentation. If set to NO, +# only classes defined in header files are included. Does not have any effect +# for Java sources. +# The default value is: YES. + +EXTRACT_LOCAL_CLASSES = YES + +# This flag is only useful for Objective-C code. If set to YES, local methods, +# which are defined in the implementation section but not in the interface are +# included in the documentation. If set to NO, only methods in the interface are +# included. +# The default value is: NO. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base name of +# the file that contains the anonymous namespace. By default anonymous namespace +# are hidden. +# The default value is: NO. + +EXTRACT_ANON_NSPACES = NO + +# If this flag is set to YES, the name of an unnamed parameter in a declaration +# will be determined by the corresponding definition. By default unnamed +# parameters remain unnamed in the output. +# The default value is: YES. + +RESOLVE_UNNAMED_PARAMS = YES + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all +# undocumented members inside documented classes or files. If set to NO these +# members will be included in the various overviews, but no documentation +# section is generated. This option has no effect if EXTRACT_ALL is enabled. +# The default value is: NO. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. If set +# to NO, these classes will be included in the various overviews. This option +# has no effect if EXTRACT_ALL is enabled. +# The default value is: NO. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend +# declarations. If set to NO, these declarations will be included in the +# documentation. +# The default value is: NO. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any +# documentation blocks found inside the body of a function. If set to NO, these +# blocks will be appended to the function's detailed documentation block. +# The default value is: NO. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation that is typed after a +# \internal command is included. If the tag is set to NO then the documentation +# will be excluded. Set it to YES to include the internal documentation. +# The default value is: NO. + +INTERNAL_DOCS = NO + +# With the correct setting of option CASE_SENSE_NAMES doxygen will better be +# able to match the capabilities of the underlying filesystem. In case the +# filesystem is case sensitive (i.e. it supports files in the same directory +# whose names only differ in casing), the option must be set to YES to properly +# deal with such files in case they appear in the input. For filesystems that +# are not case sensitive the option should be be set to NO to properly deal with +# output files written for symbols that only differ in casing, such as for two +# classes, one named CLASS and the other named Class, and to also support +# references to files without having to specify the exact matching casing. On +# Windows (including Cygwin) and MacOS, users should typically set this option +# to NO, whereas on Linux or other Unix flavors it should typically be set to +# YES. +# The default value is: system dependent. + +CASE_SENSE_NAMES = YES + +# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with +# their full class and namespace scopes in the documentation. If set to YES, the +# scope will be hidden. +# The default value is: NO. + +HIDE_SCOPE_NAMES = NO + +# If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will +# append additional text to a page's title, such as Class Reference. If set to +# YES the compound reference will be hidden. +# The default value is: NO. + +HIDE_COMPOUND_REFERENCE= NO + +# If the SHOW_HEADERFILE tag is set to YES then the documentation for a class +# will show which file needs to be included to use the class. +# The default value is: YES. + +SHOW_HEADERFILE = YES + +# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of +# the files that are included by a file in the documentation of that file. +# The default value is: YES. + +SHOW_INCLUDE_FILES = YES + +# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each +# grouped member an include statement to the documentation, telling the reader +# which file to include in order to use the member. +# The default value is: NO. + +SHOW_GROUPED_MEMB_INC = NO + +# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include +# files with double quotes in the documentation rather than with sharp brackets. +# The default value is: NO. + +FORCE_LOCAL_INCLUDES = NO + +# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the +# documentation for inline members. +# The default value is: YES. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the +# (detailed) documentation of file and class members alphabetically by member +# name. If set to NO, the members will appear in declaration order. +# The default value is: YES. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief +# descriptions of file, namespace and class members alphabetically by member +# name. If set to NO, the members will appear in declaration order. Note that +# this will also influence the order of the classes in the class list. +# The default value is: NO. + +SORT_BRIEF_DOCS = NO + +# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the +# (brief and detailed) documentation of class members so that constructors and +# destructors are listed first. If set to NO the constructors will appear in the +# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS. +# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief +# member documentation. +# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting +# detailed member documentation. +# The default value is: NO. + +SORT_MEMBERS_CTORS_1ST = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy +# of group names into alphabetical order. If set to NO the group names will +# appear in their defined order. +# The default value is: NO. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by +# fully-qualified names, including namespaces. If set to NO, the class list will +# be sorted only by class name, not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the alphabetical +# list. +# The default value is: NO. + +SORT_BY_SCOPE_NAME = NO + +# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper +# type resolution of all parameters of a function it will reject a match between +# the prototype and the implementation of a member function even if there is +# only one candidate or it is obvious which candidate to choose by doing a +# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still +# accept a match between prototype and implementation in such cases. +# The default value is: NO. + +STRICT_PROTO_MATCHING = NO + +# The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo +# list. This list is created by putting \todo commands in the documentation. +# The default value is: YES. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test +# list. This list is created by putting \test commands in the documentation. +# The default value is: YES. + +GENERATE_TESTLIST = YES + +# The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug +# list. This list is created by putting \bug commands in the documentation. +# The default value is: YES. + +GENERATE_BUGLIST = YES + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO) +# the deprecated list. This list is created by putting \deprecated commands in +# the documentation. +# The default value is: YES. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional documentation +# sections, marked by \if ... \endif and \cond +# ... \endcond blocks. + +ENABLED_SECTIONS = + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the +# initial value of a variable or macro / define can have for it to appear in the +# documentation. If the initializer consists of more lines than specified here +# it will be hidden. Use a value of 0 to hide initializers completely. The +# appearance of the value of individual variables and macros / defines can be +# controlled using \showinitializer or \hideinitializer command in the +# documentation regardless of this setting. +# Minimum value: 0, maximum value: 10000, default value: 30. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at +# the bottom of the documentation of classes and structs. If set to YES, the +# list will mention the files that were used to generate the documentation. +# The default value is: YES. + +SHOW_USED_FILES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This +# will remove the Files entry from the Quick Index and from the Folder Tree View +# (if specified). +# The default value is: YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces +# page. This will remove the Namespaces entry from the Quick Index and from the +# Folder Tree View (if specified). +# The default value is: YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command command input-file, where command is the value of the +# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided +# by doxygen. Whatever the program writes to standard output is used as the file +# version. For an example see the documentation. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed +# by doxygen. The layout file controls the global structure of the generated +# output files in an output format independent way. To create the layout file +# that represents doxygen's defaults, run doxygen with the -l option. You can +# optionally specify a file name after the option, if omitted DoxygenLayout.xml +# will be used as the name of the layout file. See also section "Changing the +# layout of pages" for information. +# +# Note that if you run doxygen from a directory containing a file called +# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE +# tag is left empty. + +LAYOUT_FILE = + +# The CITE_BIB_FILES tag can be used to specify one or more bib files containing +# the reference definitions. This must be a list of .bib files. The .bib +# extension is automatically appended if omitted. This requires the bibtex tool +# to be installed. See also https://en.wikipedia.org/wiki/BibTeX for more info. +# For LaTeX the style of the bibliography can be controlled using +# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the +# search path. See also \cite for info how to create references. + +CITE_BIB_FILES = + +#--------------------------------------------------------------------------- +# Configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated to +# standard output by doxygen. If QUIET is set to YES this implies that the +# messages are off. +# The default value is: NO. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated to standard error (stderr) by doxygen. If WARNINGS is set to YES +# this implies that the warnings are on. +# +# Tip: Turn warnings on while writing the documentation. +# The default value is: YES. + +WARNINGS = YES + +# If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate +# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag +# will automatically be disabled. +# The default value is: YES. + +WARN_IF_UNDOCUMENTED = NO + +# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as documenting some parameters in +# a documented function twice, or documenting parameters that don't exist or +# using markup commands wrongly. +# The default value is: YES. + +WARN_IF_DOC_ERROR = YES + +# If WARN_IF_INCOMPLETE_DOC is set to YES, doxygen will warn about incomplete +# function parameter documentation. If set to NO, doxygen will accept that some +# parameters have no documentation without warning. +# The default value is: YES. + +WARN_IF_INCOMPLETE_DOC = YES + +# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that +# are documented, but have no documentation for their parameters or return +# value. If set to NO, doxygen will only warn about wrong parameter +# documentation, but not about the absence of documentation. If EXTRACT_ALL is +# set to YES then this flag will automatically be disabled. See also +# WARN_IF_INCOMPLETE_DOC +# The default value is: NO. + +WARN_NO_PARAMDOC = NO + +# If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when +# a warning is encountered. If the WARN_AS_ERROR tag is set to FAIL_ON_WARNINGS +# then doxygen will continue running as if WARN_AS_ERROR tag is set to NO, but +# at the end of the doxygen process doxygen will return with a non-zero status. +# Possible values are: NO, YES and FAIL_ON_WARNINGS. +# The default value is: NO. + +WARN_AS_ERROR = NO + +# The WARN_FORMAT tag determines the format of the warning messages that doxygen +# can produce. The string should contain the $file, $line, and $text tags, which +# will be replaced by the file and line number from which the warning originated +# and the warning text. Optionally the format may contain $version, which will +# be replaced by the version of the file (if it could be obtained via +# FILE_VERSION_FILTER) +# The default value is: $file:$line: $text. + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning and error +# messages should be written. If left blank the output is written to standard +# error (stderr). + +WARN_LOGFILE = + +#--------------------------------------------------------------------------- +# Configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag is used to specify the files and/or directories that contain +# documented source files. You may enter file names like myfile.cpp or +# directories like /usr/src/myproject. Separate the files or directories with +# spaces. See also FILE_PATTERNS and EXTENSION_MAPPING +# Note: If this tag is empty the current directory is searched. + +INPUT = ./src/bin ./src/lib ./src/tests ./src/examples ./proto ./README.md + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses +# libiconv (or the iconv built into libc) for the transcoding. See the libiconv +# documentation (see: +# https://www.gnu.org/software/libiconv/) for the list of possible encodings. +# The default value is: UTF-8. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and +# *.h) to filter out the source-files in the directories. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# read by doxygen. +# +# Note the list of default checked file patterns might differ from the list of +# default file extension mappings. +# +# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp, +# *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, +# *.hh, *.hxx, *.hpp, *.h++, *.l, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, +# *.inc, *.m, *.markdown, *.md, *.mm, *.dox (to be provided as doxygen C +# comment), *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, *.f18, *.f, *.for, *.vhd, +# *.vhdl, *.ucf, *.qsf and *.ice. + +FILE_PATTERNS = *.c \ + *.cc \ + *.cxx \ + *.cpp \ + *.c++ \ + *.h \ + *.hh \ + *.hxx \ + *.hpp \ + *.md \ + *.proto \ + +# The RECURSIVE tag can be used to specify whether or not subdirectories should +# be searched for input files as well. +# The default value is: NO. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should be +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. +# +# Note that relative paths are relative to the directory from which doxygen is +# run. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or +# directories that are symbolic links (a Unix file system feature) are excluded +# from the input. +# The default value is: NO. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. +# +# Note that the wildcards are matched against the file with absolute path, so to +# exclude all test directories for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test +# +# Note that the wildcards are matched against the file with absolute path, so to +# exclude all test directories use the pattern */test/* + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or directories +# that contain example code fragments that are included (see the \include +# command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and +# *.h) to filter out the source-files in the directories. If left blank all +# files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude commands +# irrespective of the value of the RECURSIVE tag. +# The default value is: NO. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or directories +# that contain images that are to be included in the documentation (see the +# \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command: +# +# +# +# where is the value of the INPUT_FILTER tag, and is the +# name of an input file. Doxygen will then use the output that the filter +# program writes to standard output. If FILTER_PATTERNS is specified, this tag +# will be ignored. +# +# Note that the filter must not add or remove lines; it is applied before the +# code is scanned, but not when the output code is generated. If lines are added +# or removed, the anchors will not be placed correctly. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# properly processed by doxygen. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. The filters are a list of the form: pattern=filter +# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how +# filters are used. If the FILTER_PATTERNS tag is empty or if none of the +# patterns match the file name, INPUT_FILTER is applied. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# properly processed by doxygen. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will also be used to filter the input files that are used for +# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES). +# The default value is: NO. + +FILTER_SOURCE_FILES = NO + +# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file +# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and +# it is also possible to disable source filtering for a specific pattern using +# *.ext= (so without naming a filter). +# This tag requires that the tag FILTER_SOURCE_FILES is set to YES. + +FILTER_SOURCE_PATTERNS = + +# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that +# is part of the input, its contents will be placed on the main page +# (index.html). This can be useful if you have a project on for instance GitHub +# and want to reuse the introduction page also for the doxygen output. + +USE_MDFILE_AS_MAINPAGE = + +#--------------------------------------------------------------------------- +# Configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will be +# generated. Documented entities will be cross-referenced with these sources. +# +# Note: To get rid of all source code in the generated output, make sure that +# also VERBATIM_HEADERS is set to NO. +# The default value is: NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body of functions, +# classes and enums directly into the documentation. +# The default value is: NO. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any +# special comment blocks from generated source code fragments. Normal C, C++ and +# Fortran comments will always remain visible. +# The default value is: YES. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES then for each documented +# entity all documented functions referencing it will be listed. +# The default value is: NO. + +REFERENCED_BY_RELATION = NO + +# If the REFERENCES_RELATION tag is set to YES then for each documented function +# all documented entities called/used by that function will be listed. +# The default value is: NO. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set +# to YES then the hyperlinks from functions in REFERENCES_RELATION and +# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will +# link to the documentation. +# The default value is: YES. + +REFERENCES_LINK_SOURCE = YES + +# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the +# source code will show a tooltip with additional information such as prototype, +# brief description and links to the definition and documentation. Since this +# will make the HTML file larger and loading of large files a bit slower, you +# can opt to disable this feature. +# The default value is: YES. +# This tag requires that the tag SOURCE_BROWSER is set to YES. + +SOURCE_TOOLTIPS = YES + +# If the USE_HTAGS tag is set to YES then the references to source code will +# point to the HTML generated by the htags(1) tool instead of doxygen built-in +# source browser. The htags tool is part of GNU's global source tagging system +# (see https://www.gnu.org/software/global/global.html). You will need version +# 4.8.6 or higher. +# +# To use it do the following: +# - Install the latest version of global +# - Enable SOURCE_BROWSER and USE_HTAGS in the configuration file +# - Make sure the INPUT points to the root of the source tree +# - Run doxygen as normal +# +# Doxygen will invoke htags (and that will in turn invoke gtags), so these +# tools must be available from the command line (i.e. in the search path). +# +# The result: instead of the source browser generated by doxygen, the links to +# source code will now point to the output of htags. +# The default value is: NO. +# This tag requires that the tag SOURCE_BROWSER is set to YES. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a +# verbatim copy of the header file for each class for which an include is +# specified. Set to NO to disable this. +# See also: Section \class. +# The default value is: YES. + +VERBATIM_HEADERS = YES + +#--------------------------------------------------------------------------- +# Configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all +# compounds will be generated. Enable this if the project contains a lot of +# classes, structs, unions or interfaces. +# The default value is: YES. + +ALPHABETICAL_INDEX = YES + +# In case all classes in a project start with a common prefix, all classes will +# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag +# can be used to specify a prefix (or a list of prefixes) that should be ignored +# while generating the index headers. +# This tag requires that the tag ALPHABETICAL_INDEX is set to YES. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output +# The default value is: YES. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a +# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of +# it. +# The default directory is: html. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_OUTPUT = html + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each +# generated HTML page (for example: .htm, .php, .asp). +# The default value is: .html. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a user-defined HTML header file for +# each generated HTML page. If the tag is left blank doxygen will generate a +# standard header. +# +# To get valid HTML the header file that includes any scripts and style sheets +# that doxygen needs, which is dependent on the configuration options used (e.g. +# the setting GENERATE_TREEVIEW). It is highly recommended to start with a +# default header using +# doxygen -w html new_header.html new_footer.html new_stylesheet.css +# YourConfigFile +# and then modify the file new_header.html. See also section "Doxygen usage" +# for information on how to generate the default header that doxygen normally +# uses. +# Note: The header is subject to change so you typically have to regenerate the +# default header when upgrading to a newer version of doxygen. For a description +# of the possible markers and block names see the documentation. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_HEADER = + +# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each +# generated HTML page. If the tag is left blank doxygen will generate a standard +# footer. See HTML_HEADER for more information on how to generate a default +# footer and what special commands can be used inside the footer. See also +# section "Doxygen usage" for information on how to generate the default footer +# that doxygen normally uses. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FOOTER = + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style +# sheet that is used by each HTML page. It can be used to fine-tune the look of +# the HTML output. If left blank doxygen will generate a default style sheet. +# See also section "Doxygen usage" for information on how to generate the style +# sheet that doxygen normally uses. +# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as +# it is more robust and this tag (HTML_STYLESHEET) will in the future become +# obsolete. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_STYLESHEET = + +# The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined +# cascading style sheets that are included after the standard style sheets +# created by doxygen. Using this option one can overrule certain style aspects. +# This is preferred over using HTML_STYLESHEET since it does not replace the +# standard style sheet and is therefore more robust against future updates. +# Doxygen will copy the style sheet files to the output directory. +# Note: The order of the extra style sheet files is of importance (e.g. the last +# style sheet in the list overrules the setting of the previous ones in the +# list). For an example see the documentation. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_EXTRA_STYLESHEET = + +# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or +# other source files which should be copied to the HTML output directory. Note +# that these files will be copied to the base HTML output directory. Use the +# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these +# files. In the HTML_STYLESHEET file, use the file name only. Also note that the +# files will be copied as-is; there are no commands or markers available. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_EXTRA_FILES = + +# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen +# will adjust the colors in the style sheet and background images according to +# this color. Hue is specified as an angle on a color-wheel, see +# https://en.wikipedia.org/wiki/Hue for more information. For instance the value +# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300 +# purple, and 360 is red again. +# Minimum value: 0, maximum value: 359, default value: 220. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_HUE = 220 + +# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors +# in the HTML output. For a value of 0 the output will use gray-scales only. A +# value of 255 will produce the most vivid colors. +# Minimum value: 0, maximum value: 255, default value: 100. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_SAT = 100 + +# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the +# luminance component of the colors in the HTML output. Values below 100 +# gradually make the output lighter, whereas values above 100 make the output +# darker. The value divided by 100 is the actual gamma applied, so 80 represents +# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not +# change the gamma. +# Minimum value: 40, maximum value: 240, default value: 80. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_GAMMA = 80 + +# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML +# page will contain the date and time when the page was generated. Setting this +# to YES can help to show when doxygen was last run and thus if the +# documentation is up to date. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_TIMESTAMP = YES + +# If the HTML_DYNAMIC_MENUS tag is set to YES then the generated HTML +# documentation will contain a main index with vertical navigation menus that +# are dynamically created via JavaScript. If disabled, the navigation index will +# consists of multiple levels of tabs that are statically embedded in every HTML +# page. Disable this option to support browsers that do not have JavaScript, +# like the Qt help browser. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_DYNAMIC_MENUS = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_DYNAMIC_SECTIONS = NO + +# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries +# shown in the various tree structured indices initially; the user can expand +# and collapse entries dynamically later on. Doxygen will expand the tree to +# such a level that at most the specified number of entries are visible (unless +# a fully collapsed tree already exceeds this amount). So setting the number of +# entries 1 will produce a full collapsed tree by default. 0 is a special value +# representing an infinite number of entries and will result in a full expanded +# tree by default. +# Minimum value: 0, maximum value: 9999, default value: 100. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_INDEX_NUM_ENTRIES = 1000 + +# If the GENERATE_DOCSET tag is set to YES, additional index files will be +# generated that can be used as input for Apple's Xcode 3 integrated development +# environment (see: +# https://developer.apple.com/xcode/), introduced with OSX 10.5 (Leopard). To +# create a documentation set, doxygen will generate a Makefile in the HTML +# output directory. Running make will produce the docset in that directory and +# running make install will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at +# startup. See https://developer.apple.com/library/archive/featuredarticles/Doxy +# genXcode/_index.html for more information. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_DOCSET = NO + +# This tag determines the name of the docset feed. A documentation feed provides +# an umbrella under which multiple documentation sets from a single provider +# (such as a company or product suite) can be grouped. +# The default value is: Doxygen generated docs. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# This tag determines the URL of the docset feed. A documentation feed provides +# an umbrella under which multiple documentation sets from a single provider +# (such as a company or product suite) can be grouped. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_FEEDURL = + +# This tag specifies a string that should uniquely identify the documentation +# set bundle. This should be a reverse domain-name style string, e.g. +# com.mycompany.MyDocSet. Doxygen will append .docset to the name. +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify +# the documentation publisher. This should be a reverse domain-name style +# string, e.g. com.mycompany.MyDocSet.documentation. +# The default value is: org.doxygen.Publisher. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_PUBLISHER_ID = org.doxygen.Publisher + +# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher. +# The default value is: Publisher. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_PUBLISHER_NAME = Publisher + +# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three +# additional HTML index files: index.hhp, index.hhc, and index.hhk. The +# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop +# on Windows. In the beginning of 2021 Microsoft took the original page, with +# a.o. the download links, offline the HTML help workshop was already many years +# in maintenance mode). You can download the HTML help workshop from the web +# archives at Installation executable (see: +# http://web.archive.org/web/20160201063255/http://download.microsoft.com/downlo +# ad/0/A/9/0A939EF6-E31C-430F-A3DF-DFAE7960D564/htmlhelp.exe). +# +# The HTML Help Workshop contains a compiler that can convert all HTML output +# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML +# files are now used as the Windows 98 help format, and will replace the old +# Windows help format (.hlp) on all Windows platforms in the future. Compressed +# HTML files also contain an index, a table of contents, and you can search for +# words in the documentation. The HTML workshop also contains a viewer for +# compressed HTML files. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_HTMLHELP = NO + +# The CHM_FILE tag can be used to specify the file name of the resulting .chm +# file. You can add a path in front of the file if the result should not be +# written to the html output directory. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +CHM_FILE = + +# The HHC_LOCATION tag can be used to specify the location (absolute path +# including file name) of the HTML help compiler (hhc.exe). If non-empty, +# doxygen will try to run the HTML help compiler on the generated index.hhp. +# The file has to be specified with full path. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +HHC_LOCATION = + +# The GENERATE_CHI flag controls if a separate .chi index file is generated +# (YES) or that it should be included in the main .chm file (NO). +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +GENERATE_CHI = NO + +# The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc) +# and project file content. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +CHM_INDEX_ENCODING = + +# The BINARY_TOC flag controls whether a binary table of contents is generated +# (YES) or a normal table of contents (NO) in the .chm file. Furthermore it +# enables the Previous and Next buttons. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +BINARY_TOC = NO + +# The TOC_EXPAND flag can be set to YES to add extra items for group members to +# the table of contents of the HTML help documentation and to the tree view. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +TOC_EXPAND = NO + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and +# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that +# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help +# (.qch) of the generated HTML documentation. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify +# the file name of the resulting .qch file. The path specified is relative to +# the HTML output folder. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help +# Project output. For more information please see Qt Help Project / Namespace +# (see: +# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace). +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_NAMESPACE = org.doxygen.Project + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt +# Help Project output. For more information please see Qt Help Project / Virtual +# Folders (see: +# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual-folders). +# The default value is: doc. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_VIRTUAL_FOLDER = doc + +# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom +# filter to add. For more information please see Qt Help Project / Custom +# Filters (see: +# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-filters). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the +# custom filter to add. For more information please see Qt Help Project / Custom +# Filters (see: +# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-filters). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this +# project's filter section matches. Qt Help Project / Filter Attributes (see: +# https://doc.qt.io/archives/qt-4.8/qthelpproject.html#filter-attributes). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_SECT_FILTER_ATTRS = + +# The QHG_LOCATION tag can be used to specify the location (absolute path +# including file name) of Qt's qhelpgenerator. If non-empty doxygen will try to +# run qhelpgenerator on the generated .qhp file. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHG_LOCATION = + +# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be +# generated, together with the HTML files, they form an Eclipse help plugin. To +# install this plugin and make it available under the help contents menu in +# Eclipse, the contents of the directory containing the HTML and XML files needs +# to be copied into the plugins directory of eclipse. The name of the directory +# within the plugins directory should be the same as the ECLIPSE_DOC_ID value. +# After copying Eclipse needs to be restarted before the help appears. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_ECLIPSEHELP = NO + +# A unique identifier for the Eclipse help plugin. When installing the plugin +# the directory name containing the HTML and XML files should also have this +# name. Each documentation set should have its own identifier. +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES. + +ECLIPSE_DOC_ID = org.doxygen.Project + +# If you want full control over the layout of the generated HTML pages it might +# be necessary to disable the index and replace it with your own. The +# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top +# of each HTML page. A value of NO enables the index and the value YES disables +# it. Since the tabs in the index contain the same information as the navigation +# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +DISABLE_INDEX = NO + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. If the tag +# value is set to YES, a side panel will be generated containing a tree-like +# index structure (just like the one that is generated for HTML Help). For this +# to work a browser that supports JavaScript, DHTML, CSS and frames is required +# (i.e. any modern browser). Windows users are probably better off using the +# HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can +# further fine tune the look of the index (see "Fine-tuning the output"). As an +# example, the default style sheet generated by doxygen has an example that +# shows how to put an image at the root of the tree instead of the PROJECT_NAME. +# Since the tree basically has the same information as the tab index, you could +# consider setting DISABLE_INDEX to YES when enabling this option. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_TREEVIEW = YES + +# When both GENERATE_TREEVIEW and DISABLE_INDEX are set to YES, then the +# FULL_SIDEBAR option determines if the side bar is limited to only the treeview +# area (value NO) or if it should extend to the full height of the window (value +# YES). Setting this to YES gives a layout similar to +# https://docs.readthedocs.io with more room for contents, but less room for the +# project logo, title, and description. If either GENERATE_TREEVIEW or +# DISABLE_INDEX is set to NO, this option has no effect. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +FULL_SIDEBAR = NO + +# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that +# doxygen will group on one line in the generated HTML documentation. +# +# Note that a value of 0 will completely suppress the enum values from appearing +# in the overview section. +# Minimum value: 0, maximum value: 20, default value: 4. +# This tag requires that the tag GENERATE_HTML is set to YES. + +ENUM_VALUES_PER_LINE = 4 + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used +# to set the initial width (in pixels) of the frame in which the tree is shown. +# Minimum value: 0, maximum value: 1500, default value: 250. +# This tag requires that the tag GENERATE_HTML is set to YES. + +TREEVIEW_WIDTH = 250 + +# If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to +# external symbols imported via tag files in a separate window. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +EXT_LINKS_IN_WINDOW = NO + +# If the HTML_FORMULA_FORMAT option is set to svg, doxygen will use the pdf2svg +# tool (see https://github.com/dawbarton/pdf2svg) or inkscape (see +# https://inkscape.org) to generate formulas as SVG images instead of PNGs for +# the HTML output. These images will generally look nicer at scaled resolutions. +# Possible values are: png (the default) and svg (looks nicer but requires the +# pdf2svg or inkscape tool). +# The default value is: png. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FORMULA_FORMAT = png + +# Use this tag to change the font size of LaTeX formulas included as images in +# the HTML documentation. When you change the font size after a successful +# doxygen run you need to manually remove any form_*.png images from the HTML +# output directory to force them to be regenerated. +# Minimum value: 8, maximum value: 50, default value: 10. +# This tag requires that the tag GENERATE_HTML is set to YES. + +FORMULA_FONTSIZE = 10 + +# Use the FORMULA_TRANSPARENT tag to determine whether or not the images +# generated for formulas are transparent PNGs. Transparent PNGs are not +# supported properly for IE 6.0, but are supported on all modern browsers. +# +# Note that when changing this option you need to delete any form_*.png files in +# the HTML output directory before the changes have effect. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +FORMULA_TRANSPARENT = YES + +# The FORMULA_MACROFILE can contain LaTeX \newcommand and \renewcommand commands +# to create new LaTeX commands to be used in formulas as building blocks. See +# the section "Including formulas" for details. + +FORMULA_MACROFILE = + +# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see +# https://www.mathjax.org) which uses client side JavaScript for the rendering +# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX +# installed or if you want to formulas look prettier in the HTML output. When +# enabled you may also need to install MathJax separately and configure the path +# to it using the MATHJAX_RELPATH option. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +USE_MATHJAX = NO + +# With MATHJAX_VERSION it is possible to specify the MathJax version to be used. +# Note that the different versions of MathJax have different requirements with +# regards to the different settings, so it is possible that also other MathJax +# settings have to be changed when switching between the different MathJax +# versions. +# Possible values are: MathJax_2 and MathJax_3. +# The default value is: MathJax_2. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_VERSION = MathJax_2 + +# When MathJax is enabled you can set the default output format to be used for +# the MathJax output. For more details about the output format see MathJax +# version 2 (see: +# http://docs.mathjax.org/en/v2.7-latest/output.html) and MathJax version 3 +# (see: +# http://docs.mathjax.org/en/latest/web/components/output.html). +# Possible values are: HTML-CSS (which is slower, but has the best +# compatibility. This is the name for Mathjax version 2, for MathJax version 3 +# this will be translated into chtml), NativeMML (i.e. MathML. Only supported +# for NathJax 2. For MathJax version 3 chtml will be used instead.), chtml (This +# is the name for Mathjax version 3, for MathJax version 2 this will be +# translated into HTML-CSS) and SVG. +# The default value is: HTML-CSS. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_FORMAT = HTML-CSS + +# When MathJax is enabled you need to specify the location relative to the HTML +# output directory using the MATHJAX_RELPATH option. The destination directory +# should contain the MathJax.js script. For instance, if the mathjax directory +# is located at the same level as the HTML output directory, then +# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax +# Content Delivery Network so you can quickly see the result without installing +# MathJax. However, it is strongly recommended to install a local copy of +# MathJax from https://www.mathjax.org before deployment. The default value is: +# - in case of MathJax version 2: https://cdn.jsdelivr.net/npm/mathjax@2 +# - in case of MathJax version 3: https://cdn.jsdelivr.net/npm/mathjax@3 +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_RELPATH = + +# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax +# extension names that should be enabled during MathJax rendering. For example +# for MathJax version 2 (see +# https://docs.mathjax.org/en/v2.7-latest/tex.html#tex-and-latex-extensions): +# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols +# For example for MathJax version 3 (see +# http://docs.mathjax.org/en/latest/input/tex/extensions/index.html): +# MATHJAX_EXTENSIONS = ams +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_EXTENSIONS = + +# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces +# of code that will be used on startup of the MathJax code. See the MathJax site +# (see: +# http://docs.mathjax.org/en/v2.7-latest/output.html) for more details. For an +# example see the documentation. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_CODEFILE = + +# When the SEARCHENGINE tag is enabled doxygen will generate a search box for +# the HTML output. The underlying search engine uses javascript and DHTML and +# should work on any modern browser. Note that when using HTML help +# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET) +# there is already a search function so this one should typically be disabled. +# For large projects the javascript based search engine can be slow, then +# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to +# search using the keyboard; to jump to the search box use + S +# (what the is depends on the OS and browser, but it is typically +# , /