From f1bc4a19b05ae92ae686bd786eb70121a798ac6b Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Tue, 13 Feb 2024 07:10:22 +0000 Subject: [PATCH 001/128] still move source code into src folder --- .gitignore | 4 ++-- pyproject.toml | 6 +++--- .../otaclient_iot_logging_server}/__init__.py | 0 .../otaclient_iot_logging_server}/__main__.py | 0 .../otaclient_iot_logging_server}/_common.py | 0 .../otaclient_iot_logging_server}/_utils.py | 0 .../otaclient_iot_logging_server}/aws_iot_logger.py | 0 .../otaclient_iot_logging_server}/boto3_session.py | 0 .../otaclient_iot_logging_server}/configs.py | 0 .../otaclient_iot_logging_server}/greengrass_config.py | 0 .../otaclient_iot_logging_server}/log_proxy_server.py | 0 11 files changed, 5 insertions(+), 5 deletions(-) rename {otaclient_iot_logging_server => src/otaclient_iot_logging_server}/__init__.py (100%) rename {otaclient_iot_logging_server => src/otaclient_iot_logging_server}/__main__.py (100%) rename {otaclient_iot_logging_server => src/otaclient_iot_logging_server}/_common.py (100%) rename {otaclient_iot_logging_server => src/otaclient_iot_logging_server}/_utils.py (100%) rename {otaclient_iot_logging_server => src/otaclient_iot_logging_server}/aws_iot_logger.py (100%) rename {otaclient_iot_logging_server => src/otaclient_iot_logging_server}/boto3_session.py (100%) rename {otaclient_iot_logging_server => src/otaclient_iot_logging_server}/configs.py (100%) rename {otaclient_iot_logging_server => src/otaclient_iot_logging_server}/greengrass_config.py (100%) rename {otaclient_iot_logging_server => src/otaclient_iot_logging_server}/log_proxy_server.py (100%) diff --git a/.gitignore b/.gitignore index bc1b77a..72b8b24 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,6 @@ coverage build/ dist/ mypy_cache/ -otaclient_iot_logging_server/_version.py __pycache__/ -venv/ \ No newline at end of file +venv/ +src/otaclient_iot_logging_server/_version.py \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 6450a79..95b2cc3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -82,14 +82,14 @@ UPLOAD_INTERVAL = "6" source = "vcs" [tool.hatch.build.hooks.vcs] -version-file = "otaclient_iot_logging_server/_version.py" +version-file = "src/otaclient_iot_logging_server/_version.py" [tool.hatch.build.targets.sdist] exclude = ["/.github", "/docs"] [tool.hatch.build.targets.wheel] -only-include = ["otaclient_iot_logging_server"] -sources = ["otaclient_iot_logging_server"] +only-include = ["src"] +sources = ["src"] [tool.isort] atomic = true diff --git a/otaclient_iot_logging_server/__init__.py b/src/otaclient_iot_logging_server/__init__.py similarity index 100% rename from otaclient_iot_logging_server/__init__.py rename to src/otaclient_iot_logging_server/__init__.py diff --git a/otaclient_iot_logging_server/__main__.py b/src/otaclient_iot_logging_server/__main__.py similarity index 100% rename from otaclient_iot_logging_server/__main__.py rename to src/otaclient_iot_logging_server/__main__.py diff --git a/otaclient_iot_logging_server/_common.py b/src/otaclient_iot_logging_server/_common.py similarity index 100% rename from otaclient_iot_logging_server/_common.py rename to src/otaclient_iot_logging_server/_common.py diff --git a/otaclient_iot_logging_server/_utils.py b/src/otaclient_iot_logging_server/_utils.py similarity index 100% rename from otaclient_iot_logging_server/_utils.py rename to src/otaclient_iot_logging_server/_utils.py diff --git a/otaclient_iot_logging_server/aws_iot_logger.py b/src/otaclient_iot_logging_server/aws_iot_logger.py similarity index 100% rename from otaclient_iot_logging_server/aws_iot_logger.py rename to src/otaclient_iot_logging_server/aws_iot_logger.py diff --git a/otaclient_iot_logging_server/boto3_session.py b/src/otaclient_iot_logging_server/boto3_session.py similarity index 100% rename from otaclient_iot_logging_server/boto3_session.py rename to src/otaclient_iot_logging_server/boto3_session.py diff --git a/otaclient_iot_logging_server/configs.py b/src/otaclient_iot_logging_server/configs.py similarity index 100% rename from otaclient_iot_logging_server/configs.py rename to src/otaclient_iot_logging_server/configs.py diff --git a/otaclient_iot_logging_server/greengrass_config.py b/src/otaclient_iot_logging_server/greengrass_config.py similarity index 100% rename from otaclient_iot_logging_server/greengrass_config.py rename to src/otaclient_iot_logging_server/greengrass_config.py diff --git a/otaclient_iot_logging_server/log_proxy_server.py b/src/otaclient_iot_logging_server/log_proxy_server.py similarity index 100% rename from otaclient_iot_logging_server/log_proxy_server.py rename to src/otaclient_iot_logging_server/log_proxy_server.py From b010ad53c56ab91260869810479c5aab2a73e8c2 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Tue, 13 Feb 2024 07:13:30 +0000 Subject: [PATCH 002/128] minor fix --- src/otaclient_iot_logging_server/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/otaclient_iot_logging_server/__init__.py b/src/otaclient_iot_logging_server/__init__.py index 665c449..40bfe09 100644 --- a/src/otaclient_iot_logging_server/__init__.py +++ b/src/otaclient_iot_logging_server/__init__.py @@ -11,9 +11,9 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -# flake8: noqa -from _version import version as __version__ # type: ignore +from otaclient_iot_logging_server._version import __version__ package_name = __name__.split(".")[0] +version = __version__ From d3e4c1893925be892ca37832369e3402fdf2957b Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Tue, 13 Feb 2024 07:59:32 +0000 Subject: [PATCH 003/128] add Dockerfile --- Dockerfile | 62 ++++++++++++++++++++++++++++++++++++------------------ 1 file changed, 41 insertions(+), 21 deletions(-) diff --git a/Dockerfile b/Dockerfile index e12bfd3..fbfb9d4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -4,33 +4,53 @@ ARG PYTHON_BASE_VER=slim-bookworm ARG PYTHON_VENV=/venv # -# ------ prepare virtual env ------ # +# ------ prepare venv ------ # # -FROM python:${PYTHON_VERSION}-${PYTHON_BASE_VER} as deps_installer -# install build base +FROM python:${PYTHON_VERSION}-${PYTHON_BASE_VER} as venv_builder + +ARG PYTHON_VENV + +COPY . /source_code + +# ------ install build deps ------ # RUN set -eux; \ - apt-get update && \ + apt-get update ; \ apt-get install -y --no-install-recommends \ python3-dev \ + libcurl4-openssl-dev \ + libssl-dev \ gcc \ - git + git -# install otaclient deps -ARG PYTHON_VENV -ARG OTACLIENT_REQUIREMENTS -COPY "${OTACLIENT_REQUIREMENTS}" /tmp/requirements.txt - -RUN set -eux; \ - python3 -m venv ${PYTHON_VENV} && \ - . ${PYTHON_VENV}/bin/activate && \ - export PYTHONDONTWRITEBYTECODE=1 && \ - python3 -m pip install --no-cache-dir -U pip setuptools wheel && \ - python3 -m pip install --no-cache-dir -r /tmp/requirements.txt && \ +# ------ setup virtual env and build ------ # +RUN set -eux ; \ + python3 -m venv ${PYTHON_VENV} ; \ + . ${PYTHON_VENV}/bin/activate ; \ + export PYTHONDONTWRITEBYTECODE=1 ; \ + cd /source_code ;\ + python3 -m pip install -U pip ; \ + python3 -m pip install . -# cleanup the virtualenv +# ------ post installation, cleanup ------ # +# cleanup the python venv again # see python-slim Dockerfile for more details +RUN set -eux ; \ find ${PYTHON_VENV} -depth \ - \( \ - \( -type d -a \( -name test -o -name tests -o -name idle_test \) \) \ - -o \( -type f -a \( -name '*.pyc' -o -name '*.pyo' -o -name 'libpython*.a' \) \) \ - \) -exec rm -rf '{}' + \ No newline at end of file + \( \ + \( -type d -a \( -name test -o -name tests -o -name idle_test \) \) \ + -o \( -type f -a \( -name '*.pyc' -o -name '*.pyo' -o -name 'libpython*.a' \) \) \ + \) -exec rm -rf '{}' + + +# +# ------ build final image ------ # +# +FROM python:${PYTHON_VERSION}-${PYTHON_BASE_VER} + +ARG PYTHON_VENV +ARG CMD_NAME + +COPY --from=venv_builder ${PYTHON_VENV} ${PYTHON_VENV} + +ENV PATH="${PYTHON_VENV}/bin:${PATH}" + +CMD ["iot_logging_server"] \ No newline at end of file From 0c0209203bc82793edba9f6ac150a49e2c032279 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Tue, 13 Feb 2024 08:03:05 +0000 Subject: [PATCH 004/128] Dockerfile: add mount placeholder for /opt and /greengrass --- Dockerfile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/Dockerfile b/Dockerfile index fbfb9d4..5d7c675 100644 --- a/Dockerfile +++ b/Dockerfile @@ -51,6 +51,9 @@ ARG CMD_NAME COPY --from=venv_builder ${PYTHON_VENV} ${PYTHON_VENV} +# add mount points placeholder +RUN mkdir -p /opt /greengrass + ENV PATH="${PYTHON_VENV}/bin:${PATH}" CMD ["iot_logging_server"] \ No newline at end of file From 8ea8013ad45675dc3748d1f9af18c5cc40effae9 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Tue, 13 Feb 2024 08:41:09 +0000 Subject: [PATCH 005/128] add example systemd service file and aws_profile_info.yaml --- examples/aws_profile_info.yaml | 10 ++++++++++ examples/otaclient-logger.service | 17 +++++++++++++++++ 2 files changed, 27 insertions(+) create mode 100644 examples/aws_profile_info.yaml create mode 100644 examples/otaclient-logger.service diff --git a/examples/aws_profile_info.yaml b/examples/aws_profile_info.yaml new file mode 100644 index 0000000..49ef626 --- /dev/null +++ b/examples/aws_profile_info.yaml @@ -0,0 +1,10 @@ +profiles: + - profile_name: "profile-dev" + account_id: "012345678901" + credential_endpoint_url: "https://abcdefghijk01.credentials.iot.region.amazonaws.com/" + - profile_name: "profile-stg" + account_id: "012345678902" + credential_endpoint_url: "https://abcdefghijk02.credentials.iot.region.amazonaws.com/" + - profile_name: "profile-prd" + account_id: "012345678903" + credential_endpoint_url: "https://abcdefghijk03.credentials.iot.region.amazonaws.com/" \ No newline at end of file diff --git a/examples/otaclient-logger.service b/examples/otaclient-logger.service new file mode 100644 index 0000000..f39416f --- /dev/null +++ b/examples/otaclient-logger.service @@ -0,0 +1,17 @@ +[Unit] +Description=OTAClient AWS Iot logging server +Wants=network-online.target +After=network-online.target nss-lookup.target + +[Service] +RootImage=/opt/ota/client/iot_logging_server.img +BindReadOnlyPaths=/etc/hosts /etc/hostname /greengrass /opt +ExecStart=/venv/bin/iot_logging_server +Environment=LISTEN_ADDRESS="127.0.0.1" +Environment=LISTEN_ADDRESS=8083 +Restart=on-failure +RestartSec=10 +Type=simple + +[Install] +WantedBy=multi-user.target \ No newline at end of file From 42fe68b11cc425387e1922d4e87b7de6be7f9656 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Tue, 13 Feb 2024 09:20:16 +0000 Subject: [PATCH 006/128] config: credential_endpoint -> credential_endpoint_url; also validate int for account_id --- src/otaclient_iot_logging_server/configs.py | 8 ++++---- .../greengrass_config.py | 4 ++-- tests/data/aws_profile_info.yaml | 19 ++++++++++--------- 3 files changed, 16 insertions(+), 15 deletions(-) diff --git a/src/otaclient_iot_logging_server/configs.py b/src/otaclient_iot_logging_server/configs.py index d9e3bdd..6672c4e 100644 --- a/src/otaclient_iot_logging_server/configs.py +++ b/src/otaclient_iot_logging_server/configs.py @@ -18,15 +18,15 @@ import logging from pathlib import Path +from typing import Annotated import yaml -from pydantic import AnyHttpUrl, BaseModel +from pydantic import AnyHttpUrl, BaseModel, BeforeValidator, Field from pydantic_settings import BaseSettings, SettingsConfigDict class ConfigurableLoggingServerConfig(BaseSettings): model_config = SettingsConfigDict(frozen=True, validate_default=True) - # the default location of greengrass configuration files. # NOTE(20240209): allow user to change this values with env vars, GREENGRASS_V1_CONFIG: str = "/greengrass/config/config.json" @@ -51,8 +51,8 @@ class AWSProfileInfo(BaseModel): class Profile(BaseModel): model_config = SettingsConfigDict(frozen=True) profile_name: str - account_id: str - credential_endpoint: AnyHttpUrl + account_id: Annotated[str, BeforeValidator(str)] = Field(pattern=r"^\d{12}$") + credential_endpoint_url: AnyHttpUrl profiles: list[Profile] diff --git a/src/otaclient_iot_logging_server/greengrass_config.py b/src/otaclient_iot_logging_server/greengrass_config.py index f062d43..9517416 100644 --- a/src/otaclient_iot_logging_server/greengrass_config.py +++ b/src/otaclient_iot_logging_server/greengrass_config.py @@ -117,7 +117,7 @@ def parse_v1_config(_raw_cfg: str) -> IoTSessionConfig: thing_name=thing_arn.thing_name, profile=this_profile_info.profile_name, region=thing_arn.region, - aws_credential_provider_endpoint=str(this_profile_info.credential_endpoint), + aws_credential_provider_endpoint=str(this_profile_info.credential_endpoint_url), ) @@ -202,7 +202,7 @@ def parse_v2_config(_raw_cfg: str) -> IoTSessionConfig: default=None, ) if _cred_endpoint is None: - cred_endpoint = str(this_profile_info.credential_endpoint) + cred_endpoint = str(this_profile_info.credential_endpoint_url) else: cred_endpoint = f"https://{_cred_endpoint.rstrip('/')}/" diff --git a/tests/data/aws_profile_info.yaml b/tests/data/aws_profile_info.yaml index 60014c5..49ef626 100644 --- a/tests/data/aws_profile_info.yaml +++ b/tests/data/aws_profile_info.yaml @@ -1,9 +1,10 @@ -- profile_name: "profile-dev" - account_id: "012345678901" - credential_endpoint: "https://abcdefghijk01.credentials.iot.region.amazonaws.com/" -- profile_name: "profile-stg" - account_id: "012345678902" - credential_endpoint: "https://abcdefghijk02.credentials.iot.region.amazonaws.com/" -- profile_name: "profile-prd" - account_id: "012345678903" - credential_endpoint: "https://abcdefghijk03.credentials.iot.region.amazonaws.com/" \ No newline at end of file +profiles: + - profile_name: "profile-dev" + account_id: "012345678901" + credential_endpoint_url: "https://abcdefghijk01.credentials.iot.region.amazonaws.com/" + - profile_name: "profile-stg" + account_id: "012345678902" + credential_endpoint_url: "https://abcdefghijk02.credentials.iot.region.amazonaws.com/" + - profile_name: "profile-prd" + account_id: "012345678903" + credential_endpoint_url: "https://abcdefghijk03.credentials.iot.region.amazonaws.com/" \ No newline at end of file From 4aba116bacfbfda3f2fb2b95e90e3696c432d158 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Tue, 13 Feb 2024 09:22:09 +0000 Subject: [PATCH 007/128] add .dockerignore --- .dockerignore | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 .dockerignore diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..fbd2eed --- /dev/null +++ b/.dockerignore @@ -0,0 +1,3 @@ +.devcontainer/ +build/ +dist/ \ No newline at end of file From 475d5e34b712b99246ab3ada12980a7ef80a769e Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Tue, 13 Feb 2024 09:30:42 +0000 Subject: [PATCH 008/128] Dockerfile: add missing libcurl4 in the final image --- Dockerfile | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/Dockerfile b/Dockerfile index 5d7c675..a53aaae 100644 --- a/Dockerfile +++ b/Dockerfile @@ -51,6 +51,13 @@ ARG CMD_NAME COPY --from=venv_builder ${PYTHON_VENV} ${PYTHON_VENV} +# add libcurl +RUN set -eux ; \ + apt-get update ; \ + apt-get install -y --no-install-recommends libcurl4 ; \ + rm -rf /var/lib/apt/lists/* + + # add mount points placeholder RUN mkdir -p /opt /greengrass From 21ddd7dbf2f750a3b574374d828e184b6acf35fd Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Tue, 13 Feb 2024 10:02:30 +0000 Subject: [PATCH 009/128] create a dedicated Dockerfile for logger-server with tpm2.0 support --- Dockerfile_tpm2.0 | 73 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 73 insertions(+) create mode 100644 Dockerfile_tpm2.0 diff --git a/Dockerfile_tpm2.0 b/Dockerfile_tpm2.0 new file mode 100644 index 0000000..20fb90f --- /dev/null +++ b/Dockerfile_tpm2.0 @@ -0,0 +1,73 @@ +# ------ common build args ------ # +ARG PYTHON_VERSION=3.11.7 +ARG PYTHON_BASE_VER=slim-bookworm +ARG PYTHON_VENV=/venv + +# +# ------ prepare venv ------ # +# +FROM python:${PYTHON_VERSION}-${PYTHON_BASE_VER} as venv_builder + +ARG PYTHON_VENV + +COPY . /source_code + +# ------ install build deps ------ # +RUN set -eux; \ + apt-get update ; \ + apt-get install -y --no-install-recommends \ + python3-dev \ + libcurl4-openssl-dev \ + libssl-dev \ + gcc \ + git + +# ------ setup virtual env and build ------ # +RUN set -eux ; \ + python3 -m venv ${PYTHON_VENV} ; \ + . ${PYTHON_VENV}/bin/activate ; \ + export PYTHONDONTWRITEBYTECODE=1 ; \ + cd /source_code ;\ + python3 -m pip install -U pip ; \ + python3 -m pip install . + +# ------ post installation, cleanup ------ # +# cleanup the python venv again +# see python-slim Dockerfile for more details +RUN set -eux ; \ + find ${PYTHON_VENV} -depth \ + \( \ + \( -type d -a \( -name test -o -name tests -o -name idle_test \) \) \ + -o \( -type f -a \( -name '*.pyc' -o -name '*.pyo' -o -name 'libpython*.a' \) \) \ + \) -exec rm -rf '{}' + + +# +# ------ build final image ------ # +# +FROM python:${PYTHON_VERSION}-${PYTHON_BASE_VER} + +ARG PYTHON_VENV +ARG CMD_NAME + +COPY --from=venv_builder ${PYTHON_VENV} ${PYTHON_VENV} + +# add missing libs +RUN set -eux ; \ + apt-get update ; \ + apt-get install -y --no-install-recommends \ + libcurl4 \ + libengine-pkcs11-openssl \ + libp11-3 \ + libtpm2-pkcs11-1 ; \ + rm -rf \ + /var/lib/apt/lists/* \ + /root/.cache \ + /tmp/* + + +# add mount points placeholder +RUN mkdir -p /opt /greengrass /etc/tpm2_pkcs11/ + +ENV PATH="${PYTHON_VENV}/bin:${PATH}" + +CMD ["iot_logging_server"] \ No newline at end of file From f623dbd179d15a962310b03acce781b2f22e5d33 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Tue, 13 Feb 2024 10:02:57 +0000 Subject: [PATCH 010/128] fix Dockerfile missing libcurl4 --- Dockerfile | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/Dockerfile b/Dockerfile index a53aaae..7456fd5 100644 --- a/Dockerfile +++ b/Dockerfile @@ -51,15 +51,18 @@ ARG CMD_NAME COPY --from=venv_builder ${PYTHON_VENV} ${PYTHON_VENV} -# add libcurl +# add missing libs RUN set -eux ; \ apt-get update ; \ - apt-get install -y --no-install-recommends libcurl4 ; \ - rm -rf /var/lib/apt/lists/* - + apt-get install -y --no-install-recommends \ + libcurl4 ; \ + rm -rf \ + /var/lib/apt/lists/* \ + /root/.cache \ + /tmp/* # add mount points placeholder -RUN mkdir -p /opt /greengrass +RUN mkdir -p /opt /greengrass ENV PATH="${PYTHON_VENV}/bin:${PATH}" From ec8c83e62254e05feebf057685731a72536c5170 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Tue, 13 Feb 2024 11:43:42 +0000 Subject: [PATCH 011/128] aws_iot_logger: fix logs dropped when log_stream is not created --- src/otaclient_iot_logging_server/aws_iot_logger.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/otaclient_iot_logging_server/aws_iot_logger.py b/src/otaclient_iot_logging_server/aws_iot_logger.py index e64af77..6826ea6 100644 --- a/src/otaclient_iot_logging_server/aws_iot_logger.py +++ b/src/otaclient_iot_logging_server/aws_iot_logger.py @@ -133,10 +133,9 @@ def send_messages(self, log_stream_suffix: str, message_list: list[LogMessage]): # see docs for more details. if _sequence_token := response.get("nextSequenceToken"): self._sequence_tokens[log_stream_name] = _sequence_token - except ( - exceptions.DataAlreadyAcceptedException, - exceptions.InvalidSequenceTokenException, - ) as e: + except exceptions.DataAlreadyAcceptedException: + pass + except exceptions.InvalidSequenceTokenException as e: response = e.response logger.debug(f"{response}: {e!r}") @@ -148,12 +147,14 @@ def send_messages(self, log_stream_suffix: str, message_list: list[LogMessage]): self._sequence_tokens.pop(log_stream_name, None) else: self._sequence_tokens[log_stream_name] = next_expected_token + raise # let the retry do the logging upload again except client.exceptions.ResourceNotFoundException as e: response = e.response logger.info(f"{log_stream_name=} not found: {e!r}") self._create_log_stream( log_group_name=log_group_name, log_stream_name=log_stream_name ) + raise except Exception as e: logger.error( f"put_log_events failure: {e!r}\n" From 452e841242a2ee309210a433f181d1390973d641 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 14 Feb 2024 00:47:25 +0000 Subject: [PATCH 012/128] remove Dockerfile_tpm2.0 as operating tpm2.0 in container mode is not supported --- Dockerfile_tpm2.0 | 73 ----------------------------------------------- 1 file changed, 73 deletions(-) delete mode 100644 Dockerfile_tpm2.0 diff --git a/Dockerfile_tpm2.0 b/Dockerfile_tpm2.0 deleted file mode 100644 index 20fb90f..0000000 --- a/Dockerfile_tpm2.0 +++ /dev/null @@ -1,73 +0,0 @@ -# ------ common build args ------ # -ARG PYTHON_VERSION=3.11.7 -ARG PYTHON_BASE_VER=slim-bookworm -ARG PYTHON_VENV=/venv - -# -# ------ prepare venv ------ # -# -FROM python:${PYTHON_VERSION}-${PYTHON_BASE_VER} as venv_builder - -ARG PYTHON_VENV - -COPY . /source_code - -# ------ install build deps ------ # -RUN set -eux; \ - apt-get update ; \ - apt-get install -y --no-install-recommends \ - python3-dev \ - libcurl4-openssl-dev \ - libssl-dev \ - gcc \ - git - -# ------ setup virtual env and build ------ # -RUN set -eux ; \ - python3 -m venv ${PYTHON_VENV} ; \ - . ${PYTHON_VENV}/bin/activate ; \ - export PYTHONDONTWRITEBYTECODE=1 ; \ - cd /source_code ;\ - python3 -m pip install -U pip ; \ - python3 -m pip install . - -# ------ post installation, cleanup ------ # -# cleanup the python venv again -# see python-slim Dockerfile for more details -RUN set -eux ; \ - find ${PYTHON_VENV} -depth \ - \( \ - \( -type d -a \( -name test -o -name tests -o -name idle_test \) \) \ - -o \( -type f -a \( -name '*.pyc' -o -name '*.pyo' -o -name 'libpython*.a' \) \) \ - \) -exec rm -rf '{}' + - -# -# ------ build final image ------ # -# -FROM python:${PYTHON_VERSION}-${PYTHON_BASE_VER} - -ARG PYTHON_VENV -ARG CMD_NAME - -COPY --from=venv_builder ${PYTHON_VENV} ${PYTHON_VENV} - -# add missing libs -RUN set -eux ; \ - apt-get update ; \ - apt-get install -y --no-install-recommends \ - libcurl4 \ - libengine-pkcs11-openssl \ - libp11-3 \ - libtpm2-pkcs11-1 ; \ - rm -rf \ - /var/lib/apt/lists/* \ - /root/.cache \ - /tmp/* - - -# add mount points placeholder -RUN mkdir -p /opt /greengrass /etc/tpm2_pkcs11/ - -ENV PATH="${PYTHON_VENV}/bin:${PATH}" - -CMD ["iot_logging_server"] \ No newline at end of file From 8549f9c1193e8626910c3837f1647b62bec26825 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 14 Feb 2024 01:11:49 +0000 Subject: [PATCH 013/128] configs: add UPLOAD_LOGGING_SERVER_LOGS and SERVER_LOGSTREAM_SUFFIX, change SERVER_LOGGING_LEVEL field to Literal field --- src/otaclient_iot_logging_server/configs.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/otaclient_iot_logging_server/configs.py b/src/otaclient_iot_logging_server/configs.py index 6672c4e..0920334 100644 --- a/src/otaclient_iot_logging_server/configs.py +++ b/src/otaclient_iot_logging_server/configs.py @@ -16,14 +16,15 @@ from __future__ import annotations -import logging from pathlib import Path -from typing import Annotated +from typing import Annotated, Literal import yaml from pydantic import AnyHttpUrl, BaseModel, BeforeValidator, Field from pydantic_settings import BaseSettings, SettingsConfigDict +_LoggingLevelName = Literal["INFO", "DEBUG", "CRITICAL", "ERROR", "WARNING"] + class ConfigurableLoggingServerConfig(BaseSettings): model_config = SettingsConfigDict(frozen=True, validate_default=True) @@ -37,7 +38,9 @@ class ConfigurableLoggingServerConfig(BaseSettings): LISTEN_ADDRESS: str = "127.0.0.1" LISTEN_PORT: int = 8083 - SERVER_LOGGING_LEVEL: int = logging.INFO + UPLOAD_LOGGING_SERVER_LOGS: bool = False + SERVER_LOGSTREAM_SUFFIX: str = "iot_logging_server" + SERVER_LOGGING_LEVEL: _LoggingLevelName = "INFO" SERVER_LOGGING_LOG_FORMAT: str = ( "[%(asctime)s][%(levelname)s]-%(name)s:%(funcName)s:%(lineno)d,%(message)s" ) From 60a6816202279d7a6b12692eb330ec15ad94d281 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 14 Feb 2024 01:59:56 +0000 Subject: [PATCH 014/128] add support for upload server logging --- src/otaclient_iot_logging_server/__main__.py | 77 +++++++++++++++++-- .../log_proxy_server.py | 3 +- 2 files changed, 70 insertions(+), 10 deletions(-) diff --git a/src/otaclient_iot_logging_server/__main__.py b/src/otaclient_iot_logging_server/__main__.py index cbc23f0..e0cc922 100644 --- a/src/otaclient_iot_logging_server/__main__.py +++ b/src/otaclient_iot_logging_server/__main__.py @@ -16,28 +16,89 @@ from __future__ import annotations import logging +import time +from queue import Queue from otaclient_iot_logging_server import __version__ from otaclient_iot_logging_server import package_name as root_package_name +from otaclient_iot_logging_server._common import LogMessage from otaclient_iot_logging_server.configs import server_cfg from otaclient_iot_logging_server.greengrass_config import parse_config from otaclient_iot_logging_server.log_proxy_server import launch_server -def main(): - # ------ configure the root logger ------ # +class _LogTeeHandler(logging.Handler): + """Tee the local loggings to a queue.""" + + def __init__( + self, + queue: Queue[tuple[str, LogMessage]], + logstream_suffix: str, + level: int | str = 0, + ) -> None: + super().__init__(level) + self._queue = queue + self._logstream_suffix = logstream_suffix + + def emit(self, record) -> None: + try: + self._queue.put_nowait( + ( + self._logstream_suffix, + LogMessage( + timestamp=int(time.time()) * 1000, # milliseconds + message=self.format(record), + ), + ) + ) + except Exception: + pass + + +def _config_logging( + queue: Queue, + *, + format: str, + level: str, + enable_server_log: bool, + server_logstream_suffix: str, +): # NOTE: for the root logger, set to CRITICAL to filter away logs from other # external modules unless reached CRITICAL level. - logging.basicConfig( - level=logging.CRITICAL, format=server_cfg.SERVER_LOGGING_LOG_FORMAT, force=True - ) + logging.basicConfig(level=logging.CRITICAL, format=format, force=True) # NOTE: set the to the package root logger root_logger = logging.getLogger(root_package_name) - root_logger.setLevel(server_cfg.SERVER_LOGGING_LEVEL) - # ------ launch server ------ # + root_logger.setLevel(level) + + if enable_server_log and server_logstream_suffix: + _tee_handler = _LogTeeHandler( + queue=queue, + logstream_suffix=server_logstream_suffix, + level=level, + ) + _fmt = logging.Formatter(fmt=server_cfg.SERVER_LOGGING_LOG_FORMAT) + _tee_handler.setFormatter(_fmt) + + # attach the log tee handler to the root logger + root_logger.addHandler(_tee_handler) + + return root_logger + + +def main(): + queue = Queue(maxsize=server_cfg.MAX_LOGS_BACKLOG) + + root_logger = _config_logging( + queue, + format=server_cfg.SERVER_LOGGING_LOG_FORMAT, + level=server_cfg.SERVER_LOGGING_LEVEL, + enable_server_log=server_cfg.UPLOAD_LOGGING_SERVER_LOGS, + server_logstream_suffix=server_cfg.SERVER_LOGSTREAM_SUFFIX, + ) + launch_server( parse_config(), - max_logs_backlog=server_cfg.MAX_LOGS_BACKLOG, + queue=queue, max_logs_per_merge=server_cfg.MAX_LOGS_PER_MERGE, interval=server_cfg.UPLOAD_INTERVAL, ) diff --git a/src/otaclient_iot_logging_server/log_proxy_server.py b/src/otaclient_iot_logging_server/log_proxy_server.py index e57bf19..8c64b7e 100644 --- a/src/otaclient_iot_logging_server/log_proxy_server.py +++ b/src/otaclient_iot_logging_server/log_proxy_server.py @@ -72,11 +72,10 @@ async def _logging_post_handler(self, request: Request): def launch_server( session_config: IoTSessionConfig, - max_logs_backlog: int, + queue: Queue[tuple[str, LogMessage]], max_logs_per_merge: int, interval: int, ): - queue = Queue(maxsize=max_logs_backlog) start_sending_msg_thread( AWSIoTLogger( session_config=session_config, From 03557fbc8e87ef6c6e37e51f89da5be59c0ff056 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 14 Feb 2024 02:04:16 +0000 Subject: [PATCH 015/128] update otaclient-logger.service example --- examples/otaclient-logger.service | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/examples/otaclient-logger.service b/examples/otaclient-logger.service index f39416f..270cca2 100644 --- a/examples/otaclient-logger.service +++ b/examples/otaclient-logger.service @@ -1,14 +1,14 @@ [Unit] -Description=OTAClient AWS Iot logging server +Description=OTAClient AWS Iot logging server Wants=network-online.target After=network-online.target nss-lookup.target [Service] -RootImage=/opt/ota/client/iot_logging_server.img -BindReadOnlyPaths=/etc/hosts /etc/hostname /greengrass /opt -ExecStart=/venv/bin/iot_logging_server -Environment=LISTEN_ADDRESS="127.0.0.1" +ExecStart=/opt/ota/iot_logger/venv/bin/iot_logging_server +Environment=LISTEN_ADDRESS=127.0.0.1 Environment=LISTEN_ADDRESS=8083 +Environment=UPLOAD_LOGGING_SERVER_LOGS=true +Environment=SERVER_LOGGING_LEVEL=INFO Restart=on-failure RestartSec=10 Type=simple From 5bdbf217a246758899572db2d20d3cfbc70905d3 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 14 Feb 2024 02:27:28 +0000 Subject: [PATCH 016/128] log_proxy_server: properly typing launch_server function --- src/otaclient_iot_logging_server/log_proxy_server.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/otaclient_iot_logging_server/log_proxy_server.py b/src/otaclient_iot_logging_server/log_proxy_server.py index 8c64b7e..67e73b7 100644 --- a/src/otaclient_iot_logging_server/log_proxy_server.py +++ b/src/otaclient_iot_logging_server/log_proxy_server.py @@ -22,6 +22,7 @@ from aiohttp import web from aiohttp.web import Request +from typing_extensions import NoReturn from otaclient_iot_logging_server._common import LogMessage from otaclient_iot_logging_server.aws_iot_logger import ( @@ -75,7 +76,7 @@ def launch_server( queue: Queue[tuple[str, LogMessage]], max_logs_per_merge: int, interval: int, -): +) -> NoReturn: # type: ignore start_sending_msg_thread( AWSIoTLogger( session_config=session_config, @@ -90,4 +91,5 @@ def launch_server( app.add_routes([web.post(r"/{ecu_id}", handler._logging_post_handler)]) # actual launch the server and serving + # typing: run_app is a NoReturn method web.run_app(app, host=server_cfg.LISTEN_ADDRESS, port=server_cfg.LISTEN_PORT) From dc01df698d20e37804ac72fd1f9f1e3431d82352 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 14 Feb 2024 02:28:33 +0000 Subject: [PATCH 017/128] add support for uploading iot_logging_server logs to cloud too --- src/otaclient_iot_logging_server/__main__.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/src/otaclient_iot_logging_server/__main__.py b/src/otaclient_iot_logging_server/__main__.py index e0cc922..11e181f 100644 --- a/src/otaclient_iot_logging_server/__main__.py +++ b/src/otaclient_iot_logging_server/__main__.py @@ -19,6 +19,8 @@ import time from queue import Queue +from typing_extensions import NoReturn + from otaclient_iot_logging_server import __version__ from otaclient_iot_logging_server import package_name as root_package_name from otaclient_iot_logging_server._common import LogMessage @@ -34,9 +36,8 @@ def __init__( self, queue: Queue[tuple[str, LogMessage]], logstream_suffix: str, - level: int | str = 0, ) -> None: - super().__init__(level) + super().__init__() self._queue = queue self._logstream_suffix = logstream_suffix @@ -74,18 +75,18 @@ def _config_logging( _tee_handler = _LogTeeHandler( queue=queue, logstream_suffix=server_logstream_suffix, - level=level, ) _fmt = logging.Formatter(fmt=server_cfg.SERVER_LOGGING_LOG_FORMAT) _tee_handler.setFormatter(_fmt) # attach the log tee handler to the root logger root_logger.addHandler(_tee_handler) + root_logger.info(f"enable server logs upload with {server_logstream_suffix=}") return root_logger -def main(): +def main() -> NoReturn: queue = Queue(maxsize=server_cfg.MAX_LOGS_BACKLOG) root_logger = _config_logging( @@ -96,6 +97,11 @@ def main(): server_logstream_suffix=server_cfg.SERVER_LOGSTREAM_SUFFIX, ) + root_logger.info( + f"launching iot_logging_server({__version__}) at http://{server_cfg.LISTEN_ADDRESS}:{server_cfg.LISTEN_PORT}" + ) + root_logger.info(f"iot_logging_server config: \n{server_cfg}") + launch_server( parse_config(), queue=queue, @@ -103,10 +109,6 @@ def main(): interval=server_cfg.UPLOAD_INTERVAL, ) - root_logger.info( - f"logger server({__version__}) is launched at http://{server_cfg.LISTEN_ADDRESS}:{server_cfg.LISTEN_PORT}" - ) - if __name__ == "__main__": main() From c475b4942031e3816d56b8ce20567373152c29d5 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 14 Feb 2024 02:45:37 +0000 Subject: [PATCH 018/128] add on_release action --- .github/workflows/on_release.yaml | 32 +++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 .github/workflows/on_release.yaml diff --git a/.github/workflows/on_release.yaml b/.github/workflows/on_release.yaml new file mode 100644 index 0000000..068c8c8 --- /dev/null +++ b/.github/workflows/on_release.yaml @@ -0,0 +1,32 @@ +name: on_release + +on: + release: + types: [published] + +permissions: + contents: write # upload artifacts requires this permission + +jobs: + build_wheel: + runs-on: ubuntu-22.04 + + steps: + - name: Checkout source code + uses: actions/checkout@v4 + + - name: Setup python environment + uses: actions/setup-python@v5 + with: + python-version: "3.8" + + - name: Build wheel + run: | + python3 -m pip install -U pip + python3 -m pip install hatch + hatch build -t wheel + + - name: Upload built wheel as release asset + uses: softprops/action-gh-release@v1 + with: + files: dist/*.whl \ No newline at end of file From 48a144fd10570a3b9c65544743ad668f6ab93bdc Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 14 Feb 2024 10:16:32 +0000 Subject: [PATCH 019/128] pyproject.toml: add awsiot_credentialhelper and pyopenssl deps, remove pycurl --- pyproject.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 95b2cc3..3e86bb3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,11 +18,12 @@ classifiers = [ ] dependencies = [ "aiohttp>=3.9.2, <3.10.0", + "awsiot_credentialhelper==0.6.0", "boto3==1.34.35", "botocore==1.34.35", + "pyopenssl==24.0.0", "pydantic==2.6.0", "pydantic-settings==2.1.0", - "pycurl==7.45.1", "pyyaml==6.0.1", "typing_extensions>=4.0", ] From 19809177ab37a711346935e2cfa638df062af8f8 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 14 Feb 2024 10:22:32 +0000 Subject: [PATCH 020/128] configs.py: credential_endpoint_url -> credential_endpoint --- src/otaclient_iot_logging_server/configs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/otaclient_iot_logging_server/configs.py b/src/otaclient_iot_logging_server/configs.py index 0920334..b7cdf38 100644 --- a/src/otaclient_iot_logging_server/configs.py +++ b/src/otaclient_iot_logging_server/configs.py @@ -20,7 +20,7 @@ from typing import Annotated, Literal import yaml -from pydantic import AnyHttpUrl, BaseModel, BeforeValidator, Field +from pydantic import BaseModel, BeforeValidator, Field from pydantic_settings import BaseSettings, SettingsConfigDict _LoggingLevelName = Literal["INFO", "DEBUG", "CRITICAL", "ERROR", "WARNING"] @@ -55,7 +55,7 @@ class Profile(BaseModel): model_config = SettingsConfigDict(frozen=True) profile_name: str account_id: Annotated[str, BeforeValidator(str)] = Field(pattern=r"^\d{12}$") - credential_endpoint_url: AnyHttpUrl + credential_endpoint: str profiles: list[Profile] From 26521700eebd0f0e8e86cdcaef25d62a840f1637 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 14 Feb 2024 10:23:27 +0000 Subject: [PATCH 021/128] update aws_profile_info.yaml examples accordingly --- examples/aws_profile_info.yaml | 6 +++--- tests/data/aws_profile_info.yaml | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/examples/aws_profile_info.yaml b/examples/aws_profile_info.yaml index 49ef626..27a97bb 100644 --- a/examples/aws_profile_info.yaml +++ b/examples/aws_profile_info.yaml @@ -1,10 +1,10 @@ profiles: - profile_name: "profile-dev" account_id: "012345678901" - credential_endpoint_url: "https://abcdefghijk01.credentials.iot.region.amazonaws.com/" + credential_endpoint: "abcdefghijk01.credentials.iot.region.amazonaws.com" - profile_name: "profile-stg" account_id: "012345678902" - credential_endpoint_url: "https://abcdefghijk02.credentials.iot.region.amazonaws.com/" + credential_endpoint: "abcdefghijk02.credentials.iot.region.amazonaws.com" - profile_name: "profile-prd" account_id: "012345678903" - credential_endpoint_url: "https://abcdefghijk03.credentials.iot.region.amazonaws.com/" \ No newline at end of file + credential_endpoint: "abcdefghijk03.credentials.iot.region.amazonaws.com" \ No newline at end of file diff --git a/tests/data/aws_profile_info.yaml b/tests/data/aws_profile_info.yaml index 49ef626..27a97bb 100644 --- a/tests/data/aws_profile_info.yaml +++ b/tests/data/aws_profile_info.yaml @@ -1,10 +1,10 @@ profiles: - profile_name: "profile-dev" account_id: "012345678901" - credential_endpoint_url: "https://abcdefghijk01.credentials.iot.region.amazonaws.com/" + credential_endpoint: "abcdefghijk01.credentials.iot.region.amazonaws.com" - profile_name: "profile-stg" account_id: "012345678902" - credential_endpoint_url: "https://abcdefghijk02.credentials.iot.region.amazonaws.com/" + credential_endpoint: "abcdefghijk02.credentials.iot.region.amazonaws.com" - profile_name: "profile-prd" account_id: "012345678903" - credential_endpoint_url: "https://abcdefghijk03.credentials.iot.region.amazonaws.com/" \ No newline at end of file + credential_endpoint: "abcdefghijk03.credentials.iot.region.amazonaws.com" \ No newline at end of file From 9f7d54cb5bdd6a81b82c581fb401270de64b9bf1 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 14 Feb 2024 10:35:54 +0000 Subject: [PATCH 022/128] gg_cfg: add PKCS11 related logics --- .../greengrass_config.py | 51 +++++++++++++++---- 1 file changed, 41 insertions(+), 10 deletions(-) diff --git a/src/otaclient_iot_logging_server/greengrass_config.py b/src/otaclient_iot_logging_server/greengrass_config.py index 9517416..e5e38cb 100644 --- a/src/otaclient_iot_logging_server/greengrass_config.py +++ b/src/otaclient_iot_logging_server/greengrass_config.py @@ -21,7 +21,7 @@ import re from functools import partial from pathlib import Path -from typing import NamedTuple +from typing import NamedTuple, Optional from urllib.parse import urljoin import yaml @@ -117,7 +117,7 @@ def parse_v1_config(_raw_cfg: str) -> IoTSessionConfig: thing_name=thing_arn.thing_name, profile=this_profile_info.profile_name, region=thing_arn.region, - aws_credential_provider_endpoint=str(this_profile_info.credential_endpoint_url), + aws_credential_provider_endpoint=str(this_profile_info.credential_endpoint), ) @@ -131,8 +131,8 @@ def _v2_complete_uri(_cfg: NestedDict, _uri: str) -> str: aws.greengrass.crypto.Pkcs11Provider section, so these option is striped from priv_key/cert URI. - As we will feed the URI to external openssl pkcs11 engine when using - pycurl, we need to add the userPin information back to URI for openssl. + As we will feed the URI to external pkcs11 libs when using + we need to add the userPin information back to URI. Example pkcs11 URI schema: pkcs11:token=;object=;pin-value=;type= @@ -190,21 +190,38 @@ def parse_v2_config(_raw_cfg: str) -> IoTSessionConfig: this_profile_info = profile_info.get_profile_info( get_profile_from_thing_name(thing_name) ) + # NOTE(20240207): use credential endpoint defined in the config.yml in prior, # only when this information is not available, we use the # <_AWS_CREDENTIAL_PROVIDER_ENDPOINT_MAPPING> to get endpoint. - _cred_endpoint: str = chain_query( + _cred_endpoint: str + if _cred_endpoint := chain_query( loaded_cfg, "services", "aws.greengrass.Nucleus", "configuration", "iotCredEndpoint", default=None, - ) - if _cred_endpoint is None: - cred_endpoint = str(this_profile_info.credential_endpoint_url) + ): + cred_endpoint = _cred_endpoint else: - cred_endpoint = f"https://{_cred_endpoint.rstrip('/')}/" + cred_endpoint = this_profile_info.credential_endpoint + + # ------ parse pkcs11 config if any ------ # + _raw_pkcs11_cfg: dict[str, str] + pkcs11_cfg = None + if _raw_pkcs11_cfg := chain_query( + loaded_cfg, + "services", + "aws.greengrass.crypto.Pkcs11Provider", + "configuration", + default=None, + ): + pkcs11_cfg = PKCS11Config( + pkcs11_lib=_raw_pkcs11_cfg["library"], + user_pin=_raw_pkcs11_cfg["userPin"], + slot_id=str(_raw_pkcs11_cfg["slot"]), + ) return IoTSessionConfig( # NOTE: v2 config doesn't include account_id info @@ -226,12 +243,25 @@ def parse_v2_config(_raw_cfg: str) -> IoTSessionConfig: "awsRegion", ), aws_credential_provider_endpoint=cred_endpoint, + pkcs11_config=pkcs11_cfg, ) # # ------ main config parser ------ # # + + +class PKCS11Config(FixedConfig): + """ + See services.aws.greengrass.crypto.Pkcs11Provider section for more details. + """ + + pkcs11_lib: str + slot_id: str + user_pin: str + + class IoTSessionConfig(FixedConfig): """Configurations we need picked from parsed Greengrass V1/V2 configration file. @@ -249,6 +279,7 @@ class IoTSessionConfig(FixedConfig): region: str aws_credential_provider_endpoint: str + pkcs11_config: Optional[PKCS11Config] = None @computed_field @property @@ -271,7 +302,7 @@ def aws_cloudwatch_log_group(self) -> str: def aws_credential_refresh_url(self) -> str: """The endpoint to refresh token from.""" return urljoin( - self.aws_credential_provider_endpoint, + f"https://{self.aws_credential_provider_endpoint.rstrip('/')}/", f"role-aliases/{self.aws_role_alias}/credentials", ) From 06154b854ff54a7e437a646f407acd15be1bacbc Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 14 Feb 2024 11:12:57 +0000 Subject: [PATCH 023/128] _common: add PKCS11URI def --- src/otaclient_iot_logging_server/_common.py | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/src/otaclient_iot_logging_server/_common.py b/src/otaclient_iot_logging_server/_common.py index 47bd6af..332e724 100644 --- a/src/otaclient_iot_logging_server/_common.py +++ b/src/otaclient_iot_logging_server/_common.py @@ -15,7 +15,7 @@ from __future__ import annotations -from typing import TypedDict +from typing import Literal, TypedDict from typing_extensions import NotRequired @@ -37,3 +37,18 @@ class Credentials(TypedDict): secret_key: str token: str expiry_time: str + + +PKCS11URI = TypedDict( + "PKCS11URI", + { + "object": str, + "pin-value": NotRequired[str], + "token": str, + "type": Literal["cert", "private"], + }, +) +""" +NOTE: not all possible segments are defined here. + see https://www.rfc-editor.org/rfc/rfc7512.html for more details. +""" From 4c68b14420de7440af885799838b5007a469fd3e Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 14 Feb 2024 11:13:31 +0000 Subject: [PATCH 024/128] _utils: add parse_pkcs11_uri --- src/otaclient_iot_logging_server/_utils.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/src/otaclient_iot_logging_server/_utils.py b/src/otaclient_iot_logging_server/_utils.py index 3c68742..42f596b 100644 --- a/src/otaclient_iot_logging_server/_utils.py +++ b/src/otaclient_iot_logging_server/_utils.py @@ -22,6 +22,8 @@ from pydantic import BaseModel, ConfigDict from typing_extensions import ParamSpec, TypeAlias +from otaclient_iot_logging_server._common import PKCS11URI + RT = TypeVar("RT") P = ParamSpec("P") NestedDict: TypeAlias = "dict[str, Any | 'NestedDict']" @@ -111,3 +113,20 @@ def remove_prefix(_str: str, _prefix: str) -> str: if _str.startswith(_prefix): return _str.replace(_prefix, "", 1) return _str + + +def parse_pkcs11_uri(_pkcs11_uri: str) -> PKCS11URI: + _, pkcs11_opts_str = _pkcs11_uri.split(":", maxsplit=1) + pkcs11_opts_dict = {} + for opt in pkcs11_opts_str.split(";"): + k, v = opt.split("=", maxsplit=1) + pkcs11_opts_dict[k] = v + + return PKCS11URI( + { + "object": pkcs11_opts_dict.get("object", ""), + "pin-value": pkcs11_opts_dict.get("pin-value", ""), + "token": pkcs11_opts_dict.get("token", ""), + "type": pkcs11_opts_dict.get("type", ""), + } + ) From 96acb35bd9473affac4198669faeae5fcf6d02c5 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 14 Feb 2024 11:17:22 +0000 Subject: [PATCH 025/128] gg_config: complete_uri is not needed anymore --- .../greengrass_config.py | 66 +------------------ 1 file changed, 3 insertions(+), 63 deletions(-) diff --git a/src/otaclient_iot_logging_server/greengrass_config.py b/src/otaclient_iot_logging_server/greengrass_config.py index e5e38cb..588313c 100644 --- a/src/otaclient_iot_logging_server/greengrass_config.py +++ b/src/otaclient_iot_logging_server/greengrass_config.py @@ -27,12 +27,7 @@ import yaml from pydantic import computed_field -from otaclient_iot_logging_server._utils import ( - FixedConfig, - NestedDict, - chain_query, - remove_prefix, -) +from otaclient_iot_logging_server._utils import FixedConfig, chain_query, remove_prefix from otaclient_iot_logging_server.configs import profile_info, server_cfg logger = logging.getLogger(__name__) @@ -124,57 +119,6 @@ def parse_v1_config(_raw_cfg: str) -> IoTSessionConfig: # # ------ v2 configuration parse ------ # # -def _v2_complete_uri(_cfg: NestedDict, _uri: str) -> str: - """Fix up the URI if the URI is pkcs11 URI. - - In gg v2 config, the pin-value(userPin) are specified in - aws.greengrass.crypto.Pkcs11Provider section, so these - option is striped from priv_key/cert URI. - - As we will feed the URI to external pkcs11 libs when using - we need to add the userPin information back to URI. - - Example pkcs11 URI schema: - pkcs11:token=;object=;pin-value=;type= - - Args: - _cfg: the dumped config file dict. - _uri: the input uri for completing. - - Returns: - Original input <_uri> if <_uri> is not a pkcs11 URI, else a completed - pkcs11 URI with pin-value inserted. - - Raises: - ValueError on failing complete a pkcs11 URI. - """ - if not _uri.startswith("pkcs11:"): - return _uri - - scheme, pkcs11_opts_str = _uri.split(":", maxsplit=1) - pkcs11_opts_dict = {} - for opt in pkcs11_opts_str.split(";"): - k, v = opt.split("=", maxsplit=1) - pkcs11_opts_dict[k] = v - - try: - user_pin = chain_query( - _cfg, - "services", - "aws.greengrass.crypto.Pkcs11Provider", - "configuration", - "userPin", - ) - pkcs11_opts_dict["pin-value"] = user_pin - except ValueError as e: - raise ValueError( - f"failed to complete pkcs11 URI: {e!r}\nconfig={_cfg}uri=\n{_uri} " - ) - - pkcs11_opts_str = (f"{k}={v}" for k, v in pkcs11_opts_dict.items()) - return f"{scheme}:{';'.join(pkcs11_opts_str)}" - - def parse_v2_config(_raw_cfg: str) -> IoTSessionConfig: """Parse Greengrass V2 config yaml and take what we need. @@ -227,12 +171,8 @@ def parse_v2_config(_raw_cfg: str) -> IoTSessionConfig: # NOTE: v2 config doesn't include account_id info account_id=this_profile_info.account_id, ca_path=chain_query(loaded_cfg, "system", "rootCaPath"), - private_key_path=_v2_complete_uri( - loaded_cfg, chain_query(loaded_cfg, "system", "privateKeyPath") - ), - certificate_path=_v2_complete_uri( - loaded_cfg, chain_query(loaded_cfg, "system", "certificateFilePath") - ), + private_key_path=chain_query(loaded_cfg, "system", "privateKeyPath"), + certificate_path=chain_query(loaded_cfg, "system", "certificateFilePath"), thing_name=thing_name, profile=this_profile_info.profile_name, region=chain_query( From 9e7917d5d02a4e107e971cf1362008af57de6fc2 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 14 Feb 2024 14:10:59 +0000 Subject: [PATCH 026/128] fully refactor boto3_session, dip pycurl --- .../boto3_session.py | 185 +++++++++--------- 1 file changed, 94 insertions(+), 91 deletions(-) diff --git a/src/otaclient_iot_logging_server/boto3_session.py b/src/otaclient_iot_logging_server/boto3_session.py index af0e718..2529630 100644 --- a/src/otaclient_iot_logging_server/boto3_session.py +++ b/src/otaclient_iot_logging_server/boto3_session.py @@ -15,106 +15,109 @@ from __future__ import annotations -import json -import logging +import subprocess +from pathlib import Path -import pycurl -from boto3 import Session -from botocore.credentials import DeferredRefreshableCredentials -from botocore.session import get_session as get_botocore_session +from awsiot_credentialhelper.boto3_session import Boto3SessionProvider +from awsiot_credentialhelper.boto3_session import Pkcs11Config as aws_PKcs11Config +from OpenSSL import crypto -from otaclient_iot_logging_server._common import Credentials +from otaclient_iot_logging_server._utils import parse_pkcs11_uri from otaclient_iot_logging_server.greengrass_config import IoTSessionConfig -logger = logging.getLogger(__name__) +def _load_pkcs11_cert( + pkcs11_lib: str, + slot_id: str, + user_pin: str, + object_label: str, +) -> bytes: + """Load certificate from a pkcs11 interface(backed by a TPM2.0 chip). -class Boto3Session: - """A refreshable boto3 session with pkcs11. - - Reference: - https://github.com/awslabs/aws-iot-core-credential-provider-session-helper/blob/main/src/awsiot_credentialhelper/boto3_session.py + This function requires opensc and libtpm2-pkcs11-1 to be installed, + and a properly setup and working TPM2.0 chip. """ + # fmt: off + _cmd = [ + "/usr/bin/pkcs11-tool", + "--module", pkcs11_lib, + "--type", "cert", + "--pin", user_pin, + "--slot", slot_id, + "--label", object_label, + "--read-object", + ] + # fmt: on + return subprocess.check_output(_cmd) + + +def _convert_to_pem(_data: bytes) -> bytes: + """Unconditionally convert input cert to PEM format.""" + if _data.startswith(b"-----BEGIN CERTIFICATE-----"): + return _data + return crypto.dump_certificate( + crypto.FILETYPE_PEM, + crypto.load_certificate(crypto.FILETYPE_ASN1, _data), + ) + + +class Boto3Session: def __init__(self, config: IoTSessionConfig) -> None: self._config = config - def get_session(self, **kwargs) -> Session: - session = get_botocore_session() - # NOTE: session does have an attribute named _credentials - session._credentials = DeferredRefreshableCredentials( # type: ignore - method="sts-assume-role", - refresh_using=self._get_credentials, - ) - session.set_config_variable("region", self._config.region) - - # set other configs if any - for k, v in kwargs.items(): - session.set_config_variable(k, v) - return Session(botocore_session=session) - - def _get_credentials(self) -> Credentials: - """Get credentials using mtls from credential_endpoint.""" - gg_config = self._config - connection = pycurl.Curl() - connection.setopt(pycurl.URL, gg_config.aws_credential_refresh_url) - - # ------ client auth option ------ # - # TPM2.0 support, if private_key is provided as pkcs11 URI, - # enable to use pkcs11 interface from openssl. - _enable_pkcs11_engine = False - if gg_config.private_key_path.startswith("pkcs11:"): - _enable_pkcs11_engine = True - connection.setopt(pycurl.SSLKEYTYPE, "eng") - connection.setopt(pycurl.SSLKEY, gg_config.private_key_path) - - if gg_config.certificate_path.startswith("pkcs11:"): - _enable_pkcs11_engine = True - connection.setopt(pycurl.SSLCERTTYPE, "eng") - connection.setopt(pycurl.SSLCERT, gg_config.certificate_path) - - if _enable_pkcs11_engine: - connection.setopt(pycurl.SSLENGINE, "pkcs11") - - # ------ server auth option ------ # - connection.setopt(pycurl.SSL_VERIFYPEER, 1) - connection.setopt(pycurl.CAINFO, gg_config.ca_path) - connection.setopt(pycurl.CAPATH, None) - connection.setopt(pycurl.SSL_VERIFYHOST, 2) - - # ------ set required header ------ # - headers = [f"x-amzn-iot-thingname:{gg_config.thing_name}"] - connection.setopt(pycurl.HTTPHEADER, headers) - - # ------ execute the request and parse creds ------ # - response = connection.perform_rs() - status = connection.getinfo(pycurl.HTTP_CODE) - connection.close() - - if status // 100 != 2: - _err_msg = f"failed to get cred: {status=}" - logger.debug(_err_msg) - raise ValueError(_err_msg) - - try: - response_json = json.loads(response) - assert isinstance(response_json, dict), "response is not a json object" - except Exception as e: - _err_msg = f"cred response is invalid: {e!r}\nresponse={response}" - logger.debug(_err_msg) - raise ValueError(_err_msg) - - try: - _creds = response_json["credentials"] - creds = Credentials( - access_key=_creds["accessKeyId"], - secret_key=_creds["secretAccessKey"], - token=_creds["sessionToken"], - expiry_time=_creds["expiration"], + def _load_certificate(self) -> bytes: + """ + NOTE: Boto3SessionProvider only takes PEM format cert. + """ + _path = self._config.certificate_path + if _path.startswith("pkcs11"): + _pkcs11_cfg = self._config.pkcs11_config + assert _pkcs11_cfg + + _parsed_cert_uri = parse_pkcs11_uri(_path) + # NOTE: the cert pull from pkcs11 interface is in DER format + return _convert_to_pem( + _load_pkcs11_cert( + pkcs11_lib=_pkcs11_cfg.pkcs11_lib, + slot_id=_pkcs11_cfg.slot_id, + user_pin=_pkcs11_cfg.user_pin, + object_label=_parsed_cert_uri["object"], + ) ) - logger.debug(f"loaded credential={creds}") - return creds - except Exception as e: - _err_msg = f"failed to create Credentials object from response: {e!r}\nresponse_json={response_json}" - logger.debug(_err_msg) - raise ValueError(_err_msg) + return _convert_to_pem(Path(_path).read_bytes()) + + def _get_session(self): + """Get a session that using plain privkey.""" + config = self._config + return Boto3SessionProvider( + endpoint=config.aws_credential_provider_endpoint, + role_alias=config.aws_role_alias, + certificate=self._load_certificate(), + private_key=config.private_key_path, + thing_name=config.thing_name, + ).get_session() + + def _get_session_pkcs11(self): + """Get a session backed by privkey provided by pkcs11.""" + config, pkcs11_cfg = self._config, self._config.pkcs11_config + assert pkcs11_cfg + + input_pkcs11_cfg = aws_PKcs11Config( + pkcs11_lib=pkcs11_cfg.pkcs11_lib, + slot_id=int(pkcs11_cfg.slot_id), + user_pin=pkcs11_cfg.user_pin, + ) + + return Boto3SessionProvider( + endpoint=config.aws_credential_provider_endpoint, + role_alias=config.aws_role_alias, + certificate=self._load_certificate(), + thing_name=config.thing_name, + pkcs11=input_pkcs11_cfg, + ).get_session() + + def get_session(self): + if self._config.private_key_path.startswith("pkcs11"): + return self._get_session_pkcs11() + return self._get_session() From 6fb5bee1efb1e2fa36309066022454347060da0f Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 14 Feb 2024 14:47:43 +0000 Subject: [PATCH 027/128] minor update --- .../aws_iot_logger.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/src/otaclient_iot_logging_server/aws_iot_logger.py b/src/otaclient_iot_logging_server/aws_iot_logger.py index 6826ea6..2c2c72c 100644 --- a/src/otaclient_iot_logging_server/aws_iot_logger.py +++ b/src/otaclient_iot_logging_server/aws_iot_logger.py @@ -34,7 +34,7 @@ def get_log_stream_name(thing_name: str, log_stream_sufix: str) -> str: """Compose LogStream name. - Schema: YYYY/MM/DD/ + Schema: YYYY/MM/DD// """ fmt = "{strftime:%Y/%m/%d}".format(strftime=datetime.utcnow()) return f"{fmt}/{thing_name}/{log_stream_sufix}" @@ -71,6 +71,8 @@ def __init__( @retry(max_retry=16, backoff_factor=2, backoff_max=32) def _create_log_group(self, log_group_name: str): + # TODO: (20240214) should we let the edge side iot_logging_server + # create the log group? try: self._client.create_log_group(logGroupName=log_group_name) logger.info(f"{log_group_name=} has been created") @@ -91,9 +93,9 @@ def _create_log_stream(self, log_group_name: str, log_stream_name: str): ) logger.info(f"{log_stream_name=}@{log_group_name} has been created") self._sequence_tokens = {} # clear sequence token on new stream created - except self._exception.ResourceAlreadyExistsException: + except self._exception.ResourceAlreadyExistsException as e: logger.debug( - f"{log_stream_name=}@{log_group_name} already existed, skip creating" + f"{log_stream_name=}@{log_group_name} already existed, skip creating: {e.response}" ) except Exception as e: logger.error(f"failed to create {log_stream_name=}@{log_group_name}: {e!r}") @@ -119,6 +121,7 @@ def send_messages(self, log_stream_suffix: str, message_list: list[LogMessage]): } if _seq_token := self._sequence_tokens.get(log_stream_name): request["sequenceToken"] = _seq_token + # check message_list length if len(message_list) > self.MAX_LOGS_PER_PUT: logger.warning( @@ -133,11 +136,12 @@ def send_messages(self, log_stream_suffix: str, message_list: list[LogMessage]): # see docs for more details. if _sequence_token := response.get("nextSequenceToken"): self._sequence_tokens[log_stream_name] = _sequence_token + # logger.debug(f"successfully uploaded: {response}") except exceptions.DataAlreadyAcceptedException: pass except exceptions.InvalidSequenceTokenException as e: response = e.response - logger.debug(f"{response}: {e!r}") + logger.debug(f"invalid sequence token: {response}") _resp_err_msg: str = chain_query(e.response, "Error", "Message", default="") # null as the next sequenceToken means don't include any @@ -150,7 +154,7 @@ def send_messages(self, log_stream_suffix: str, message_list: list[LogMessage]): raise # let the retry do the logging upload again except client.exceptions.ResourceNotFoundException as e: response = e.response - logger.info(f"{log_stream_name=} not found: {e!r}") + logger.debug(f"{log_stream_name=} not found: {e!r}") self._create_log_stream( log_group_name=log_group_name, log_stream_name=log_stream_name ) @@ -166,7 +170,8 @@ def send_messages(self, log_stream_suffix: str, message_list: list[LogMessage]): def thread_main(self) -> NoReturn: """Main entry for running this iot_logger in a thread.""" while True: - # merge message + # merge LogMessages into the same source, identified by + # log_stream_suffix. message_dict: dict[str, list[LogMessage]] = {} _merge_count = 0 From 9d45257266951e70bdda3372576f82f994e96400 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Thu, 15 Feb 2024 10:21:03 +0000 Subject: [PATCH 028/128] boto3_session: pass priv_key_label into the session --- src/otaclient_iot_logging_server/boto3_session.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/src/otaclient_iot_logging_server/boto3_session.py b/src/otaclient_iot_logging_server/boto3_session.py index 2529630..eccee80 100644 --- a/src/otaclient_iot_logging_server/boto3_session.py +++ b/src/otaclient_iot_logging_server/boto3_session.py @@ -29,8 +29,7 @@ def _load_pkcs11_cert( pkcs11_lib: str, slot_id: str, - user_pin: str, - object_label: str, + private_key_label: str, ) -> bytes: """Load certificate from a pkcs11 interface(backed by a TPM2.0 chip). @@ -42,9 +41,8 @@ def _load_pkcs11_cert( "/usr/bin/pkcs11-tool", "--module", pkcs11_lib, "--type", "cert", - "--pin", user_pin, "--slot", slot_id, - "--label", object_label, + "--label", private_key_label, "--read-object", ] # fmt: on @@ -81,8 +79,7 @@ def _load_certificate(self) -> bytes: _load_pkcs11_cert( pkcs11_lib=_pkcs11_cfg.pkcs11_lib, slot_id=_pkcs11_cfg.slot_id, - user_pin=_pkcs11_cfg.user_pin, - object_label=_parsed_cert_uri["object"], + private_key_label=_parsed_cert_uri["object"], ) ) return _convert_to_pem(Path(_path).read_bytes()) @@ -103,10 +100,12 @@ def _get_session_pkcs11(self): config, pkcs11_cfg = self._config, self._config.pkcs11_config assert pkcs11_cfg + _parsed_key_uri = parse_pkcs11_uri(config.private_key_path) input_pkcs11_cfg = aws_PKcs11Config( pkcs11_lib=pkcs11_cfg.pkcs11_lib, slot_id=int(pkcs11_cfg.slot_id), user_pin=pkcs11_cfg.user_pin, + private_key_label=_parsed_key_uri.get("object"), ) return Boto3SessionProvider( From c8d9138f46e9005541ce25c22c61beeeeff6be91 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Fri, 16 Feb 2024 01:24:53 +0000 Subject: [PATCH 029/128] minor update --- src/otaclient_iot_logging_server/_common.py | 2 +- .../boto3_session.py | 17 +++++++++++------ 2 files changed, 12 insertions(+), 7 deletions(-) diff --git a/src/otaclient_iot_logging_server/_common.py b/src/otaclient_iot_logging_server/_common.py index 332e724..189c913 100644 --- a/src/otaclient_iot_logging_server/_common.py +++ b/src/otaclient_iot_logging_server/_common.py @@ -43,7 +43,7 @@ class Credentials(TypedDict): "PKCS11URI", { "object": str, - "pin-value": NotRequired[str], + "pin-value": str, "token": str, "type": Literal["cert", "private"], }, diff --git a/src/otaclient_iot_logging_server/boto3_session.py b/src/otaclient_iot_logging_server/boto3_session.py index eccee80..d6d2224 100644 --- a/src/otaclient_iot_logging_server/boto3_session.py +++ b/src/otaclient_iot_logging_server/boto3_session.py @@ -17,6 +17,7 @@ import subprocess from pathlib import Path +from typing import Optional from awsiot_credentialhelper.boto3_session import Boto3SessionProvider from awsiot_credentialhelper.boto3_session import Pkcs11Config as aws_PKcs11Config @@ -30,6 +31,7 @@ def _load_pkcs11_cert( pkcs11_lib: str, slot_id: str, private_key_label: str, + user_pin: Optional[str] = None, ) -> bytes: """Load certificate from a pkcs11 interface(backed by a TPM2.0 chip). @@ -45,6 +47,8 @@ def _load_pkcs11_cert( "--label", private_key_label, "--read-object", ] + if user_pin: + _cmd.extend(["--pin", user_pin]) # fmt: on return subprocess.check_output(_cmd) @@ -63,6 +67,7 @@ class Boto3Session: def __init__(self, config: IoTSessionConfig) -> None: self._config = config + self._pkcs11_cfg = config.pkcs11_config def _load_certificate(self) -> bytes: """ @@ -70,16 +75,16 @@ def _load_certificate(self) -> bytes: """ _path = self._config.certificate_path if _path.startswith("pkcs11"): - _pkcs11_cfg = self._config.pkcs11_config - assert _pkcs11_cfg + assert (pkcs11_cfg := self._pkcs11_cfg) _parsed_cert_uri = parse_pkcs11_uri(_path) # NOTE: the cert pull from pkcs11 interface is in DER format return _convert_to_pem( _load_pkcs11_cert( - pkcs11_lib=_pkcs11_cfg.pkcs11_lib, - slot_id=_pkcs11_cfg.slot_id, + pkcs11_lib=pkcs11_cfg.pkcs11_lib, + slot_id=pkcs11_cfg.slot_id, private_key_label=_parsed_cert_uri["object"], + user_pin=pkcs11_cfg.user_pin, ) ) return _convert_to_pem(Path(_path).read_bytes()) @@ -97,8 +102,8 @@ def _get_session(self): def _get_session_pkcs11(self): """Get a session backed by privkey provided by pkcs11.""" - config, pkcs11_cfg = self._config, self._config.pkcs11_config - assert pkcs11_cfg + config = self._config + assert (pkcs11_cfg := self._pkcs11_cfg) _parsed_key_uri = parse_pkcs11_uri(config.private_key_path) input_pkcs11_cfg = aws_PKcs11Config( From 635187c54b6e16677112e84fa6c7dff2887ebdfb Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Fri, 16 Feb 2024 01:31:34 +0000 Subject: [PATCH 030/128] configs: use RootModel to define AWSProfileInfo --- examples/aws_profile_info.yaml | 19 +++++++++---------- src/otaclient_iot_logging_server/configs.py | 11 ++++++----- tests/data/aws_profile_info.yaml | 19 +++++++++---------- 3 files changed, 24 insertions(+), 25 deletions(-) diff --git a/examples/aws_profile_info.yaml b/examples/aws_profile_info.yaml index 27a97bb..abd9974 100644 --- a/examples/aws_profile_info.yaml +++ b/examples/aws_profile_info.yaml @@ -1,10 +1,9 @@ -profiles: - - profile_name: "profile-dev" - account_id: "012345678901" - credential_endpoint: "abcdefghijk01.credentials.iot.region.amazonaws.com" - - profile_name: "profile-stg" - account_id: "012345678902" - credential_endpoint: "abcdefghijk02.credentials.iot.region.amazonaws.com" - - profile_name: "profile-prd" - account_id: "012345678903" - credential_endpoint: "abcdefghijk03.credentials.iot.region.amazonaws.com" \ No newline at end of file +- profile_name: "profile-dev" + account_id: "012345678901" + credential_endpoint: "abcdefghijk01.credentials.iot.region.amazonaws.com" +- profile_name: "profile-stg" + account_id: "012345678902" + credential_endpoint: "abcdefghijk02.credentials.iot.region.amazonaws.com" +- profile_name: "profile-prd" + account_id: "012345678903" + credential_endpoint: "abcdefghijk03.credentials.iot.region.amazonaws.com" \ No newline at end of file diff --git a/src/otaclient_iot_logging_server/configs.py b/src/otaclient_iot_logging_server/configs.py index b7cdf38..6be0349 100644 --- a/src/otaclient_iot_logging_server/configs.py +++ b/src/otaclient_iot_logging_server/configs.py @@ -17,10 +17,10 @@ from __future__ import annotations from pathlib import Path -from typing import Annotated, Literal +from typing import Annotated, Literal, List import yaml -from pydantic import BaseModel, BeforeValidator, Field +from pydantic import BaseModel, BeforeValidator, Field, RootModel from pydantic_settings import BaseSettings, SettingsConfigDict _LoggingLevelName = Literal["INFO", "DEBUG", "CRITICAL", "ERROR", "WARNING"] @@ -50,17 +50,18 @@ class ConfigurableLoggingServerConfig(BaseSettings): UPLOAD_INTERVAL: int = 60 # in seconds -class AWSProfileInfo(BaseModel): +class AWSProfileInfo(RootModel): + class Profile(BaseModel): model_config = SettingsConfigDict(frozen=True) profile_name: str account_id: Annotated[str, BeforeValidator(str)] = Field(pattern=r"^\d{12}$") credential_endpoint: str - profiles: list[Profile] + root: List[Profile] def get_profile_info(self, profile_name: str) -> Profile: - for profile in self.profiles: + for profile in self.root: if profile.profile_name == profile_name: return profile raise KeyError(f"failed to get profile info for {profile_name=}") diff --git a/tests/data/aws_profile_info.yaml b/tests/data/aws_profile_info.yaml index 27a97bb..abd9974 100644 --- a/tests/data/aws_profile_info.yaml +++ b/tests/data/aws_profile_info.yaml @@ -1,10 +1,9 @@ -profiles: - - profile_name: "profile-dev" - account_id: "012345678901" - credential_endpoint: "abcdefghijk01.credentials.iot.region.amazonaws.com" - - profile_name: "profile-stg" - account_id: "012345678902" - credential_endpoint: "abcdefghijk02.credentials.iot.region.amazonaws.com" - - profile_name: "profile-prd" - account_id: "012345678903" - credential_endpoint: "abcdefghijk03.credentials.iot.region.amazonaws.com" \ No newline at end of file +- profile_name: "profile-dev" + account_id: "012345678901" + credential_endpoint: "abcdefghijk01.credentials.iot.region.amazonaws.com" +- profile_name: "profile-stg" + account_id: "012345678902" + credential_endpoint: "abcdefghijk02.credentials.iot.region.amazonaws.com" +- profile_name: "profile-prd" + account_id: "012345678903" + credential_endpoint: "abcdefghijk03.credentials.iot.region.amazonaws.com" \ No newline at end of file From a5b65d0006ac407e911485cafd5db4a345b4663e Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Fri, 16 Feb 2024 01:56:51 +0000 Subject: [PATCH 031/128] aws_iot_logger: refinement --- .../aws_iot_logger.py | 71 ++++++++----------- 1 file changed, 31 insertions(+), 40 deletions(-) diff --git a/src/otaclient_iot_logging_server/aws_iot_logger.py b/src/otaclient_iot_logging_server/aws_iot_logger.py index 2c2c72c..67f5631 100644 --- a/src/otaclient_iot_logging_server/aws_iot_logger.py +++ b/src/otaclient_iot_logging_server/aws_iot_logger.py @@ -17,6 +17,7 @@ import logging import time +from collections import defaultdict from datetime import datetime from queue import Empty, Queue from threading import Thread @@ -42,7 +43,6 @@ def get_log_stream_name(thing_name: str, log_stream_sufix: str) -> str: class AWSIoTLogger: """ - Ref: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/logs.html """ @@ -57,26 +57,29 @@ def __init__( interval: int, ): _boto3_session = Boto3Session(session_config) - self._client = _client = _boto3_session.get_session().client( - service_name="logs" - ) + self._client = _boto3_session.get_session().client(service_name="logs") + self._session_config = session_config - self._exception = _client.exceptions + self._log_group_name = session_config.aws_cloudwatch_log_group self._sequence_tokens = {} self._interval = interval self._queue: Queue[tuple[str, LogMessage]] = queue - self._max_logs_per_merge = max_logs_per_merge + # NOTE: add this limitation to ensure all of the log_streams in a merge + # will definitely have entries less than MAX_LOGS_PER_PUT + self._max_logs_per_merge = min(max_logs_per_merge, self.MAX_LOGS_PER_PUT) + # unconditionally create log_group and log_stream, do nothing if existed. - self._create_log_group(log_group_name=session_config.aws_cloudwatch_log_group) + self._create_log_group() @retry(max_retry=16, backoff_factor=2, backoff_max=32) - def _create_log_group(self, log_group_name: str): + def _create_log_group(self): # TODO: (20240214) should we let the edge side iot_logging_server # create the log group? + log_group_name, client = self._log_group_name, self._client try: - self._client.create_log_group(logGroupName=log_group_name) + client.create_log_group(logGroupName=log_group_name) logger.info(f"{log_group_name=} has been created") - except self._exception.ResourceAlreadyExistsException as e: + except client.exceptions.ResourceAlreadyExistsException as e: logger.debug( f"{log_group_name=} already existed, skip creating: {e.response}" ) @@ -85,15 +88,16 @@ def _create_log_group(self, log_group_name: str): raise @retry(max_retry=16, backoff_factor=2, backoff_max=32) - def _create_log_stream(self, log_group_name: str, log_stream_name: str): + def _create_log_stream(self, log_stream_name: str): + log_group_name, client = self._log_group_name, self._client try: - self._client.create_log_stream( + client.create_log_stream( logGroupName=log_group_name, logStreamName=log_stream_name, ) logger.info(f"{log_stream_name=}@{log_group_name} has been created") self._sequence_tokens = {} # clear sequence token on new stream created - except self._exception.ResourceAlreadyExistsException as e: + except client.exceptions.ResourceAlreadyExistsException as e: logger.debug( f"{log_stream_name=}@{log_group_name} already existed, skip creating: {e.response}" ) @@ -102,33 +106,20 @@ def _create_log_stream(self, log_group_name: str, log_stream_name: str): raise @retry(backoff_factor=2) - def send_messages(self, log_stream_suffix: str, message_list: list[LogMessage]): + def send_messages(self, log_stream_name: str, message_list: list[LogMessage]): """ Ref: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/logs/client/put_log_events.html """ - session_config, client = self._session_config, self._client - exceptions = client.exceptions - log_stream_name = get_log_stream_name( - session_config.thing_name, log_stream_suffix - ) - log_group_name = session_config.aws_cloudwatch_log_group - request = { - "logGroupName": log_group_name, + "logGroupName": self._log_group_name, "logStreamName": log_stream_name, "logEvents": message_list, } if _seq_token := self._sequence_tokens.get(log_stream_name): request["sequenceToken"] = _seq_token - # check message_list length - if len(message_list) > self.MAX_LOGS_PER_PUT: - logger.warning( - f"too much logs in a single put, ignore exceeded logs: {self.MAX_LOGS_PER_PUT=}" - ) - message_list = message_list[: self.MAX_LOGS_PER_PUT] - + exceptions, client = self._client.exceptions, self._client try: response = client.put_log_events(**request) # NOTE: the sequenceToken is deprecated, put_log_events will always @@ -152,17 +143,15 @@ def send_messages(self, log_stream_suffix: str, message_list: list[LogMessage]): else: self._sequence_tokens[log_stream_name] = next_expected_token raise # let the retry do the logging upload again - except client.exceptions.ResourceNotFoundException as e: + except exceptions.ResourceNotFoundException as e: response = e.response logger.debug(f"{log_stream_name=} not found: {e!r}") - self._create_log_stream( - log_group_name=log_group_name, log_stream_name=log_stream_name - ) + self._create_log_stream(log_stream_name) raise except Exception as e: logger.error( f"put_log_events failure: {e!r}\n" - f"log_group_name={session_config.aws_cloudwatch_log_group}, \n" + f"log_group_name={self._log_group_name}, \n" f"log_stream_name={log_stream_name}" ) raise @@ -172,7 +161,7 @@ def thread_main(self) -> NoReturn: while True: # merge LogMessages into the same source, identified by # log_stream_suffix. - message_dict: dict[str, list[LogMessage]] = {} + message_dict: dict[str, list[LogMessage]] = defaultdict(list) _merge_count = 0 while _merge_count < self._max_logs_per_merge: @@ -181,20 +170,22 @@ def thread_main(self) -> NoReturn: log_stream_suffix, message = _queue.get_nowait() _merge_count += 1 - if log_stream_suffix not in message_dict: - message_dict[log_stream_suffix] = [] message_dict[log_stream_suffix].append(message) except Empty: break for log_stream_suffix, logs in message_dict.items(): - self.send_messages(log_stream_suffix, logs) - + self.send_messages( + get_log_stream_name( + self._session_config.thing_name, log_stream_suffix + ), + logs, + ) time.sleep(self._interval) def start_sending_msg_thread(iot_logger: AWSIoTLogger) -> Thread: _thread = Thread(target=iot_logger.thread_main, daemon=True) _thread.start() - logger.debug("iot logger started") + logger.debug("iot logger thread started") return _thread From d231106473ff6774890c177e3484e3ebe5ecf555 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Fri, 16 Feb 2024 02:28:21 +0000 Subject: [PATCH 032/128] utils: fix parse_pkcs11_uri --- src/otaclient_iot_logging_server/_utils.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/src/otaclient_iot_logging_server/_utils.py b/src/otaclient_iot_logging_server/_utils.py index 42f596b..484d7de 100644 --- a/src/otaclient_iot_logging_server/_utils.py +++ b/src/otaclient_iot_logging_server/_utils.py @@ -121,12 +121,4 @@ def parse_pkcs11_uri(_pkcs11_uri: str) -> PKCS11URI: for opt in pkcs11_opts_str.split(";"): k, v = opt.split("=", maxsplit=1) pkcs11_opts_dict[k] = v - - return PKCS11URI( - { - "object": pkcs11_opts_dict.get("object", ""), - "pin-value": pkcs11_opts_dict.get("pin-value", ""), - "token": pkcs11_opts_dict.get("token", ""), - "type": pkcs11_opts_dict.get("type", ""), - } - ) + return PKCS11URI(**pkcs11_opts_dict) From b7d16897d44d9b1b2f1d88c3812c4f29db8f5d50 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Fri, 16 Feb 2024 02:31:26 +0000 Subject: [PATCH 033/128] minor cleanup --- src/otaclient_iot_logging_server/configs.py | 4 ++-- src/otaclient_iot_logging_server/log_proxy_server.py | 10 +--------- 2 files changed, 3 insertions(+), 11 deletions(-) diff --git a/src/otaclient_iot_logging_server/configs.py b/src/otaclient_iot_logging_server/configs.py index 6be0349..0c11918 100644 --- a/src/otaclient_iot_logging_server/configs.py +++ b/src/otaclient_iot_logging_server/configs.py @@ -17,7 +17,7 @@ from __future__ import annotations from pathlib import Path -from typing import Annotated, Literal, List +from typing import Annotated, List, Literal import yaml from pydantic import BaseModel, BeforeValidator, Field, RootModel @@ -33,7 +33,7 @@ class ConfigurableLoggingServerConfig(BaseSettings): GREENGRASS_V1_CONFIG: str = "/greengrass/config/config.json" GREENGRASS_V2_CONFIG: str = "/greengrass/v2/init_config/config.yaml" - AWS_PROFILE_INFO: str = "/opt/ota/client/aws_profile_info.yaml" + AWS_PROFILE_INFO: str = "/opt/ota/iot_logger/aws_profile_info.yaml" """The path to aws_profile_info.yaml.""" LISTEN_ADDRESS: str = "127.0.0.1" diff --git a/src/otaclient_iot_logging_server/log_proxy_server.py b/src/otaclient_iot_logging_server/log_proxy_server.py index 67e73b7..3dc1132 100644 --- a/src/otaclient_iot_logging_server/log_proxy_server.py +++ b/src/otaclient_iot_logging_server/log_proxy_server.py @@ -36,14 +36,7 @@ class LoggingPostHandler: - """A simple aiohttp server handler that receives logs from otaclient. - - This server listen POST requests on /, and then package the - incoming posted into LogMessage instance as follow: - log_msg = LogMessage(timestamp=, message=) - and then push the instance into queue for aws_iot_logger - to process and upload to AWS cloudwatch. - """ + """A simple aiohttp server handler that receives logs from otaclient.""" def __init__(self, queue: Queue[tuple[str, LogMessage]]) -> None: self._queue = queue @@ -90,6 +83,5 @@ def launch_server( app = web.Application() app.add_routes([web.post(r"/{ecu_id}", handler._logging_post_handler)]) - # actual launch the server and serving # typing: run_app is a NoReturn method web.run_app(app, host=server_cfg.LISTEN_ADDRESS, port=server_cfg.LISTEN_PORT) From 5ca3899ed027e1f9eee63ca3522d092e31fbaa53 Mon Sep 17 00:00:00 2001 From: Bodong-Yang Date: Mon, 26 Feb 2024 02:04:27 +0000 Subject: [PATCH 034/128] use github python.gitignore template --- .gitignore | 164 +++++++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 159 insertions(+), 5 deletions(-) diff --git a/.gitignore b/.gitignore index 72b8b24..7ef6cf9 100644 --- a/.gitignore +++ b/.gitignore @@ -1,9 +1,163 @@ -.devcontainer/ -cache -coverage +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python build/ +develop-eggs/ dist/ -mypy_cache/ -__pycache__/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +# generated sversion file src/otaclient_iot_logging_server/_version.py \ No newline at end of file From dec1d368241001a17f10703082b107e51d8ce82c Mon Sep 17 00:00:00 2001 From: Bodong-Yang Date: Mon, 26 Feb 2024 02:05:43 +0000 Subject: [PATCH 035/128] pyproject.toml: remove fixing to py3.11 for black and pyright --- pyproject.toml | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 3e86bb3..1fc2a0b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,18 +39,17 @@ dev = [ "pytest==7.4.4", "pytest-asyncio==0.23.4", "pytest-mock==3.12.0", - ] [project.scripts] iot_logging_server = "otaclient_iot_logging_server.__main__:main" [project.urls] +Homepage = "https://github.com/tier4/otaclient-iot-logging-server" Source = "https://github.com/tier4/otaclient-iot-logging-server" [tool.black] line-length = 88 -target-version = ['py311'] [tool.coverage.run] branch = false @@ -70,6 +69,7 @@ skip_covered = true skip_empty = true [tool.hatch.envs.dev] +type = "virtual" features = ["dev"] [tool.hatch.envs.dev.env-vars] @@ -100,10 +100,6 @@ lines_before_imports = 2 skip_gitignore = true known_first_party = ["otaclient_iot_logging_server"] -[tool.pyright] -exclude = ["**/__pycache__"] -pythonVersion = "3.11" - [tool.pytest.ini_options] asyncio_mode = "auto" log_auto_indent = true From b9f3278764142365bf8bfac99d8b9ae54f194b3a Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Mon, 26 Feb 2024 08:37:10 +0000 Subject: [PATCH 036/128] boto3_session: tear down Boto3Session class into multiple helper functions --- .../aws_iot_logger.py | 6 +- .../boto3_session.py | 136 ++++++++++-------- src/otaclient_iot_logging_server/configs.py | 3 +- 3 files changed, 81 insertions(+), 64 deletions(-) diff --git a/src/otaclient_iot_logging_server/aws_iot_logger.py b/src/otaclient_iot_logging_server/aws_iot_logger.py index 67f5631..a134a85 100644 --- a/src/otaclient_iot_logging_server/aws_iot_logger.py +++ b/src/otaclient_iot_logging_server/aws_iot_logger.py @@ -26,7 +26,7 @@ from otaclient_iot_logging_server._common import LogMessage from otaclient_iot_logging_server._utils import chain_query, retry -from otaclient_iot_logging_server.boto3_session import Boto3Session +from otaclient_iot_logging_server.boto3_session import get_session from otaclient_iot_logging_server.greengrass_config import IoTSessionConfig logger = logging.getLogger(__name__) @@ -56,8 +56,8 @@ def __init__( max_logs_per_merge: int, interval: int, ): - _boto3_session = Boto3Session(session_config) - self._client = _boto3_session.get_session().client(service_name="logs") + _boto3_session = get_session(session_config) + self._client = _boto3_session.client(service_name="logs") self._session_config = session_config self._log_group_name = session_config.aws_cloudwatch_log_group diff --git a/src/otaclient_iot_logging_server/boto3_session.py b/src/otaclient_iot_logging_server/boto3_session.py index d6d2224..66737f7 100644 --- a/src/otaclient_iot_logging_server/boto3_session.py +++ b/src/otaclient_iot_logging_server/boto3_session.py @@ -24,7 +24,14 @@ from OpenSSL import crypto from otaclient_iot_logging_server._utils import parse_pkcs11_uri -from otaclient_iot_logging_server.greengrass_config import IoTSessionConfig +from otaclient_iot_logging_server.greengrass_config import ( + IoTSessionConfig, + PKCS11Config, +) + +# +# ------ certificate loading helpers ------ # +# def _load_pkcs11_cert( @@ -63,65 +70,74 @@ def _convert_to_pem(_data: bytes) -> bytes: ) -class Boto3Session: - - def __init__(self, config: IoTSessionConfig) -> None: - self._config = config - self._pkcs11_cfg = config.pkcs11_config - - def _load_certificate(self) -> bytes: - """ - NOTE: Boto3SessionProvider only takes PEM format cert. - """ - _path = self._config.certificate_path - if _path.startswith("pkcs11"): - assert (pkcs11_cfg := self._pkcs11_cfg) - - _parsed_cert_uri = parse_pkcs11_uri(_path) - # NOTE: the cert pull from pkcs11 interface is in DER format - return _convert_to_pem( - _load_pkcs11_cert( - pkcs11_lib=pkcs11_cfg.pkcs11_lib, - slot_id=pkcs11_cfg.slot_id, - private_key_label=_parsed_cert_uri["object"], - user_pin=pkcs11_cfg.user_pin, - ) +def _load_certificate(cert_path: str, pkcs11_cfg: Optional[PKCS11Config]) -> bytes: + """ + NOTE: Boto3SessionProvider only takes PEM format cert. + """ + if cert_path.startswith("pkcs11"): + assert ( + pkcs11_cfg + ), "certificate is provided by pkcs11, but no pkcs11_cfg is not available" + + _parsed_cert_uri = parse_pkcs11_uri(cert_path) + # NOTE: the cert pull from pkcs11 interface is in DER format + return _convert_to_pem( + _load_pkcs11_cert( + pkcs11_lib=pkcs11_cfg.pkcs11_lib, + slot_id=pkcs11_cfg.slot_id, + private_key_label=_parsed_cert_uri["object"], + user_pin=pkcs11_cfg.user_pin, ) - return _convert_to_pem(Path(_path).read_bytes()) - - def _get_session(self): - """Get a session that using plain privkey.""" - config = self._config - return Boto3SessionProvider( - endpoint=config.aws_credential_provider_endpoint, - role_alias=config.aws_role_alias, - certificate=self._load_certificate(), - private_key=config.private_key_path, - thing_name=config.thing_name, - ).get_session() - - def _get_session_pkcs11(self): - """Get a session backed by privkey provided by pkcs11.""" - config = self._config - assert (pkcs11_cfg := self._pkcs11_cfg) - - _parsed_key_uri = parse_pkcs11_uri(config.private_key_path) - input_pkcs11_cfg = aws_PKcs11Config( - pkcs11_lib=pkcs11_cfg.pkcs11_lib, - slot_id=int(pkcs11_cfg.slot_id), - user_pin=pkcs11_cfg.user_pin, - private_key_label=_parsed_key_uri.get("object"), ) + return _convert_to_pem(Path(cert_path).read_bytes()) + + +# +# ------ session creating helpers ------ # +# + + +def _get_session(config: IoTSessionConfig): + """Get a session that using plain privkey.""" + return Boto3SessionProvider( + endpoint=config.aws_credential_provider_endpoint, + role_alias=config.aws_role_alias, + certificate=_load_certificate(config.certificate_path, config.pkcs11_config), + private_key=config.private_key_path, + thing_name=config.thing_name, + ).get_session() - return Boto3SessionProvider( - endpoint=config.aws_credential_provider_endpoint, - role_alias=config.aws_role_alias, - certificate=self._load_certificate(), - thing_name=config.thing_name, - pkcs11=input_pkcs11_cfg, - ).get_session() - - def get_session(self): - if self._config.private_key_path.startswith("pkcs11"): - return self._get_session_pkcs11() - return self._get_session() + +def _get_session_pkcs11(config: IoTSessionConfig): + """Get a session backed by privkey provided by pkcs11.""" + assert (pkcs11_cfg := config.pkcs11_config) + + _parsed_key_uri = parse_pkcs11_uri(config.private_key_path) + input_pkcs11_cfg = aws_PKcs11Config( + pkcs11_lib=pkcs11_cfg.pkcs11_lib, + slot_id=int(pkcs11_cfg.slot_id), + user_pin=pkcs11_cfg.user_pin, + private_key_label=_parsed_key_uri.get("object"), + ) + + return Boto3SessionProvider( + endpoint=config.aws_credential_provider_endpoint, + role_alias=config.aws_role_alias, + certificate=_load_certificate(config.certificate_path, config.pkcs11_config), + thing_name=config.thing_name, + pkcs11=input_pkcs11_cfg, + ).get_session() + + +# API + + +def get_session(config: IoTSessionConfig): + """Get a boto3 session with givin IoTSessionConfig. + + The behavior changes according to whether privkey is provided by + pkcs11 or by plain file, indicating with URI. + """ + if config.private_key_path.startswith("pkcs11"): + return _get_session_pkcs11(config) + return _get_session(config) diff --git a/src/otaclient_iot_logging_server/configs.py b/src/otaclient_iot_logging_server/configs.py index 0c11918..f48f0ac 100644 --- a/src/otaclient_iot_logging_server/configs.py +++ b/src/otaclient_iot_logging_server/configs.py @@ -17,11 +17,12 @@ from __future__ import annotations from pathlib import Path -from typing import Annotated, List, Literal +from typing import List, Literal import yaml from pydantic import BaseModel, BeforeValidator, Field, RootModel from pydantic_settings import BaseSettings, SettingsConfigDict +from typing_extensions import Annotated _LoggingLevelName = Literal["INFO", "DEBUG", "CRITICAL", "ERROR", "WARNING"] From 763c26056b7af24195a836b533f792e0f9996c54 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Mon, 26 Feb 2024 09:22:09 +0000 Subject: [PATCH 037/128] (WIP) for passing strict type check --- src/otaclient_iot_logging_server/_utils.py | 4 ++-- src/otaclient_iot_logging_server/configs.py | 15 +++++++-------- .../greengrass_config.py | 6 +++--- .../log_proxy_server.py | 11 +++++------ 4 files changed, 17 insertions(+), 19 deletions(-) diff --git a/src/otaclient_iot_logging_server/_utils.py b/src/otaclient_iot_logging_server/_utils.py index 484d7de..93ad07b 100644 --- a/src/otaclient_iot_logging_server/_utils.py +++ b/src/otaclient_iot_logging_server/_utils.py @@ -36,7 +36,7 @@ class FixedConfig(BaseModel): _MISSING = object() -def chain_query(_obj: NestedDict, *_paths: str, default=_MISSING) -> Any: +def chain_query(_obj: NestedDict, *_paths: str, default: object = _MISSING) -> Any: """Chain access a nested dict <_obj> according to search <_paths>. For example: @@ -117,7 +117,7 @@ def remove_prefix(_str: str, _prefix: str) -> str: def parse_pkcs11_uri(_pkcs11_uri: str) -> PKCS11URI: _, pkcs11_opts_str = _pkcs11_uri.split(":", maxsplit=1) - pkcs11_opts_dict = {} + pkcs11_opts_dict: dict[str, Any] = {} for opt in pkcs11_opts_str.split(";"): k, v = opt.split("=", maxsplit=1) pkcs11_opts_dict[k] = v diff --git a/src/otaclient_iot_logging_server/configs.py b/src/otaclient_iot_logging_server/configs.py index f48f0ac..a7217a6 100644 --- a/src/otaclient_iot_logging_server/configs.py +++ b/src/otaclient_iot_logging_server/configs.py @@ -51,17 +51,16 @@ class ConfigurableLoggingServerConfig(BaseSettings): UPLOAD_INTERVAL: int = 60 # in seconds -class AWSProfileInfo(RootModel): +class _AWSProfile(BaseModel): + model_config = SettingsConfigDict(frozen=True) + profile_name: str + account_id: Annotated[str, BeforeValidator(str)] = Field(pattern=r"^\d{12}$") + credential_endpoint: str - class Profile(BaseModel): - model_config = SettingsConfigDict(frozen=True) - profile_name: str - account_id: Annotated[str, BeforeValidator(str)] = Field(pattern=r"^\d{12}$") - credential_endpoint: str - root: List[Profile] +class AWSProfileInfo(RootModel[List[_AWSProfile]]): - def get_profile_info(self, profile_name: str) -> Profile: + def get_profile_info(self, profile_name: str) -> _AWSProfile: for profile in self.root: if profile.profile_name == profile_name: return profile diff --git a/src/otaclient_iot_logging_server/greengrass_config.py b/src/otaclient_iot_logging_server/greengrass_config.py index 588313c..bcb2b32 100644 --- a/src/otaclient_iot_logging_server/greengrass_config.py +++ b/src/otaclient_iot_logging_server/greengrass_config.py @@ -21,7 +21,7 @@ import re from functools import partial from pathlib import Path -from typing import NamedTuple, Optional +from typing import Any, NamedTuple, Optional from urllib.parse import urljoin import yaml @@ -89,7 +89,7 @@ def parse_v1_config(_raw_cfg: str) -> IoTSessionConfig: NOTE(20240207): not consider TPM for ggv1. """ - loaded_cfg = json.loads(_raw_cfg) + loaded_cfg: dict[str, Any] = json.loads(_raw_cfg) assert isinstance(loaded_cfg, dict), f"invalid cfg: {_raw_cfg}" _raw_thing_arn = chain_query(loaded_cfg, "coreThing", "thingArn") @@ -127,7 +127,7 @@ def parse_v2_config(_raw_cfg: str) -> IoTSessionConfig: https://tier4.atlassian.net/wiki/spaces/HIICS/pages/2544042770/TPM+Ubuntu+22.04+Greengrass+v2. https://datatracker.ietf.org/doc/html/rfc7512. """ - loaded_cfg = yaml.safe_load(_raw_cfg) + loaded_cfg: dict[str, Any] = yaml.safe_load(_raw_cfg) assert isinstance(loaded_cfg, dict), f"invalid cfg: {_raw_cfg}" thing_name = chain_query(loaded_cfg, "system", "thingName") diff --git a/src/otaclient_iot_logging_server/log_proxy_server.py b/src/otaclient_iot_logging_server/log_proxy_server.py index 3dc1132..f3d5f2c 100644 --- a/src/otaclient_iot_logging_server/log_proxy_server.py +++ b/src/otaclient_iot_logging_server/log_proxy_server.py @@ -22,7 +22,6 @@ from aiohttp import web from aiohttp.web import Request -from typing_extensions import NoReturn from otaclient_iot_logging_server._common import LogMessage from otaclient_iot_logging_server.aws_iot_logger import ( @@ -42,7 +41,7 @@ def __init__(self, queue: Queue[tuple[str, LogMessage]]) -> None: self._queue = queue # route: POST /{ecu_id} - async def _logging_post_handler(self, request: Request): + async def logging_post_handler(self, request: Request): """ NOTE: use as log_stream_suffix, each ECU has its own logging stream for uploading. @@ -69,7 +68,7 @@ def launch_server( queue: Queue[tuple[str, LogMessage]], max_logs_per_merge: int, interval: int, -) -> NoReturn: # type: ignore +) -> None: start_sending_msg_thread( AWSIoTLogger( session_config=session_config, @@ -81,7 +80,7 @@ def launch_server( handler = LoggingPostHandler(queue=queue) app = web.Application() - app.add_routes([web.post(r"/{ecu_id}", handler._logging_post_handler)]) + app.add_routes([web.post(r"/{ecu_id}", handler.logging_post_handler)]) - # typing: run_app is a NoReturn method - web.run_app(app, host=server_cfg.LISTEN_ADDRESS, port=server_cfg.LISTEN_PORT) + # typing: run_app is a NoReturn method, unless received signal + web.run_app(app, host=server_cfg.LISTEN_ADDRESS, port=server_cfg.LISTEN_PORT) # type: ignore From dfaf78f698c6868d3014afcb761239eb3c0649af Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Mon, 26 Feb 2024 09:27:08 +0000 Subject: [PATCH 038/128] boto3_session: pass strict type check --- .../boto3_session.py | 29 ++++++++++--------- 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/src/otaclient_iot_logging_server/boto3_session.py b/src/otaclient_iot_logging_server/boto3_session.py index 66737f7..4abd7d7 100644 --- a/src/otaclient_iot_logging_server/boto3_session.py +++ b/src/otaclient_iot_logging_server/boto3_session.py @@ -21,6 +21,7 @@ from awsiot_credentialhelper.boto3_session import Boto3SessionProvider from awsiot_credentialhelper.boto3_session import Pkcs11Config as aws_PKcs11Config +from boto3 import Session from OpenSSL import crypto from otaclient_iot_logging_server._utils import parse_pkcs11_uri @@ -97,7 +98,7 @@ def _load_certificate(cert_path: str, pkcs11_cfg: Optional[PKCS11Config]) -> byt # -def _get_session(config: IoTSessionConfig): +def _get_session(config: IoTSessionConfig) -> Session: """Get a session that using plain privkey.""" return Boto3SessionProvider( endpoint=config.aws_credential_provider_endpoint, @@ -105,34 +106,34 @@ def _get_session(config: IoTSessionConfig): certificate=_load_certificate(config.certificate_path, config.pkcs11_config), private_key=config.private_key_path, thing_name=config.thing_name, - ).get_session() + ).get_session() # type: ignore -def _get_session_pkcs11(config: IoTSessionConfig): +def _get_session_pkcs11(config: IoTSessionConfig) -> Session: """Get a session backed by privkey provided by pkcs11.""" - assert (pkcs11_cfg := config.pkcs11_config) + assert ( + pkcs11_cfg := config.pkcs11_config + ), "privkey is provided by pkcs11, but pkcs11_config is not available" _parsed_key_uri = parse_pkcs11_uri(config.private_key_path) - input_pkcs11_cfg = aws_PKcs11Config( - pkcs11_lib=pkcs11_cfg.pkcs11_lib, - slot_id=int(pkcs11_cfg.slot_id), - user_pin=pkcs11_cfg.user_pin, - private_key_label=_parsed_key_uri.get("object"), - ) - return Boto3SessionProvider( endpoint=config.aws_credential_provider_endpoint, role_alias=config.aws_role_alias, certificate=_load_certificate(config.certificate_path, config.pkcs11_config), thing_name=config.thing_name, - pkcs11=input_pkcs11_cfg, - ).get_session() + pkcs11=aws_PKcs11Config( + pkcs11_lib=pkcs11_cfg.pkcs11_lib, + slot_id=int(pkcs11_cfg.slot_id), + user_pin=pkcs11_cfg.user_pin, + private_key_label=_parsed_key_uri.get("object"), + ), + ).get_session() # type: ignore # API -def get_session(config: IoTSessionConfig): +def get_session(config: IoTSessionConfig) -> Session: """Get a boto3 session with givin IoTSessionConfig. The behavior changes according to whether privkey is provided by From e274266a0d69acfb2b2d1caa528782c6658f5d2e Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Mon, 26 Feb 2024 10:12:37 +0000 Subject: [PATCH 039/128] utils: properly type retry decorator --- src/otaclient_iot_logging_server/_utils.py | 24 +++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/src/otaclient_iot_logging_server/_utils.py b/src/otaclient_iot_logging_server/_utils.py index 93ad07b..8366730 100644 --- a/src/otaclient_iot_logging_server/_utils.py +++ b/src/otaclient_iot_logging_server/_utils.py @@ -17,7 +17,7 @@ import time from functools import partial, wraps -from typing import Any, Callable, Optional, TypeVar +from typing import Any, Callable, Optional, TypeVar, overload from pydantic import BaseModel, ConfigDict from typing_extensions import ParamSpec, TypeAlias @@ -73,6 +73,28 @@ def chain_query(_obj: NestedDict, *_paths: str, default: object = _MISSING) -> A raise ValueError(f"chain query with {_paths=} failed: {e!r}") from e +@overload +def retry( + func: None = None, + /, + backoff_factor: float = 0.1, + backoff_max: int = 6, + max_retry: int = 6, + retry_on_exceptions: tuple[type[Exception], ...] = (Exception,), +) -> partial[Any]: ... + + +@overload +def retry( + func: Callable[P, RT], + /, + backoff_factor: float = ..., + backoff_max: int = ..., + max_retry: int = ..., + retry_on_exceptions: tuple[type[Exception], ...] = ..., +) -> Callable[P, RT]: ... + + def retry( func: Optional[Callable[P, RT]] = None, /, From c0df655640b35265d0fdbaea332dd40a8f8ed598 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Mon, 26 Feb 2024 10:14:39 +0000 Subject: [PATCH 040/128] finish up typing fix up, all modules now pass strict type check --- src/otaclient_iot_logging_server/__main__.py | 10 ++++------ src/otaclient_iot_logging_server/aws_iot_logger.py | 7 ++++--- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/src/otaclient_iot_logging_server/__main__.py b/src/otaclient_iot_logging_server/__main__.py index 11e181f..f2c701e 100644 --- a/src/otaclient_iot_logging_server/__main__.py +++ b/src/otaclient_iot_logging_server/__main__.py @@ -19,8 +19,6 @@ import time from queue import Queue -from typing_extensions import NoReturn - from otaclient_iot_logging_server import __version__ from otaclient_iot_logging_server import package_name as root_package_name from otaclient_iot_logging_server._common import LogMessage @@ -41,7 +39,7 @@ def __init__( self._queue = queue self._logstream_suffix = logstream_suffix - def emit(self, record) -> None: + def emit(self, record: logging.LogRecord) -> None: try: self._queue.put_nowait( ( @@ -57,7 +55,7 @@ def emit(self, record) -> None: def _config_logging( - queue: Queue, + queue: Queue[tuple[str, LogMessage]], *, format: str, level: str, @@ -86,8 +84,8 @@ def _config_logging( return root_logger -def main() -> NoReturn: - queue = Queue(maxsize=server_cfg.MAX_LOGS_BACKLOG) +def main() -> None: + queue: Queue[tuple[str, LogMessage]] = Queue(maxsize=server_cfg.MAX_LOGS_BACKLOG) root_logger = _config_logging( queue, diff --git a/src/otaclient_iot_logging_server/aws_iot_logger.py b/src/otaclient_iot_logging_server/aws_iot_logger.py index a134a85..72b1fce 100644 --- a/src/otaclient_iot_logging_server/aws_iot_logger.py +++ b/src/otaclient_iot_logging_server/aws_iot_logger.py @@ -21,6 +21,7 @@ from datetime import datetime from queue import Empty, Queue from threading import Thread +from typing import Any from typing_extensions import NoReturn @@ -57,11 +58,11 @@ def __init__( interval: int, ): _boto3_session = get_session(session_config) - self._client = _boto3_session.client(service_name="logs") + self._client = _boto3_session.client(service_name="logs") # type: ignore self._session_config = session_config self._log_group_name = session_config.aws_cloudwatch_log_group - self._sequence_tokens = {} + self._sequence_tokens: dict[str, str | None] = {} self._interval = interval self._queue: Queue[tuple[str, LogMessage]] = queue # NOTE: add this limitation to ensure all of the log_streams in a merge @@ -111,7 +112,7 @@ def send_messages(self, log_stream_name: str, message_list: list[LogMessage]): Ref: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/logs/client/put_log_events.html """ - request = { + request: dict[str, Any] = { "logGroupName": self._log_group_name, "logStreamName": log_stream_name, "logEvents": message_list, From d5aa9ae80b10b42fa3c72df349683fde0075f8f2 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Mon, 26 Feb 2024 10:15:29 +0000 Subject: [PATCH 041/128] tests: add sample cert, minor update to gg_v2_cfg.yaml_tpm2.0 --- tests/data/gg_v2_cfg.yaml_tpm2.0 | 6 +++--- tests/data/sample_cert.der | Bin 0 -> 1321 bytes tests/data/sample_cert.pem | 30 ++++++++++++++++++++++++++++++ 3 files changed, 33 insertions(+), 3 deletions(-) create mode 100644 tests/data/sample_cert.der create mode 100644 tests/data/sample_cert.pem diff --git a/tests/data/gg_v2_cfg.yaml_tpm2.0 b/tests/data/gg_v2_cfg.yaml_tpm2.0 index c48c76a..2a5c509 100644 --- a/tests/data/gg_v2_cfg.yaml_tpm2.0 +++ b/tests/data/gg_v2_cfg.yaml_tpm2.0 @@ -1,6 +1,6 @@ system: - certificateFilePath: "pkcs11:object=greengrass;type=cert;pin-value=greengrass" - privateKeyPath: "pkcs11:object=greengrass;type=private;pin-value=greengrass" + certificateFilePath: "pkcs11:object=greengrass_key;type=cert;pin-value=greengrass_userpin" + privateKeyPath: "pkcs11:object=greengrass_key;type=private;pin-value=greengrass_userpin" rootCaPath: "/greengrass/certs/root.ca.pem" rootpath: "/greengrass/v2" thingName: "thing_name" @@ -18,4 +18,4 @@ services: library: "/usr/lib/x86_64-linux-gnu/pkcs11/libtpm2_pkcs11.so" name: "tpm2_pkcs11" slot: 1 - userPin: "greengrass" \ No newline at end of file + userPin: "greengrass_userpin" \ No newline at end of file diff --git a/tests/data/sample_cert.der b/tests/data/sample_cert.der new file mode 100644 index 0000000000000000000000000000000000000000..e4e192df2660c1d9ed7c8b7d5f4ecfb78cba2030 GIT binary patch literal 1321 zcmXqLVpTP0V&+}I%*4pVBx0YlvPSQ1@d>t>TbcRFRN^|N`dJKk**LY@JlekVGBR?r zG8iZs@)~fnF^95n^RSep7MBURyd9U|Lx0+s6lAOJOqa~@2Riiy} z=TgHR8%!d8ajh%h4CRfwpZM;FVDL`aoM&byMLP698YiCe|K^G^dH(4 zy5Da*78@GvzvJ_I-g{foS#$sL{NoPTDDEP8ai`enCl&B<(UKHZ+EQ2)H;Rnq4>o@?BNO!$NoHws;J z@>;?cJtO9;;QM=~AsdYpLOUFnbh({;J?9H+u~4R(Xc(jK--Po$5@~n+d08x7*qN9a z85kD_8w4820+XLCABz}^h;5XDa^Jxnv6B303W^zC388#nS`6et(#k9n24W4^6@Zej ztS}?ve->5)W*~)}?0_jAnCuuCl70o&Z}hk8S`m=>`1g)}jWcS@@n?_Jy6xK9G~?aU zw@*%Tdk9@px4U^RU8(r_YV{+@T~R)Zo5cj%mVEfyu~Gdcr%#7rzjNpIvy1j`DYWIr;sG?(FJitNBIUCKdNY?JDaj35dNO{-^m^=!)l?oXYm$w|llhX>-z_}JIA zJusZE5~-rH)3ih6)jO%Cd#ca*7Wp`}*-eOeTbCZZdrfh{}&0@ zRsr>fL(d&Tt}WBwea-Zp!&8Y2iA2w{)>D_CDNePOu4}*HyYYy8?G|C7yKbJf`JY2w zVozypS)TqZ`hvy2CI3Gpv?|DE%AMSD!H$3ZYCh+k|BhMpn>+7X@3m8Q+i4f4|8ne3 zzqV$)_L|ztd&()jFIpSxc=d^pWlxA5?ftPb->=iX^QxhGJ@KiAl*{KawptI;xoe{WVBa__WRZI*nl uK4D4uj-VflFI*{`#Q*Wl{%NrXcHi1s{rTCQcR&Apl-}hK@v+Fba~}X6$3|fQ literal 0 HcmV?d00001 diff --git a/tests/data/sample_cert.pem b/tests/data/sample_cert.pem new file mode 100644 index 0000000..9e97c7f --- /dev/null +++ b/tests/data/sample_cert.pem @@ -0,0 +1,30 @@ +-----BEGIN CERTIFICATE----- +MIIFJTCCAw2gAwIBAgIUP2ypfC7tc8gGmbUDDnYkXokajwQwDQYJKoZIhvcNAQEL +BQAwIjENMAsGA1UECwwEdGVzdDERMA8GA1UEAwwIdGVzdF9jcnQwHhcNMjQwMjI2 +MDgyMTUzWhcNMzQwMjIzMDgyMTUzWjAiMQ0wCwYDVQQLDAR0ZXN0MREwDwYDVQQD +DAh0ZXN0X2NydDCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAJ451LDo +VGmRIl/hL4hMPlC0vtE2eXhRaLip3+zRFaGTABVJPSM/g7hls+VWWevi7NDVjaIi +7PoGwn4h0AdyIjFBuT26ihn9n65rUFH1wWH0mQP1e3/fCFIhx51s5FF3dkzIhHgY +I6W02D1rqhyKViYr1TLYoqwc5dXribQBQfHvN7D7wMiCG/y0HWFJ4WHL98mIlq36 +nuu+G4U11SIZm6AIhGKOBSiHWbmlMbiwNFj6Cq5wCVUNXGt0mTf0YL3jXXJ1aU8p +rOL4ObI/QsxWr75/bfz6BZSIPuqr+xFjer6LslOjRrrkAFs6oJ/CrKaU+PLEPWCB +Ug2Nr4qdI501LZVXyZUMUIEd+klx3zG7j/0GAm5BZAyfJ9MUVwD9/KpHvuYLPyG0 +vm5LcCGmtsS7+TU8x2eGv92jX5f4K3gt99tBczEyv9xM657vPRWanf0M/gtQsRL2 +cjEO+CpeIaI8XsK9Gm8qTGXCPQFl0N8X5IU+VF/fyblG10+Wef8VeGOX+DaieF6+ +RQicAwVIFDv0JTp6uE74FdmPdv0NkX7ACxrvSzD0DaiA3WQWWxt+IlfDFcW3J9NG +aymTBuzy25Egf+eE6mLz3EmsRxI0DhNhsRLRQkqkBluYXPUR7941VLEyIFWIQaSK +RsnrnPQFcxJpJhVWAU39YM+MGGbdTw0EOUQHAgMBAAGjUzBRMB0GA1UdDgQWBBQ9 +WiAjjsG4XRkPZiAhaEpgVQ70hDAfBgNVHSMEGDAWgBQ9WiAjjsG4XRkPZiAhaEpg +VQ70hDAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4ICAQBi+lF/sU8+ +iqhQaeP7uI8ozCYDX83EfUa6tYKY7qXt5MkLSBLSJz7Zzmcic+erJ8RjilpMo4MW +EYak8PWIsSfpCUyIMY9DibfNor+0dN86EJuTBGuh1/ypQOh0yffkLZt7pqsPFUaS +c4xaunaMdFBd11f8g8ZVqOeyQnf8waleIELr7nsCjsEQB1xB0eEh8+KxGgur7kpO +tOQVDi4RnCn1/Ed98eHuNMlBj+HAX3YOB66G4DGXJFkkJLk1iBTq7hqC3iXnDqJM +QoY+kFjtfmdUSDDTNRJ2tIhxYYJEFyOOn3W0ci2VIaJ+zfDblUbf12RiP+JbRneS +07Dc+vNi6w/Z7G3tenf9pK6VtadO+bSv2P/0GEWFECeAwudAVNamL7vWNe5A5Rho +GGFJzTuVp8xzZT0bfofYTbHEH320ExLdRkl9b/NVRF3KKrSnZ+Zb0Di+pP/wYIUg +HWkeybTQPg+vqw5Duf7GOo83Q7qvSrkdtstEQv8eB0L6tWjrSpWFDcpKZR/iDiwf ++0/7rJaE7stwWdWdIWIXNBVw14MxTLNgX6hNSMucYdf0Cu7EE/m76UDwyw5+ocP4 +aog34s7uK+TeEHYPnTM6d+jHD9VbHBH92XsxC7k8qzZjzn9gpHe4Uvij0NR2kg/x +7L+WXcC72rV78+bc7vn88Ru6QFjxcjOJvg== +-----END CERTIFICATE----- From 57d2605f00e40f453ad6d9a4ee13b9a4778151e7 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Tue, 27 Feb 2024 08:30:30 +0000 Subject: [PATCH 042/128] _common: update typing --- src/otaclient_iot_logging_server/_common.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/otaclient_iot_logging_server/_common.py b/src/otaclient_iot_logging_server/_common.py index 189c913..8fac4d6 100644 --- a/src/otaclient_iot_logging_server/_common.py +++ b/src/otaclient_iot_logging_server/_common.py @@ -43,12 +43,14 @@ class Credentials(TypedDict): "PKCS11URI", { "object": str, - "pin-value": str, - "token": str, - "type": Literal["cert", "private"], + "pin-value": NotRequired[str], + "token": NotRequired[str], + "type": NotRequired[Literal["cert", "private"]], }, ) """ -NOTE: not all possible segments are defined here. +NOTE: Not all possible segments are defined here. see https://www.rfc-editor.org/rfc/rfc7512.html for more details. + In normal case, (priv_key_label) is enough, as long as there is + only one private key inside the slot. """ From 0ae74c72715bd47d7719bf78785a30a3f30b161d Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Tue, 27 Feb 2024 08:37:33 +0000 Subject: [PATCH 043/128] test: add test_utils --- tests/__init__.py | 13 +++ tests/test__utils.py | 255 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 268 insertions(+) create mode 100644 tests/test__utils.py diff --git a/tests/__init__.py b/tests/__init__.py index e69de29..bcfd866 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2022 TIER IV, INC. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/tests/test__utils.py b/tests/test__utils.py new file mode 100644 index 0000000..b85012d --- /dev/null +++ b/tests/test__utils.py @@ -0,0 +1,255 @@ +# Copyright 2022 TIER IV, INC. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import annotations + +import time +import logging +import random +from typing import Any + +import pytest + +from otaclient_iot_logging_server._utils import ( + NestedDict, + chain_query, + retry, + remove_prefix, + parse_pkcs11_uri, +) + +logger = logging.getLogger(__name__) + + +@pytest.mark.parametrize( + "_input, _paths, _expected, _default", + [ + # test#1: succeeded chain_query + ( + {"a": {"b": {"c": {"d": "e"}}}}, + ["a", "b", "c", "d"], + "e", + None, + ), + # test#2: failed chain query with set to "default_value" + ( + {"a": {"b": {"c": {"d": "e"}}}}, + ["non", "existed", "path"], + "default_value", + "default_value", + ), + ], +) +def test_chain_query( + _input: NestedDict, + _paths: str, + _expected: Any, + _default: Any, +): + _queried = chain_query(_input, *_paths, default=_default) + assert _queried == _expected + + +class TestRetry: + + class HandledException(Exception): + pass + + class UnhandledException(Exception): + pass + + @staticmethod + def _func_factory( + _max_retry: int, + _return_value: Any, + exception_to_raise: list[Any] | None = None, + ): + # return a func that succeeds in first run + if exception_to_raise is None: + return lambda: _return_value + + execution_round = 0 + exception_replay = iter(exception_to_raise) + + def _func(): + nonlocal execution_round, exception_replay + execution_round += 1 + _exception = next(exception_replay, None) + + if _exception is None: + if execution_round <= _max_retry + 1: + return _return_value + logger.error("retrier doesn't work!") + raise ValueError + logger.info(f"{execution_round=}") + raise _exception + + return _func + + def test_normal_finished(self): + """ + Function returns directly without raising any exception. + """ + return_value = random.randint(10**3, 10**6) + _res = retry( + self._func_factory(0, return_value), + retry_on_exceptions=(self.HandledException,), + )() + assert _res == return_value + + def test_successfully_retried(self): + """ + Function failed for some times, within , but finally succeeded. + """ + return_value = random.randint(10**3, 10**6) + max_retries, actual_retries = 8, 7 + + _res = retry( + self._func_factory( + actual_retries, + return_value, + exception_to_raise=[ + self.HandledException for _ in range(actual_retries) + ], + ), + max_retry=max_retries, + backoff_factor=0.01, # for speeding up test + retry_on_exceptions=(self.HandledException,), + )() + assert _res == return_value + + def test_aborted_by_unhandled_exception(self): + return_value = random.randint(10**3, 10**6) + max_retries, actual_retries = 8, 7 + + with pytest.raises(self.UnhandledException): + retry( + self._func_factory( + actual_retries, + return_value, + exception_to_raise=[ + self.HandledException for _ in range(actual_retries - 1) + ] + + [self.UnhandledException], + ), + max_retry=max_retries, + backoff_factor=0.01, # for speeding up test + retry_on_exceptions=(self.HandledException,), + )() + + def test_aborted_by_exceeded_max_retries(self): + return_value = random.randint(10**3, 10**6) + max_retries, actual_retries = 3, 7 + + with pytest.raises(self.HandledException): + _exceptions = [self.HandledException for _ in range(actual_retries)] + retry( + self._func_factory( + actual_retries, + return_value, + exception_to_raise=_exceptions, + ), + max_retry=max_retries, + backoff_factor=0.01, # for speeding up test + retry_on_exceptions=(self.HandledException,), + )() + + def test_retry_session_timecost(self): + """ + For a retry session with the following configurations: + 1. backoff_factor = 0.1 + 2. backoff_max = 1 + 3. max_retry = 8 + We should have the time cost sequence as follow: + 0.1, 0.2, 0.4, 0.6, 0.8, 1.0, 1.0, 1.0 + So the retry session should not take more than 6s(~5.1s+) + """ + max_retries, actual_retries = 8, 9 + backoff_factor, backoff_max = 0.1, 1 + expected_retry_session_timecost = sum( + min(backoff_max, backoff_factor * 2**i) for i in range(max_retries) + ) + + return_value = random.randint(10**3, 10**6) + _start_time = time.time() + with pytest.raises(self.HandledException): + retry( + self._func_factory( + actual_retries, + return_value, + exception_to_raise=[ + self.HandledException for _ in range(actual_retries) + ], + ), + max_retry=max_retries, + backoff_factor=backoff_factor, + backoff_max=backoff_max, + retry_on_exceptions=(self.HandledException,), + )() + assert time.time() - _start_time <= expected_retry_session_timecost + + +@pytest.mark.parametrize( + "_input, _prefix, _expected", + [ + # test#1: test remove schema from pkcs11 URI + ( + "pkcs11:token=token;object=object;pin-value=pin-value", + "pkcs11:", + "token=token;object=object;pin-value=pin-value", + ), + # test#2: test remove schema from file URI + ( + "file:///path/to/something", + "file://", + "/path/to/something", + ), + ( + "abcabcabcabcabcabcabcabcabc", + "abc", + "abcabcabcabcabcabcabcabc", + ), + ], +) +def test_remove_prefix(_input: str, _prefix: str, _expected: str): + assert remove_prefix(_input, _prefix) == _expected + + +@pytest.mark.parametrize( + "_pkcs11_uri, _expected", + [ + # test#1: TypedDict also accepts unknown keys + ( + "pkcs11:token=token;object=object;slot-id=1;pin-value=pin-value;type=cert", + { + "object": "object", + "token": "token", + "pin-value": "pin-value", + "type": "cert", + "slot-id": "1", + }, + ), + # test#2: minimum pkcs11 sample + ( + "pkcs11:object=object;type=cert", + { + "object": "object", + "type": "cert", + }, + ), + ], +) +def test_parse_pkcs11_uri(_pkcs11_uri: str, _expected: dict[str, Any]): + assert parse_pkcs11_uri(_pkcs11_uri) == _expected From 10131325ca172b45f81f5bb35e1b7c6dc6d1559e Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Tue, 27 Feb 2024 08:42:35 +0000 Subject: [PATCH 044/128] minor fix to test_utils --- tests/test__utils.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/tests/test__utils.py b/tests/test__utils.py index b85012d..174190f 100644 --- a/tests/test__utils.py +++ b/tests/test__utils.py @@ -174,17 +174,20 @@ def test_retry_session_timecost(self): 3. max_retry = 8 We should have the time cost sequence as follow: 0.1, 0.2, 0.4, 0.6, 0.8, 1.0, 1.0, 1.0 - So the retry session should not take more than 6s(~5.1s+) + So the retry session should not take more than 6s(5.1s+) """ max_retries, actual_retries = 8, 9 backoff_factor, backoff_max = 0.1, 1 - expected_retry_session_timecost = sum( - min(backoff_max, backoff_factor * 2**i) for i in range(max_retries) + + # NOTE: add some overhead for function execution + expected_retry_session_timecost = ( + sum(min(backoff_max, backoff_factor * 2**i) for i in range(max_retries)) + + 0.5 ) return_value = random.randint(10**3, 10**6) - _start_time = time.time() with pytest.raises(self.HandledException): + _start_time = time.time() retry( self._func_factory( actual_retries, @@ -198,7 +201,10 @@ def test_retry_session_timecost(self): backoff_max=backoff_max, retry_on_exceptions=(self.HandledException,), )() - assert time.time() - _start_time <= expected_retry_session_timecost + + time_cost = time.time() - _start_time + logger.info(f"{time_cost=}") + assert time_cost <= expected_retry_session_timecost @pytest.mark.parametrize( From d8ec536f63db7ff86368e494980d5d2107296db8 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Tue, 27 Feb 2024 09:18:07 +0000 Subject: [PATCH 045/128] tests: introduce thing_name differences in gg_v1_cfg and gg_v2_cfg --- tests/data/gg_v1_cfg.json | 2 +- tests/data/gg_v2_cfg.yaml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/data/gg_v1_cfg.json b/tests/data/gg_v1_cfg.json index 0dd5b5f..7abde7e 100644 --- a/tests/data/gg_v1_cfg.json +++ b/tests/data/gg_v1_cfg.json @@ -3,7 +3,7 @@ "caPath": "root.ca.pem", "certPath": "gg.cert.pem", "keyPath": "gg.private.key", - "thingArn": "arn:aws:iot:region:012345678901:thing/thing_name", + "thingArn": "arn:aws:iot:region:012345678901:thing/profile-dev-edge-ggv1-Core", "iotHost": "abcdefghijklm-ats.iot.region.amazonaws.com", "ggHost": "greengrass-ats.iot.region.amazonaws.com", "keepAlive": 30 diff --git a/tests/data/gg_v2_cfg.yaml b/tests/data/gg_v2_cfg.yaml index 4c87074..f20f542 100644 --- a/tests/data/gg_v2_cfg.yaml +++ b/tests/data/gg_v2_cfg.yaml @@ -3,7 +3,7 @@ system: privateKeyPath: "/greengrass/certs/gg.private.key" rootCaPath: "/greengrass/certs/root.ca.pem" rootpath: "/greengrass/v2" - thingName: "thing_name" + thingName: "profile-dev-edge-ggv2-Core" services: aws.greengrass.Nucleus: componentType: "NUCLEUS" @@ -12,4 +12,4 @@ services: awsRegion: "region" iotRoleAlias: "iot_role_alias" iotDataEndpoint: "abcdefghijklm-ats.iot.region.amazonaws.com" - iotCredEndpoint: "abcdefghijk01.credentials.iot.region.amazonaws.com" \ No newline at end of file + iotCredEndpoint: "abcdefghijk01.credentials.iot.region.amazonaws.com" From 1f266d1492e364789b8fa7fa4c666ddd559fe35b Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Tue, 27 Feb 2024 11:37:07 +0000 Subject: [PATCH 046/128] implement test_greengrass_conf --- tests/data/gg_v2_cfg.yaml_tpm2.0 | 2 +- tests/test_greengrass_config.py | 166 +++++++++++++++++++++++++++++++ 2 files changed, 167 insertions(+), 1 deletion(-) create mode 100644 tests/test_greengrass_config.py diff --git a/tests/data/gg_v2_cfg.yaml_tpm2.0 b/tests/data/gg_v2_cfg.yaml_tpm2.0 index 2a5c509..9af1e2a 100644 --- a/tests/data/gg_v2_cfg.yaml_tpm2.0 +++ b/tests/data/gg_v2_cfg.yaml_tpm2.0 @@ -3,7 +3,7 @@ system: privateKeyPath: "pkcs11:object=greengrass_key;type=private;pin-value=greengrass_userpin" rootCaPath: "/greengrass/certs/root.ca.pem" rootpath: "/greengrass/v2" - thingName: "thing_name" + thingName: "profile-dev-edge-ggv2-Core" services: aws.greengrass.Nucleus: componentType: "NUCLEUS" diff --git a/tests/test_greengrass_config.py b/tests/test_greengrass_config.py new file mode 100644 index 0000000..856fac7 --- /dev/null +++ b/tests/test_greengrass_config.py @@ -0,0 +1,166 @@ +# Copyright 2022 TIER IV, INC. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import annotations + +import logging +import uuid +from dataclasses import dataclass + +import pytest +from pytest_mock import MockerFixture + +import otaclient_iot_logging_server.greengrass_config +from otaclient_iot_logging_server.greengrass_config import ( + get_profile_from_thing_name, + IoTSessionConfig, + PKCS11Config, + parse_v1_config, + parse_v2_config, + parse_config, +) + +from tests.conftest import TEST_DATA_DPATH + +logger = logging.getLogger(__name__) + +MODULE = otaclient_iot_logging_server.greengrass_config.__name__ + +# NOTE: AWS_PROFILE_INFO, GREENGRASS_V1_CONFIG and GREENGRASS_V2_CONFIG +# environmental variables are properly set in pyproject.toml. +# profile_info in configs.py is populated with aws_profile_info.yaml in tests/data. + +# NOTE: gg_v1_cfg and gg_v2_cfg is the same, besides the thing_name, +# this will be used as evidence to check which config is used. +GG_V1_CFG_FPATH = TEST_DATA_DPATH / "gg_v1_cfg.json" +GG_V1_CFG_RAW = GG_V1_CFG_FPATH.read_text() +CFG_FROM_GG_V1 = IoTSessionConfig( + account_id="012345678901", + ca_path="/greengrass/certs/root.ca.pem", + private_key_path="/greengrass/certs/gg.private.key", + certificate_path="/greengrass/certs/gg.cert.pem", + thing_name="profile-dev-edge-ggv1-Core", + profile="profile-dev", + region="region", + aws_credential_provider_endpoint="abcdefghijk01.credentials.iot.region.amazonaws.com", +) + +GG_V2_CFG_FPATH = TEST_DATA_DPATH / "gg_v2_cfg.yaml" +GG_V2_CFG_RAW = GG_V2_CFG_FPATH.read_text() +CFG_FROM_GG_V2 = IoTSessionConfig( + account_id="012345678901", + ca_path="/greengrass/certs/root.ca.pem", + private_key_path="/greengrass/certs/gg.private.key", + certificate_path="/greengrass/certs/gg.cert.pem", + thing_name="profile-dev-edge-ggv2-Core", + profile="profile-dev", + region="region", + aws_credential_provider_endpoint="abcdefghijk01.credentials.iot.region.amazonaws.com", +) + +GG_V2_TPM2_CFG_FPATH = TEST_DATA_DPATH / "gg_v2_cfg.yaml_tpm2.0" +GG_V2_TPM2_CFG_RAW = GG_V2_TPM2_CFG_FPATH.read_text() +CFG_FROM_GG_V2_TPM2 = IoTSessionConfig( + account_id="012345678901", + ca_path="/greengrass/certs/root.ca.pem", + private_key_path="pkcs11:object=greengrass_key;type=private;pin-value=greengrass_userpin", + certificate_path="pkcs11:object=greengrass_key;type=cert;pin-value=greengrass_userpin", + thing_name="profile-dev-edge-ggv2-Core", + profile="profile-dev", + region="region", + aws_credential_provider_endpoint="abcdefghijk01.credentials.iot.region.amazonaws.com", + pkcs11_config=PKCS11Config( + pkcs11_lib="/usr/lib/x86_64-linux-gnu/pkcs11/libtpm2_pkcs11.so", + slot_id="1", + user_pin="greengrass_userpin", + ), +) + + +@pytest.mark.parametrize( + "_in, _expected", + [ + (f"thing/profile-stg-edge-{uuid.uuid1()}-Core", "profile-stg"), + (f"profile-dev-edge-{uuid.uuid1()}-Core", "profile-dev"), + ], +) +def test_get_profile_from_thing_name(_in: str, _expected: str): + assert get_profile_from_thing_name(_in) == _expected + + +# +# ------ greengrass v1 configuration ------ # +# +# NOTE: support for ggv1 tpm2.0 is not implemented. +@pytest.mark.parametrize( + "_raw_cfg, _expected", + [(GG_V1_CFG_RAW, CFG_FROM_GG_V1)], +) +def test_parse_v1_config(_raw_cfg: str, _expected: IoTSessionConfig): + assert parse_v1_config(_raw_cfg) == _expected + + +# +# ------ greengrass v2 configuration ------ # +# +@pytest.mark.parametrize( + "_raw_cfg, _expected", + [ + (GG_V2_CFG_RAW, CFG_FROM_GG_V2), + (GG_V2_TPM2_CFG_RAW, CFG_FROM_GG_V2_TPM2), + ], +) +def test_parse_v2_config(_raw_cfg: str, _expected: IoTSessionConfig): + assert parse_v2_config(_raw_cfg) == _expected + + +# +# ------ test parse_config entry point ------ # +# +@dataclass +class _ServerConfig: + GREENGRASS_V2_CONFIG: str + GREENGRASS_V1_CONFIG: str + + +class TestParseConfig: + def test_greengrass_v1_cfg_only(self, mocker: MockerFixture): + _server_cfg = _ServerConfig( + GREENGRASS_V1_CONFIG=str(GG_V1_CFG_FPATH), + GREENGRASS_V2_CONFIG="/path/not/exists", + ) + mocker.patch(f"{MODULE}.server_cfg", _server_cfg) + + assert parse_config() == CFG_FROM_GG_V1 + + def test_greengrass_v2_cfg_only(self, mocker: MockerFixture): + _server_cfg = _ServerConfig( + GREENGRASS_V1_CONFIG="/path/not/exists", + GREENGRASS_V2_CONFIG=str(GG_V2_CFG_FPATH), + ) + mocker.patch(f"{MODULE}.server_cfg", _server_cfg) + + assert parse_config() == CFG_FROM_GG_V2 + + def test_both_exist(self, mocker: MockerFixture): + """ + Greengrass V2 config should take priority. + """ + _server_cfg = _ServerConfig( + GREENGRASS_V1_CONFIG=str(GG_V1_CFG_FPATH), + GREENGRASS_V2_CONFIG=str(GG_V2_CFG_FPATH), + ) + mocker.patch(f"{MODULE}.server_cfg", _server_cfg) + assert parse_config() == CFG_FROM_GG_V2 From 4e3a6d2803a6a0550d516a2c5d2d7791f43fd92a Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 28 Feb 2024 02:16:25 +0000 Subject: [PATCH 047/128] add test_config, cleanup pyproject.toml --- pyproject.toml | 17 +++-- tests/test_configs.py | 148 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 156 insertions(+), 9 deletions(-) create mode 100644 tests/test_configs.py diff --git a/pyproject.toml b/pyproject.toml index 1fc2a0b..ea1eb4b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,6 +38,7 @@ dev = [ "isort==5.13.2", "pytest==7.4.4", "pytest-asyncio==0.23.4", + "pytest-env==1.1.3", "pytest-mock==3.12.0", ] @@ -72,13 +73,6 @@ skip_empty = true type = "virtual" features = ["dev"] -[tool.hatch.envs.dev.env-vars] -AWS_PROFILE_INFO = "tests/data/aws_profile_info.yaml" -GREENGRASS_V1_CONFIG = "tests/data/gg_v1_cfg.json" -GREENGRASS_V2_CONFIG = "tests/data/gg_v2_cfg.yaml" -SERVER_LOGGING_LEVEL = "10" # debug -UPLOAD_INTERVAL = "6" - [tool.hatch.version] source = "vcs" @@ -101,10 +95,15 @@ skip_gitignore = true known_first_party = ["otaclient_iot_logging_server"] [tool.pytest.ini_options] +env = [ + "AWS_PROFILE_INFO=tests/data/aws_profile_info.yaml", + "GREENGRASS_V1_CONFIG=tests/data/gg_v1_cfg.json", + "GREENGRASS_V2_CONFIG=tests/data/gg_v2_cfg.yaml", +] asyncio_mode = "auto" log_auto_indent = true log_format = "%(asctime)s %(levelname)s %(filename)s %(funcName)s,%(lineno)d %(message)s" log_cli = true log_cli_level = "INFO" -pythonpath = ["otaclient_iot_logging_server"] -testpaths = ["./tests"] +pythonpath = ["src"] +testpaths = ["tests"] diff --git a/tests/test_configs.py b/tests/test_configs.py new file mode 100644 index 0000000..73e26ea --- /dev/null +++ b/tests/test_configs.py @@ -0,0 +1,148 @@ +# Copyright 2022 TIER IV, INC. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import annotations + +import os +from typing import Any + +import pytest +from pytest_mock import MockerFixture + +from otaclient_iot_logging_server.configs import ( + ConfigurableLoggingServerConfig, + load_profile_info, +) + +from tests.conftest import TEST_DATA_DPATH + +AWS_PROFILE_INFO_FPATH = TEST_DATA_DPATH / "aws_profile_info.yaml" + + +@pytest.mark.parametrize( + "_mock_envs, _expected", + [ + # test#0: check default settings: + ( + {}, + { + "GREENGRASS_V1_CONFIG": "/greengrass/config/config.json", + "GREENGRASS_V2_CONFIG": "/greengrass/v2/init_config/config.yaml", + "AWS_PROFILE_INFO": "/opt/ota/iot_logger/aws_profile_info.yaml", + "LISTEN_ADDRESS": "127.0.0.1", + "LISTEN_PORT": 8083, + "UPLOAD_LOGGING_SERVER_LOGS": False, + "SERVER_LOGSTREAM_SUFFIX": "iot_logging_server", + "SERVER_LOGGING_LEVEL": "INFO", + "SERVER_LOGGING_LOG_FORMAT": "[%(asctime)s][%(levelname)s]-%(name)s:%(funcName)s:%(lineno)d,%(message)s", + "MAX_LOGS_BACKLOG": 4096, + "MAX_LOGS_PER_MERGE": 512, + "UPLOAD_INTERVAL": 60, + }, + ), + # test#1: frequently changed settings + ( + { + "LISTEN_ADDRESS": "172.16.1.1", + "SERVER_LOGGING_LEVEL": "ERROR", + "UPLOAD_INTERVAL": "30", + }, + { + "GREENGRASS_V1_CONFIG": "/greengrass/config/config.json", + "GREENGRASS_V2_CONFIG": "/greengrass/v2/init_config/config.yaml", + "AWS_PROFILE_INFO": "/opt/ota/iot_logger/aws_profile_info.yaml", + "LISTEN_ADDRESS": "172.16.1.1", + "LISTEN_PORT": 8083, + "UPLOAD_LOGGING_SERVER_LOGS": False, + "SERVER_LOGSTREAM_SUFFIX": "iot_logging_server", + "SERVER_LOGGING_LEVEL": "ERROR", + "SERVER_LOGGING_LOG_FORMAT": "[%(asctime)s][%(levelname)s]-%(name)s:%(funcName)s:%(lineno)d,%(message)s", + "MAX_LOGS_BACKLOG": 4096, + "MAX_LOGS_PER_MERGE": 512, + "UPLOAD_INTERVAL": 30, + }, + ), + # test#2: change everything + ( + { + "GREENGRASS_V1_CONFIG": "ggv1_cfg.json", + "GREENGRASS_V2_CONFIG": "ggv2_cfg.yaml", + "AWS_PROFILE_INFO": "aws_profile_info.yaml", + "LISTEN_ADDRESS": "172.16.1.1", + "LISTEN_PORT": "12345", + "UPLOAD_LOGGING_SERVER_LOGS": "true", + "SERVER_LOGSTREAM_SUFFIX": "test_logging_server", + "SERVER_LOGGING_LEVEL": "DEBUG", + "SERVER_LOGGING_LOG_FORMAT": "someformat", + "MAX_LOGS_BACKLOG": "1024", + "MAX_LOGS_PER_MERGE": "128", + "UPLOAD_INTERVAL": "10", + }, + { + "GREENGRASS_V1_CONFIG": "ggv1_cfg.json", + "GREENGRASS_V2_CONFIG": "ggv2_cfg.yaml", + "AWS_PROFILE_INFO": "aws_profile_info.yaml", + "LISTEN_ADDRESS": "172.16.1.1", + "LISTEN_PORT": 12345, + "UPLOAD_LOGGING_SERVER_LOGS": True, + "SERVER_LOGSTREAM_SUFFIX": "test_logging_server", + "SERVER_LOGGING_LEVEL": "DEBUG", + "SERVER_LOGGING_LOG_FORMAT": "someformat", + "MAX_LOGS_BACKLOG": 1024, + "MAX_LOGS_PER_MERGE": 128, + "UPLOAD_INTERVAL": 10, + }, + ), + ], +) +def test_server_config_loading( + _mock_envs: dict[str, str], + _expected: dict[str, Any], + mocker: MockerFixture, +): + # patch environmental variables while clearing all already + mocker.patch.dict(os.environ, _mock_envs, clear=True) + + # NOTE: compare by dict to prevent double import from env vars + assert _expected == ConfigurableLoggingServerConfig().model_dump() + + +@pytest.mark.parametrize( + "_in, _expected", + [ + ( + str(AWS_PROFILE_INFO_FPATH), + [ + { + "profile_name": "profile-dev", + "account_id": "012345678901", + "credential_endpoint": "abcdefghijk01.credentials.iot.region.amazonaws.com", + }, + { + "profile_name": "profile-stg", + "account_id": "012345678902", + "credential_endpoint": "abcdefghijk02.credentials.iot.region.amazonaws.com", + }, + { + "profile_name": "profile-prd", + "account_id": "012345678903", + "credential_endpoint": "abcdefghijk03.credentials.iot.region.amazonaws.com", + }, + ], + ), + ], +) +def test_load_profile_info(_in: str, _expected: dict[str, Any]): + assert load_profile_info(_in).model_dump() == _expected From ff9ff54aad88407f4923ec9bc8fc690c7d7a15aa Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 28 Feb 2024 02:47:22 +0000 Subject: [PATCH 048/128] add test__main__.py --- tests/test__main__.py | 111 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 111 insertions(+) create mode 100644 tests/test__main__.py diff --git a/tests/test__main__.py b/tests/test__main__.py new file mode 100644 index 0000000..1dd70ad --- /dev/null +++ b/tests/test__main__.py @@ -0,0 +1,111 @@ +# Copyright 2022 TIER IV, INC. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import annotations + +import logging +from dataclasses import dataclass +from queue import Queue + +import pytest +from pytest import LogCaptureFixture +from pytest_mock import MockerFixture + +import otaclient_iot_logging_server.__main__ as _main_module +from otaclient_iot_logging_server._common import LogMessage + +MODULE = _main_module.__name__ + +logger = logging.getLogger(__name__) + + +@dataclass +class _ServerCfg: + """A minimum set of configs used by main module.""" + + SERVER_LOGGING_LOG_FORMAT: str = "test_format" + SERVER_LOGGING_LEVEL: str = "DEBUG" + UPLOAD_LOGGING_SERVER_LOGS: bool = False + SERVER_LOGSTREAM_SUFFIX: str = "test_suffix" + LISTEN_ADDRESS: str = "172.16.1.1" + LISTEN_PORT: int = 1234 + MAX_LOGS_PER_MERGE: int = 123 + MAX_LOGS_BACKLOG: int = 1234 + UPLOAD_INTERVAL: int = 12 + + +@pytest.mark.parametrize("_in_server_cfg, _version", [(_ServerCfg(), "test_version")]) +def test_main( + _in_server_cfg: _ServerCfg, + _version: str, + mocker: MockerFixture, + caplog: LogCaptureFixture, +): + # ------ prepare patching ------ # + mocker.patch( + f"{MODULE}._config_logging", + _logger_mock := mocker.MagicMock(return_value=logger), + ) + mocker.patch( + f"{MODULE}.launch_server", + _launch_server_mock := mocker.MagicMock(), + ) + mocker.patch(f"{MODULE}.__version__", _version) + mocker.patch(f"{MODULE}.server_cfg", _in_server_cfg) + + # ------ execution ------ # + _main_module.main() + + # ------ check result ------ # + _logger_mock.assert_called_once_with( + mocker.ANY, + format=_in_server_cfg.SERVER_LOGGING_LOG_FORMAT, + level=_in_server_cfg.SERVER_LOGGING_LEVEL, + enable_server_log=_in_server_cfg.UPLOAD_LOGGING_SERVER_LOGS, + server_logstream_suffix=_in_server_cfg.SERVER_LOGSTREAM_SUFFIX, + ) + _launch_server_mock.assert_called_once_with( + mocker.ANY, + queue=mocker.ANY, + max_logs_per_merge=_in_server_cfg.MAX_LOGS_PER_MERGE, + interval=_in_server_cfg.UPLOAD_INTERVAL, + ) + + # check __main__.main source code for more details + assert ( + caplog.records[-2].msg + == f"launching iot_logging_server({_version}) at http://{_in_server_cfg.LISTEN_ADDRESS}:{_in_server_cfg.LISTEN_PORT}" + ) + assert (caplog.records[-1].msg) == f"iot_logging_server config: \n{_in_server_cfg}" + + +def test_server_logger(): + _queue: Queue[tuple[str, LogMessage]] = Queue() + suffix = "test_suffix" + + # ------ setup test ------ # + _handler = _main_module._LogTeeHandler(_queue, suffix) # type: ignore + logger.addHandler(_handler) + + # ------ execution ------ # + logger.info("emit one logging entry") + + # ------ clenaup ------ # + logger.removeHandler(_handler) + + # ------ check result ------ # + _log = _queue.get_nowait() + assert _log[0] == suffix + assert _log[1] From 3b9805c2ef819a0c12933864711678eec6edf065 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 28 Feb 2024 03:07:16 +0000 Subject: [PATCH 049/128] minor update to pyproject.toml, add tests/conftest.py --- pyproject.toml | 4 +--- tests/conftest.py | 20 ++++++++++++++++++++ 2 files changed, 21 insertions(+), 3 deletions(-) create mode 100644 tests/conftest.py diff --git a/pyproject.toml b/pyproject.toml index ea1eb4b..52dbf2d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -54,19 +54,17 @@ line-length = 88 [tool.coverage.run] branch = false -include = ["otaclient_iot_logging_server/**/*.py"] +source = ["otaclient_iot_logging_server"] [tool.coverage.report] exclude_also = [ "def __repr__", - "if cfg.DEBUG_MODE", "if __name__ == .__main__.:", "if TYPE_CHECKING:", "class .*\\bProtocol\\):", "@(abc\\.)?abstractmethod", ] show_missing = true -skip_covered = true skip_empty = true [tool.hatch.envs.dev] diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..1de5753 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,20 @@ +# Copyright 2022 TIER IV, INC. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from pathlib import Path + +TEST_PACKAGE = Path(__file__).parent + +TEST_DATA_DPATH = TEST_PACKAGE / "data" From 3f2133ad924594e5acfad8ca76c2f9b37117f0cd Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 28 Feb 2024 06:44:13 +0000 Subject: [PATCH 050/128] add test_boto3_session --- tests/test_boto3_session.py | 134 ++++++++++++++++++++++++++++++++++++ 1 file changed, 134 insertions(+) create mode 100644 tests/test_boto3_session.py diff --git a/tests/test_boto3_session.py b/tests/test_boto3_session.py new file mode 100644 index 0000000..f7caf61 --- /dev/null +++ b/tests/test_boto3_session.py @@ -0,0 +1,134 @@ +# Copyright 2022 TIER IV, INC. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import annotations +from typing import Any + +import pytest +from awsiot_credentialhelper.boto3_session import Boto3SessionProvider +from awsiot_credentialhelper.boto3_session import Pkcs11Config as aws_PKcs11Config +from pytest_mock import MockerFixture + + +import otaclient_iot_logging_server.boto3_session +from otaclient_iot_logging_server._utils import parse_pkcs11_uri +from otaclient_iot_logging_server.boto3_session import _convert_to_pem, get_session # type: ignore +from otaclient_iot_logging_server.greengrass_config import ( + IoTSessionConfig, + PKCS11Config, +) + +from tests.conftest import TEST_DATA_DPATH + +MODULE = otaclient_iot_logging_server.boto3_session.__name__ + +SAMPLE_CERT_PEM_FPATH = TEST_DATA_DPATH / "sample_cert.pem" +SAMPLE_CERT_DER_FPATH = TEST_DATA_DPATH / "sample_cert.der" + + +@pytest.mark.parametrize( + "_in, _expected", + [ + ( + pem_cert := SAMPLE_CERT_PEM_FPATH.read_bytes(), + pem_cert, + ), + (SAMPLE_CERT_DER_FPATH.read_bytes(), pem_cert), + ], +) +def test__convert_to_pem(_in: bytes, _expected: bytes): + assert _convert_to_pem(_in) == _expected + + +_MOCKED_CERT = b"mocked_certs" +_PKCS11_PRIVKEY_URI = "pkcs11:object=greengrass_privkey;type=private" +_PARSED_PKCS11_PRIVKEY_URI = parse_pkcs11_uri(_PKCS11_PRIVKEY_URI) + + +@pytest.mark.parametrize( + "_config, _expected_call", + [ + # test#1: boto3 session without pkcs11 + ( + test1_cfg := IoTSessionConfig( + account_id="test_account", + ca_path="test_capath", + private_key_path="test_privkey_path", + certificate_path="test_cert_path", + thing_name="test_thing_name", + profile="test_profile", + region="test_region", + aws_credential_provider_endpoint="test_cred_endpoint", + ), + { + "endpoint": test1_cfg.aws_credential_provider_endpoint, + "role_alias": test1_cfg.aws_role_alias, + "certificate": _MOCKED_CERT, + "private_key": test1_cfg.private_key_path, + "thing_name": test1_cfg.thing_name, + }, + ), + # test#2: boto3 session with pkcs11 + ( + test2_cfg := IoTSessionConfig( + account_id="test_account", + ca_path="test_capath", + private_key_path=_PKCS11_PRIVKEY_URI, + certificate_path="test_cert_path", + thing_name="test_thing_name", + profile="test_profile", + region="test_region", + aws_credential_provider_endpoint="test_cred_endpoint", + pkcs11_config=( + test2_pkcs11_cfg := PKCS11Config( + pkcs11_lib="tpm2-pkcs11_lib", + slot_id="1", + user_pin="userpin", + ) + ), + ), + { + "endpoint": test2_cfg.aws_credential_provider_endpoint, + "role_alias": test2_cfg.aws_role_alias, + "certificate": _MOCKED_CERT, + "thing_name": test2_cfg.thing_name, + "pkcs11": aws_PKcs11Config( + pkcs11_lib=test2_pkcs11_cfg.pkcs11_lib, + slot_id=int(test2_pkcs11_cfg.slot_id), + user_pin=test2_pkcs11_cfg.user_pin, + private_key_label=_PARSED_PKCS11_PRIVKEY_URI["object"], + ), + }, + ), + ], +) +def test_get_session( + _config: IoTSessionConfig, _expected_call: dict[str, Any], mocker: MockerFixture +): + """ + Confirm with specific input IoTSessionConfig, we get the expected Boto3Session being created. + """ + # ------ setup test ------ # + _boto3_session_provider_mock = mocker.MagicMock(spec=Boto3SessionProvider) + mocker.patch(f"{MODULE}.Boto3SessionProvider", _boto3_session_provider_mock) + mocker.patch( + f"{MODULE}._load_certificate", mocker.MagicMock(return_value=_MOCKED_CERT) + ) + + # ------ execution ------ # + get_session(_config) + + # ------ check result ------ # + _boto3_session_provider_mock.assert_called_once_with(**_expected_call) From 589d60f434095eaed69a2a93829e65fe6d029d54 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 28 Feb 2024 07:30:11 +0000 Subject: [PATCH 051/128] common: define package scope type LogsQueue --- src/otaclient_iot_logging_server/_common.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/otaclient_iot_logging_server/_common.py b/src/otaclient_iot_logging_server/_common.py index 8fac4d6..1aa3f40 100644 --- a/src/otaclient_iot_logging_server/_common.py +++ b/src/otaclient_iot_logging_server/_common.py @@ -15,10 +15,13 @@ from __future__ import annotations +from queue import Queue from typing import Literal, TypedDict from typing_extensions import NotRequired +LogsQueue = Queue[tuple[str, "LogMessage"]] + class LogMessage(TypedDict): timestamp: int # in milliseconds From d08923fc942c18796e11d293794954fc92e31b67 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 28 Feb 2024 07:32:40 +0000 Subject: [PATCH 052/128] aws_iot_logger: start_sendind_msg_thread -> start_aws_iot_logger_thread, now this function takes charge of creating AWSIoTLogger object --- .../aws_iot_logger.py | 20 ++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/src/otaclient_iot_logging_server/aws_iot_logger.py b/src/otaclient_iot_logging_server/aws_iot_logger.py index 72b1fce..5e728f7 100644 --- a/src/otaclient_iot_logging_server/aws_iot_logger.py +++ b/src/otaclient_iot_logging_server/aws_iot_logger.py @@ -19,14 +19,15 @@ import time from collections import defaultdict from datetime import datetime -from queue import Empty, Queue +from queue import Empty from threading import Thread from typing import Any from typing_extensions import NoReturn -from otaclient_iot_logging_server._common import LogMessage +from otaclient_iot_logging_server._common import LogMessage, LogsQueue from otaclient_iot_logging_server._utils import chain_query, retry +from otaclient_iot_logging_server.configs import server_cfg from otaclient_iot_logging_server.boto3_session import get_session from otaclient_iot_logging_server.greengrass_config import IoTSessionConfig @@ -53,7 +54,7 @@ class AWSIoTLogger: def __init__( self, session_config: IoTSessionConfig, - queue: Queue[tuple[str, LogMessage]], + queue: LogsQueue, max_logs_per_merge: int, interval: int, ): @@ -64,7 +65,7 @@ def __init__( self._log_group_name = session_config.aws_cloudwatch_log_group self._sequence_tokens: dict[str, str | None] = {} self._interval = interval - self._queue: Queue[tuple[str, LogMessage]] = queue + self._queue: LogsQueue = queue # NOTE: add this limitation to ensure all of the log_streams in a merge # will definitely have entries less than MAX_LOGS_PER_PUT self._max_logs_per_merge = min(max_logs_per_merge, self.MAX_LOGS_PER_PUT) @@ -185,7 +186,16 @@ def thread_main(self) -> NoReturn: time.sleep(self._interval) -def start_sending_msg_thread(iot_logger: AWSIoTLogger) -> Thread: +def start_aws_iot_logger_thread( + queue: LogsQueue, session_config: IoTSessionConfig +) -> Thread: + iot_logger = AWSIoTLogger( + session_config=session_config, + queue=queue, + max_logs_per_merge=server_cfg.MAX_LOGS_PER_MERGE, + interval=server_cfg.UPLOAD_INTERVAL, + ) + _thread = Thread(target=iot_logger.thread_main, daemon=True) _thread.start() logger.debug("iot logger thread started") From db3e42b3f40925b56ca007d2085451195a692e40 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 28 Feb 2024 07:33:42 +0000 Subject: [PATCH 053/128] common: fix LogsQueue as type alias --- src/otaclient_iot_logging_server/_common.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/otaclient_iot_logging_server/_common.py b/src/otaclient_iot_logging_server/_common.py index 1aa3f40..c9167d4 100644 --- a/src/otaclient_iot_logging_server/_common.py +++ b/src/otaclient_iot_logging_server/_common.py @@ -18,9 +18,9 @@ from queue import Queue from typing import Literal, TypedDict -from typing_extensions import NotRequired +from typing_extensions import NotRequired, TypeAlias -LogsQueue = Queue[tuple[str, "LogMessage"]] +LogsQueue: TypeAlias = "Queue[tuple[str, LogMessage]]" class LogMessage(TypedDict): From 0af6d68a8de62f1a1b5d9c12634527b22cdf1698 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 28 Feb 2024 07:34:38 +0000 Subject: [PATCH 054/128] log_proxy_server: now launch_server only takes care of launching logging collecting server --- .../log_proxy_server.py | 21 +------------------ 1 file changed, 1 insertion(+), 20 deletions(-) diff --git a/src/otaclient_iot_logging_server/log_proxy_server.py b/src/otaclient_iot_logging_server/log_proxy_server.py index f3d5f2c..8ede693 100644 --- a/src/otaclient_iot_logging_server/log_proxy_server.py +++ b/src/otaclient_iot_logging_server/log_proxy_server.py @@ -24,12 +24,7 @@ from aiohttp.web import Request from otaclient_iot_logging_server._common import LogMessage -from otaclient_iot_logging_server.aws_iot_logger import ( - AWSIoTLogger, - start_sending_msg_thread, -) from otaclient_iot_logging_server.configs import server_cfg -from otaclient_iot_logging_server.greengrass_config import IoTSessionConfig logger = logging.getLogger(__name__) @@ -63,21 +58,7 @@ async def logging_post_handler(self, request: Request): return web.Response(status=HTTPStatus.OK) -def launch_server( - session_config: IoTSessionConfig, - queue: Queue[tuple[str, LogMessage]], - max_logs_per_merge: int, - interval: int, -) -> None: - start_sending_msg_thread( - AWSIoTLogger( - session_config=session_config, - queue=queue, - max_logs_per_merge=max_logs_per_merge, - interval=interval, - ) - ) - +def launch_server(queue: Queue[tuple[str, LogMessage]]) -> None: handler = LoggingPostHandler(queue=queue) app = web.Application() app.add_routes([web.post(r"/{ecu_id}", handler.logging_post_handler)]) From 696f98d3e859b76ed930558e241c0823ed3fabc9 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 28 Feb 2024 07:37:08 +0000 Subject: [PATCH 055/128] aws_iot_logger: IoTSessionConfig is only used by aws_iot_logger, compose session_config in this module only --- src/otaclient_iot_logging_server/aws_iot_logger.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/otaclient_iot_logging_server/aws_iot_logger.py b/src/otaclient_iot_logging_server/aws_iot_logger.py index 5e728f7..358bdb7 100644 --- a/src/otaclient_iot_logging_server/aws_iot_logger.py +++ b/src/otaclient_iot_logging_server/aws_iot_logger.py @@ -29,7 +29,10 @@ from otaclient_iot_logging_server._utils import chain_query, retry from otaclient_iot_logging_server.configs import server_cfg from otaclient_iot_logging_server.boto3_session import get_session -from otaclient_iot_logging_server.greengrass_config import IoTSessionConfig +from otaclient_iot_logging_server.greengrass_config import ( + IoTSessionConfig, + parse_config, +) logger = logging.getLogger(__name__) @@ -186,11 +189,9 @@ def thread_main(self) -> NoReturn: time.sleep(self._interval) -def start_aws_iot_logger_thread( - queue: LogsQueue, session_config: IoTSessionConfig -) -> Thread: +def start_aws_iot_logger_thread(queue: LogsQueue) -> Thread: iot_logger = AWSIoTLogger( - session_config=session_config, + session_config=parse_config(), queue=queue, max_logs_per_merge=server_cfg.MAX_LOGS_PER_MERGE, interval=server_cfg.UPLOAD_INTERVAL, From 5f3518a3244c0c888f62c3a54633f7c971b22f8f Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 28 Feb 2024 07:41:36 +0000 Subject: [PATCH 056/128] main: split logging related logics to a new module _log_setting --- .../_log_setting.py | 81 +++++++++++++++++++ 1 file changed, 81 insertions(+) create mode 100644 src/otaclient_iot_logging_server/_log_setting.py diff --git a/src/otaclient_iot_logging_server/_log_setting.py b/src/otaclient_iot_logging_server/_log_setting.py new file mode 100644 index 0000000..b87c2b1 --- /dev/null +++ b/src/otaclient_iot_logging_server/_log_setting.py @@ -0,0 +1,81 @@ +# Copyright 2022 TIER IV, INC. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import annotations + +import logging +import time +from queue import Queue + +from otaclient_iot_logging_server import package_name as root_package_name +from otaclient_iot_logging_server._common import LogMessage +from otaclient_iot_logging_server.configs import server_cfg + + +class _LogTeeHandler(logging.Handler): + """Implementation of uploading local server loggings to cloudwatch.""" + + def __init__( + self, + queue: Queue[tuple[str, LogMessage]], + logstream_suffix: str, + ) -> None: + super().__init__() + self._queue = queue + self._logstream_suffix = logstream_suffix + + def emit(self, record: logging.LogRecord) -> None: + try: + self._queue.put_nowait( + ( + self._logstream_suffix, + LogMessage( + timestamp=int(time.time()) * 1000, # milliseconds + message=self.format(record), + ), + ) + ) + except Exception: + pass + + +def config_logging( + queue: Queue[tuple[str, LogMessage]], + *, + format: str, + level: str, + enable_server_log: bool, + server_logstream_suffix: str, +): + # NOTE: for the root logger, set to CRITICAL to filter away logs from other + # external modules unless reached CRITICAL level. + logging.basicConfig(level=logging.CRITICAL, format=format, force=True) + # NOTE: set the to the package root logger + root_logger = logging.getLogger(root_package_name) + root_logger.setLevel(level) + + if enable_server_log and server_logstream_suffix: + _tee_handler = _LogTeeHandler( + queue=queue, + logstream_suffix=server_logstream_suffix, + ) + _fmt = logging.Formatter(fmt=server_cfg.SERVER_LOGGING_LOG_FORMAT) + _tee_handler.setFormatter(_fmt) + + # attach the log tee handler to the root logger + root_logger.addHandler(_tee_handler) + root_logger.info(f"enable server logs upload with {server_logstream_suffix=}") + + return root_logger From b772eb14514160b39aefd6f8d994af207bc40063 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 28 Feb 2024 07:42:04 +0000 Subject: [PATCH 057/128] cleanup __main__ --- src/otaclient_iot_logging_server/__main__.py | 80 +++----------------- 1 file changed, 10 insertions(+), 70 deletions(-) diff --git a/src/otaclient_iot_logging_server/__main__.py b/src/otaclient_iot_logging_server/__main__.py index f2c701e..6c6ffd6 100644 --- a/src/otaclient_iot_logging_server/__main__.py +++ b/src/otaclient_iot_logging_server/__main__.py @@ -15,79 +15,22 @@ from __future__ import annotations -import logging -import time from queue import Queue from otaclient_iot_logging_server import __version__ -from otaclient_iot_logging_server import package_name as root_package_name -from otaclient_iot_logging_server._common import LogMessage +from otaclient_iot_logging_server._common import LogsQueue +from otaclient_iot_logging_server._log_setting import config_logging +from otaclient_iot_logging_server.aws_iot_logger import start_aws_iot_logger_thread from otaclient_iot_logging_server.configs import server_cfg -from otaclient_iot_logging_server.greengrass_config import parse_config from otaclient_iot_logging_server.log_proxy_server import launch_server -class _LogTeeHandler(logging.Handler): - """Tee the local loggings to a queue.""" - - def __init__( - self, - queue: Queue[tuple[str, LogMessage]], - logstream_suffix: str, - ) -> None: - super().__init__() - self._queue = queue - self._logstream_suffix = logstream_suffix - - def emit(self, record: logging.LogRecord) -> None: - try: - self._queue.put_nowait( - ( - self._logstream_suffix, - LogMessage( - timestamp=int(time.time()) * 1000, # milliseconds - message=self.format(record), - ), - ) - ) - except Exception: - pass - - -def _config_logging( - queue: Queue[tuple[str, LogMessage]], - *, - format: str, - level: str, - enable_server_log: bool, - server_logstream_suffix: str, -): - # NOTE: for the root logger, set to CRITICAL to filter away logs from other - # external modules unless reached CRITICAL level. - logging.basicConfig(level=logging.CRITICAL, format=format, force=True) - # NOTE: set the to the package root logger - root_logger = logging.getLogger(root_package_name) - root_logger.setLevel(level) - - if enable_server_log and server_logstream_suffix: - _tee_handler = _LogTeeHandler( - queue=queue, - logstream_suffix=server_logstream_suffix, - ) - _fmt = logging.Formatter(fmt=server_cfg.SERVER_LOGGING_LOG_FORMAT) - _tee_handler.setFormatter(_fmt) - - # attach the log tee handler to the root logger - root_logger.addHandler(_tee_handler) - root_logger.info(f"enable server logs upload with {server_logstream_suffix=}") - - return root_logger - - def main() -> None: - queue: Queue[tuple[str, LogMessage]] = Queue(maxsize=server_cfg.MAX_LOGS_BACKLOG) + # server scope log entries pipe + queue: LogsQueue = Queue(maxsize=server_cfg.MAX_LOGS_BACKLOG) - root_logger = _config_logging( + # ------ configure local logging ------ # + root_logger = config_logging( queue, format=server_cfg.SERVER_LOGGING_LOG_FORMAT, level=server_cfg.SERVER_LOGGING_LEVEL, @@ -95,17 +38,14 @@ def main() -> None: server_logstream_suffix=server_cfg.SERVER_LOGSTREAM_SUFFIX, ) + # ------ start server ------ # root_logger.info( f"launching iot_logging_server({__version__}) at http://{server_cfg.LISTEN_ADDRESS}:{server_cfg.LISTEN_PORT}" ) root_logger.info(f"iot_logging_server config: \n{server_cfg}") - launch_server( - parse_config(), - queue=queue, - max_logs_per_merge=server_cfg.MAX_LOGS_PER_MERGE, - interval=server_cfg.UPLOAD_INTERVAL, - ) + start_aws_iot_logger_thread(queue) + launch_server(queue=queue) # NoReturn if __name__ == "__main__": From 7e76fb304f58a70a3916c96bfe84f4f72d4fb598 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 28 Feb 2024 08:14:39 +0000 Subject: [PATCH 058/128] split test__main --- tests/test__log_setting.py | 47 ++++++++++++++++++++++++++++++++++++++ tests/test__main__.py | 22 ------------------ 2 files changed, 47 insertions(+), 22 deletions(-) create mode 100644 tests/test__log_setting.py diff --git a/tests/test__log_setting.py b/tests/test__log_setting.py new file mode 100644 index 0000000..0a513b5 --- /dev/null +++ b/tests/test__log_setting.py @@ -0,0 +1,47 @@ +# Copyright 2022 TIER IV, INC. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import annotations + +import logging +from queue import Queue + +import otaclient_iot_logging_server._log_setting +from otaclient_iot_logging_server._log_setting import _LogTeeHandler # type: ignore +from otaclient_iot_logging_server._common import LogsQueue + +MODULE = otaclient_iot_logging_server._log_setting.__name__ + +logger = logging.getLogger(__name__) + + +def test_server_logger(): + _queue: LogsQueue = Queue() + suffix = "test_suffix" + + # ------ setup test ------ # + _handler = _LogTeeHandler(_queue, suffix) # type: ignore + logger.addHandler(_handler) + + # ------ execution ------ # + logger.info("emit one logging entry") + + # ------ clenaup ------ # + logger.removeHandler(_handler) + + # ------ check result ------ # + _log = _queue.get_nowait() + assert _log[0] == suffix + assert _log[1] diff --git a/tests/test__main__.py b/tests/test__main__.py index 1dd70ad..f01e305 100644 --- a/tests/test__main__.py +++ b/tests/test__main__.py @@ -17,14 +17,12 @@ import logging from dataclasses import dataclass -from queue import Queue import pytest from pytest import LogCaptureFixture from pytest_mock import MockerFixture import otaclient_iot_logging_server.__main__ as _main_module -from otaclient_iot_logging_server._common import LogMessage MODULE = _main_module.__name__ @@ -89,23 +87,3 @@ def test_main( == f"launching iot_logging_server({_version}) at http://{_in_server_cfg.LISTEN_ADDRESS}:{_in_server_cfg.LISTEN_PORT}" ) assert (caplog.records[-1].msg) == f"iot_logging_server config: \n{_in_server_cfg}" - - -def test_server_logger(): - _queue: Queue[tuple[str, LogMessage]] = Queue() - suffix = "test_suffix" - - # ------ setup test ------ # - _handler = _main_module._LogTeeHandler(_queue, suffix) # type: ignore - logger.addHandler(_handler) - - # ------ execution ------ # - logger.info("emit one logging entry") - - # ------ clenaup ------ # - logger.removeHandler(_handler) - - # ------ check result ------ # - _log = _queue.get_nowait() - assert _log[0] == suffix - assert _log[1] From a80a566cd9f9e883ddfa2bdfb5d3b9ec46a38d66 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 28 Feb 2024 09:30:24 +0000 Subject: [PATCH 059/128] log_proxy_server: minor type fix --- src/otaclient_iot_logging_server/log_proxy_server.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/otaclient_iot_logging_server/log_proxy_server.py b/src/otaclient_iot_logging_server/log_proxy_server.py index 8ede693..c9cbfae 100644 --- a/src/otaclient_iot_logging_server/log_proxy_server.py +++ b/src/otaclient_iot_logging_server/log_proxy_server.py @@ -23,7 +23,7 @@ from aiohttp import web from aiohttp.web import Request -from otaclient_iot_logging_server._common import LogMessage +from otaclient_iot_logging_server._common import LogMessage, LogsQueue from otaclient_iot_logging_server.configs import server_cfg logger = logging.getLogger(__name__) @@ -32,7 +32,7 @@ class LoggingPostHandler: """A simple aiohttp server handler that receives logs from otaclient.""" - def __init__(self, queue: Queue[tuple[str, LogMessage]]) -> None: + def __init__(self, queue: LogsQueue) -> None: self._queue = queue # route: POST /{ecu_id} From 8b8cca19d451b32c461d6e9d574b56391aed7918 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 28 Feb 2024 09:31:39 +0000 Subject: [PATCH 060/128] add test_log_proxy_server --- tests/test_log_proxy_server.py | 132 +++++++++++++++++++++++++++++++++ 1 file changed, 132 insertions(+) create mode 100644 tests/test_log_proxy_server.py diff --git a/tests/test_log_proxy_server.py b/tests/test_log_proxy_server.py new file mode 100644 index 0000000..915d0e0 --- /dev/null +++ b/tests/test_log_proxy_server.py @@ -0,0 +1,132 @@ +# Copyright 2022 TIER IV, INC. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import annotations + +import logging +import os +import random +from dataclasses import dataclass +from queue import Queue +from typing import NamedTuple +from urllib.parse import urljoin + +import aiohttp +import pytest +from aiohttp import web + +import otaclient_iot_logging_server.log_proxy_server as log_server_module +from otaclient_iot_logging_server._common import LogsQueue +from otaclient_iot_logging_server.log_proxy_server import LoggingPostHandler + +logger = logging.getLogger(__name__) + +MODULE = log_server_module.__name__ + + +@dataclass +class _ServerConfig: + """Minimum set of server_config needed for this test.""" + + LISTEN_ADDRESS: str = "127.0.0.1" + LISTEN_PORT: int = 8083 + + +_test_server_cfg = _ServerConfig() + + +class MessageEntry(NamedTuple): + ecu_id: str + message: str + + +class TestLogProxyServer: + + SERVER_URL = ( + f"http://{_test_server_cfg.LISTEN_ADDRESS}:{_test_server_cfg.LISTEN_PORT}/" + ) + ECUS = ("main_ecu", "sub_ecu0", "sub_ecu1", "sub_ecu2") + MSG_LEN = 16 + TOTAL_MSG_NUM = 4096 + + @classmethod + def _generate_random_msg(cls) -> MessageEntry: + _ecu, *_ = random.sample(cls.ECUS, 1) + _msg = os.urandom(cls.MSG_LEN).hex() + return MessageEntry(_ecu, _msg) + + @pytest.fixture(autouse=True) + async def launch_server(self): + """ + See https://docs.aiohttp.org/en/stable/web_advanced.html#custom-resource-implementation + for more details. + """ + queue: LogsQueue = Queue() + self._queue = queue + + handler = LoggingPostHandler(queue) + app = web.Application() + # mute the aiohttp server logging + aiohttp_server_logger = logging.getLogger("aiohttp") + aiohttp_server_logger.setLevel("ERROR") + + # add handler to the server + app.add_routes([web.post(r"/{ecu_id}", handler.logging_post_handler)]) + + # star the server + runner = web.AppRunner(app) + try: + await runner.setup() + site = web.TCPSite( + runner, _test_server_cfg.LISTEN_ADDRESS, _test_server_cfg.LISTEN_PORT + ) + await site.start() + logger.info(f"test log_proxy_server started at {self.SERVER_URL}") + yield + finally: + await runner.cleanup() + + @pytest.fixture(autouse=True) + async def client_sesion(self): + client_session = aiohttp.ClientSession( + raise_for_status=True, + timeout=aiohttp.ClientTimeout(total=0.2), # for speedup testing + ) + try: + yield client_session + finally: + await client_session.close() + + @pytest.fixture(autouse=True) + def prepare_test_data(self): + self._msgs: list[MessageEntry] = [] + for _ in range(self.TOTAL_MSG_NUM): + self._msgs.append(self._generate_random_msg()) + + async def test_server(self, client_sesion: aiohttp.ClientSession): + # ------ execution ------ # + for item in self._msgs: + _ecu_id, _msg = item.ecu_id, item.message + _log_upload_endpoint_url = urljoin(self.SERVER_URL, _ecu_id) + async with client_sesion.post(_log_upload_endpoint_url, data=_msg): + pass # raise_for_status is set on session + + # ------ check result ------ # + # ensure the all msgs are sent in order to the queue by the server. + for item in self._msgs: + _ecu_id, _log_msg = self._queue.get_nowait() + assert _ecu_id == item.ecu_id + assert _log_msg["message"] == item.message + assert self._queue.empty() From dce9b8fa3198ecfa93a37e12b91ac49323632780 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 28 Feb 2024 09:35:02 +0000 Subject: [PATCH 061/128] fix test__main__ --- tests/test__main__.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/tests/test__main__.py b/tests/test__main__.py index f01e305..74816ff 100644 --- a/tests/test__main__.py +++ b/tests/test__main__.py @@ -53,9 +53,13 @@ def test_main( ): # ------ prepare patching ------ # mocker.patch( - f"{MODULE}._config_logging", + f"{MODULE}.config_logging", _logger_mock := mocker.MagicMock(return_value=logger), ) + mocker.patch( + f"{MODULE}.start_aws_iot_logger_thread", + _aws_iot_logger_mock := mocker.MagicMock(), + ) mocker.patch( f"{MODULE}.launch_server", _launch_server_mock := mocker.MagicMock(), @@ -74,12 +78,8 @@ def test_main( enable_server_log=_in_server_cfg.UPLOAD_LOGGING_SERVER_LOGS, server_logstream_suffix=_in_server_cfg.SERVER_LOGSTREAM_SUFFIX, ) - _launch_server_mock.assert_called_once_with( - mocker.ANY, - queue=mocker.ANY, - max_logs_per_merge=_in_server_cfg.MAX_LOGS_PER_MERGE, - interval=_in_server_cfg.UPLOAD_INTERVAL, - ) + _aws_iot_logger_mock.assert_called_once() + _launch_server_mock.assert_called_once() # check __main__.main source code for more details assert ( From 3c21a113380dbc4ad6222551b98c1e7d78203d38 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 28 Feb 2024 09:37:07 +0000 Subject: [PATCH 062/128] minor update --- tests/test_log_proxy_server.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/test_log_proxy_server.py b/tests/test_log_proxy_server.py index 915d0e0..43fb906 100644 --- a/tests/test_log_proxy_server.py +++ b/tests/test_log_proxy_server.py @@ -117,6 +117,7 @@ def prepare_test_data(self): async def test_server(self, client_sesion: aiohttp.ClientSession): # ------ execution ------ # + logger.info(f"sending {self.TOTAL_MSG_NUM} msgs to {self.SERVER_URL}...") for item in self._msgs: _ecu_id, _msg = item.ecu_id, item.message _log_upload_endpoint_url = urljoin(self.SERVER_URL, _ecu_id) @@ -125,6 +126,7 @@ async def test_server(self, client_sesion: aiohttp.ClientSession): # ------ check result ------ # # ensure the all msgs are sent in order to the queue by the server. + logger.info("checking all the received messages...") for item in self._msgs: _ecu_id, _log_msg = self._queue.get_nowait() assert _ecu_id == item.ecu_id From d158f785e2a9fb2be46385a0bec0e1e566a63119 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 28 Feb 2024 09:58:48 +0000 Subject: [PATCH 063/128] github_action: implement test CI --- .github/workflows/test.yaml | 75 +++++++++++++++++++++++++++++++++++++ 1 file changed, 75 insertions(+) create mode 100644 .github/workflows/test.yaml diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml new file mode 100644 index 0000000..fbd83ae --- /dev/null +++ b/.github/workflows/test.yaml @@ -0,0 +1,75 @@ +name: test CI + +on: [pull_request] + +jobs: + pytest_with_coverage_on_supported_os: + strategy: + fail-fast: true + matrix: + # currently we only need to ensure it is running on the following OS + # with OS-shipped python interpreter. + os: ["ubuntu-20.04", "ubuntu-22.04"] + runs-on: ${{ matrix.os }} + steps: + - name: Checkout commit + uses: actions/checkout@v4 + - name: Install package + run: | + python -m pip install -q -U pip + pip install -q .[dev] + - name: Execute pytest with coverage + run: | + coverage run -m pytest --junit-xml=test_result/pytest.xml + coverage xml -o test_result/coverage.xml + # export the coverage report to the comment! + - name: Add coverage report to PR comment + continue-on-error: true + uses: MishaKav/pytest-coverage-comment@v1.1.51 + with: + pytest-xml-coverage-path: test_result/coverage.xml + junitxml-path: test_result/pytest.xml + + pytest_on_supported_python_vers: + runs-on: ubuntu-22.04 + strategy: + fail-fast: true + matrix: + python-version: ["3.8", "3.9", "3.10", "3.11"] + steps: + - name: Checkout commit + uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: "pip" + - name: Install package + run: | + python -m pip install -q -U pip + pip install -q .[dev] + - name: Execute pytest + run: pytest + + python_lint_check: + runs-on: ubuntu-22.04 + timeout-minutes: 3 + strategy: + fail-fast: true + matrix: + python-version: ["3.8", "3.9", "3.10", "3.11"] + steps: + - name: Checkout commit + uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Install check dependencies + run: | + python -m pip install -q -U pip + pip install -q .[dev] + - name: Check code linting + run: | + black --check src + flake8 src From eaa6da230ddae0952d30428b4d55a00b82ea0fb9 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 28 Feb 2024 10:01:47 +0000 Subject: [PATCH 064/128] test CI: minor updates --- .github/workflows/test.yaml | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index fbd83ae..d6ce5e2 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -14,14 +14,22 @@ jobs: steps: - name: Checkout commit uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + cache: "pip" + - name: Install package run: | python -m pip install -q -U pip pip install -q .[dev] + - name: Execute pytest with coverage run: | coverage run -m pytest --junit-xml=test_result/pytest.xml coverage xml -o test_result/coverage.xml + # export the coverage report to the comment! - name: Add coverage report to PR comment continue-on-error: true @@ -39,15 +47,18 @@ jobs: steps: - name: Checkout commit uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} cache: "pip" + - name: Install package run: | python -m pip install -q -U pip pip install -q .[dev] + - name: Execute pytest run: pytest @@ -61,14 +72,17 @@ jobs: steps: - name: Checkout commit uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} + - name: Install check dependencies run: | python -m pip install -q -U pip pip install -q .[dev] + - name: Check code linting run: | black --check src From d6d285ff51569df42cea6e761d5a61319becceee Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 28 Feb 2024 10:04:23 +0000 Subject: [PATCH 065/128] minor update to release CI --- .github/workflows/on_release.yaml | 32 ------------------------------- .github/workflows/release.yaml | 29 ++++++++++++++++++++++++++++ 2 files changed, 29 insertions(+), 32 deletions(-) delete mode 100644 .github/workflows/on_release.yaml create mode 100644 .github/workflows/release.yaml diff --git a/.github/workflows/on_release.yaml b/.github/workflows/on_release.yaml deleted file mode 100644 index 068c8c8..0000000 --- a/.github/workflows/on_release.yaml +++ /dev/null @@ -1,32 +0,0 @@ -name: on_release - -on: - release: - types: [published] - -permissions: - contents: write # upload artifacts requires this permission - -jobs: - build_wheel: - runs-on: ubuntu-22.04 - - steps: - - name: Checkout source code - uses: actions/checkout@v4 - - - name: Setup python environment - uses: actions/setup-python@v5 - with: - python-version: "3.8" - - - name: Build wheel - run: | - python3 -m pip install -U pip - python3 -m pip install hatch - hatch build -t wheel - - - name: Upload built wheel as release asset - uses: softprops/action-gh-release@v1 - with: - files: dist/*.whl \ No newline at end of file diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml new file mode 100644 index 0000000..5b78f6b --- /dev/null +++ b/.github/workflows/release.yaml @@ -0,0 +1,29 @@ +name: build wheel for release + +on: + release: + types: [published] + +permissions: + contents: write # upload artifacts requires this permission + +jobs: + build_wheel: + runs-on: ubuntu-22.04 + steps: + - name: Checkout source code + uses: actions/checkout@v4 + + - name: Setup python environment + uses: actions/setup-python@v5 + + - name: Build wheel + run: | + python3 -m pip install -q -U pip + pip install -q -U hatch + hatch build -t wheel + + - name: Upload built wheel as release asset + uses: softprops/action-gh-release@v1 + with: + files: dist/*.whl From fc6d8ff2d68b14e504aa1a21e0c4c9e15a251112 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Thu, 29 Feb 2024 02:51:32 +0000 Subject: [PATCH 066/128] common: cleanup unused --- src/otaclient_iot_logging_server/_common.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/src/otaclient_iot_logging_server/_common.py b/src/otaclient_iot_logging_server/_common.py index c9167d4..438e418 100644 --- a/src/otaclient_iot_logging_server/_common.py +++ b/src/otaclient_iot_logging_server/_common.py @@ -35,13 +35,6 @@ class LogEvent(TypedDict): sequenceToken: NotRequired[str] -class Credentials(TypedDict): - access_key: str - secret_key: str - token: str - expiry_time: str - - PKCS11URI = TypedDict( "PKCS11URI", { From 18d67c726cf4425b63620070fbe3eb1ad91330bb Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Thu, 29 Feb 2024 02:53:23 +0000 Subject: [PATCH 067/128] aws_iot_logger: minor fix typing --- src/otaclient_iot_logging_server/aws_iot_logger.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/src/otaclient_iot_logging_server/aws_iot_logger.py b/src/otaclient_iot_logging_server/aws_iot_logger.py index 358bdb7..2800840 100644 --- a/src/otaclient_iot_logging_server/aws_iot_logger.py +++ b/src/otaclient_iot_logging_server/aws_iot_logger.py @@ -21,11 +21,10 @@ from datetime import datetime from queue import Empty from threading import Thread -from typing import Any from typing_extensions import NoReturn -from otaclient_iot_logging_server._common import LogMessage, LogsQueue +from otaclient_iot_logging_server._common import LogEvent, LogMessage, LogsQueue from otaclient_iot_logging_server._utils import chain_query, retry from otaclient_iot_logging_server.configs import server_cfg from otaclient_iot_logging_server.boto3_session import get_session @@ -116,11 +115,11 @@ def send_messages(self, log_stream_name: str, message_list: list[LogMessage]): Ref: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/logs/client/put_log_events.html """ - request: dict[str, Any] = { - "logGroupName": self._log_group_name, - "logStreamName": log_stream_name, - "logEvents": message_list, - } + request = LogEvent( + logGroupName=self._log_group_name, + logStreamName=log_stream_name, + logEvents=message_list, + ) if _seq_token := self._sequence_tokens.get(log_stream_name): request["sequenceToken"] = _seq_token From 67b75b57215102d749f2b23aaa440040329f845c Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Thu, 29 Feb 2024 05:48:10 +0000 Subject: [PATCH 068/128] add test_aws_iot_logger, minor update to test_log_proxy_server --- tests/test_aws_iot_logger.py | 148 +++++++++++++++++++++++++++++++++ tests/test_log_proxy_server.py | 32 ++++--- 2 files changed, 167 insertions(+), 13 deletions(-) create mode 100644 tests/test_aws_iot_logger.py diff --git a/tests/test_aws_iot_logger.py b/tests/test_aws_iot_logger.py new file mode 100644 index 0000000..8dd9fe9 --- /dev/null +++ b/tests/test_aws_iot_logger.py @@ -0,0 +1,148 @@ +# Copyright 2022 TIER IV, INC. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import annotations +from collections import defaultdict + +import os +from queue import Queue +import logging +import random +import time +from datetime import datetime +from uuid import uuid1 + +import pytest +from pytest_mock import MockerFixture + +import otaclient_iot_logging_server.aws_iot_logger +from otaclient_iot_logging_server._common import LogMessage, LogsQueue +from otaclient_iot_logging_server.aws_iot_logger import ( + get_log_stream_name, + AWSIoTLogger, +) + +logger = logging.getLogger(__name__) + +MODULE = otaclient_iot_logging_server.aws_iot_logger.__name__ + +_UNIX_EPOCH = datetime(1970, 1, 1, 0, 0) +_UNIX_EPOCH_FMT = "1970/01/01" + + +@pytest.mark.parametrize( + "_thing_name, _suffix, _expected", + [ + ( + "some_thingname", + "some_suffix", + f"{_UNIX_EPOCH_FMT}/some_thingname/some_suffix", + ), + ( + _thing_name := f"profile-dev-edge-{uuid1()}-Core", + _suffix := "some_ecu", + f"{_UNIX_EPOCH_FMT}/{_thing_name}/{_suffix}", + ), + ], +) +def test_get_log_stream_name( + _thing_name: str, _suffix: str, _expected: str, mocker: MockerFixture +): + _datetime_mock = mocker.MagicMock(spec=datetime) + _datetime_mock.utcnow.return_value = _UNIX_EPOCH + mocker.patch(f"{MODULE}.datetime", _datetime_mock) + assert get_log_stream_name(_thing_name, _suffix) == _expected + + +_mocked_ECUs_list = ("main_ecu", "sub_ecu0", "sub_ecu1", "sub_ecu2", "sub_ecu3") + + +def generate_random_msgs( + msg_len: int, + msg_num: int, + ecus_list: tuple[str, ...] = _mocked_ECUs_list, +) -> list[tuple[str, LogMessage]]: + _res: list[tuple[str, LogMessage]] = [] + for _ in range(msg_num): + _ecu, *_ = random.sample(ecus_list, 1) + _msg = os.urandom(msg_len).hex() + _timestamp = int(time.time()) * 1000 # milliseconds + _res.append((_ecu, LogMessage(timestamp=_timestamp, message=_msg))) + return _res + + +class TestAWSIoTLogger_thread_main: + MSG_LEN = 16 + MSG_NUM = 4096 + + class _TestFinished(Exception): + pass + + def _mocked_send_messages(self, _ecu_id: str, _logs: list[LogMessage]): + self._test_result[_ecu_id] = _logs + + @pytest.fixture + def prepare_test_data(self): + _msgs = generate_random_msgs(self.MSG_LEN, self.MSG_NUM) + + # prepare result for test_thread_main + _merged_msgs: dict[str, list[LogMessage]] = defaultdict(list) + for _ecu_id, _log_msg in _msgs: + _merged_msgs[_ecu_id].append(_log_msg) + self._merged_msgs = _merged_msgs + + # prepare the queue for test + _queue: LogsQueue = Queue() + for _item in _msgs: + _queue.put_nowait(_item) + self._queue = _queue + + @pytest.fixture(autouse=True) + def setup_test(self, prepare_test_data, mocker: MockerFixture): + _time_mocker = mocker.MagicMock(spec=time) + # NOTE: a hack here to interrupt the while loop + _time_mocker.sleep.side_effect = self._TestFinished + mocker.patch(f"{MODULE}.time", _time_mocker) + + # ------ prepare test self ------ # + # The following bound variables will be used in thread_main method. + # NOTE: another hack to let all entries being merged within one + # loop iteration. + self._max_logs_per_merge = float("inf") + self.send_messages = self._mocked_send_messages + self._interval = 6 # place holder + self._session_config = mocker.MagicMock() # place holder + + # for holding test results + # mocked_send_messages will record each calls in this dict + self._test_result: dict[str, list[LogMessage]] = {} + + # mock get_log_stream_name to let it returns the log_stream_suffix + # as it, make the test easier. + # see get_log_stream_name signature for more details + get_log_stream_name_mock = mocker.MagicMock(wraps=lambda x, y: y) + mocker.patch(f"{MODULE}.get_log_stream_name", get_log_stream_name_mock) + + def test_thread_main(self): + func_to_test = AWSIoTLogger.thread_main + + # ------ execution ------ # + with pytest.raises(self._TestFinished): + func_to_test.__get__(self)() + logger.info("execution finished") + + # ------ check result ------ # + # confirm the send_messages mock receives the expecting calls. + assert self._merged_msgs == self._test_result diff --git a/tests/test_log_proxy_server.py b/tests/test_log_proxy_server.py index 43fb906..fdcf0fe 100644 --- a/tests/test_log_proxy_server.py +++ b/tests/test_log_proxy_server.py @@ -20,7 +20,6 @@ import random from dataclasses import dataclass from queue import Queue -from typing import NamedTuple from urllib.parse import urljoin import aiohttp @@ -47,26 +46,35 @@ class _ServerConfig: _test_server_cfg = _ServerConfig() -class MessageEntry(NamedTuple): +@dataclass +class MessageEntry: ecu_id: str message: str +mocked_ECUs_list = ("main_ecu", "sub_ecu0", "sub_ecu1", "sub_ecu2") + + +def generate_random_msgs( + ecus_list: tuple[str, ...] = mocked_ECUs_list, + msg_len: int = 16, + msg_num: int = 4096, +) -> list[MessageEntry]: + _res: list[MessageEntry] = [] + for _ in range(msg_num): + _ecu, *_ = random.sample(ecus_list, 1) + _msg = os.urandom(msg_len).hex() + _res.append(MessageEntry(_ecu, _msg)) + return _res + + class TestLogProxyServer: SERVER_URL = ( f"http://{_test_server_cfg.LISTEN_ADDRESS}:{_test_server_cfg.LISTEN_PORT}/" ) - ECUS = ("main_ecu", "sub_ecu0", "sub_ecu1", "sub_ecu2") - MSG_LEN = 16 TOTAL_MSG_NUM = 4096 - @classmethod - def _generate_random_msg(cls) -> MessageEntry: - _ecu, *_ = random.sample(cls.ECUS, 1) - _msg = os.urandom(cls.MSG_LEN).hex() - return MessageEntry(_ecu, _msg) - @pytest.fixture(autouse=True) async def launch_server(self): """ @@ -111,9 +119,7 @@ async def client_sesion(self): @pytest.fixture(autouse=True) def prepare_test_data(self): - self._msgs: list[MessageEntry] = [] - for _ in range(self.TOTAL_MSG_NUM): - self._msgs.append(self._generate_random_msg()) + self._msgs = generate_random_msgs(msg_num=self.TOTAL_MSG_NUM) async def test_server(self, client_sesion: aiohttp.ClientSession): # ------ execution ------ # From 35f96be7928fa8e7ff39984207e5d0f417887b1d Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Thu, 29 Feb 2024 06:04:46 +0000 Subject: [PATCH 069/128] minor change to release CI --- .github/workflows/release.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 5b78f6b..2dab9fd 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -16,6 +16,10 @@ jobs: - name: Setup python environment uses: actions/setup-python@v5 + with: + # use the minimum py ver we support to + # generate the wheel. + python-version: "3.8" - name: Build wheel run: | From f4718060dfc49f926180171ecbd05fe883437d1f Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Thu, 29 Feb 2024 06:58:20 +0000 Subject: [PATCH 070/128] aws_iot_logger: fix main loop breaks on failed upload --- .../aws_iot_logger.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/src/otaclient_iot_logging_server/aws_iot_logger.py b/src/otaclient_iot_logging_server/aws_iot_logger.py index 2800840..5b34e47 100644 --- a/src/otaclient_iot_logging_server/aws_iot_logger.py +++ b/src/otaclient_iot_logging_server/aws_iot_logger.py @@ -153,12 +153,14 @@ def send_messages(self, log_stream_name: str, message_list: list[LogMessage]): self._create_log_stream(log_stream_name) raise except Exception as e: + # NOTE: for unhandled exception, we just log it and ignore, + # leave for the developer to properly handle it + # in the future! logger.error( f"put_log_events failure: {e!r}\n" f"log_group_name={self._log_group_name}, \n" f"log_stream_name={log_stream_name}" ) - raise def thread_main(self) -> NoReturn: """Main entry for running this iot_logger in a thread.""" @@ -179,12 +181,15 @@ def thread_main(self) -> NoReturn: break for log_stream_suffix, logs in message_dict.items(): - self.send_messages( - get_log_stream_name( - self._session_config.thing_name, log_stream_suffix - ), - logs, - ) + try: + self.send_messages( + get_log_stream_name( + self._session_config.thing_name, log_stream_suffix + ), + logs, + ) + except Exception: + pass # don't let the exception breaks the main loop time.sleep(self._interval) From 9a558e4e70014ab2e9fd649620d54c70059ad497 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Thu, 29 Feb 2024 07:00:32 +0000 Subject: [PATCH 071/128] log_proxy_server: filter out empty request --- src/otaclient_iot_logging_server/log_proxy_server.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/otaclient_iot_logging_server/log_proxy_server.py b/src/otaclient_iot_logging_server/log_proxy_server.py index c9cbfae..806df7f 100644 --- a/src/otaclient_iot_logging_server/log_proxy_server.py +++ b/src/otaclient_iot_logging_server/log_proxy_server.py @@ -43,6 +43,10 @@ async def logging_post_handler(self, request: Request): """ _ecu_id = request.match_info["ecu_id"] _raw_logging = await request.text() + # don't allow empty request + if not _raw_logging: + return web.Response(status=HTTPStatus.BAD_REQUEST) + _logging_msg = LogMessage( timestamp=int(time.time()) * 1000, # milliseconds message=_raw_logging, From b02f80b5d6d1fbc0b7e5caa552641f5a73f7e34d Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Thu, 29 Feb 2024 07:15:58 +0000 Subject: [PATCH 072/128] configs: add new options ALLOWED_ECUS --- src/otaclient_iot_logging_server/configs.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/otaclient_iot_logging_server/configs.py b/src/otaclient_iot_logging_server/configs.py index a7217a6..aa6e26d 100644 --- a/src/otaclient_iot_logging_server/configs.py +++ b/src/otaclient_iot_logging_server/configs.py @@ -27,6 +27,10 @@ _LoggingLevelName = Literal["INFO", "DEBUG", "CRITICAL", "ERROR", "WARNING"] +def _csv_to_list(_in: str) -> list[str]: + return list(map(str.strip, _in.split(","))) + + class ConfigurableLoggingServerConfig(BaseSettings): model_config = SettingsConfigDict(frozen=True, validate_default=True) # the default location of greengrass configuration files. @@ -50,6 +54,12 @@ class ConfigurableLoggingServerConfig(BaseSettings): MAX_LOGS_PER_MERGE: int = 512 UPLOAD_INTERVAL: int = 60 # in seconds + ALLOWED_ECUS: Annotated[ + List[str], + BeforeValidator(_csv_to_list), + ] = ["autoware"] + """Comma separated list of allowed ECU ids.""" + class _AWSProfile(BaseModel): model_config = SettingsConfigDict(frozen=True) From 601aa2f38bf478dc9fab3e06368e322904f1b65f Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Thu, 29 Feb 2024 07:18:14 +0000 Subject: [PATCH 073/128] silently ignore requests from unknowned ECUs --- src/otaclient_iot_logging_server/configs.py | 12 ++++++------ src/otaclient_iot_logging_server/log_proxy_server.py | 4 ++-- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/src/otaclient_iot_logging_server/configs.py b/src/otaclient_iot_logging_server/configs.py index aa6e26d..7031597 100644 --- a/src/otaclient_iot_logging_server/configs.py +++ b/src/otaclient_iot_logging_server/configs.py @@ -17,7 +17,7 @@ from __future__ import annotations from pathlib import Path -from typing import List, Literal +from typing import List, Literal, Set import yaml from pydantic import BaseModel, BeforeValidator, Field, RootModel @@ -27,8 +27,8 @@ _LoggingLevelName = Literal["INFO", "DEBUG", "CRITICAL", "ERROR", "WARNING"] -def _csv_to_list(_in: str) -> list[str]: - return list(map(str.strip, _in.split(","))) +def _csv_to_set(_in: str) -> set[str]: + return set(map(str.strip, _in.split(","))) class ConfigurableLoggingServerConfig(BaseSettings): @@ -55,9 +55,9 @@ class ConfigurableLoggingServerConfig(BaseSettings): UPLOAD_INTERVAL: int = 60 # in seconds ALLOWED_ECUS: Annotated[ - List[str], - BeforeValidator(_csv_to_list), - ] = ["autoware"] + Set[str], + BeforeValidator(_csv_to_set), + ] = {"autoware"} """Comma separated list of allowed ECU ids.""" diff --git a/src/otaclient_iot_logging_server/log_proxy_server.py b/src/otaclient_iot_logging_server/log_proxy_server.py index 806df7f..4c824dd 100644 --- a/src/otaclient_iot_logging_server/log_proxy_server.py +++ b/src/otaclient_iot_logging_server/log_proxy_server.py @@ -43,8 +43,8 @@ async def logging_post_handler(self, request: Request): """ _ecu_id = request.match_info["ecu_id"] _raw_logging = await request.text() - # don't allow empty request - if not _raw_logging: + # don't allow empty request or unknowned ECUs + if not _raw_logging or _ecu_id not in server_cfg.ALLOWED_ECUS: return web.Response(status=HTTPStatus.BAD_REQUEST) _logging_msg = LogMessage( From 9ef2bfc83b83f81490211bf4cc3e0314a29431a8 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Thu, 29 Feb 2024 07:26:24 +0000 Subject: [PATCH 074/128] fix configs ALLOWED_ECUS field parsing --- src/otaclient_iot_logging_server/configs.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/src/otaclient_iot_logging_server/configs.py b/src/otaclient_iot_logging_server/configs.py index 7031597..1ca00bb 100644 --- a/src/otaclient_iot_logging_server/configs.py +++ b/src/otaclient_iot_logging_server/configs.py @@ -27,10 +27,6 @@ _LoggingLevelName = Literal["INFO", "DEBUG", "CRITICAL", "ERROR", "WARNING"] -def _csv_to_set(_in: str) -> set[str]: - return set(map(str.strip, _in.split(","))) - - class ConfigurableLoggingServerConfig(BaseSettings): model_config = SettingsConfigDict(frozen=True, validate_default=True) # the default location of greengrass configuration files. @@ -54,10 +50,7 @@ class ConfigurableLoggingServerConfig(BaseSettings): MAX_LOGS_PER_MERGE: int = 512 UPLOAD_INTERVAL: int = 60 # in seconds - ALLOWED_ECUS: Annotated[ - Set[str], - BeforeValidator(_csv_to_set), - ] = {"autoware"} + ALLOWED_ECUS: Set[str] = {"autoware"} """Comma separated list of allowed ECU ids.""" From 4a78b3571ff550df4b9482a26f25398b9dea70fd Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Thu, 29 Feb 2024 07:37:09 +0000 Subject: [PATCH 075/128] update README.md --- README.md | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 725b86c..e364466 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,25 @@ # OTAClient AWS IoT logging server -A logging server that uploads logs sent from otaclient to AWS cloudwatch. \ No newline at end of file +A logging server that uploads logs sent from otaclient to AWS cloudwatch. + +## Usage + +### Environmental variables + +The behaviors of the iot_logging_server can be configured with the following environmental variables: + +| Environmental variables | Default value | Description | +| ---- | ---- | --- | +| GREENGRASS_V1_CONFIG | `/greengrass/config/config.json` | | +| GREENGRASS_V2_CONFIG | `/greengrass/v2/init_config/config.yaml` | | +| AWS_PROFILE_INFO | `/opt/ota/iot_logger/aws_profile_info.yaml` | | +| LISTEN_ADDRESS | `127.0.0.1` | | +| LISTEN_PORT | `8083` | | +| UPLOAD_LOGGING_SERVER_LOGS | `false` | Whether to upload the logs from server itself to cloudwatchlogs | +| SERVER_LOGSTREAM_SUFFIX | `iot_logging_server` | log_stream_suffix to use for local server logs upload | +| SERVER_LOGGING_LEVEL | `INFO` | | +| SERVER_LOGGING_LOG_FORMAT | `[%(asctime)s][%(levelname)s]-%(name)s:%(funcName)s:%(lineno)d,%(message)s` | | +| MAX_LOGS_BACKLOG | `4096` | Max pending log entries | +| MAX_LOGS_PER_MERGE | `512` | Max log entries in a merge group | +| UPLOAD_INTERVAL | `60` | Interval of uploading log batches to cloud | +| ALLOWED_ECUS | `["autoware"]` | An JSON array contains the allowed ECUs'ID, only logs from allowed ECUs will be processed | From 60f7a6abb54fa056869f03a63831200aace2767b Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Thu, 29 Feb 2024 07:55:07 +0000 Subject: [PATCH 076/128] fix test --- tests/test_aws_iot_logger.py | 6 +++--- tests/test_configs.py | 5 +++++ tests/test_log_proxy_server.py | 31 +++++++++++++++++++++++++++++-- 3 files changed, 37 insertions(+), 5 deletions(-) diff --git a/tests/test_aws_iot_logger.py b/tests/test_aws_iot_logger.py index 8dd9fe9..29dfc47 100644 --- a/tests/test_aws_iot_logger.py +++ b/tests/test_aws_iot_logger.py @@ -14,14 +14,14 @@ from __future__ import annotations -from collections import defaultdict -import os -from queue import Queue import logging +import os import random import time +from collections import defaultdict from datetime import datetime +from queue import Queue from uuid import uuid1 import pytest diff --git a/tests/test_configs.py b/tests/test_configs.py index 73e26ea..9d12f29 100644 --- a/tests/test_configs.py +++ b/tests/test_configs.py @@ -50,6 +50,7 @@ "MAX_LOGS_BACKLOG": 4096, "MAX_LOGS_PER_MERGE": 512, "UPLOAD_INTERVAL": 60, + "ALLOWED_ECUS": {"autoware"}, }, ), # test#1: frequently changed settings @@ -58,6 +59,7 @@ "LISTEN_ADDRESS": "172.16.1.1", "SERVER_LOGGING_LEVEL": "ERROR", "UPLOAD_INTERVAL": "30", + "ALLOWED_ECUS": '["main_ecu", "sub_ecu"]', }, { "GREENGRASS_V1_CONFIG": "/greengrass/config/config.json", @@ -72,6 +74,7 @@ "MAX_LOGS_BACKLOG": 4096, "MAX_LOGS_PER_MERGE": 512, "UPLOAD_INTERVAL": 30, + "ALLOWED_ECUS": {"main_ecu", "sub_ecu"}, }, ), # test#2: change everything @@ -89,6 +92,7 @@ "MAX_LOGS_BACKLOG": "1024", "MAX_LOGS_PER_MERGE": "128", "UPLOAD_INTERVAL": "10", + "ALLOWED_ECUS": '["main_ecu", "sub_ecu"]', }, { "GREENGRASS_V1_CONFIG": "ggv1_cfg.json", @@ -103,6 +107,7 @@ "MAX_LOGS_BACKLOG": 1024, "MAX_LOGS_PER_MERGE": 128, "UPLOAD_INTERVAL": 10, + "ALLOWED_ECUS": {"main_ecu", "sub_ecu"}, }, ), ], diff --git a/tests/test_log_proxy_server.py b/tests/test_log_proxy_server.py index fdcf0fe..a15a550 100644 --- a/tests/test_log_proxy_server.py +++ b/tests/test_log_proxy_server.py @@ -18,12 +18,16 @@ import logging import os import random -from dataclasses import dataclass +from dataclasses import dataclass, field +from http import HTTPStatus from queue import Queue +from typing import Set from urllib.parse import urljoin import aiohttp +import aiohttp.client_exceptions import pytest +from pytest_mock import MockerFixture from aiohttp import web import otaclient_iot_logging_server.log_proxy_server as log_server_module @@ -41,6 +45,9 @@ class _ServerConfig: LISTEN_ADDRESS: str = "127.0.0.1" LISTEN_PORT: int = 8083 + ALLOWED_ECUS: Set[str] = field( + default_factory=lambda: {"main_ecu", "sub_ecu0", "sub_ecu1", "sub_ecu2"} + ) _test_server_cfg = _ServerConfig() @@ -76,11 +83,13 @@ class TestLogProxyServer: TOTAL_MSG_NUM = 4096 @pytest.fixture(autouse=True) - async def launch_server(self): + async def launch_server(self, mocker: MockerFixture): """ See https://docs.aiohttp.org/en/stable/web_advanced.html#custom-resource-implementation for more details. """ + mocker.patch(f"{MODULE}.server_cfg", _test_server_cfg) + queue: LogsQueue = Queue() self._queue = queue @@ -138,3 +147,21 @@ async def test_server(self, client_sesion: aiohttp.ClientSession): assert _ecu_id == item.ecu_id assert _log_msg["message"] == item.message assert self._queue.empty() + + @pytest.mark.parametrize( + "_ecu_id, _data", + [ + # unknowned ECU's request will be dropped + ("bad_ecu_id", "valid_msg"), + # empty message will be dropped + ("main_ecu", ""), + ], + ) + async def test_reject_invalid_request( + self, _ecu_id: str, _data: str, client_sesion: aiohttp.ClientSession + ): + with pytest.raises(aiohttp.client_exceptions.ClientResponseError) as exc_info: + _log_upload_endpoint_url = urljoin(self.SERVER_URL, _ecu_id) + async with client_sesion.post(_log_upload_endpoint_url, data=_data): + pass # raise_for_status is set on session + assert exc_info.value.status == HTTPStatus.BAD_REQUEST From 82c22daa653ec5db95b2ddd021d851b75f340a07 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Mon, 4 Mar 2024 09:21:27 +0000 Subject: [PATCH 077/128] aws_iot_logger: add logic to detect failure caused by failed mtls connection --- .../aws_iot_logger.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/src/otaclient_iot_logging_server/aws_iot_logger.py b/src/otaclient_iot_logging_server/aws_iot_logger.py index 5b34e47..ad1d759 100644 --- a/src/otaclient_iot_logging_server/aws_iot_logger.py +++ b/src/otaclient_iot_logging_server/aws_iot_logger.py @@ -22,6 +22,7 @@ from queue import Empty from threading import Thread +import awscrt.exceptions from typing_extensions import NoReturn from otaclient_iot_logging_server._common import LogEvent, LogMessage, LogsQueue @@ -87,6 +88,14 @@ def _create_log_group(self): logger.debug( f"{log_group_name=} already existed, skip creating: {e.response}" ) + except ValueError as e: + if e.__cause__ and isinstance(e.__cause__, awscrt.exceptions.AwsCrtError): + logger.error( + (f"failed to create mtls connection to remote: {e.__cause__}") + ) + raise e.__cause__ + logger.error(f"failed to create {log_group_name=}: {e!r}") + raise except Exception as e: logger.error(f"failed to create {log_group_name=}: {e!r}") raise @@ -105,6 +114,14 @@ def _create_log_stream(self, log_stream_name: str): logger.debug( f"{log_stream_name=}@{log_group_name} already existed, skip creating: {e.response}" ) + except ValueError as e: + if e.__cause__ and isinstance(e.__cause__, awscrt.exceptions.AwsCrtError): + logger.error( + (f"failed to create mtls connection to remote: {e.__cause__}") + ) + raise e.__cause__ + logger.error(f"failed to create {log_stream_name=}@{log_group_name}: {e!r}") + raise except Exception as e: logger.error(f"failed to create {log_stream_name=}@{log_group_name}: {e!r}") raise From 5b07231b54018d00ec73641203ef66c8ce5f6872 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Mon, 4 Mar 2024 09:22:45 +0000 Subject: [PATCH 078/128] aws_iot_logger: move create_log_group from __init__ to thread_main --- src/otaclient_iot_logging_server/aws_iot_logger.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/otaclient_iot_logging_server/aws_iot_logger.py b/src/otaclient_iot_logging_server/aws_iot_logger.py index ad1d759..d3f8c97 100644 --- a/src/otaclient_iot_logging_server/aws_iot_logger.py +++ b/src/otaclient_iot_logging_server/aws_iot_logger.py @@ -73,9 +73,6 @@ def __init__( # will definitely have entries less than MAX_LOGS_PER_PUT self._max_logs_per_merge = min(max_logs_per_merge, self.MAX_LOGS_PER_PUT) - # unconditionally create log_group and log_stream, do nothing if existed. - self._create_log_group() - @retry(max_retry=16, backoff_factor=2, backoff_max=32) def _create_log_group(self): # TODO: (20240214) should we let the edge side iot_logging_server @@ -181,6 +178,9 @@ def send_messages(self, log_stream_name: str, message_list: list[LogMessage]): def thread_main(self) -> NoReturn: """Main entry for running this iot_logger in a thread.""" + # unconditionally create log_group and log_stream, do nothing if existed. + self._create_log_group() + while True: # merge LogMessages into the same source, identified by # log_stream_suffix. From 6609e5042dcaabe31f72535093816bda6486fd26 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Thu, 14 Mar 2024 07:17:23 +0000 Subject: [PATCH 079/128] configs: AWS_PROFILE_INFO, iot_logger -> iot-logger --- src/otaclient_iot_logging_server/configs.py | 2 +- tests/test_aws_iot_logger.py | 6 +++++- tests/test_configs.py | 4 ++-- 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/src/otaclient_iot_logging_server/configs.py b/src/otaclient_iot_logging_server/configs.py index 1ca00bb..6a0a486 100644 --- a/src/otaclient_iot_logging_server/configs.py +++ b/src/otaclient_iot_logging_server/configs.py @@ -34,7 +34,7 @@ class ConfigurableLoggingServerConfig(BaseSettings): GREENGRASS_V1_CONFIG: str = "/greengrass/config/config.json" GREENGRASS_V2_CONFIG: str = "/greengrass/v2/init_config/config.yaml" - AWS_PROFILE_INFO: str = "/opt/ota/iot_logger/aws_profile_info.yaml" + AWS_PROFILE_INFO: str = "/opt/ota/iot-logger/aws_profile_info.yaml" """The path to aws_profile_info.yaml.""" LISTEN_ADDRESS: str = "127.0.0.1" diff --git a/tests/test_aws_iot_logger.py b/tests/test_aws_iot_logger.py index 29dfc47..b3f0957 100644 --- a/tests/test_aws_iot_logger.py +++ b/tests/test_aws_iot_logger.py @@ -135,8 +135,11 @@ def setup_test(self, prepare_test_data, mocker: MockerFixture): get_log_stream_name_mock = mocker.MagicMock(wraps=lambda x, y: y) mocker.patch(f"{MODULE}.get_log_stream_name", get_log_stream_name_mock) - def test_thread_main(self): + def test_thread_main(self, mocker: MockerFixture): func_to_test = AWSIoTLogger.thread_main + self._create_log_group = mocked__create_log_group = mocker.MagicMock( + spec=AWSIoTLogger._create_log_group + ) # ------ execution ------ # with pytest.raises(self._TestFinished): @@ -144,5 +147,6 @@ def test_thread_main(self): logger.info("execution finished") # ------ check result ------ # + mocked__create_log_group.assert_called_once() # confirm the send_messages mock receives the expecting calls. assert self._merged_msgs == self._test_result diff --git a/tests/test_configs.py b/tests/test_configs.py index 9d12f29..dec1a43 100644 --- a/tests/test_configs.py +++ b/tests/test_configs.py @@ -40,7 +40,7 @@ { "GREENGRASS_V1_CONFIG": "/greengrass/config/config.json", "GREENGRASS_V2_CONFIG": "/greengrass/v2/init_config/config.yaml", - "AWS_PROFILE_INFO": "/opt/ota/iot_logger/aws_profile_info.yaml", + "AWS_PROFILE_INFO": "/opt/ota/iot-logger/aws_profile_info.yaml", "LISTEN_ADDRESS": "127.0.0.1", "LISTEN_PORT": 8083, "UPLOAD_LOGGING_SERVER_LOGS": False, @@ -64,7 +64,7 @@ { "GREENGRASS_V1_CONFIG": "/greengrass/config/config.json", "GREENGRASS_V2_CONFIG": "/greengrass/v2/init_config/config.yaml", - "AWS_PROFILE_INFO": "/opt/ota/iot_logger/aws_profile_info.yaml", + "AWS_PROFILE_INFO": "/opt/ota/iot-logger/aws_profile_info.yaml", "LISTEN_ADDRESS": "172.16.1.1", "LISTEN_PORT": 8083, "UPLOAD_LOGGING_SERVER_LOGS": False, From c0d478c6adb0d81e5e685e10682357b868b107dc Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Thu, 14 Mar 2024 07:28:55 +0000 Subject: [PATCH 080/128] configs: ALLOWED_ECUS now is optional, only enforce checking if ALLOWED_ECUS is configure --- src/otaclient_iot_logging_server/configs.py | 4 ++-- src/otaclient_iot_logging_server/log_proxy_server.py | 8 +++++++- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/src/otaclient_iot_logging_server/configs.py b/src/otaclient_iot_logging_server/configs.py index 6a0a486..34cd83c 100644 --- a/src/otaclient_iot_logging_server/configs.py +++ b/src/otaclient_iot_logging_server/configs.py @@ -17,7 +17,7 @@ from __future__ import annotations from pathlib import Path -from typing import List, Literal, Set +from typing import List, Literal, Optional, Set import yaml from pydantic import BaseModel, BeforeValidator, Field, RootModel @@ -50,7 +50,7 @@ class ConfigurableLoggingServerConfig(BaseSettings): MAX_LOGS_PER_MERGE: int = 512 UPLOAD_INTERVAL: int = 60 # in seconds - ALLOWED_ECUS: Set[str] = {"autoware"} + ALLOWED_ECUS: Optional[Set[str]] = None """Comma separated list of allowed ECU ids.""" diff --git a/src/otaclient_iot_logging_server/log_proxy_server.py b/src/otaclient_iot_logging_server/log_proxy_server.py index 4c824dd..bd5d8d4 100644 --- a/src/otaclient_iot_logging_server/log_proxy_server.py +++ b/src/otaclient_iot_logging_server/log_proxy_server.py @@ -19,6 +19,7 @@ import time from http import HTTPStatus from queue import Full, Queue +from typing import Optional from aiohttp import web from aiohttp.web import Request @@ -34,6 +35,7 @@ class LoggingPostHandler: def __init__(self, queue: LogsQueue) -> None: self._queue = queue + self._allowed_ecus: Optional[set[str]] = server_cfg.ALLOWED_ECUS # route: POST /{ecu_id} async def logging_post_handler(self, request: Request): @@ -43,8 +45,12 @@ async def logging_post_handler(self, request: Request): """ _ecu_id = request.match_info["ecu_id"] _raw_logging = await request.text() + _allowed_ecus = self._allowed_ecus + # don't allow empty request or unknowned ECUs - if not _raw_logging or _ecu_id not in server_cfg.ALLOWED_ECUS: + # NOTE(20240314): if ALLOWED_ECUS is not configured, we don't enforce + # the check against incoming ECU id. + if not _raw_logging or (_allowed_ecus and _ecu_id not in _allowed_ecus): return web.Response(status=HTTPStatus.BAD_REQUEST) _logging_msg = LogMessage( From 30484b6715938f0a08983edda56448f3912fb844 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Thu, 14 Mar 2024 07:30:03 +0000 Subject: [PATCH 081/128] fix test_configs --- tests/test_configs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_configs.py b/tests/test_configs.py index dec1a43..949f7e2 100644 --- a/tests/test_configs.py +++ b/tests/test_configs.py @@ -50,7 +50,7 @@ "MAX_LOGS_BACKLOG": 4096, "MAX_LOGS_PER_MERGE": 512, "UPLOAD_INTERVAL": 60, - "ALLOWED_ECUS": {"autoware"}, + "ALLOWED_ECUS": None, }, ), # test#1: frequently changed settings From 5cd6950f0575602e8e4f82b54ad46ea31eb4c211 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Thu, 14 Mar 2024 07:31:05 +0000 Subject: [PATCH 082/128] update README.md about ALLOWED_ECUS --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index e364466..f822c0b 100644 --- a/README.md +++ b/README.md @@ -22,4 +22,4 @@ The behaviors of the iot_logging_server can be configured with the following env | MAX_LOGS_BACKLOG | `4096` | Max pending log entries | | MAX_LOGS_PER_MERGE | `512` | Max log entries in a merge group | | UPLOAD_INTERVAL | `60` | Interval of uploading log batches to cloud | -| ALLOWED_ECUS | `["autoware"]` | An JSON array contains the allowed ECUs'ID, only logs from allowed ECUs will be processed | +| ALLOWED_ECUS | | An JSON array contains the allowed ECUs'ID, when set, only logs from allowed ECUs will be processed | From 19c1c215ba95e5f46a8371c2ad4e63118e523e38 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Tue, 9 Apr 2024 09:22:33 +0000 Subject: [PATCH 083/128] update gitignore to ignore local vscode settings --- .gitignore | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.gitignore b/.gitignore index 7ef6cf9..d6f5fa9 100644 --- a/.gitignore +++ b/.gitignore @@ -159,5 +159,6 @@ cython_debug/ # option (not recommended) you can uncomment the following to ignore the entire idea folder. #.idea/ -# generated sversion file -src/otaclient_iot_logging_server/_version.py \ No newline at end of file +# local vscode settings +.devcontainer +.vscode From a17a4e0821eb1a32c7dcd1cd7eb5205037847848 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Tue, 9 Apr 2024 09:24:54 +0000 Subject: [PATCH 084/128] add gitignore --- src/otaclient_iot_logging_server/.gitignore | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 src/otaclient_iot_logging_server/.gitignore diff --git a/src/otaclient_iot_logging_server/.gitignore b/src/otaclient_iot_logging_server/.gitignore new file mode 100644 index 0000000..0cee351 --- /dev/null +++ b/src/otaclient_iot_logging_server/.gitignore @@ -0,0 +1,2 @@ +# ignore generated version file +_version.py \ No newline at end of file From bc7c2579632b4160f53719dabf03cdcbfbf3bb03 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Tue, 9 Apr 2024 09:29:25 +0000 Subject: [PATCH 085/128] ci: calculate checksum over built package --- .github/workflows/release.yaml | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 2dab9fd..a09f57f 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -21,13 +21,20 @@ jobs: # generate the wheel. python-version: "3.8" - - name: Build wheel + - name: Build wheel and calculate checksum run: | python3 -m pip install -q -U pip pip install -q -U hatch hatch build -t wheel + for WHL in dist/*.whl; \ + do \ + sha256sum ${WHL} | sed -E "s@(\w+)\s+.*@sha256:\1@" > \ + ${WHL}.checksum; \ + done - name: Upload built wheel as release asset uses: softprops/action-gh-release@v1 with: - files: dist/*.whl + files: | + dist/*.whl + dist/*.checksum From 2b5ce1eb25372779aa8543b45c65d5bbefb906c5 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Mon, 15 Apr 2024 09:35:59 +0000 Subject: [PATCH 086/128] deps: bump versions --- pyproject.toml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 52dbf2d..5f161b2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,12 +18,12 @@ classifiers = [ ] dependencies = [ "aiohttp>=3.9.2, <3.10.0", - "awsiot_credentialhelper==0.6.0", - "boto3==1.34.35", - "botocore==1.34.35", + "awsiot_credentialhelper>=0.6.0, <0.7.0", + "boto3>=1.34.35, <1.35.0", + "botocore==1.34.35, <1.35.0", "pyopenssl==24.0.0", - "pydantic==2.6.0", - "pydantic-settings==2.1.0", + "pydantic==2.7.0", + "pydantic-settings==2.2.1", "pyyaml==6.0.1", "typing_extensions>=4.0", ] From e2991f48628165f842f178788a508e963f91d3e2 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Mon, 15 Apr 2024 09:36:34 +0000 Subject: [PATCH 087/128] now we parse ecu_info.yaml to only accept logs from known ECUs --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index f822c0b..95c4910 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,8 @@ The behaviors of the iot_logging_server can be configured with the following env | GREENGRASS_V1_CONFIG | `/greengrass/config/config.json` | | | GREENGRASS_V2_CONFIG | `/greengrass/v2/init_config/config.yaml` | | | AWS_PROFILE_INFO | `/opt/ota/iot_logger/aws_profile_info.yaml` | | -| LISTEN_ADDRESS | `127.0.0.1` | | +| ECU_INFO_YAML | `/boot/ota/ecu_info.yaml` | The location of ecu_info.yaml config file. iot-logger server will parse the config file and only process logs sending from known ECUs.| +| LISTEN_ADDRESS | `0.0.0.0` | The IP address iot-logger server listen on. | | LISTEN_PORT | `8083` | | | UPLOAD_LOGGING_SERVER_LOGS | `false` | Whether to upload the logs from server itself to cloudwatchlogs | | SERVER_LOGSTREAM_SUFFIX | `iot_logging_server` | log_stream_suffix to use for local server logs upload | From 8398d259dd7bd5a09236607d35c045c1260f5dfd Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Mon, 15 Apr 2024 10:20:54 +0000 Subject: [PATCH 088/128] add stripped version of ecu_info.py --- README.md | 8 ++- src/otaclient_iot_logging_server/ecu_info.py | 67 ++++++++++++++++++++ 2 files changed, 74 insertions(+), 1 deletion(-) create mode 100644 src/otaclient_iot_logging_server/ecu_info.py diff --git a/README.md b/README.md index 95c4910..a30a0dd 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,8 @@ A logging server that uploads logs sent from otaclient to AWS cloudwatch. +This iot-logger is expected to be installed on the main ECU, with greengrass certificates and otaclient config file(ecu_info.yaml) installed. + ## Usage ### Environmental variables @@ -23,4 +25,8 @@ The behaviors of the iot_logging_server can be configured with the following env | MAX_LOGS_BACKLOG | `4096` | Max pending log entries | | MAX_LOGS_PER_MERGE | `512` | Max log entries in a merge group | | UPLOAD_INTERVAL | `60` | Interval of uploading log batches to cloud | -| ALLOWED_ECUS | | An JSON array contains the allowed ECUs'ID, when set, only logs from allowed ECUs will be processed | + +### ecu_info.yaml + +If `ecu_info.yaml` presented and valid, iot-logger will only accept logs from known ECU ids. +Currently only ECU id will be checked, IP checking is not performed as sub ECU otaclient might send logging from different IPs if ECU has multiple interfaces. \ No newline at end of file diff --git a/src/otaclient_iot_logging_server/ecu_info.py b/src/otaclient_iot_logging_server/ecu_info.py new file mode 100644 index 0000000..a9415ef --- /dev/null +++ b/src/otaclient_iot_logging_server/ecu_info.py @@ -0,0 +1,67 @@ +# Copyright 2022 TIER IV, INC. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""ECU metadatas definition and parsing logic. + +Basically the one copied from otaclient, with only parsing fields we care about. +""" + + +from __future__ import annotations +import logging +from functools import cached_property +from pathlib import Path +from typing import List, Optional + +import yaml +from pydantic import BaseModel, ConfigDict, Field, IPvAnyAddress + + +logger = logging.getLogger(__name__) + + +class BaseFixedConfig(BaseModel): + model_config = ConfigDict(frozen=True) + + +class ECUContact(BaseFixedConfig): + ecu_id: str + ip_addr: IPvAnyAddress + port: int = 50051 + + +class ECUInfo(BaseFixedConfig): + """ECU info configuration. + + We only need to parse ecu_id and secondaries fields. + """ + + format_version: int = 1 + ecu_id: str + secondaries: List[ECUContact] = Field(default_factory=list) + + @cached_property + def ecu_id_list(self) -> set[str]: + res = [ecu_contact.ecu_id for ecu_contact in self.secondaries] + res.append(self.ecu_id) + return set(res) + + +def parse_ecu_info(ecu_info_file: Path | str) -> Optional[ECUInfo]: + try: + _raw_yaml_str = Path(ecu_info_file).read_text() + loaded_ecu_info = yaml.safe_load(_raw_yaml_str) + assert isinstance(loaded_ecu_info, dict), "not a valid yaml file" + return ECUInfo.model_validate(loaded_ecu_info, strict=True) + except Exception as e: + logger.info(f"{ecu_info_file=} is invalid or missing: {e!r}") From c08df8853669512defb1a37883cf8444ff2d6292 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Mon, 15 Apr 2024 10:25:51 +0000 Subject: [PATCH 089/128] integrate stripped ecu_info into log_proxy_server --- src/otaclient_iot_logging_server/configs.py | 5 ++--- src/otaclient_iot_logging_server/ecu_info.py | 2 +- src/otaclient_iot_logging_server/log_proxy_server.py | 11 +++++++---- 3 files changed, 10 insertions(+), 8 deletions(-) diff --git a/src/otaclient_iot_logging_server/configs.py b/src/otaclient_iot_logging_server/configs.py index 34cd83c..1ce4afa 100644 --- a/src/otaclient_iot_logging_server/configs.py +++ b/src/otaclient_iot_logging_server/configs.py @@ -17,7 +17,7 @@ from __future__ import annotations from pathlib import Path -from typing import List, Literal, Optional, Set +from typing import List, Literal import yaml from pydantic import BaseModel, BeforeValidator, Field, RootModel @@ -50,8 +50,7 @@ class ConfigurableLoggingServerConfig(BaseSettings): MAX_LOGS_PER_MERGE: int = 512 UPLOAD_INTERVAL: int = 60 # in seconds - ALLOWED_ECUS: Optional[Set[str]] = None - """Comma separated list of allowed ECU ids.""" + ECU_INFO_YAML = "/boot/ota/ecu_info.yaml" class _AWSProfile(BaseModel): diff --git a/src/otaclient_iot_logging_server/ecu_info.py b/src/otaclient_iot_logging_server/ecu_info.py index a9415ef..6969229 100644 --- a/src/otaclient_iot_logging_server/ecu_info.py +++ b/src/otaclient_iot_logging_server/ecu_info.py @@ -51,7 +51,7 @@ class ECUInfo(BaseFixedConfig): secondaries: List[ECUContact] = Field(default_factory=list) @cached_property - def ecu_id_list(self) -> set[str]: + def ecu_id_set(self) -> set[str]: res = [ecu_contact.ecu_id for ecu_contact in self.secondaries] res.append(self.ecu_id) return set(res) diff --git a/src/otaclient_iot_logging_server/log_proxy_server.py b/src/otaclient_iot_logging_server/log_proxy_server.py index bd5d8d4..4921f66 100644 --- a/src/otaclient_iot_logging_server/log_proxy_server.py +++ b/src/otaclient_iot_logging_server/log_proxy_server.py @@ -19,13 +19,13 @@ import time from http import HTTPStatus from queue import Full, Queue -from typing import Optional from aiohttp import web from aiohttp.web import Request from otaclient_iot_logging_server._common import LogMessage, LogsQueue from otaclient_iot_logging_server.configs import server_cfg +from otaclient_iot_logging_server.ecu_info import parse_ecu_info logger = logging.getLogger(__name__) @@ -35,7 +35,11 @@ class LoggingPostHandler: def __init__(self, queue: LogsQueue) -> None: self._queue = queue - self._allowed_ecus: Optional[set[str]] = server_cfg.ALLOWED_ECUS + self._allowed_ecus = None + + stripped_ecu_info = parse_ecu_info(server_cfg.ECU_INFO_YAML) + if stripped_ecu_info: + self._allowed_ecus = stripped_ecu_info.ecu_id_set # route: POST /{ecu_id} async def logging_post_handler(self, request: Request): @@ -48,8 +52,7 @@ async def logging_post_handler(self, request: Request): _allowed_ecus = self._allowed_ecus # don't allow empty request or unknowned ECUs - # NOTE(20240314): if ALLOWED_ECUS is not configured, we don't enforce - # the check against incoming ECU id. + # if ECU id is unknown(not listed in ecu_info.yaml), drop this log. if not _raw_logging or (_allowed_ecus and _ecu_id not in _allowed_ecus): return web.Response(status=HTTPStatus.BAD_REQUEST) From b0913e7de0dd76856231b091839f205eb96ece4c Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Mon, 15 Apr 2024 10:33:01 +0000 Subject: [PATCH 090/128] add test --- tests/data/ecu_info.yaml | 15 +++++++++++++++ tests/test_ecu_info.py | 38 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 53 insertions(+) create mode 100644 tests/data/ecu_info.yaml create mode 100644 tests/test_ecu_info.py diff --git a/tests/data/ecu_info.yaml b/tests/data/ecu_info.yaml new file mode 100644 index 0000000..344595c --- /dev/null +++ b/tests/data/ecu_info.yaml @@ -0,0 +1,15 @@ +format_version: 1 +ecu_id: "main" +bootloader: "grub" +secondaries: + - ecu_id: "sub1" + ip_addr: "192.168.10.21" + - ecu_id: "sub2" + ip_addr: "192.168.10.22" + - ecu_id: "sub3" + ip_addr: "192.168.10.23" +available_ecu_ids: + - "main" + - "sub1" + - "sub2" + - "sub3" diff --git a/tests/test_ecu_info.py b/tests/test_ecu_info.py new file mode 100644 index 0000000..711e2f9 --- /dev/null +++ b/tests/test_ecu_info.py @@ -0,0 +1,38 @@ +# Copyright 2022 TIER IV, INC. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import annotations +from pathlib import Path + +import pytest + +from otaclient_iot_logging_server.ecu_info import parse_ecu_info + +TESTS_DIR = Path(__file__).parent / "data" + + +@pytest.mark.parametrize( + ["ecu_info_fname", "expected_ecu_id_set"], + ( + ( + "ecu_info.yaml", + set(["sub1", "sub2", "sub3", "main"]), + ), + ), +) +def test_ecu_info(ecu_info_fname: str, expected_ecu_id_set: set[str]): + ecu_info_fpath = TESTS_DIR / ecu_info_fname + assert (ecu_info_cfg := parse_ecu_info(ecu_info_fpath)) + assert ecu_info_cfg.ecu_id_set == expected_ecu_id_set From bcb417c00fc7a8e99abaf18314dfee31677ac6ef Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Mon, 15 Apr 2024 10:37:51 +0000 Subject: [PATCH 091/128] fix tests --- src/otaclient_iot_logging_server/configs.py | 2 +- tests/test_configs.py | 9 ++++----- tests/test_log_proxy_server.py | 14 +++++++------- 3 files changed, 12 insertions(+), 13 deletions(-) diff --git a/src/otaclient_iot_logging_server/configs.py b/src/otaclient_iot_logging_server/configs.py index 1ce4afa..4c48b64 100644 --- a/src/otaclient_iot_logging_server/configs.py +++ b/src/otaclient_iot_logging_server/configs.py @@ -50,7 +50,7 @@ class ConfigurableLoggingServerConfig(BaseSettings): MAX_LOGS_PER_MERGE: int = 512 UPLOAD_INTERVAL: int = 60 # in seconds - ECU_INFO_YAML = "/boot/ota/ecu_info.yaml" + ECU_INFO_YAML: str = "/boot/ota/ecu_info.yaml" class _AWSProfile(BaseModel): diff --git a/tests/test_configs.py b/tests/test_configs.py index 949f7e2..1fc0a79 100644 --- a/tests/test_configs.py +++ b/tests/test_configs.py @@ -50,7 +50,7 @@ "MAX_LOGS_BACKLOG": 4096, "MAX_LOGS_PER_MERGE": 512, "UPLOAD_INTERVAL": 60, - "ALLOWED_ECUS": None, + "ECU_INFO_YAML": "/boot/ota/ecu_info.yaml", }, ), # test#1: frequently changed settings @@ -59,7 +59,6 @@ "LISTEN_ADDRESS": "172.16.1.1", "SERVER_LOGGING_LEVEL": "ERROR", "UPLOAD_INTERVAL": "30", - "ALLOWED_ECUS": '["main_ecu", "sub_ecu"]', }, { "GREENGRASS_V1_CONFIG": "/greengrass/config/config.json", @@ -74,7 +73,7 @@ "MAX_LOGS_BACKLOG": 4096, "MAX_LOGS_PER_MERGE": 512, "UPLOAD_INTERVAL": 30, - "ALLOWED_ECUS": {"main_ecu", "sub_ecu"}, + "ECU_INFO_YAML": "/boot/ota/ecu_info.yaml", }, ), # test#2: change everything @@ -92,7 +91,7 @@ "MAX_LOGS_BACKLOG": "1024", "MAX_LOGS_PER_MERGE": "128", "UPLOAD_INTERVAL": "10", - "ALLOWED_ECUS": '["main_ecu", "sub_ecu"]', + "ECU_INFO_YAML": "/some/where/ecu_info.yaml", }, { "GREENGRASS_V1_CONFIG": "ggv1_cfg.json", @@ -107,7 +106,7 @@ "MAX_LOGS_BACKLOG": 1024, "MAX_LOGS_PER_MERGE": 128, "UPLOAD_INTERVAL": 10, - "ALLOWED_ECUS": {"main_ecu", "sub_ecu"}, + "ECU_INFO_YAML": "/some/where/ecu_info.yaml", }, ), ], diff --git a/tests/test_log_proxy_server.py b/tests/test_log_proxy_server.py index a15a550..fc0cc6a 100644 --- a/tests/test_log_proxy_server.py +++ b/tests/test_log_proxy_server.py @@ -17,11 +17,11 @@ import logging import os +from pathlib import Path import random -from dataclasses import dataclass, field +from dataclasses import dataclass from http import HTTPStatus from queue import Queue -from typing import Set from urllib.parse import urljoin import aiohttp @@ -37,6 +37,7 @@ logger = logging.getLogger(__name__) MODULE = log_server_module.__name__ +TEST_DIR = Path(__file__).parent / "data" @dataclass @@ -45,9 +46,7 @@ class _ServerConfig: LISTEN_ADDRESS: str = "127.0.0.1" LISTEN_PORT: int = 8083 - ALLOWED_ECUS: Set[str] = field( - default_factory=lambda: {"main_ecu", "sub_ecu0", "sub_ecu1", "sub_ecu2"} - ) + ECU_INFO_YAML: Path = TEST_DIR / "ecu_info.yaml" _test_server_cfg = _ServerConfig() @@ -59,7 +58,8 @@ class MessageEntry: message: str -mocked_ECUs_list = ("main_ecu", "sub_ecu0", "sub_ecu1", "sub_ecu2") +# see data/ecu_info.yaml +mocked_ECUs_list = ("main", "sub1", "sub2", "sub3") def generate_random_msgs( @@ -154,7 +154,7 @@ async def test_server(self, client_sesion: aiohttp.ClientSession): # unknowned ECU's request will be dropped ("bad_ecu_id", "valid_msg"), # empty message will be dropped - ("main_ecu", ""), + ("main", ""), ], ) async def test_reject_invalid_request( From 27635b8879ff8ad8eb6727564d9c0d28f8da7801 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Mon, 15 Apr 2024 11:28:57 +0000 Subject: [PATCH 092/128] log_proxy_server: log loaded allowed_ecu_set --- src/otaclient_iot_logging_server/log_proxy_server.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/otaclient_iot_logging_server/log_proxy_server.py b/src/otaclient_iot_logging_server/log_proxy_server.py index 4921f66..979c178 100644 --- a/src/otaclient_iot_logging_server/log_proxy_server.py +++ b/src/otaclient_iot_logging_server/log_proxy_server.py @@ -40,6 +40,9 @@ def __init__(self, queue: LogsQueue) -> None: stripped_ecu_info = parse_ecu_info(server_cfg.ECU_INFO_YAML) if stripped_ecu_info: self._allowed_ecus = stripped_ecu_info.ecu_id_set + logger.info( + f"setup allowed_ecu_id from ecu_info.yaml: {stripped_ecu_info.ecu_id_set}" + ) # route: POST /{ecu_id} async def logging_post_handler(self, request: Request): From 1f9c046c3f69474a78d90b19dec098ff8705cfb8 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 24 Apr 2024 02:27:41 +0000 Subject: [PATCH 093/128] update README.md --- README.md | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index a30a0dd..83672c1 100644 --- a/README.md +++ b/README.md @@ -13,20 +13,21 @@ The behaviors of the iot_logging_server can be configured with the following env | Environmental variables | Default value | Description | | ---- | ---- | --- | | GREENGRASS_V1_CONFIG | `/greengrass/config/config.json` | | -| GREENGRASS_V2_CONFIG | `/greengrass/v2/init_config/config.yaml` | | -| AWS_PROFILE_INFO | `/opt/ota/iot_logger/aws_profile_info.yaml` | | +| GREENGRASS_V2_CONFIG | `/greengrass/v2/init_config/config.yaml` | If both v1 and v2 config file exist, v2 will be used in prior. | +| AWS_PROFILE_INFO | `/opt/ota/iot_logger/aws_profile_info.yaml` | The location of AWS profile info mapping files. | | ECU_INFO_YAML | `/boot/ota/ecu_info.yaml` | The location of ecu_info.yaml config file. iot-logger server will parse the config file and only process logs sending from known ECUs.| -| LISTEN_ADDRESS | `0.0.0.0` | The IP address iot-logger server listen on. | +| LISTEN_ADDRESS | `127.0.0.1` | The IP address iot-logger server listen on. By default only receive logs from local machine. | | LISTEN_PORT | `8083` | | -| UPLOAD_LOGGING_SERVER_LOGS | `false` | Whether to upload the logs from server itself to cloudwatchlogs | -| SERVER_LOGSTREAM_SUFFIX | `iot_logging_server` | log_stream_suffix to use for local server logs upload | -| SERVER_LOGGING_LEVEL | `INFO` | | +| UPLOAD_LOGGING_SERVER_LOGS | `false` | Whether to upload the logs from server itself to cloudwatchlogs. | +| SERVER_LOGSTREAM_SUFFIX | `iot_logging_server` | log_stream_suffix to use for local server logs upload. | +| SERVER_LOGGING_LEVEL | `INFO` | The logging level of the server itself. | | SERVER_LOGGING_LOG_FORMAT | `[%(asctime)s][%(levelname)s]-%(name)s:%(funcName)s:%(lineno)d,%(message)s` | | -| MAX_LOGS_BACKLOG | `4096` | Max pending log entries | -| MAX_LOGS_PER_MERGE | `512` | Max log entries in a merge group | -| UPLOAD_INTERVAL | `60` | Interval of uploading log batches to cloud | +| MAX_LOGS_BACKLOG | `4096` | Max pending log entries. | +| MAX_LOGS_PER_MERGE | `512` | Max log entries in a merge group. | +| UPLOAD_INTERVAL | `60` | Interval of uploading log batches to cloud. Note that if the logger is restarted before next upload occurs, the pending loggings will be dropped. | ### ecu_info.yaml If `ecu_info.yaml` presented and valid, iot-logger will only accept logs from known ECU ids. +The known ECU ids are retrieved from parsing `ecu_info.secondaries` field. Currently only ECU id will be checked, IP checking is not performed as sub ECU otaclient might send logging from different IPs if ECU has multiple interfaces. \ No newline at end of file From befbe9eb194b2ee675351448b521022913633d83 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 8 May 2024 02:11:15 +0000 Subject: [PATCH 094/128] update according to sonarcloud recommends --- src/otaclient_iot_logging_server/aws_iot_logger.py | 4 ++-- tests/test__utils.py | 6 +++--- tests/test_aws_iot_logger.py | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/otaclient_iot_logging_server/aws_iot_logger.py b/src/otaclient_iot_logging_server/aws_iot_logger.py index d3f8c97..92b8dc3 100644 --- a/src/otaclient_iot_logging_server/aws_iot_logger.py +++ b/src/otaclient_iot_logging_server/aws_iot_logger.py @@ -18,7 +18,7 @@ import logging import time from collections import defaultdict -from datetime import datetime +from datetime import datetime, timezone from queue import Empty from threading import Thread @@ -42,7 +42,7 @@ def get_log_stream_name(thing_name: str, log_stream_sufix: str) -> str: Schema: YYYY/MM/DD// """ - fmt = "{strftime:%Y/%m/%d}".format(strftime=datetime.utcnow()) + fmt = "{strftime:%Y/%m/%d}".format(strftime=datetime.now(timezone.utc)) return f"{fmt}/{thing_name}/{log_stream_sufix}" diff --git a/tests/test__utils.py b/tests/test__utils.py index 174190f..3d0e3d2 100644 --- a/tests/test__utils.py +++ b/tests/test__utils.py @@ -202,9 +202,9 @@ def test_retry_session_timecost(self): retry_on_exceptions=(self.HandledException,), )() - time_cost = time.time() - _start_time - logger.info(f"{time_cost=}") - assert time_cost <= expected_retry_session_timecost + time_cost = time.time() - _start_time + logger.info(f"{time_cost=}") + assert time_cost <= expected_retry_session_timecost @pytest.mark.parametrize( diff --git a/tests/test_aws_iot_logger.py b/tests/test_aws_iot_logger.py index b3f0957..005ef32 100644 --- a/tests/test_aws_iot_logger.py +++ b/tests/test_aws_iot_logger.py @@ -83,7 +83,7 @@ def generate_random_msgs( return _res -class TestAWSIoTLogger_thread_main: +class TestAWSIoTLogger: MSG_LEN = 16 MSG_NUM = 4096 From 2d9c44afa9459034ff19c551ea7c1a1811118249 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 8 May 2024 02:20:33 +0000 Subject: [PATCH 095/128] greengrass_config: check thing_name length before processing the regex match --- src/otaclient_iot_logging_server/greengrass_config.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/otaclient_iot_logging_server/greengrass_config.py b/src/otaclient_iot_logging_server/greengrass_config.py index bcb2b32..8ab3473 100644 --- a/src/otaclient_iot_logging_server/greengrass_config.py +++ b/src/otaclient_iot_logging_server/greengrass_config.py @@ -33,14 +33,17 @@ logger = logging.getLogger(__name__) +THINGNAME_PA = re.compile(r"^(thing[/:])?(?P[\w-]+)-edge-(?P[\w-]+)-.*$") +THINGNAME_MAXLENGH = 128 + len("thing/") +"""ThingName's max length is 128. See https://docs.aws.amazon.com/iot/latest/apireference/API_ThingDocument.html.""" + + def get_profile_from_thing_name(_in: str) -> str: """Get profile from specific thing_name naming scheme. Schema: thing/-edge--Core """ - THINGNAME_PA = re.compile( - r"^(thing[/:])?(?P[\w-]+)-edge-(?P[\w-]+)-.*$" - ) + assert len(_in) <= THINGNAME_MAXLENGH, f"invalid thing_name: {_in}" _ma = THINGNAME_PA.match(_in) assert _ma, f"invalid resource id: {_in}" From 211fb25c7e81025ccdbe7886a940ff110b022fa6 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 8 May 2024 02:24:11 +0000 Subject: [PATCH 096/128] dockerfile: only copy src and pyproject.toml --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 7456fd5..6e27a08 100644 --- a/Dockerfile +++ b/Dockerfile @@ -10,7 +10,7 @@ FROM python:${PYTHON_VERSION}-${PYTHON_BASE_VER} as venv_builder ARG PYTHON_VENV -COPY . /source_code +COPY ./src ./pyproject.toml /source_code # ------ install build deps ------ # RUN set -eux; \ From 71d04c819e5b560aa7ccd2bc5a2b05f89be87bf9 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 8 May 2024 02:30:56 +0000 Subject: [PATCH 097/128] add sonarcloud.properties --- .sonarcloud.properties | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 .sonarcloud.properties diff --git a/.sonarcloud.properties b/.sonarcloud.properties new file mode 100644 index 0000000..b268850 --- /dev/null +++ b/.sonarcloud.properties @@ -0,0 +1,5 @@ +sonar.sources=./src +sonar.coverage.exclusions=./tests/**/* +sonar.exclusions=./tests/**/* +sonar.test.exclusions=./tests/**/* +sonar.sourceEncoding=UTF-8 \ No newline at end of file From 2c19c32e3c3ba23c431fd4e14cf9a3134882e092 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 8 May 2024 02:46:18 +0000 Subject: [PATCH 098/128] enable sonarcloud scanning on ci --- .github/workflows/test.yaml | 5 +++++ .sonarcloud.properties | 7 ++++--- pyproject.toml | 2 +- 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index d6ce5e2..cc7599a 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -30,6 +30,11 @@ jobs: coverage run -m pytest --junit-xml=test_result/pytest.xml coverage xml -o test_result/coverage.xml + - name: SonarCloud Scan + uses: SonarSource/sonarcloud-github-action@master + env: + SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} + # export the coverage report to the comment! - name: Add coverage report to PR comment continue-on-error: true diff --git a/.sonarcloud.properties b/.sonarcloud.properties index b268850..8395964 100644 --- a/.sonarcloud.properties +++ b/.sonarcloud.properties @@ -1,5 +1,6 @@ +sonar.organization=tier4 +sonar.projectKey=otaclient-iot-logging-server +sonar.python.coverage.reportPaths=test_result/pytest.xml sonar.sources=./src -sonar.coverage.exclusions=./tests/**/* -sonar.exclusions=./tests/**/* -sonar.test.exclusions=./tests/**/* +sonar.tests=tests sonar.sourceEncoding=UTF-8 \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 5f161b2..4c40f30 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,7 +53,7 @@ Source = "https://github.com/tier4/otaclient-iot-logging-server" line-length = 88 [tool.coverage.run] -branch = false +branch = true source = ["otaclient_iot_logging_server"] [tool.coverage.report] From 0fe6a8956aef05f61ba131b7815e535c851999d6 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 8 May 2024 02:48:13 +0000 Subject: [PATCH 099/128] sonarcloud: limit the python version --- .sonarcloud.properties | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.sonarcloud.properties b/.sonarcloud.properties index 8395964..b95d39b 100644 --- a/.sonarcloud.properties +++ b/.sonarcloud.properties @@ -3,4 +3,5 @@ sonar.projectKey=otaclient-iot-logging-server sonar.python.coverage.reportPaths=test_result/pytest.xml sonar.sources=./src sonar.tests=tests -sonar.sourceEncoding=UTF-8 \ No newline at end of file +sonar.sourceEncoding=UTF-8 +sonar.python.version=3.8,3.9,3.10,3.11 \ No newline at end of file From 36de4b133d8266709262deb90ba8eb4bbec78d37 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 8 May 2024 02:51:16 +0000 Subject: [PATCH 100/128] fix test --- tests/test_aws_iot_logger.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_aws_iot_logger.py b/tests/test_aws_iot_logger.py index 005ef32..bcb40dd 100644 --- a/tests/test_aws_iot_logger.py +++ b/tests/test_aws_iot_logger.py @@ -61,7 +61,7 @@ def test_get_log_stream_name( _thing_name: str, _suffix: str, _expected: str, mocker: MockerFixture ): _datetime_mock = mocker.MagicMock(spec=datetime) - _datetime_mock.utcnow.return_value = _UNIX_EPOCH + _datetime_mock.now.return_value = _UNIX_EPOCH mocker.patch(f"{MODULE}.datetime", _datetime_mock) assert get_log_stream_name(_thing_name, _suffix) == _expected From ff55bcc60c7d105cae1bdbac356edcdb0f3ab107 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 8 May 2024 03:36:28 +0000 Subject: [PATCH 101/128] fix sonarcloud config --- sonar-project.properties | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 sonar-project.properties diff --git a/sonar-project.properties b/sonar-project.properties new file mode 100644 index 0000000..b95d39b --- /dev/null +++ b/sonar-project.properties @@ -0,0 +1,7 @@ +sonar.organization=tier4 +sonar.projectKey=otaclient-iot-logging-server +sonar.python.coverage.reportPaths=test_result/pytest.xml +sonar.sources=./src +sonar.tests=tests +sonar.sourceEncoding=UTF-8 +sonar.python.version=3.8,3.9,3.10,3.11 \ No newline at end of file From 692dca6c31de91ae11f03aea9a85e98c3731ed86 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 8 May 2024 03:52:21 +0000 Subject: [PATCH 102/128] fix sonarcloud projectkey --- sonar-project.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sonar-project.properties b/sonar-project.properties index b95d39b..fcd34ec 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -1,5 +1,5 @@ sonar.organization=tier4 -sonar.projectKey=otaclient-iot-logging-server +sonar.projectKey=tier4_otaclient-iot-logging-server sonar.python.coverage.reportPaths=test_result/pytest.xml sonar.sources=./src sonar.tests=tests From 10f74b305c3c0488842b78baf4988a7608b9456d Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 8 May 2024 04:27:27 +0000 Subject: [PATCH 103/128] test_ci: mark sonarcloud scan as optional for now --- .github/workflows/test.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index cc7599a..fbbb287 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -32,6 +32,7 @@ jobs: - name: SonarCloud Scan uses: SonarSource/sonarcloud-github-action@master + continue-on-error: true env: SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} From f6c9fe5de0afa9072a9b35cbc789e264f5e8518b Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 8 May 2024 06:36:41 +0000 Subject: [PATCH 104/128] pyproject: update deps --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 4c40f30..d6af124 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,11 +17,11 @@ classifiers = [ "Programming Language :: Python :: 3.11", ] dependencies = [ - "aiohttp>=3.9.2, <3.10.0", + "aiohttp>=3.9.5, <3.10.0", "awsiot_credentialhelper>=0.6.0, <0.7.0", "boto3>=1.34.35, <1.35.0", "botocore==1.34.35, <1.35.0", - "pyopenssl==24.0.0", + "pyopenssl>=24.1.0, <25.0.0", "pydantic==2.7.0", "pydantic-settings==2.2.1", "pyyaml==6.0.1", From 86822011d8c47fa5f399ccdc73a9b12dcf19998f Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 8 May 2024 06:36:51 +0000 Subject: [PATCH 105/128] add pre-commit config files --- .pre-commit-config.yaml | 50 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 50 insertions(+) create mode 100644 .pre-commit-config.yaml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..bbef31e --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,50 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.6.0 + hooks: + - id: detect-private-key + - id: end-of-file-fixer + - id: trailing-whitespace + - repo: https://github.com/pre-commit/pygrep-hooks + rev: v1.10.0 + hooks: + - id: python-check-mock-methods + - id: python-use-type-annotations + - repo: https://github.com/pycqa/isort + rev: 5.13.2 + hooks: + - id: isort + - repo: https://github.com/pycqa/flake8 + rev: 7.0.0 + hooks: + - id: flake8 + additional_dependencies: + - flake8-bugbear==24.2.6 + - flake8-comprehensions + - flake8-simplify + - repo: https://github.com/pre-commit/mirrors-mypy + rev: v1.10.0 + hooks: + - id: mypy + files: ^src/ + args: [] + additional_dependencies: + - aiohttp>=3.9.5, <3.10.0 + - awsiot_credentialhelper>=0.6.0, <0.7.0 + - boto3>=1.34.35, <1.35.0 + - pyopenssl>=24.1.0, <25.0.0 + - packaging >= 22.0 + - pydantic==2.7.0 + - pydantic-settings==2.2.1 + - pyyaml==6.0.1 + - pytest + - typing_extensions>=4.0 + - tomli >= 0.2.6, <2.0.0 + - repo: https://github.com/tox-dev/pyproject-fmt + rev: "1.8.0" + hooks: + - id: pyproject-fmt + # https://pyproject-fmt.readthedocs.io/en/latest/#calculating-max-supported-python-version + additional_dependencies: ["tox>=4.9"] +ci: + autoupdate_schedule: monthly From 284b664ff37a637ad2c16ecd259c4372a132a27a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 8 May 2024 06:37:35 +0000 Subject: [PATCH 106/128] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .dockerignore | 2 +- .flake8 | 2 +- .sonarcloud.properties | 2 +- Dockerfile | 2 +- README.md | 6 +- examples/aws_profile_info.yaml | 2 +- examples/otaclient-logger.service | 2 +- pyproject.toml | 77 ++++++++++--------- sonar-project.properties | 2 +- src/otaclient_iot_logging_server/.gitignore | 2 +- .../aws_iot_logger.py | 3 +- src/otaclient_iot_logging_server/ecu_info.py | 2 +- tests/data/aws_profile_info.yaml | 2 +- tests/data/gg_v1_cfg.json | 2 +- tests/data/gg_v2_cfg.yaml_tpm2.0 | 2 +- tests/test__log_setting.py | 6 +- tests/test__utils.py | 7 +- tests/test_aws_iot_logger.py | 9 +-- tests/test_boto3_session.py | 10 +-- tests/test_configs.py | 2 - tests/test_ecu_info.py | 1 + tests/test_greengrass_config.py | 13 +--- tests/test_log_proxy_server.py | 9 +-- 23 files changed, 71 insertions(+), 96 deletions(-) diff --git a/.dockerignore b/.dockerignore index fbd2eed..7c1a04e 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,3 +1,3 @@ .devcontainer/ build/ -dist/ \ No newline at end of file +dist/ diff --git a/.flake8 b/.flake8 index 1387491..ef8aeb2 100644 --- a/.flake8 +++ b/.flake8 @@ -1,4 +1,4 @@ [flake8] extend-ignore = E266, E501, E203 max-line-length = 88 -max-complexity = 16 \ No newline at end of file +max-complexity = 16 diff --git a/.sonarcloud.properties b/.sonarcloud.properties index b95d39b..3d4e469 100644 --- a/.sonarcloud.properties +++ b/.sonarcloud.properties @@ -4,4 +4,4 @@ sonar.python.coverage.reportPaths=test_result/pytest.xml sonar.sources=./src sonar.tests=tests sonar.sourceEncoding=UTF-8 -sonar.python.version=3.8,3.9,3.10,3.11 \ No newline at end of file +sonar.python.version=3.8,3.9,3.10,3.11 diff --git a/Dockerfile b/Dockerfile index 6e27a08..e959cee 100644 --- a/Dockerfile +++ b/Dockerfile @@ -66,4 +66,4 @@ RUN mkdir -p /opt /greengrass ENV PATH="${PYTHON_VENV}/bin:${PATH}" -CMD ["iot_logging_server"] \ No newline at end of file +CMD ["iot_logging_server"] diff --git a/README.md b/README.md index 83672c1..6665951 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ A logging server that uploads logs sent from otaclient to AWS cloudwatch. -This iot-logger is expected to be installed on the main ECU, with greengrass certificates and otaclient config file(ecu_info.yaml) installed. +This iot-logger is expected to be installed on the main ECU, with greengrass certificates and otaclient config file(ecu_info.yaml) installed. ## Usage @@ -28,6 +28,6 @@ The behaviors of the iot_logging_server can be configured with the following env ### ecu_info.yaml -If `ecu_info.yaml` presented and valid, iot-logger will only accept logs from known ECU ids. +If `ecu_info.yaml` presented and valid, iot-logger will only accept logs from known ECU ids. The known ECU ids are retrieved from parsing `ecu_info.secondaries` field. -Currently only ECU id will be checked, IP checking is not performed as sub ECU otaclient might send logging from different IPs if ECU has multiple interfaces. \ No newline at end of file +Currently only ECU id will be checked, IP checking is not performed as sub ECU otaclient might send logging from different IPs if ECU has multiple interfaces. diff --git a/examples/aws_profile_info.yaml b/examples/aws_profile_info.yaml index abd9974..16a374e 100644 --- a/examples/aws_profile_info.yaml +++ b/examples/aws_profile_info.yaml @@ -6,4 +6,4 @@ credential_endpoint: "abcdefghijk02.credentials.iot.region.amazonaws.com" - profile_name: "profile-prd" account_id: "012345678903" - credential_endpoint: "abcdefghijk03.credentials.iot.region.amazonaws.com" \ No newline at end of file + credential_endpoint: "abcdefghijk03.credentials.iot.region.amazonaws.com" diff --git a/examples/otaclient-logger.service b/examples/otaclient-logger.service index 270cca2..294ca9b 100644 --- a/examples/otaclient-logger.service +++ b/examples/otaclient-logger.service @@ -14,4 +14,4 @@ RestartSec=10 Type=simple [Install] -WantedBy=multi-user.target \ No newline at end of file +WantedBy=multi-user.target diff --git a/pyproject.toml b/pyproject.toml index d6af124..8e8acb7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,12 +1,16 @@ [build-system] -requires = ["hatchling>=1.20.0", "hatch-vcs"] build-backend = "hatchling.build" +requires = [ + "hatch-vcs", + "hatchling>=1.20", +] [project] -name = "otaclient_iot_logging_server" +name = "otaclient-iot-logging-server" +description = "A logging server that uploads logs sent from otaclient to AWS cloudwatch." readme = "README.md" -requires-python = ">=3.8" license = { text = "LICENSE.md" } +requires-python = ">=3.8" classifiers = [ "License :: OSI Approved :: Apache Software License", "Operating System :: Unix", @@ -15,57 +19,38 @@ classifiers = [ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", +] +dynamic = [ + "version", ] dependencies = [ - "aiohttp>=3.9.5, <3.10.0", - "awsiot_credentialhelper>=0.6.0, <0.7.0", - "boto3>=1.34.35, <1.35.0", - "botocore==1.34.35, <1.35.0", - "pyopenssl>=24.1.0, <25.0.0", - "pydantic==2.7.0", + "aiohttp<3.10.0,>=3.9.5", + "awsiot_credentialhelper<0.7.0,>=0.6", + "boto3<1.35.0,>=1.34.35", + "botocore<1.35.0,==1.34.35", + "pydantic==2.7", "pydantic-settings==2.2.1", + "pyopenssl<25.0.0,>=24.1", "pyyaml==6.0.1", - "typing_extensions>=4.0", + "typing_extensions>=4", ] -dynamic = ["version"] -description = "A logging server that uploads logs sent from otaclient to AWS cloudwatch." - [project.optional-dependencies] dev = [ "black==24.1.1", "coverage==7.4.1", - "flake8==6.1.0", + "flake8==6.1", "isort==5.13.2", "pytest==7.4.4", "pytest-asyncio==0.23.4", "pytest-env==1.1.3", - "pytest-mock==3.12.0", + "pytest-mock==3.12", ] - -[project.scripts] -iot_logging_server = "otaclient_iot_logging_server.__main__:main" - [project.urls] Homepage = "https://github.com/tier4/otaclient-iot-logging-server" Source = "https://github.com/tier4/otaclient-iot-logging-server" - -[tool.black] -line-length = 88 - -[tool.coverage.run] -branch = true -source = ["otaclient_iot_logging_server"] - -[tool.coverage.report] -exclude_also = [ - "def __repr__", - "if __name__ == .__main__.:", - "if TYPE_CHECKING:", - "class .*\\bProtocol\\):", - "@(abc\\.)?abstractmethod", -] -show_missing = true -skip_empty = true +[project.scripts] +iot_logging_server = "otaclient_iot_logging_server.__main__:main" [tool.hatch.envs.dev] type = "virtual" @@ -84,6 +69,9 @@ exclude = ["/.github", "/docs"] only-include = ["src"] sources = ["src"] +[tool.black] +line-length = 88 + [tool.isort] atomic = true profile = "black" @@ -105,3 +93,18 @@ log_cli = true log_cli_level = "INFO" pythonpath = ["src"] testpaths = ["tests"] + +[tool.coverage.run] +branch = true +source = ["otaclient_iot_logging_server"] + +[tool.coverage.report] +exclude_also = [ + "def __repr__", + "if __name__ == .__main__.:", + "if TYPE_CHECKING:", + "class .*\\bProtocol\\):", + "@(abc\\.)?abstractmethod", +] +show_missing = true +skip_empty = true diff --git a/sonar-project.properties b/sonar-project.properties index fcd34ec..b45821e 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -4,4 +4,4 @@ sonar.python.coverage.reportPaths=test_result/pytest.xml sonar.sources=./src sonar.tests=tests sonar.sourceEncoding=UTF-8 -sonar.python.version=3.8,3.9,3.10,3.11 \ No newline at end of file +sonar.python.version=3.8,3.9,3.10,3.11 diff --git a/src/otaclient_iot_logging_server/.gitignore b/src/otaclient_iot_logging_server/.gitignore index 0cee351..6a8d699 100644 --- a/src/otaclient_iot_logging_server/.gitignore +++ b/src/otaclient_iot_logging_server/.gitignore @@ -1,2 +1,2 @@ # ignore generated version file -_version.py \ No newline at end of file +_version.py diff --git a/src/otaclient_iot_logging_server/aws_iot_logger.py b/src/otaclient_iot_logging_server/aws_iot_logger.py index 92b8dc3..a8b4925 100644 --- a/src/otaclient_iot_logging_server/aws_iot_logger.py +++ b/src/otaclient_iot_logging_server/aws_iot_logger.py @@ -27,8 +27,8 @@ from otaclient_iot_logging_server._common import LogEvent, LogMessage, LogsQueue from otaclient_iot_logging_server._utils import chain_query, retry -from otaclient_iot_logging_server.configs import server_cfg from otaclient_iot_logging_server.boto3_session import get_session +from otaclient_iot_logging_server.configs import server_cfg from otaclient_iot_logging_server.greengrass_config import ( IoTSessionConfig, parse_config, @@ -50,7 +50,6 @@ class AWSIoTLogger: """ Ref: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/logs.html """ - # this upper bound is defined by boto3, check doc for more details. MAX_LOGS_PER_PUT = 10_000 diff --git a/src/otaclient_iot_logging_server/ecu_info.py b/src/otaclient_iot_logging_server/ecu_info.py index 6969229..c401763 100644 --- a/src/otaclient_iot_logging_server/ecu_info.py +++ b/src/otaclient_iot_logging_server/ecu_info.py @@ -18,6 +18,7 @@ from __future__ import annotations + import logging from functools import cached_property from pathlib import Path @@ -26,7 +27,6 @@ import yaml from pydantic import BaseModel, ConfigDict, Field, IPvAnyAddress - logger = logging.getLogger(__name__) diff --git a/tests/data/aws_profile_info.yaml b/tests/data/aws_profile_info.yaml index abd9974..16a374e 100644 --- a/tests/data/aws_profile_info.yaml +++ b/tests/data/aws_profile_info.yaml @@ -6,4 +6,4 @@ credential_endpoint: "abcdefghijk02.credentials.iot.region.amazonaws.com" - profile_name: "profile-prd" account_id: "012345678903" - credential_endpoint: "abcdefghijk03.credentials.iot.region.amazonaws.com" \ No newline at end of file + credential_endpoint: "abcdefghijk03.credentials.iot.region.amazonaws.com" diff --git a/tests/data/gg_v1_cfg.json b/tests/data/gg_v1_cfg.json index 7abde7e..3299fd3 100644 --- a/tests/data/gg_v1_cfg.json +++ b/tests/data/gg_v1_cfg.json @@ -26,4 +26,4 @@ } } } -} \ No newline at end of file +} diff --git a/tests/data/gg_v2_cfg.yaml_tpm2.0 b/tests/data/gg_v2_cfg.yaml_tpm2.0 index 9af1e2a..815d091 100644 --- a/tests/data/gg_v2_cfg.yaml_tpm2.0 +++ b/tests/data/gg_v2_cfg.yaml_tpm2.0 @@ -18,4 +18,4 @@ services: library: "/usr/lib/x86_64-linux-gnu/pkcs11/libtpm2_pkcs11.so" name: "tpm2_pkcs11" slot: 1 - userPin: "greengrass_userpin" \ No newline at end of file + userPin: "greengrass_userpin" diff --git a/tests/test__log_setting.py b/tests/test__log_setting.py index 0a513b5..67a4550 100644 --- a/tests/test__log_setting.py +++ b/tests/test__log_setting.py @@ -19,8 +19,8 @@ from queue import Queue import otaclient_iot_logging_server._log_setting -from otaclient_iot_logging_server._log_setting import _LogTeeHandler # type: ignore from otaclient_iot_logging_server._common import LogsQueue +from otaclient_iot_logging_server._log_setting import _LogTeeHandler # type: ignore MODULE = otaclient_iot_logging_server._log_setting.__name__ @@ -30,17 +30,13 @@ def test_server_logger(): _queue: LogsQueue = Queue() suffix = "test_suffix" - # ------ setup test ------ # _handler = _LogTeeHandler(_queue, suffix) # type: ignore logger.addHandler(_handler) - # ------ execution ------ # logger.info("emit one logging entry") - # ------ clenaup ------ # logger.removeHandler(_handler) - # ------ check result ------ # _log = _queue.get_nowait() assert _log[0] == suffix diff --git a/tests/test__utils.py b/tests/test__utils.py index 3d0e3d2..c0d0ae1 100644 --- a/tests/test__utils.py +++ b/tests/test__utils.py @@ -15,9 +15,9 @@ from __future__ import annotations -import time import logging import random +import time from typing import Any import pytest @@ -25,9 +25,9 @@ from otaclient_iot_logging_server._utils import ( NestedDict, chain_query, - retry, - remove_prefix, parse_pkcs11_uri, + remove_prefix, + retry, ) logger = logging.getLogger(__name__) @@ -178,7 +178,6 @@ def test_retry_session_timecost(self): """ max_retries, actual_retries = 8, 9 backoff_factor, backoff_max = 0.1, 1 - # NOTE: add some overhead for function execution expected_retry_session_timecost = ( sum(min(backoff_max, backoff_factor * 2**i) for i in range(max_retries)) diff --git a/tests/test_aws_iot_logger.py b/tests/test_aws_iot_logger.py index bcb40dd..779f29c 100644 --- a/tests/test_aws_iot_logger.py +++ b/tests/test_aws_iot_logger.py @@ -30,8 +30,8 @@ import otaclient_iot_logging_server.aws_iot_logger from otaclient_iot_logging_server._common import LogMessage, LogsQueue from otaclient_iot_logging_server.aws_iot_logger import ( - get_log_stream_name, AWSIoTLogger, + get_log_stream_name, ) logger = logging.getLogger(__name__) @@ -96,13 +96,11 @@ def _mocked_send_messages(self, _ecu_id: str, _logs: list[LogMessage]): @pytest.fixture def prepare_test_data(self): _msgs = generate_random_msgs(self.MSG_LEN, self.MSG_NUM) - # prepare result for test_thread_main _merged_msgs: dict[str, list[LogMessage]] = defaultdict(list) for _ecu_id, _log_msg in _msgs: _merged_msgs[_ecu_id].append(_log_msg) self._merged_msgs = _merged_msgs - # prepare the queue for test _queue: LogsQueue = Queue() for _item in _msgs: @@ -115,7 +113,6 @@ def setup_test(self, prepare_test_data, mocker: MockerFixture): # NOTE: a hack here to interrupt the while loop _time_mocker.sleep.side_effect = self._TestFinished mocker.patch(f"{MODULE}.time", _time_mocker) - # ------ prepare test self ------ # # The following bound variables will be used in thread_main method. # NOTE: another hack to let all entries being merged within one @@ -124,11 +121,9 @@ def setup_test(self, prepare_test_data, mocker: MockerFixture): self.send_messages = self._mocked_send_messages self._interval = 6 # place holder self._session_config = mocker.MagicMock() # place holder - # for holding test results # mocked_send_messages will record each calls in this dict self._test_result: dict[str, list[LogMessage]] = {} - # mock get_log_stream_name to let it returns the log_stream_suffix # as it, make the test easier. # see get_log_stream_name signature for more details @@ -140,12 +135,10 @@ def test_thread_main(self, mocker: MockerFixture): self._create_log_group = mocked__create_log_group = mocker.MagicMock( spec=AWSIoTLogger._create_log_group ) - # ------ execution ------ # with pytest.raises(self._TestFinished): func_to_test.__get__(self)() logger.info("execution finished") - # ------ check result ------ # mocked__create_log_group.assert_called_once() # confirm the send_messages mock receives the expecting calls. diff --git a/tests/test_boto3_session.py b/tests/test_boto3_session.py index f7caf61..a49ab58 100644 --- a/tests/test_boto3_session.py +++ b/tests/test_boto3_session.py @@ -14,6 +14,7 @@ from __future__ import annotations + from typing import Any import pytest @@ -21,15 +22,16 @@ from awsiot_credentialhelper.boto3_session import Pkcs11Config as aws_PKcs11Config from pytest_mock import MockerFixture - import otaclient_iot_logging_server.boto3_session from otaclient_iot_logging_server._utils import parse_pkcs11_uri -from otaclient_iot_logging_server.boto3_session import _convert_to_pem, get_session # type: ignore +from otaclient_iot_logging_server.boto3_session import ( # type: ignore + _convert_to_pem, + get_session, +) from otaclient_iot_logging_server.greengrass_config import ( IoTSessionConfig, PKCS11Config, ) - from tests.conftest import TEST_DATA_DPATH MODULE = otaclient_iot_logging_server.boto3_session.__name__ @@ -126,9 +128,7 @@ def test_get_session( mocker.patch( f"{MODULE}._load_certificate", mocker.MagicMock(return_value=_MOCKED_CERT) ) - # ------ execution ------ # get_session(_config) - # ------ check result ------ # _boto3_session_provider_mock.assert_called_once_with(**_expected_call) diff --git a/tests/test_configs.py b/tests/test_configs.py index 1fc0a79..a9edbee 100644 --- a/tests/test_configs.py +++ b/tests/test_configs.py @@ -25,7 +25,6 @@ ConfigurableLoggingServerConfig, load_profile_info, ) - from tests.conftest import TEST_DATA_DPATH AWS_PROFILE_INFO_FPATH = TEST_DATA_DPATH / "aws_profile_info.yaml" @@ -118,7 +117,6 @@ def test_server_config_loading( ): # patch environmental variables while clearing all already mocker.patch.dict(os.environ, _mock_envs, clear=True) - # NOTE: compare by dict to prevent double import from env vars assert _expected == ConfigurableLoggingServerConfig().model_dump() diff --git a/tests/test_ecu_info.py b/tests/test_ecu_info.py index 711e2f9..c89cb59 100644 --- a/tests/test_ecu_info.py +++ b/tests/test_ecu_info.py @@ -14,6 +14,7 @@ from __future__ import annotations + from pathlib import Path import pytest diff --git a/tests/test_greengrass_config.py b/tests/test_greengrass_config.py index 856fac7..05434a8 100644 --- a/tests/test_greengrass_config.py +++ b/tests/test_greengrass_config.py @@ -24,24 +24,21 @@ import otaclient_iot_logging_server.greengrass_config from otaclient_iot_logging_server.greengrass_config import ( - get_profile_from_thing_name, IoTSessionConfig, PKCS11Config, + get_profile_from_thing_name, + parse_config, parse_v1_config, parse_v2_config, - parse_config, ) - from tests.conftest import TEST_DATA_DPATH logger = logging.getLogger(__name__) MODULE = otaclient_iot_logging_server.greengrass_config.__name__ - # NOTE: AWS_PROFILE_INFO, GREENGRASS_V1_CONFIG and GREENGRASS_V2_CONFIG # environmental variables are properly set in pyproject.toml. # profile_info in configs.py is populated with aws_profile_info.yaml in tests/data. - # NOTE: gg_v1_cfg and gg_v2_cfg is the same, besides the thing_name, # this will be used as evidence to check which config is used. GG_V1_CFG_FPATH = TEST_DATA_DPATH / "gg_v1_cfg.json" @@ -98,8 +95,6 @@ ) def test_get_profile_from_thing_name(_in: str, _expected: str): assert get_profile_from_thing_name(_in) == _expected - - # # ------ greengrass v1 configuration ------ # # @@ -110,8 +105,6 @@ def test_get_profile_from_thing_name(_in: str, _expected: str): ) def test_parse_v1_config(_raw_cfg: str, _expected: IoTSessionConfig): assert parse_v1_config(_raw_cfg) == _expected - - # # ------ greengrass v2 configuration ------ # # @@ -124,8 +117,6 @@ def test_parse_v1_config(_raw_cfg: str, _expected: IoTSessionConfig): ) def test_parse_v2_config(_raw_cfg: str, _expected: IoTSessionConfig): assert parse_v2_config(_raw_cfg) == _expected - - # # ------ test parse_config entry point ------ # # diff --git a/tests/test_log_proxy_server.py b/tests/test_log_proxy_server.py index fc0cc6a..18e2462 100644 --- a/tests/test_log_proxy_server.py +++ b/tests/test_log_proxy_server.py @@ -17,18 +17,18 @@ import logging import os -from pathlib import Path import random from dataclasses import dataclass from http import HTTPStatus +from pathlib import Path from queue import Queue from urllib.parse import urljoin import aiohttp import aiohttp.client_exceptions import pytest -from pytest_mock import MockerFixture from aiohttp import web +from pytest_mock import MockerFixture import otaclient_iot_logging_server.log_proxy_server as log_server_module from otaclient_iot_logging_server._common import LogsQueue @@ -56,8 +56,6 @@ class _ServerConfig: class MessageEntry: ecu_id: str message: str - - # see data/ecu_info.yaml mocked_ECUs_list = ("main", "sub1", "sub2", "sub3") @@ -98,10 +96,8 @@ async def launch_server(self, mocker: MockerFixture): # mute the aiohttp server logging aiohttp_server_logger = logging.getLogger("aiohttp") aiohttp_server_logger.setLevel("ERROR") - # add handler to the server app.add_routes([web.post(r"/{ecu_id}", handler.logging_post_handler)]) - # star the server runner = web.AppRunner(app) try: @@ -138,7 +134,6 @@ async def test_server(self, client_sesion: aiohttp.ClientSession): _log_upload_endpoint_url = urljoin(self.SERVER_URL, _ecu_id) async with client_sesion.post(_log_upload_endpoint_url, data=_msg): pass # raise_for_status is set on session - # ------ check result ------ # # ensure the all msgs are sent in order to the queue by the server. logger.info("checking all the received messages...") From c113c0740d2cb8008c054ce14392139c66b5d8ed Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 8 May 2024 07:21:51 +0000 Subject: [PATCH 107/128] pre-commit: remove mypy, add black --- .pre-commit-config.yaml | 28 ++++++++++------------------ 1 file changed, 10 insertions(+), 18 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index bbef31e..b6789ed 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -10,6 +10,16 @@ repos: hooks: - id: python-check-mock-methods - id: python-use-type-annotations + # Using this mirror lets us use mypyc-compiled black, which is about 2x faster + - repo: https://github.com/psf/black-pre-commit-mirror + rev: 24.4.2 + hooks: + - id: black + # It is recommended to specify the latest version of Python + # supported by your project here, or alternatively use + # pre-commit's default_language_version, see + # https://pre-commit.com/#top_level-default_language_version + language_version: python3.11 - repo: https://github.com/pycqa/isort rev: 5.13.2 hooks: @@ -22,24 +32,6 @@ repos: - flake8-bugbear==24.2.6 - flake8-comprehensions - flake8-simplify - - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.10.0 - hooks: - - id: mypy - files: ^src/ - args: [] - additional_dependencies: - - aiohttp>=3.9.5, <3.10.0 - - awsiot_credentialhelper>=0.6.0, <0.7.0 - - boto3>=1.34.35, <1.35.0 - - pyopenssl>=24.1.0, <25.0.0 - - packaging >= 22.0 - - pydantic==2.7.0 - - pydantic-settings==2.2.1 - - pyyaml==6.0.1 - - pytest - - typing_extensions>=4.0 - - tomli >= 0.2.6, <2.0.0 - repo: https://github.com/tox-dev/pyproject-fmt rev: "1.8.0" hooks: From f5063cb623653cfcb9a2b0cf986cb927607bd90a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 8 May 2024 07:22:40 +0000 Subject: [PATCH 108/128] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/otaclient_iot_logging_server/aws_iot_logger.py | 1 + tests/test_greengrass_config.py | 6 ++++++ tests/test_log_proxy_server.py | 2 ++ 3 files changed, 9 insertions(+) diff --git a/src/otaclient_iot_logging_server/aws_iot_logger.py b/src/otaclient_iot_logging_server/aws_iot_logger.py index a8b4925..8f50caf 100644 --- a/src/otaclient_iot_logging_server/aws_iot_logger.py +++ b/src/otaclient_iot_logging_server/aws_iot_logger.py @@ -50,6 +50,7 @@ class AWSIoTLogger: """ Ref: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/logs.html """ + # this upper bound is defined by boto3, check doc for more details. MAX_LOGS_PER_PUT = 10_000 diff --git a/tests/test_greengrass_config.py b/tests/test_greengrass_config.py index 05434a8..46beac3 100644 --- a/tests/test_greengrass_config.py +++ b/tests/test_greengrass_config.py @@ -95,6 +95,8 @@ ) def test_get_profile_from_thing_name(_in: str, _expected: str): assert get_profile_from_thing_name(_in) == _expected + + # # ------ greengrass v1 configuration ------ # # @@ -105,6 +107,8 @@ def test_get_profile_from_thing_name(_in: str, _expected: str): ) def test_parse_v1_config(_raw_cfg: str, _expected: IoTSessionConfig): assert parse_v1_config(_raw_cfg) == _expected + + # # ------ greengrass v2 configuration ------ # # @@ -117,6 +121,8 @@ def test_parse_v1_config(_raw_cfg: str, _expected: IoTSessionConfig): ) def test_parse_v2_config(_raw_cfg: str, _expected: IoTSessionConfig): assert parse_v2_config(_raw_cfg) == _expected + + # # ------ test parse_config entry point ------ # # diff --git a/tests/test_log_proxy_server.py b/tests/test_log_proxy_server.py index 18e2462..2ba2cc2 100644 --- a/tests/test_log_proxy_server.py +++ b/tests/test_log_proxy_server.py @@ -56,6 +56,8 @@ class _ServerConfig: class MessageEntry: ecu_id: str message: str + + # see data/ecu_info.yaml mocked_ECUs_list = ("main", "sub1", "sub2", "sub3") From c130ae0973290bb987d3202e920ae66ee25d41fe Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 8 May 2024 07:31:17 +0000 Subject: [PATCH 109/128] fix according to pre-commit result --- src/otaclient_iot_logging_server/_log_setting.py | 5 ++--- src/otaclient_iot_logging_server/aws_iot_logger.py | 6 +++--- tests/test_ecu_info.py | 2 +- 3 files changed, 6 insertions(+), 7 deletions(-) diff --git a/src/otaclient_iot_logging_server/_log_setting.py b/src/otaclient_iot_logging_server/_log_setting.py index b87c2b1..e18c19f 100644 --- a/src/otaclient_iot_logging_server/_log_setting.py +++ b/src/otaclient_iot_logging_server/_log_setting.py @@ -15,6 +15,7 @@ from __future__ import annotations +import contextlib import logging import time from queue import Queue @@ -37,7 +38,7 @@ def __init__( self._logstream_suffix = logstream_suffix def emit(self, record: logging.LogRecord) -> None: - try: + with contextlib.suppress(Exception): self._queue.put_nowait( ( self._logstream_suffix, @@ -47,8 +48,6 @@ def emit(self, record: logging.LogRecord) -> None: ), ) ) - except Exception: - pass def config_logging( diff --git a/src/otaclient_iot_logging_server/aws_iot_logger.py b/src/otaclient_iot_logging_server/aws_iot_logger.py index 8f50caf..8c0644e 100644 --- a/src/otaclient_iot_logging_server/aws_iot_logger.py +++ b/src/otaclient_iot_logging_server/aws_iot_logger.py @@ -15,6 +15,7 @@ from __future__ import annotations +import contextlib import logging import time from collections import defaultdict @@ -198,15 +199,14 @@ def thread_main(self) -> NoReturn: break for log_stream_suffix, logs in message_dict.items(): - try: + with contextlib.suppress(Exception): self.send_messages( get_log_stream_name( self._session_config.thing_name, log_stream_suffix ), logs, ) - except Exception: - pass # don't let the exception breaks the main loop + # don't let the exception breaks the main loop time.sleep(self._interval) diff --git a/tests/test_ecu_info.py b/tests/test_ecu_info.py index c89cb59..ac64e9a 100644 --- a/tests/test_ecu_info.py +++ b/tests/test_ecu_info.py @@ -29,7 +29,7 @@ ( ( "ecu_info.yaml", - set(["sub1", "sub2", "sub3", "main"]), + {"sub1", "sub2", "sub3", "main"}, ), ), ) From 05984bcf2245138e249c12eed338dd1d4e30bb04 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 8 May 2024 08:50:08 +0000 Subject: [PATCH 110/128] fix sonarcloud settings --- .sonarcloud.properties | 7 ------- sonar-project.properties | 2 +- 2 files changed, 1 insertion(+), 8 deletions(-) delete mode 100644 .sonarcloud.properties diff --git a/.sonarcloud.properties b/.sonarcloud.properties deleted file mode 100644 index 3d4e469..0000000 --- a/.sonarcloud.properties +++ /dev/null @@ -1,7 +0,0 @@ -sonar.organization=tier4 -sonar.projectKey=otaclient-iot-logging-server -sonar.python.coverage.reportPaths=test_result/pytest.xml -sonar.sources=./src -sonar.tests=tests -sonar.sourceEncoding=UTF-8 -sonar.python.version=3.8,3.9,3.10,3.11 diff --git a/sonar-project.properties b/sonar-project.properties index b45821e..943211c 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -1,6 +1,6 @@ sonar.organization=tier4 sonar.projectKey=tier4_otaclient-iot-logging-server -sonar.python.coverage.reportPaths=test_result/pytest.xml +sonar.python.coverage.reportPaths=test_result/coverage.xml sonar.sources=./src sonar.tests=tests sonar.sourceEncoding=UTF-8 From ce5bad5f28579c4d7d351818a522c072572dee21 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 8 May 2024 12:29:45 +0000 Subject: [PATCH 111/128] test_ci: fetch all git branches, this is needed for sonarcloud --- .github/workflows/test.yaml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index fbbb287..d7a57dc 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -14,6 +14,9 @@ jobs: steps: - name: Checkout commit uses: actions/checkout@v4 + with: + # sonarcloud needs main branch's ref + fetch-depth: 0 - name: Set up Python uses: actions/setup-python@v5 @@ -32,7 +35,6 @@ jobs: - name: SonarCloud Scan uses: SonarSource/sonarcloud-github-action@master - continue-on-error: true env: SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} From 80dd0145d6b4fb268c674322b0d2c05fd4a4a755 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 8 May 2024 12:30:08 +0000 Subject: [PATCH 112/128] pyproject: set coverage.run.relative_file to true --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 8e8acb7..5ab2d7f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -97,6 +97,7 @@ testpaths = ["tests"] [tool.coverage.run] branch = true source = ["otaclient_iot_logging_server"] +relative_files = true [tool.coverage.report] exclude_also = [ From adc4893647ac14181f0876914ca6d32345f30fde Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 8 May 2024 12:46:57 +0000 Subject: [PATCH 113/128] update test ci --- .github/workflows/test.yaml | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index d7a57dc..1d30abf 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -10,6 +10,11 @@ jobs: # currently we only need to ensure it is running on the following OS # with OS-shipped python interpreter. os: ["ubuntu-20.04", "ubuntu-22.04"] + include: + - os: ubuntu-22.04 + python_version: 3.10 + - os: ubuntu-20.04 + python_version: 3.8 runs-on: ${{ matrix.os }} steps: - name: Checkout commit @@ -21,6 +26,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: + python-version: ${{ matrix.python_version }} cache: "pip" - name: Install package @@ -33,11 +39,6 @@ jobs: coverage run -m pytest --junit-xml=test_result/pytest.xml coverage xml -o test_result/coverage.xml - - name: SonarCloud Scan - uses: SonarSource/sonarcloud-github-action@master - env: - SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} - # export the coverage report to the comment! - name: Add coverage report to PR comment continue-on-error: true @@ -46,6 +47,11 @@ jobs: pytest-xml-coverage-path: test_result/coverage.xml junitxml-path: test_result/pytest.xml + - name: SonarCloud Scan + uses: SonarSource/sonarcloud-github-action@master + env: + SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} + pytest_on_supported_python_vers: runs-on: ubuntu-22.04 strategy: From 38aeaa0dbab8dca0c8cb91ecdba4e3696ab8717f Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Wed, 8 May 2024 12:50:09 +0000 Subject: [PATCH 114/128] test ci: fix python version --- .github/workflows/test.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 1d30abf..fde4898 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -12,9 +12,9 @@ jobs: os: ["ubuntu-20.04", "ubuntu-22.04"] include: - os: ubuntu-22.04 - python_version: 3.10 + python_version: "3.10" - os: ubuntu-20.04 - python_version: 3.8 + python_version: "3.8" runs-on: ${{ matrix.os }} steps: - name: Checkout commit From 62813bd4b1e36ca71928af528216b3be73d3779f Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Thu, 9 May 2024 02:35:01 +0000 Subject: [PATCH 115/128] test_ci: only trigger test CI on pr or push to main branch with src or tests changed, allow manually trigger --- .github/workflows/test.yaml | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index fde4898..099661b 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -1,6 +1,24 @@ name: test CI -on: [pull_request] +on: + pull_request: + branches: + - main + # only trigger unit test CI when src or tests changed + paths: + - "src/**" + - "tests/**" + - ".github/workflows/test.yaml" + push: + branches: + - main + # only trigger unit test CI when src or tests changed + paths: + - "src/**" + - "tests/**" + - ".github/workflows/test.yaml" + # allow the test CI to be manually triggerred + workflow_dispatch: jobs: pytest_with_coverage_on_supported_os: From 5c2c792d205d0c5b53ef180cd445f7c572c09483 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Mon, 13 May 2024 01:55:53 +0000 Subject: [PATCH 116/128] pre-commit: enable markdownlint --- .markdownlint.yaml | 4 ++++ .pre-commit-config.yaml | 6 +++++- 2 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 .markdownlint.yaml diff --git a/.markdownlint.yaml b/.markdownlint.yaml new file mode 100644 index 0000000..44fcb49 --- /dev/null +++ b/.markdownlint.yaml @@ -0,0 +1,4 @@ +"MD013": false +"MD041": false +"MD024": + "siblings_only": true diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b6789ed..23a9b4d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,6 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.6.0 hooks: - - id: detect-private-key - id: end-of-file-fixer - id: trailing-whitespace - repo: https://github.com/pre-commit/pygrep-hooks @@ -38,5 +37,10 @@ repos: - id: pyproject-fmt # https://pyproject-fmt.readthedocs.io/en/latest/#calculating-max-supported-python-version additional_dependencies: ["tox>=4.9"] + - repo: https://github.com/igorshubovych/markdownlint-cli + rev: v0.40.0 + hooks: + - id: markdownlint + args: ["-c", ".markdownlint.yaml", "--fix"] ci: autoupdate_schedule: monthly From a2613d5003804298aeac0ae6f32206d22f577c98 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Mon, 13 May 2024 02:34:51 +0000 Subject: [PATCH 117/128] aws_iot_logger: put_log_events now doesn't need sequenceToken anymore --- .../aws_iot_logger.py | 51 +++++++------------ 1 file changed, 17 insertions(+), 34 deletions(-) diff --git a/src/otaclient_iot_logging_server/aws_iot_logger.py b/src/otaclient_iot_logging_server/aws_iot_logger.py index 8c0644e..b017855 100644 --- a/src/otaclient_iot_logging_server/aws_iot_logger.py +++ b/src/otaclient_iot_logging_server/aws_iot_logger.py @@ -27,7 +27,7 @@ from typing_extensions import NoReturn from otaclient_iot_logging_server._common import LogEvent, LogMessage, LogsQueue -from otaclient_iot_logging_server._utils import chain_query, retry +from otaclient_iot_logging_server._utils import retry from otaclient_iot_logging_server.boto3_session import get_session from otaclient_iot_logging_server.configs import server_cfg from otaclient_iot_logging_server.greengrass_config import ( @@ -63,11 +63,11 @@ def __init__( interval: int, ): _boto3_session = get_session(session_config) - self._client = _boto3_session.client(service_name="logs") # type: ignore + self._client = client = _boto3_session.client(service_name="logs") + self._exc_types = client.exceptions self._session_config = session_config self._log_group_name = session_config.aws_cloudwatch_log_group - self._sequence_tokens: dict[str, str | None] = {} self._interval = interval self._queue: LogsQueue = queue # NOTE: add this limitation to ensure all of the log_streams in a merge @@ -79,10 +79,11 @@ def _create_log_group(self): # TODO: (20240214) should we let the edge side iot_logging_server # create the log group? log_group_name, client = self._log_group_name, self._client + exc_types = self._exc_types try: client.create_log_group(logGroupName=log_group_name) logger.info(f"{log_group_name=} has been created") - except client.exceptions.ResourceAlreadyExistsException as e: + except exc_types.ResourceAlreadyExistsException as e: logger.debug( f"{log_group_name=} already existed, skip creating: {e.response}" ) @@ -101,14 +102,14 @@ def _create_log_group(self): @retry(max_retry=16, backoff_factor=2, backoff_max=32) def _create_log_stream(self, log_stream_name: str): log_group_name, client = self._log_group_name, self._client + exc_types = self._exc_types try: client.create_log_stream( logGroupName=log_group_name, logStreamName=log_stream_name, ) logger.info(f"{log_stream_name=}@{log_group_name} has been created") - self._sequence_tokens = {} # clear sequence token on new stream created - except client.exceptions.ResourceAlreadyExistsException as e: + except exc_types.ResourceAlreadyExistsException as e: logger.debug( f"{log_stream_name=}@{log_group_name} already existed, skip creating: {e.response}" ) @@ -125,45 +126,27 @@ def _create_log_stream(self, log_stream_name: str): raise @retry(backoff_factor=2) - def send_messages(self, log_stream_name: str, message_list: list[LogMessage]): + def put_log_events(self, log_stream_name: str, message_list: list[LogMessage]): """ Ref: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/logs/client/put_log_events.html + + NOTE: sequence token is not needed and ignored by PutLogEvents action now. See the documentation for more details. + NOTE: The sequenceToken parameter is now ignored in PutLogEvents actions. PutLogEvents actions are now accepted + and never return InvalidSequenceTokenException or DataAlreadyAcceptedException even if the sequence token is not valid. + See the documentation for more details. """ request = LogEvent( logGroupName=self._log_group_name, logStreamName=log_stream_name, logEvents=message_list, ) - if _seq_token := self._sequence_tokens.get(log_stream_name): - request["sequenceToken"] = _seq_token - exceptions, client = self._client.exceptions, self._client + exc_types, client = self._exc_types, self._client try: - response = client.put_log_events(**request) - # NOTE: the sequenceToken is deprecated, put_log_events will always - # be accepted with/without a sequenceToken. - # see docs for more details. - if _sequence_token := response.get("nextSequenceToken"): - self._sequence_tokens[log_stream_name] = _sequence_token + client.put_log_events(**request) # logger.debug(f"successfully uploaded: {response}") - except exceptions.DataAlreadyAcceptedException: - pass - except exceptions.InvalidSequenceTokenException as e: - response = e.response - logger.debug(f"invalid sequence token: {response}") - - _resp_err_msg: str = chain_query(e.response, "Error", "Message", default="") - # null as the next sequenceToken means don't include any - # sequenceToken at all, not that the token should be set to "null" - next_expected_token = _resp_err_msg.rsplit(" ", 1)[-1] - if next_expected_token == "null": - self._sequence_tokens.pop(log_stream_name, None) - else: - self._sequence_tokens[log_stream_name] = next_expected_token - raise # let the retry do the logging upload again - except exceptions.ResourceNotFoundException as e: - response = e.response + except exc_types.ResourceNotFoundException as e: logger.debug(f"{log_stream_name=} not found: {e!r}") self._create_log_stream(log_stream_name) raise @@ -200,7 +183,7 @@ def thread_main(self) -> NoReturn: for log_stream_suffix, logs in message_dict.items(): with contextlib.suppress(Exception): - self.send_messages( + self.put_log_events( get_log_stream_name( self._session_config.thing_name, log_stream_suffix ), From 4914fe6459e49fdfe1c556a8befb33a5ab5eb2e1 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Mon, 13 May 2024 02:50:45 +0000 Subject: [PATCH 118/128] fix test --- tests/test_aws_iot_logger.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/test_aws_iot_logger.py b/tests/test_aws_iot_logger.py index 779f29c..378efc5 100644 --- a/tests/test_aws_iot_logger.py +++ b/tests/test_aws_iot_logger.py @@ -90,7 +90,7 @@ class TestAWSIoTLogger: class _TestFinished(Exception): pass - def _mocked_send_messages(self, _ecu_id: str, _logs: list[LogMessage]): + def _mocked_put_log_events(self, _ecu_id: str, _logs: list[LogMessage]): self._test_result[_ecu_id] = _logs @pytest.fixture @@ -118,7 +118,7 @@ def setup_test(self, prepare_test_data, mocker: MockerFixture): # NOTE: another hack to let all entries being merged within one # loop iteration. self._max_logs_per_merge = float("inf") - self.send_messages = self._mocked_send_messages + self.put_log_events = self._mocked_put_log_events self._interval = 6 # place holder self._session_config = mocker.MagicMock() # place holder # for holding test results @@ -135,10 +135,12 @@ def test_thread_main(self, mocker: MockerFixture): self._create_log_group = mocked__create_log_group = mocker.MagicMock( spec=AWSIoTLogger._create_log_group ) + # ------ execution ------ # with pytest.raises(self._TestFinished): func_to_test.__get__(self)() logger.info("execution finished") + # ------ check result ------ # mocked__create_log_group.assert_called_once() # confirm the send_messages mock receives the expecting calls. From 4811680fed199239b9a11c4c39f5e212144d04ad Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Mon, 13 May 2024 03:06:12 +0000 Subject: [PATCH 119/128] log_proxy_server: issue a warning when no ecu_info.yaml is presented and filtering is disabled --- src/otaclient_iot_logging_server/log_proxy_server.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/otaclient_iot_logging_server/log_proxy_server.py b/src/otaclient_iot_logging_server/log_proxy_server.py index 979c178..a8cc9a7 100644 --- a/src/otaclient_iot_logging_server/log_proxy_server.py +++ b/src/otaclient_iot_logging_server/log_proxy_server.py @@ -43,6 +43,10 @@ def __init__(self, queue: LogsQueue) -> None: logger.info( f"setup allowed_ecu_id from ecu_info.yaml: {stripped_ecu_info.ecu_id_set}" ) + else: + logger.warning( + "no ecu_info.yaml presented, logging upload filtering is DISABLED" + ) # route: POST /{ecu_id} async def logging_post_handler(self, request: Request): From 7c4400e4f021efd30420c585866558531dfce150 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Mon, 13 May 2024 03:06:40 +0000 Subject: [PATCH 120/128] configs: set the default value of UPLOAD_INTERVAL to a reasonable 3 seconds --- src/otaclient_iot_logging_server/configs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/otaclient_iot_logging_server/configs.py b/src/otaclient_iot_logging_server/configs.py index 4c48b64..97fe796 100644 --- a/src/otaclient_iot_logging_server/configs.py +++ b/src/otaclient_iot_logging_server/configs.py @@ -48,7 +48,7 @@ class ConfigurableLoggingServerConfig(BaseSettings): MAX_LOGS_BACKLOG: int = 4096 MAX_LOGS_PER_MERGE: int = 512 - UPLOAD_INTERVAL: int = 60 # in seconds + UPLOAD_INTERVAL: int = 3 # in seconds ECU_INFO_YAML: str = "/boot/ota/ecu_info.yaml" From 58001ced1a3f7a4683cea7b5a2b253747edcbaf7 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Mon, 13 May 2024 03:06:57 +0000 Subject: [PATCH 121/128] update README.md --- README.md | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 6665951..e235e80 100644 --- a/README.md +++ b/README.md @@ -4,6 +4,18 @@ A logging server that uploads logs sent from otaclient to AWS cloudwatch. This iot-logger is expected to be installed on the main ECU, with greengrass certificates and otaclient config file(ecu_info.yaml) installed. +## TPM support + +If greengrass is configured to use TPM with pkcs11(priv-key sealed by TPM, with or without cert also stored in tpm-pkcs11 database), iot-logger will automatically enable TPM support when parsing the greengrass configuration file. + +## Filter uploaded logs + +If `ecu_info.yaml` presented and valid, iot-logger will only accept logs from known ECU ids. +The known ECU ids are retrieved from parsing `ecu_info.secondaries` field. +Currently only ECU id will be checked, IP checking is not performed as sub ECU otaclient might send logging from different IPs if ECU has multiple interfaces. + +NOTE that if `ecu_info.yaml` file is not presented, the filtering will be DISABLED. + ## Usage ### Environmental variables @@ -19,15 +31,9 @@ The behaviors of the iot_logging_server can be configured with the following env | LISTEN_ADDRESS | `127.0.0.1` | The IP address iot-logger server listen on. By default only receive logs from local machine. | | LISTEN_PORT | `8083` | | | UPLOAD_LOGGING_SERVER_LOGS | `false` | Whether to upload the logs from server itself to cloudwatchlogs. | -| SERVER_LOGSTREAM_SUFFIX | `iot_logging_server` | log_stream_suffix to use for local server logs upload. | +| SERVER_LOGSTREAM_SUFFIX | `iot_logging_server` | log_stream suffix for local server logs on cloudwatchlogs if uploaded. | | SERVER_LOGGING_LEVEL | `INFO` | The logging level of the server itself. | | SERVER_LOGGING_LOG_FORMAT | `[%(asctime)s][%(levelname)s]-%(name)s:%(funcName)s:%(lineno)d,%(message)s` | | | MAX_LOGS_BACKLOG | `4096` | Max pending log entries. | | MAX_LOGS_PER_MERGE | `512` | Max log entries in a merge group. | -| UPLOAD_INTERVAL | `60` | Interval of uploading log batches to cloud. Note that if the logger is restarted before next upload occurs, the pending loggings will be dropped. | - -### ecu_info.yaml - -If `ecu_info.yaml` presented and valid, iot-logger will only accept logs from known ECU ids. -The known ECU ids are retrieved from parsing `ecu_info.secondaries` field. -Currently only ECU id will be checked, IP checking is not performed as sub ECU otaclient might send logging from different IPs if ECU has multiple interfaces. +| UPLOAD_INTERVAL | `3` | Interval of uploading log batches to cloud. **Note that if the logger is restarted before next upload occurs, the pending loggings will be dropped.** | \ No newline at end of file From 03a3e4d2fbcea30ac82015c42c96e8d2929f67c9 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 13 May 2024 03:07:08 +0000 Subject: [PATCH 122/128] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index e235e80..4ffe738 100644 --- a/README.md +++ b/README.md @@ -36,4 +36,4 @@ The behaviors of the iot_logging_server can be configured with the following env | SERVER_LOGGING_LOG_FORMAT | `[%(asctime)s][%(levelname)s]-%(name)s:%(funcName)s:%(lineno)d,%(message)s` | | | MAX_LOGS_BACKLOG | `4096` | Max pending log entries. | | MAX_LOGS_PER_MERGE | `512` | Max log entries in a merge group. | -| UPLOAD_INTERVAL | `3` | Interval of uploading log batches to cloud. **Note that if the logger is restarted before next upload occurs, the pending loggings will be dropped.** | \ No newline at end of file +| UPLOAD_INTERVAL | `3` | Interval of uploading log batches to cloud. **Note that if the logger is restarted before next upload occurs, the pending loggings will be dropped.** | From 29f3f29fc73e97a26a7df3b55c4d33c28192c942 Mon Sep 17 00:00:00 2001 From: Bodong Yang <86948717+Bodong-Yang@users.noreply.github.com> Date: Tue, 14 May 2024 11:13:09 +0900 Subject: [PATCH 123/128] feat: implement config_file_monitor (#2) This PR introduces the functionality of killing the server on config files changed. Currently the in use ecu_info.yaml, aws_profile_info.yaml and greengrass configuration files are monitored. This feature is expected to be used together with systemd.service Restart policy to achieve automatically restart on config files changed. --- README.md | 6 ++ src/otaclient_iot_logging_server/__main__.py | 9 ++- .../config_file_monitor.py | 79 +++++++++++++++++++ src/otaclient_iot_logging_server/configs.py | 6 ++ src/otaclient_iot_logging_server/ecu_info.py | 8 ++ .../greengrass_config.py | 13 +-- .../log_proxy_server.py | 9 +-- 7 files changed, 112 insertions(+), 18 deletions(-) create mode 100644 src/otaclient_iot_logging_server/config_file_monitor.py diff --git a/README.md b/README.md index 4ffe738..c20f9f5 100644 --- a/README.md +++ b/README.md @@ -16,6 +16,11 @@ Currently only ECU id will be checked, IP checking is not performed as sub ECU o NOTE that if `ecu_info.yaml` file is not presented, the filtering will be DISABLED. +## Auto restart on config files changed + +By default, the `EXIT_ON_CONFIG_FILE_CHANGED` is enabled. +Together with systemd.service `Restart` policy configured, automatically restart iot-logger server on config files changed can be achieved. + ## Usage ### Environmental variables @@ -37,3 +42,4 @@ The behaviors of the iot_logging_server can be configured with the following env | MAX_LOGS_BACKLOG | `4096` | Max pending log entries. | | MAX_LOGS_PER_MERGE | `512` | Max log entries in a merge group. | | UPLOAD_INTERVAL | `3` | Interval of uploading log batches to cloud. **Note that if the logger is restarted before next upload occurs, the pending loggings will be dropped.** | +| EXIT_ON_CONFIG_FILE_CHANGED | `true` | Whether to kill the server on config files changed. **Note that this feature is expected to be used together with systemd.service Restart.** | diff --git a/src/otaclient_iot_logging_server/__main__.py b/src/otaclient_iot_logging_server/__main__.py index 6c6ffd6..0e4e49b 100644 --- a/src/otaclient_iot_logging_server/__main__.py +++ b/src/otaclient_iot_logging_server/__main__.py @@ -21,6 +21,7 @@ from otaclient_iot_logging_server._common import LogsQueue from otaclient_iot_logging_server._log_setting import config_logging from otaclient_iot_logging_server.aws_iot_logger import start_aws_iot_logger_thread +from otaclient_iot_logging_server.config_file_monitor import config_file_monitor_thread from otaclient_iot_logging_server.configs import server_cfg from otaclient_iot_logging_server.log_proxy_server import launch_server @@ -28,7 +29,6 @@ def main() -> None: # server scope log entries pipe queue: LogsQueue = Queue(maxsize=server_cfg.MAX_LOGS_BACKLOG) - # ------ configure local logging ------ # root_logger = config_logging( queue, @@ -38,13 +38,16 @@ def main() -> None: server_logstream_suffix=server_cfg.SERVER_LOGSTREAM_SUFFIX, ) - # ------ start server ------ # root_logger.info( f"launching iot_logging_server({__version__}) at http://{server_cfg.LISTEN_ADDRESS}:{server_cfg.LISTEN_PORT}" ) root_logger.info(f"iot_logging_server config: \n{server_cfg}") - + # ------ launch aws cloudwatch client ------ # start_aws_iot_logger_thread(queue) + # ------ launch config file monitor ------ # + if server_cfg.EXIT_ON_CONFIG_FILE_CHANGED: + config_file_monitor_thread() + # ------ start server ------ # launch_server(queue=queue) # NoReturn diff --git a/src/otaclient_iot_logging_server/config_file_monitor.py b/src/otaclient_iot_logging_server/config_file_monitor.py new file mode 100644 index 0000000..15268af --- /dev/null +++ b/src/otaclient_iot_logging_server/config_file_monitor.py @@ -0,0 +1,79 @@ +# Copyright 2022 TIER IV, INC. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Monitor the used config files. + +Monitor the files listed in , kill the server +if any of the files are changed. + +This is expected to be used together with systemd.unit Restart policy +to achieve automatically restart on configuration files changed. +""" + + +from __future__ import annotations + +import logging +import os +import signal +import threading +import time +from os import stat_result +from pathlib import Path +from typing import NamedTuple, NoReturn + +logger = logging.getLogger(__name__) + +_CHECK_INTERVAL = 3 # second + +monitored_config_files: set[str] = set() +_monitored_files_stat: dict[str, _MCTime] = {} + + +class _MCTime(NamedTuple): + mtime: int + ctime: int + + def file_changed(self, new_mctime: _MCTime) -> bool: + # if create time is newer in , it means the file is recreated. + # if modified time is newer in , it means the file is modified. + return self.ctime < new_mctime.ctime or self.mtime < new_mctime.mtime + + @classmethod + def from_stat(cls, stat: stat_result) -> _MCTime: + return cls(int(stat.st_mtime), int(stat.st_ctime)) + + +def _config_file_monitor() -> NoReturn: + # initialize, record the original status + logger.info(f"start to monitor the changes of {monitored_config_files}") + while True: + for entry in monitored_config_files: + new_f_mctime = _MCTime.from_stat(Path(entry).stat()) + if entry not in _monitored_files_stat: + _monitored_files_stat[entry] = new_f_mctime + continue + + f_mctime = _monitored_files_stat[entry] + if f_mctime.file_changed(new_f_mctime): + logger.warning(f"detect change on config file {entry}, exit") + # NOTE: sys.exit is not working in thread + os.kill(os.getpid(), signal.SIGINT) + + time.sleep(_CHECK_INTERVAL) + + +def config_file_monitor_thread() -> threading.Thread: + t = threading.Thread(target=_config_file_monitor, daemon=True) + t.start() + return t diff --git a/src/otaclient_iot_logging_server/configs.py b/src/otaclient_iot_logging_server/configs.py index 97fe796..f2d6796 100644 --- a/src/otaclient_iot_logging_server/configs.py +++ b/src/otaclient_iot_logging_server/configs.py @@ -24,6 +24,8 @@ from pydantic_settings import BaseSettings, SettingsConfigDict from typing_extensions import Annotated +from otaclient_iot_logging_server.config_file_monitor import monitored_config_files + _LoggingLevelName = Literal["INFO", "DEBUG", "CRITICAL", "ERROR", "WARNING"] @@ -52,6 +54,9 @@ class ConfigurableLoggingServerConfig(BaseSettings): ECU_INFO_YAML: str = "/boot/ota/ecu_info.yaml" + EXIT_ON_CONFIG_FILE_CHANGED: bool = True + """Kill the server when any config files changed.""" + class _AWSProfile(BaseModel): model_config = SettingsConfigDict(frozen=True) @@ -76,3 +81,4 @@ def load_profile_info(_cfg_fpath: str) -> AWSProfileInfo: server_cfg = ConfigurableLoggingServerConfig() profile_info = load_profile_info(server_cfg.AWS_PROFILE_INFO) +monitored_config_files.add(server_cfg.AWS_PROFILE_INFO) diff --git a/src/otaclient_iot_logging_server/ecu_info.py b/src/otaclient_iot_logging_server/ecu_info.py index c401763..90ba3b6 100644 --- a/src/otaclient_iot_logging_server/ecu_info.py +++ b/src/otaclient_iot_logging_server/ecu_info.py @@ -27,6 +27,9 @@ import yaml from pydantic import BaseModel, ConfigDict, Field, IPvAnyAddress +from otaclient_iot_logging_server.config_file_monitor import monitored_config_files +from otaclient_iot_logging_server.configs import server_cfg + logger = logging.getLogger(__name__) @@ -65,3 +68,8 @@ def parse_ecu_info(ecu_info_file: Path | str) -> Optional[ECUInfo]: return ECUInfo.model_validate(loaded_ecu_info, strict=True) except Exception as e: logger.info(f"{ecu_info_file=} is invalid or missing: {e!r}") + + +ecu_info = parse_ecu_info(server_cfg.ECU_INFO_YAML) +if ecu_info: + monitored_config_files.add(server_cfg.ECU_INFO_YAML) diff --git a/src/otaclient_iot_logging_server/greengrass_config.py b/src/otaclient_iot_logging_server/greengrass_config.py index 8ab3473..2e37b55 100644 --- a/src/otaclient_iot_logging_server/greengrass_config.py +++ b/src/otaclient_iot_logging_server/greengrass_config.py @@ -28,6 +28,7 @@ from pydantic import computed_field from otaclient_iot_logging_server._utils import FixedConfig, chain_query, remove_prefix +from otaclient_iot_logging_server.config_file_monitor import monitored_config_files from otaclient_iot_logging_server.configs import profile_info, server_cfg logger = logging.getLogger(__name__) @@ -76,8 +77,6 @@ def profile(self) -> str: @property def thing_name(self) -> str: return remove_prefix(self.resource_id, "thing/") - - # # ------ v1 configuration parse ------ # # @@ -117,8 +116,6 @@ def parse_v1_config(_raw_cfg: str) -> IoTSessionConfig: region=thing_arn.region, aws_credential_provider_endpoint=str(this_profile_info.credential_endpoint), ) - - # # ------ v2 configuration parse ------ # # @@ -137,7 +134,6 @@ def parse_v2_config(_raw_cfg: str) -> IoTSessionConfig: this_profile_info = profile_info.get_profile_info( get_profile_from_thing_name(thing_name) ) - # NOTE(20240207): use credential endpoint defined in the config.yml in prior, # only when this information is not available, we use the # <_AWS_CREDENTIAL_PROVIDER_ENDPOINT_MAPPING> to get endpoint. @@ -153,7 +149,6 @@ def parse_v2_config(_raw_cfg: str) -> IoTSessionConfig: cred_endpoint = _cred_endpoint else: cred_endpoint = this_profile_info.credential_endpoint - # ------ parse pkcs11 config if any ------ # _raw_pkcs11_cfg: dict[str, str] pkcs11_cfg = None @@ -188,13 +183,9 @@ def parse_v2_config(_raw_cfg: str) -> IoTSessionConfig: aws_credential_provider_endpoint=cred_endpoint, pkcs11_config=pkcs11_cfg, ) - - # # ------ main config parser ------ # # - - class PKCS11Config(FixedConfig): """ See services.aws.greengrass.crypto.Pkcs11Provider section for more details. @@ -259,10 +250,12 @@ def parse_config() -> IoTSessionConfig: if (_v2_cfg_f := Path(server_cfg.GREENGRASS_V2_CONFIG)).is_file(): _v2_cfg = parse_v2_config(_v2_cfg_f.read_text()) logger.debug(f"gg config v2 is in used: {_v2_cfg}") + monitored_config_files.add(server_cfg.GREENGRASS_V2_CONFIG) return _v2_cfg _v1_cfg = parse_v1_config(Path(server_cfg.GREENGRASS_V1_CONFIG).read_text()) logger.debug(f"gg config v1 is in used: {_v1_cfg}") + monitored_config_files.add(server_cfg.GREENGRASS_V1_CONFIG) return _v1_cfg except Exception as e: _msg = f"failed to parse config: {e!r}" diff --git a/src/otaclient_iot_logging_server/log_proxy_server.py b/src/otaclient_iot_logging_server/log_proxy_server.py index a8cc9a7..586301e 100644 --- a/src/otaclient_iot_logging_server/log_proxy_server.py +++ b/src/otaclient_iot_logging_server/log_proxy_server.py @@ -25,7 +25,7 @@ from otaclient_iot_logging_server._common import LogMessage, LogsQueue from otaclient_iot_logging_server.configs import server_cfg -from otaclient_iot_logging_server.ecu_info import parse_ecu_info +from otaclient_iot_logging_server.ecu_info import ecu_info logger = logging.getLogger(__name__) @@ -37,11 +37,10 @@ def __init__(self, queue: LogsQueue) -> None: self._queue = queue self._allowed_ecus = None - stripped_ecu_info = parse_ecu_info(server_cfg.ECU_INFO_YAML) - if stripped_ecu_info: - self._allowed_ecus = stripped_ecu_info.ecu_id_set + if ecu_info: + self._allowed_ecus = ecu_info.ecu_id_set logger.info( - f"setup allowed_ecu_id from ecu_info.yaml: {stripped_ecu_info.ecu_id_set}" + f"setup allowed_ecu_id from ecu_info.yaml: {ecu_info.ecu_id_set}" ) else: logger.warning( From 3561785951b0b34bfff19120bd367543aff166a2 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 14 May 2024 02:13:17 +0000 Subject: [PATCH 124/128] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/otaclient_iot_logging_server/greengrass_config.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/otaclient_iot_logging_server/greengrass_config.py b/src/otaclient_iot_logging_server/greengrass_config.py index 2e37b55..11536c0 100644 --- a/src/otaclient_iot_logging_server/greengrass_config.py +++ b/src/otaclient_iot_logging_server/greengrass_config.py @@ -77,6 +77,8 @@ def profile(self) -> str: @property def thing_name(self) -> str: return remove_prefix(self.resource_id, "thing/") + + # # ------ v1 configuration parse ------ # # @@ -116,6 +118,8 @@ def parse_v1_config(_raw_cfg: str) -> IoTSessionConfig: region=thing_arn.region, aws_credential_provider_endpoint=str(this_profile_info.credential_endpoint), ) + + # # ------ v2 configuration parse ------ # # @@ -183,6 +187,8 @@ def parse_v2_config(_raw_cfg: str) -> IoTSessionConfig: aws_credential_provider_endpoint=cred_endpoint, pkcs11_config=pkcs11_cfg, ) + + # # ------ main config parser ------ # # From 994c60afca494b1fdeb9daa75276ebd923e56318 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Tue, 14 May 2024 02:40:19 +0000 Subject: [PATCH 125/128] add tests for test_config_file_monitor, fix up other tests --- tests/test__main__.py | 1 + tests/test_config_file_monitor.py | 53 +++++++++++++++++++++++++++++++ tests/test_configs.py | 6 +++- 3 files changed, 59 insertions(+), 1 deletion(-) create mode 100644 tests/test_config_file_monitor.py diff --git a/tests/test__main__.py b/tests/test__main__.py index 74816ff..5c7d42b 100644 --- a/tests/test__main__.py +++ b/tests/test__main__.py @@ -42,6 +42,7 @@ class _ServerCfg: MAX_LOGS_PER_MERGE: int = 123 MAX_LOGS_BACKLOG: int = 1234 UPLOAD_INTERVAL: int = 12 + EXIT_ON_CONFIG_FILE_CHANGED: bool = True @pytest.mark.parametrize("_in_server_cfg, _version", [(_ServerCfg(), "test_version")]) diff --git a/tests/test_config_file_monitor.py b/tests/test_config_file_monitor.py new file mode 100644 index 0000000..dc6dcc3 --- /dev/null +++ b/tests/test_config_file_monitor.py @@ -0,0 +1,53 @@ +# Copyright 2022 TIER IV, INC. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import annotations + +from pathlib import Path + +import pytest +from pytest_mock import MockerFixture + +from otaclient_iot_logging_server import config_file_monitor + + +class _SuccessExit(Exception): + """config file monitor successfully kills the server.""" + + +class TestConfigFileMonitor: + + @pytest.fixture(autouse=True) + def setup_set(self, tmp_path: Path, mocker: MockerFixture): + self.config_file = config_file = tmp_path / "config_file" + config_file.write_text("config_file") + config_file_monitor.monitored_config_files.add(str(config_file)) + + # hack time.sleep to modify the config_file + def _modify_config_file(*args, **kwargs): + config_file.write_text("another config_file") + + mocker.patch.object( + config_file_monitor.time, + "sleep", + mocker.MagicMock(wraps=_modify_config_file), + ) + + # mock os.kill to raise SuccessExit exception + mocker.patch("os.kill", mocker.MagicMock(side_effect=_SuccessExit)) + + def test_config_file_monitor(self): + with pytest.raises(_SuccessExit): + config_file_monitor._config_file_monitor() diff --git a/tests/test_configs.py b/tests/test_configs.py index a9edbee..cc91a31 100644 --- a/tests/test_configs.py +++ b/tests/test_configs.py @@ -48,8 +48,9 @@ "SERVER_LOGGING_LOG_FORMAT": "[%(asctime)s][%(levelname)s]-%(name)s:%(funcName)s:%(lineno)d,%(message)s", "MAX_LOGS_BACKLOG": 4096, "MAX_LOGS_PER_MERGE": 512, - "UPLOAD_INTERVAL": 60, + "UPLOAD_INTERVAL": 3, "ECU_INFO_YAML": "/boot/ota/ecu_info.yaml", + "EXIT_ON_CONFIG_FILE_CHANGED": True, }, ), # test#1: frequently changed settings @@ -73,6 +74,7 @@ "MAX_LOGS_PER_MERGE": 512, "UPLOAD_INTERVAL": 30, "ECU_INFO_YAML": "/boot/ota/ecu_info.yaml", + "EXIT_ON_CONFIG_FILE_CHANGED": True, }, ), # test#2: change everything @@ -91,6 +93,7 @@ "MAX_LOGS_PER_MERGE": "128", "UPLOAD_INTERVAL": "10", "ECU_INFO_YAML": "/some/where/ecu_info.yaml", + "EXIT_ON_CONFIG_FILE_CHANGED": "false", }, { "GREENGRASS_V1_CONFIG": "ggv1_cfg.json", @@ -106,6 +109,7 @@ "MAX_LOGS_PER_MERGE": 128, "UPLOAD_INTERVAL": 10, "ECU_INFO_YAML": "/some/where/ecu_info.yaml", + "EXIT_ON_CONFIG_FILE_CHANGED": False, }, ), ], From 12414722391a06408993fe7f34d4bec5deff3215 Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Tue, 14 May 2024 03:40:49 +0000 Subject: [PATCH 126/128] fix test_log_proxy --- tests/test_log_proxy_server.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/tests/test_log_proxy_server.py b/tests/test_log_proxy_server.py index 2ba2cc2..9113cfa 100644 --- a/tests/test_log_proxy_server.py +++ b/tests/test_log_proxy_server.py @@ -32,6 +32,7 @@ import otaclient_iot_logging_server.log_proxy_server as log_server_module from otaclient_iot_logging_server._common import LogsQueue +from otaclient_iot_logging_server.ecu_info import parse_ecu_info from otaclient_iot_logging_server.log_proxy_server import LoggingPostHandler logger = logging.getLogger(__name__) @@ -47,6 +48,8 @@ class _ServerConfig: LISTEN_ADDRESS: str = "127.0.0.1" LISTEN_PORT: int = 8083 ECU_INFO_YAML: Path = TEST_DIR / "ecu_info.yaml" + # remember to disable config file monitor + EXIT_ON_CONFIG_FILE_CHANGED: bool = False _test_server_cfg = _ServerConfig() @@ -83,7 +86,12 @@ class TestLogProxyServer: TOTAL_MSG_NUM = 4096 @pytest.fixture(autouse=True) - async def launch_server(self, mocker: MockerFixture): + def mock_ecu_info(self, mocker: MockerFixture): + ecu_info = parse_ecu_info(TEST_DIR / "ecu_info.yaml") + mocker.patch(f"{MODULE}.ecu_info", ecu_info) + + @pytest.fixture(autouse=True) + async def launch_server(self, mocker: MockerFixture, mock_ecu_info): """ See https://docs.aiohttp.org/en/stable/web_advanced.html#custom-resource-implementation for more details. From de75613f8cef2b78a60f3b893dc28a8e84556cec Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Tue, 14 May 2024 03:56:25 +0000 Subject: [PATCH 127/128] test_main: disable config_file_monitor --- tests/test__main__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test__main__.py b/tests/test__main__.py index 5c7d42b..8ad8004 100644 --- a/tests/test__main__.py +++ b/tests/test__main__.py @@ -42,7 +42,7 @@ class _ServerCfg: MAX_LOGS_PER_MERGE: int = 123 MAX_LOGS_BACKLOG: int = 1234 UPLOAD_INTERVAL: int = 12 - EXIT_ON_CONFIG_FILE_CHANGED: bool = True + EXIT_ON_CONFIG_FILE_CHANGED: bool = False @pytest.mark.parametrize("_in_server_cfg, _version", [(_ServerCfg(), "test_version")]) From a1cb170fe770dc7878763af078da7eec8a9eaeaa Mon Sep 17 00:00:00 2001 From: Bodong Yang Date: Thu, 16 May 2024 05:30:13 +0000 Subject: [PATCH 128/128] config_file_monitor: if config_file is being deleted, just skip it --- src/otaclient_iot_logging_server/config_file_monitor.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/otaclient_iot_logging_server/config_file_monitor.py b/src/otaclient_iot_logging_server/config_file_monitor.py index 15268af..62504b4 100644 --- a/src/otaclient_iot_logging_server/config_file_monitor.py +++ b/src/otaclient_iot_logging_server/config_file_monitor.py @@ -59,7 +59,13 @@ def _config_file_monitor() -> NoReturn: logger.info(f"start to monitor the changes of {monitored_config_files}") while True: for entry in monitored_config_files: - new_f_mctime = _MCTime.from_stat(Path(entry).stat()) + try: + f_stat = Path(entry).stat() + except Exception as e: + logger.debug(f"cannot query stat from {entry}, skip: {e!r}") + continue + + new_f_mctime = _MCTime.from_stat(f_stat) if entry not in _monitored_files_stat: _monitored_files_stat[entry] = new_f_mctime continue