From b69c0d80692ede31dee55275a7bb35192c12dacf Mon Sep 17 00:00:00 2001 From: Pietro Pasotti Date: Tue, 3 Sep 2024 15:25:55 +0200 Subject: [PATCH 01/12] handle ingress regardless of coordinator status --- requirements.txt | 6 +++--- src/charm.py | 16 +++++++++------- 2 files changed, 12 insertions(+), 10 deletions(-) diff --git a/requirements.txt b/requirements.txt index 3834f32..52621b3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,8 +3,8 @@ importlib-metadata~=6.0.0 ops crossplane jsonschema==4.17.0 -lightkube==0.11.0 -lightkube-models==1.24.1.4 +lightkube>=0.15.4 +lightkube-models>=1.24.1.4 tenacity==8.2.3 # crossplane is a package from nginxinc to interact with the Nginx config crossplane @@ -19,4 +19,4 @@ cryptography # lib/charms/tempo_k8s/v1/tracing.py pydantic>=2 # lib/charms/prometheus_k8s/v0/prometheus_scrape.py -cosl +cosl==0.0.26 diff --git a/src/charm.py b/src/charm.py index 3dfb10b..16be734 100755 --- a/src/charm.py +++ b/src/charm.py @@ -85,6 +85,15 @@ def __init__(self, *args): self.tracing = TracingEndpointProvider(self, external_url=self._external_url) + # ingress + # handle ingress regardless of the coordinator's status. reason is, if we miss these events now, we need to + # 'remember' to publish ingress data as soon as our coordinator is ready to handle events + # (and that point is hard to catch) + ingress = self.on["ingress"] + self.framework.observe(ingress.relation_created, self._on_ingress_relation_created) + self.framework.observe(ingress.relation_joined, self._on_ingress_relation_joined) + self.framework.observe(self.ingress.on.ready, self._on_ingress_ready) + # refuse to handle any other event as we can't possibly know what to do. if not self.coordinator.can_handle_events: # logging will be handled by `self.coordinator` for each of the above circumstances. @@ -94,12 +103,6 @@ def __init__(self, *args): self.framework.observe(self.on.leader_elected, self._on_leader_elected) self.framework.observe(self.on.list_receivers_action, self._on_list_receivers_action) - # ingress - ingress = self.on["ingress"] - self.framework.observe(ingress.relation_created, self._on_ingress_relation_created) - self.framework.observe(ingress.relation_joined, self._on_ingress_relation_joined) - self.framework.observe(self.ingress.on.ready, self._on_ingress_ready) - # tracing self.framework.observe(self.tracing.on.request, self._on_tracing_request) self.framework.observe(self.tracing.on.broken, self._on_tracing_broken) @@ -182,7 +185,6 @@ def _on_tracing_broken(self, _): self._update_tracing_relations() def _on_cert_handler_changed(self, e: ops.RelationChangedEvent): - # tls readiness change means config change. # sync scheme change with traefik and related consumers self._configure_ingress() From 0d4baaa3d2e819e756c4a7dd1627b7cfb33b71d9 Mon Sep 17 00:00:00 2001 From: Pietro Pasotti Date: Wed, 4 Sep 2024 12:53:36 +0200 Subject: [PATCH 02/12] certhandler update --- .../observability_libs/v1/cert_handler.py | 42 +++++++- lib/charms/tempo_k8s/v1/charm_tracing.py | 100 ++++++++++++------ lib/charms/traefik_k8s/v2/ingress.py | 27 +++-- src/charm.py | 70 +++--------- 4 files changed, 142 insertions(+), 97 deletions(-) diff --git a/lib/charms/observability_libs/v1/cert_handler.py b/lib/charms/observability_libs/v1/cert_handler.py index 3b87ad4..6e693ff 100644 --- a/lib/charms/observability_libs/v1/cert_handler.py +++ b/lib/charms/observability_libs/v1/cert_handler.py @@ -26,7 +26,7 @@ self.framework.observe(self.cert_handler.on.cert_changed, self._on_server_cert_changed) container.push(keypath, self.cert_handler.private_key) -container.push(certpath, self.cert_handler.servert_cert) +container.push(certpath, self.cert_handler.server_cert) ``` Since this library uses [Juju Secrets](https://juju.is/docs/juju/secret) it requires Juju >= 3.0.3. @@ -59,7 +59,7 @@ import logging from ops.charm import CharmBase -from ops.framework import EventBase, EventSource, Object, ObjectEvents +from ops.framework import BoundEvent, EventBase, EventSource, Object, ObjectEvents, StoredState from ops.jujuversion import JujuVersion from ops.model import Relation, Secret, SecretNotFoundError @@ -67,7 +67,7 @@ LIBID = "b5cd5cd580f3428fa5f59a8876dcbe6a" LIBAPI = 1 -LIBPATCH = 11 +LIBPATCH = 12 VAULT_SECRET_LABEL = "cert-handler-private-vault" @@ -273,6 +273,7 @@ class CertHandler(Object): """A wrapper for the requirer side of the TLS Certificates charm library.""" on = CertHandlerEvents() # pyright: ignore + _stored = StoredState() def __init__( self, @@ -283,6 +284,7 @@ def __init__( peer_relation_name: str = "peers", cert_subject: Optional[str] = None, sans: Optional[List[str]] = None, + refresh_events: Optional[List[BoundEvent]] = None, ): """CertHandler is used to wrap TLS Certificates management operations for charms. @@ -299,8 +301,17 @@ def __init__( Must match metadata.yaml. cert_subject: Custom subject. Name collisions are under the caller's responsibility. sans: DNS names. If none are given, use FQDN. + refresh_events: an optional list of bound events which + will be observed to replace the current CSR with a new one + if there are changes in the CSR's DNS SANs or IP SANs. + Then, subsequently, replace its corresponding certificate with a new one. """ super().__init__(charm, key) + # use StoredState to store the hash of the CSR + # to potentially trigger a CSR renewal on `refresh_events` + self._stored.set_default( + csr_hash=None, + ) self.charm = charm # We need to sanitize the unit name, otherwise route53 complains: @@ -355,6 +366,15 @@ def __init__( self._on_upgrade_charm, ) + if refresh_events: + for ev in refresh_events: + self.framework.observe(ev, self._on_refresh_event) + + def _on_refresh_event(self, _): + """Replace the latest current CSR with a new one if there are any SANs changes.""" + if self._stored.csr_hash != self._csr_hash: + self._generate_csr(renew=True) + def _on_upgrade_charm(self, _): has_privkey = self.vault.get_value("private-key") @@ -419,6 +439,20 @@ def enabled(self) -> bool: return True + @property + def _csr_hash(self) -> int: + """A hash of the config that constructs the CSR. + + Only include here the config options that, should they change, should trigger a renewal of + the CSR. + """ + return hash( + ( + tuple(self.sans_dns), + tuple(self.sans_ip), + ) + ) + @property def available(self) -> bool: """Return True if all certs are available in relation data; False otherwise.""" @@ -484,6 +518,8 @@ def _generate_csr( ) self.certificates.request_certificate_creation(certificate_signing_request=csr) + self._stored.csr_hash = self._csr_hash + if clear_cert: self.vault.clear() diff --git a/lib/charms/tempo_k8s/v1/charm_tracing.py b/lib/charms/tempo_k8s/v1/charm_tracing.py index 4306b4d..2dbdddd 100644 --- a/lib/charms/tempo_k8s/v1/charm_tracing.py +++ b/lib/charms/tempo_k8s/v1/charm_tracing.py @@ -172,14 +172,64 @@ def my_tracing_endpoint(self) -> Optional[str]: provide an *absolute* path to the certificate file instead. """ + +def _remove_stale_otel_sdk_packages(): + """Hack to remove stale opentelemetry sdk packages from the charm's python venv. + + See https://github.com/canonical/grafana-agent-operator/issues/146 and + https://bugs.launchpad.net/juju/+bug/2058335 for more context. This patch can be removed after + this juju issue is resolved and sufficient time has passed to expect most users of this library + have migrated to the patched version of juju. When this patch is removed, un-ignore rule E402 for this file in the pyproject.toml (see setting + [tool.ruff.lint.per-file-ignores] in pyproject.toml). + + This only has an effect if executed on an upgrade-charm event. + """ + # all imports are local to keep this function standalone, side-effect-free, and easy to revert later + import os + + if os.getenv("JUJU_DISPATCH_PATH") != "hooks/upgrade-charm": + return + + import logging + import shutil + from collections import defaultdict + + from importlib_metadata import distributions + + otel_logger = logging.getLogger("charm_tracing_otel_patcher") + otel_logger.debug("Applying _remove_stale_otel_sdk_packages patch on charm upgrade") + # group by name all distributions starting with "opentelemetry_" + otel_distributions = defaultdict(list) + for distribution in distributions(): + name = distribution._normalized_name # type: ignore + if name.startswith("opentelemetry_"): + otel_distributions[name].append(distribution) + + otel_logger.debug(f"Found {len(otel_distributions)} opentelemetry distributions") + + # If we have multiple distributions with the same name, remove any that have 0 associated files + for name, distributions_ in otel_distributions.items(): + if len(distributions_) <= 1: + continue + + otel_logger.debug(f"Package {name} has multiple ({len(distributions_)}) distributions.") + for distribution in distributions_: + if not distribution.files: # Not None or empty list + path = distribution._path # type: ignore + otel_logger.info(f"Removing empty distribution of {name} at {path}.") + shutil.rmtree(path) + + otel_logger.debug("Successfully applied _remove_stale_otel_sdk_packages patch. ") + + +_remove_stale_otel_sdk_packages() + import functools import inspect import logging import os -import shutil from contextlib import contextmanager from contextvars import Context, ContextVar, copy_context -from importlib.metadata import distributions from pathlib import Path from typing import ( Any, @@ -199,14 +249,15 @@ def my_tracing_endpoint(self) -> Optional[str]: from opentelemetry.sdk.resources import Resource from opentelemetry.sdk.trace import Span, TracerProvider from opentelemetry.sdk.trace.export import BatchSpanProcessor -from opentelemetry.trace import INVALID_SPAN, Tracer -from opentelemetry.trace import get_current_span as otlp_get_current_span from opentelemetry.trace import ( + INVALID_SPAN, + Tracer, get_tracer, get_tracer_provider, set_span_in_context, set_tracer_provider, ) +from opentelemetry.trace import get_current_span as otlp_get_current_span from ops.charm import CharmBase from ops.framework import Framework @@ -219,7 +270,7 @@ def my_tracing_endpoint(self) -> Optional[str]: # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 13 +LIBPATCH = 15 PYDEPS = ["opentelemetry-exporter-otlp-proto-http==1.21.0"] @@ -229,7 +280,6 @@ def my_tracing_endpoint(self) -> Optional[str]: # set this to 0 if you are debugging/developing this library source dev_logger.setLevel(logging.CRITICAL) - _CharmType = Type[CharmBase] # the type CharmBase and any subclass thereof _C = TypeVar("_C", bound=_CharmType) _T = TypeVar("_T", bound=type) @@ -281,9 +331,22 @@ def _get_tracer() -> Optional[Tracer]: try: return tracer.get() except LookupError: + # fallback: this course-corrects for a user error where charm_tracing symbols are imported + # from different paths (typically charms.tempo_k8s... and lib.charms.tempo_k8s...) try: ctx: Context = copy_context() if context_tracer := _get_tracer_from_context(ctx): + logger.warning( + "Tracer not found in `tracer` context var. " + "Verify that you're importing all `charm_tracing` symbols from the same module path. \n" + "For example, DO" + ": `from charms.lib...charm_tracing import foo, bar`. \n" + "DONT: \n" + " \t - `from charms.lib...charm_tracing import foo` \n" + " \t - `from lib...charm_tracing import bar` \n" + "For more info: https://python-notes.curiousefficiency.org/en/latest/python" + "_concepts/import_traps.html#the-double-import-trap" + ) return context_tracer.get() else: return None @@ -361,30 +424,6 @@ def _get_server_cert( return server_cert -def _remove_stale_otel_sdk_packages(): - """Hack to remove stale opentelemetry sdk packages from the charm's python venv. - - See https://github.com/canonical/grafana-agent-operator/issues/146 and - https://bugs.launchpad.net/juju/+bug/2058335 for more context. This patch can be removed after - this juju issue is resolved and sufficient time has passed to expect most users of this library - have migrated to the patched version of juju. - - This only does something if executed on an upgrade-charm event. - """ - if os.getenv("JUJU_DISPATCH_PATH") == "hooks/upgrade-charm": - logger.debug("Executing _remove_stale_otel_sdk_packages patch on charm upgrade") - # Find any opentelemetry_sdk distributions - otel_sdk_distributions = list(distributions(name="opentelemetry_sdk")) - # If there is more than 1, inspect each and if it has 0 entrypoints, infer that it is stale - if len(otel_sdk_distributions) > 1: - for distribution in otel_sdk_distributions: - if len(distribution.entry_points) == 0: - # Distribution appears to be empty. Remove it - path = distribution._path # type: ignore - logger.debug(f"Removing empty opentelemetry_sdk distribution at: {path}") - shutil.rmtree(path) - - def _setup_root_span_initializer( charm_type: _CharmType, tracing_endpoint_attr: str, @@ -420,7 +459,6 @@ def wrap_init(self: CharmBase, framework: Framework, *args, **kwargs): # apply hacky patch to remove stale opentelemetry sdk packages on upgrade-charm. # it could be trouble if someone ever decides to implement their own tracer parallel to # ours and before the charm has inited. We assume they won't. - _remove_stale_otel_sdk_packages() resource = Resource.create( attributes={ "service.name": _service_name, diff --git a/lib/charms/traefik_k8s/v2/ingress.py b/lib/charms/traefik_k8s/v2/ingress.py index 407cfb5..bb7ac5e 100644 --- a/lib/charms/traefik_k8s/v2/ingress.py +++ b/lib/charms/traefik_k8s/v2/ingress.py @@ -56,13 +56,14 @@ def _on_ingress_revoked(self, event: IngressPerAppRevokedEvent): import socket import typing from dataclasses import dataclass +from functools import partial from typing import Any, Callable, Dict, List, MutableMapping, Optional, Sequence, Tuple, Union import pydantic from ops.charm import CharmBase, RelationBrokenEvent, RelationEvent from ops.framework import EventSource, Object, ObjectEvents, StoredState from ops.model import ModelError, Relation, Unit -from pydantic import AnyHttpUrl, BaseModel, Field, validator +from pydantic import AnyHttpUrl, BaseModel, Field # The unique Charmhub library identifier, never change it LIBID = "e6de2a5cd5b34422a204668f3b8f90d2" @@ -72,7 +73,7 @@ def _on_ingress_revoked(self, event: IngressPerAppRevokedEvent): # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 13 +LIBPATCH = 14 PYDEPS = ["pydantic"] @@ -84,6 +85,9 @@ def _on_ingress_revoked(self, event: IngressPerAppRevokedEvent): PYDANTIC_IS_V1 = int(pydantic.version.VERSION.split(".")[0]) < 2 if PYDANTIC_IS_V1: + from pydantic import validator + + input_validator = partial(validator, pre=True) class DatabagModel(BaseModel): # type: ignore """Base databag model.""" @@ -143,7 +147,9 @@ def dump(self, databag: Optional[MutableMapping] = None, clear: bool = True): return databag else: - from pydantic import ConfigDict + from pydantic import ConfigDict, field_validator + + input_validator = partial(field_validator, mode="before") class DatabagModel(BaseModel): """Base databag model.""" @@ -171,7 +177,7 @@ def load(cls, databag: MutableMapping): k: json.loads(v) for k, v in databag.items() # Don't attempt to parse model-external values - if k in {(f.alias or n) for n, f in cls.__fields__.items()} # type: ignore + if k in {(f.alias or n) for n, f in cls.model_fields.items()} # type: ignore } except json.JSONDecodeError as e: msg = f"invalid databag contents: expecting json. {databag}" @@ -252,14 +258,14 @@ class IngressRequirerAppData(DatabagModel): default="http", description="What scheme to use in the generated ingress url" ) - @validator("scheme", pre=True) + @input_validator("scheme") def validate_scheme(cls, scheme): # noqa: N805 # pydantic wants 'cls' as first arg """Validate scheme arg.""" if scheme not in {"http", "https", "h2c"}: raise ValueError("invalid scheme: should be one of `http|https|h2c`") return scheme - @validator("port", pre=True) + @input_validator("port") def validate_port(cls, port): # noqa: N805 # pydantic wants 'cls' as first arg """Validate port.""" assert isinstance(port, int), type(port) @@ -277,13 +283,13 @@ class IngressRequirerUnitData(DatabagModel): "IP can only be None if the IP information can't be retrieved from juju.", ) - @validator("host", pre=True) + @input_validator("host") def validate_host(cls, host): # noqa: N805 # pydantic wants 'cls' as first arg """Validate host.""" assert isinstance(host, str), type(host) return host - @validator("ip", pre=True) + @input_validator("ip") def validate_ip(cls, ip): # noqa: N805 # pydantic wants 'cls' as first arg """Validate ip.""" if ip is None: @@ -462,7 +468,10 @@ def _handle_relation(self, event): event.relation, data.app.name, data.app.model, - [unit.dict() for unit in data.units], + [ + unit.dict() if PYDANTIC_IS_V1 else unit.model_dump(mode="json") + for unit in data.units + ], data.app.strip_prefix or False, data.app.redirect_https or False, ) diff --git a/src/charm.py b/src/charm.py index 01a5d6b..da6dff6 100755 --- a/src/charm.py +++ b/src/charm.py @@ -87,27 +87,16 @@ def __init__(self, *args): self.tracing = TracingEndpointProvider(self, external_url=self._external_url) - # ingress - # handle ingress regardless of the coordinator's status. reason is, if we miss these events now, we need to - # 'remember' to publish ingress data as soon as our coordinator is ready to handle events - # (and that point is hard to catch) - ingress = self.on["ingress"] - self.framework.observe(ingress.relation_created, self._on_ingress_relation_created) - self.framework.observe(ingress.relation_joined, self._on_ingress_relation_joined) - self.framework.observe(self.ingress.on.ready, self._on_ingress_ready) - # refuse to handle any other event as we can't possibly know what to do. if not self.coordinator.can_handle_events: - # logging will be handled by `self.coordinator` for each of the above circumstances. + # logging is handled by the Coordinator object return - # lifecycle - self.framework.observe(self.on.leader_elected, self._on_leader_elected) - self.framework.observe(self.on.list_receivers_action, self._on_list_receivers_action) + # do this regardless of what event we are processing + self._update_outgoing_relations() - # tracing - self.framework.observe(self.tracing.on.request, self._on_tracing_request) - self.framework.observe(self.tracing.on.broken, self._on_tracing_broken) + # actions + self.framework.observe(self.on.list_receivers_action, self._on_list_receivers_action) # tls self.framework.observe( @@ -182,45 +171,12 @@ def enabled_receivers(self) -> Set[str]: ################## # EVENT HANDLERS # ################## - def _on_tracing_broken(self, _): - """Update tracing relations' databags once one relation is removed.""" - self._update_tracing_relations() - - def _on_cert_handler_changed(self, e: ops.RelationChangedEvent): - # tls readiness change means config change. - # sync scheme change with traefik and related consumers - self._configure_ingress() + def _on_cert_handler_changed(self, _: ops.RelationChangedEvent): # sync the server CA cert with the charm container. # technically, because of charm tracing, this will be called first thing on each event self._update_server_ca_cert() - # update relations to reflect the new certificate - self._update_tracing_relations() - - def _on_tracing_request(self, e: RequestEvent): - """Handle a remote requesting a tracing endpoint.""" - logger.debug(f"received tracing request from {e.relation.app}: {e.requested_receivers}") - self._update_tracing_relations() - - def _on_ingress_relation_created(self, _: RelationEvent): - self._configure_ingress() - - def _on_ingress_relation_joined(self, _: RelationEvent): - self._configure_ingress() - - def _on_leader_elected(self, _: ops.LeaderElectedEvent): - # as traefik_route goes through app data, we need to take lead of traefik_route if our leader dies. - self._configure_ingress() - - def _on_ingress_ready(self, _event): - # whenever there's a change in ingress, we need to update all tracing relations - self._update_tracing_relations() - - def _on_ingress_revoked(self, _event): - # whenever there's a change in ingress, we need to update all tracing relations - self._update_tracing_relations() - def _on_list_receivers_action(self, event: ops.ActionEvent): res = {} for receiver in self._requested_receivers(): @@ -239,8 +195,6 @@ def _configure_ingress(self) -> None: self.ingress.submit_to_traefik( self._ingress_config, static=self._static_ingress_config ) - if self.ingress.external_host: - self._update_tracing_relations() # notify the cluster self.coordinator.update_cluster() @@ -260,8 +214,6 @@ def _update_tracing_relations(self) -> None: [(p, self.get_receiver_url(p)) for p in requested_receivers] ) - self.coordinator.update_cluster() - def _requested_receivers(self) -> Tuple[ReceiverProtocol, ...]: """List what receivers we should activate, based on the active tracing relations and config-enabled extra receivers.""" # we start with the sum of the requested endpoints from the requirers @@ -282,6 +234,7 @@ def trace_retention_period_hours(self) -> int: def server_ca_cert(self) -> str: """For charm tracing.""" + # Fixme: we do this once too many times if we're handling cert_handler.changed self._update_server_ca_cert() return self.tempo.tls_ca_path @@ -403,6 +356,15 @@ def get_resources_requests(self, _) -> Dict[str, str]: """Returns a dictionary for the "requests" portion of the resources requirements.""" return {"cpu": "50m", "memory": "100Mi"} + def _update_outgoing_relations(self): + # This method contains unconditional update logic, i.e. logic that should be executed + # regardless of the event we are processing. + # reason is, if we miss these events because our coordinator cannot process events (inconsistent status), + # we need to 'remember' to run this logic as soon as we become ready, which is hard and error-prone + self._configure_ingress() + self._update_tracing_relations() + self.coordinator.update_cluster() + if __name__ == "__main__": # pragma: nocover main(TempoCoordinatorCharm) From 5d23bacd7fedbaaff7d10a1185584ccd1344df7c Mon Sep 17 00:00:00 2001 From: Pietro Pasotti Date: Wed, 4 Sep 2024 12:56:28 +0200 Subject: [PATCH 03/12] lint --- lib/charms/tempo_k8s/v1/charm_tracing.py | 5 ++--- pyproject.toml | 7 ++++++- src/charm.py | 3 +-- 3 files changed, 9 insertions(+), 6 deletions(-) diff --git a/lib/charms/tempo_k8s/v1/charm_tracing.py b/lib/charms/tempo_k8s/v1/charm_tracing.py index 2dbdddd..db977ba 100644 --- a/lib/charms/tempo_k8s/v1/charm_tracing.py +++ b/lib/charms/tempo_k8s/v1/charm_tracing.py @@ -249,15 +249,14 @@ def _remove_stale_otel_sdk_packages(): from opentelemetry.sdk.resources import Resource from opentelemetry.sdk.trace import Span, TracerProvider from opentelemetry.sdk.trace.export import BatchSpanProcessor +from opentelemetry.trace import INVALID_SPAN, Tracer +from opentelemetry.trace import get_current_span as otlp_get_current_span from opentelemetry.trace import ( - INVALID_SPAN, - Tracer, get_tracer, get_tracer_provider, set_span_in_context, set_tracer_provider, ) -from opentelemetry.trace import get_current_span as otlp_get_current_span from ops.charm import CharmBase from ops.framework import Framework diff --git a/pyproject.toml b/pyproject.toml index 8eb7b54..681ad68 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -54,7 +54,12 @@ extend-ignore = [ ] ignore = ["E501", "D107"] extend-exclude = ["__pycache__", "*.egg_info", "*integration/tester*"] -per-file-ignores = {"tests/*" = ["D100","D101","D102","D103","D104"]} + +[tool.ruff.lint.per-file-ignores] +"tests/*" = ["D100","D101","D102","D103","D104"] +# Remove charm_tracing.py E402 when _remove_stale_otel_sdk_packages() is removed +# from the library +"lib/charms/tempo_k8s/v1/charm_tracing.py" = ["E402"] [lint.mccabe] max-complexity = 10 diff --git a/src/charm.py b/src/charm.py index da6dff6..9bc9ba8 100755 --- a/src/charm.py +++ b/src/charm.py @@ -14,7 +14,6 @@ from charms.tempo_k8s.v1.charm_tracing import trace_charm from charms.tempo_k8s.v2.tracing import ( ReceiverProtocol, - RequestEvent, TracingEndpointProvider, TransportProtocolType, receiver_protocol_to_transport_protocol, @@ -22,7 +21,7 @@ from charms.traefik_route_k8s.v0.traefik_route import TraefikRouteRequirer from cosl.coordinated_workers.coordinator import ClusterRolesConfig, Coordinator from cosl.coordinated_workers.nginx import CA_CERT_PATH, CERT_PATH, KEY_PATH -from ops.charm import CharmBase, RelationEvent +from ops.charm import CharmBase from ops.main import main from nginx_config import NginxConfig From efb33df58afb8fbe615138fd40d530bcc5da2eb5 Mon Sep 17 00:00:00 2001 From: Pietro Pasotti Date: Wed, 4 Sep 2024 13:20:06 +0200 Subject: [PATCH 04/12] lint --- src/charm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/charm.py b/src/charm.py index 78c3aac..49756c4 100755 --- a/src/charm.py +++ b/src/charm.py @@ -25,7 +25,7 @@ from cosl.coordinated_workers.coordinator import ClusterRolesConfig, Coordinator from cosl.coordinated_workers.nginx import CA_CERT_PATH, CERT_PATH, KEY_PATH from ops import CollectStatusEvent -from ops.charm import CharmBase, RelationEvent +from ops.charm import CharmBase from ops.main import main from nginx_config import NginxConfig From eb7ef20d4991e43f1843335756c9c224e812bb65 Mon Sep 17 00:00:00 2001 From: Pietro Pasotti Date: Mon, 9 Sep 2024 10:01:51 +0200 Subject: [PATCH 05/12] some pr comments --- src/charm.py | 23 +++++------------------ tests/integration/test_ingressed_tls.py | 3 +-- 2 files changed, 6 insertions(+), 20 deletions(-) diff --git a/src/charm.py b/src/charm.py index 51f40b6..4496048 100755 --- a/src/charm.py +++ b/src/charm.py @@ -102,7 +102,7 @@ def __init__(self, *args): return # do this regardless of what event we are processing - self._update_outgoing_relations() + self._reconcile() # actions self.framework.observe(self.on.list_receivers_action, self._on_list_receivers_action) @@ -112,12 +112,6 @@ def __init__(self, *args): self.coordinator.cert_handler.on.cert_changed, self._on_cert_handler_changed ) - # remote-write - self.framework.observe( - self._remote_write.on.endpoints_changed, # pyright: ignore - self._on_remote_write_changed, - ) - ###################### # UTILITY PROPERTIES # ###################### @@ -198,10 +192,6 @@ def _on_list_receivers_action(self, event: ops.ActionEvent): res[receiver.replace("_", "-")] = self.get_receiver_url(receiver) event.set_results(res) - def _on_remote_write_changed(self, _event): - """Event handler for the remote write changed event.""" - # notify the cluster - self.coordinator.update_cluster() def _on_collect_status(self, e: CollectStatusEvent): # add Tempo coordinator-specific statuses @@ -218,8 +208,8 @@ def _on_collect_status(self, e: CollectStatusEvent): ################### # UTILITY METHODS # ################### - def _configure_ingress(self) -> None: - """Make sure the traefik route and tracing relation data are up-to-date.""" + def _update_ingress_relation(self) -> None: + """Make sure the traefik route is up-to-date.""" if not self.unit.is_leader(): return @@ -228,9 +218,6 @@ def _configure_ingress(self) -> None: self._ingress_config, static=self._static_ingress_config ) - # notify the cluster - self.coordinator.update_cluster() - def _update_tracing_relations(self) -> None: tracing_relations = self.model.relations["tracing"] if not tracing_relations: @@ -392,12 +379,12 @@ def remote_write_endpoints(self): """Return remote-write endpoints.""" return self._remote_write.endpoints - def _update_outgoing_relations(self): + def _reconcile(self): # This method contains unconditional update logic, i.e. logic that should be executed # regardless of the event we are processing. # reason is, if we miss these events because our coordinator cannot process events (inconsistent status), # we need to 'remember' to run this logic as soon as we become ready, which is hard and error-prone - self._configure_ingress() + self._update_ingress_relation() self._update_tracing_relations() self.coordinator.update_cluster() diff --git a/tests/integration/test_ingressed_tls.py b/tests/integration/test_ingressed_tls.py index 8e759e7..fc09b8d 100644 --- a/tests/integration/test_ingressed_tls.py +++ b/tests/integration/test_ingressed_tls.py @@ -8,14 +8,13 @@ deploy_cluster, emit_trace, get_traces_patiently, - protocols_endpoints, + protocols_endpoints, WORKER_NAME, ) from juju.application import Application from pytest_operator.plugin import OpsTest METADATA = yaml.safe_load(Path("./charmcraft.yaml").read_text()) APP_NAME = "tempo" -WORKER_NAME = "tempo-worker" SSC = "self-signed-certificates" SSC_APP_NAME = "ssc" TRAEFIK = "traefik-k8s" From 08a2602a815771e0a65d140b04deb143d8b2fe23 Mon Sep 17 00:00:00 2001 From: Pietro Pasotti Date: Tue, 10 Sep 2024 07:44:56 +0200 Subject: [PATCH 06/12] uniformed name to loki convention --- charmcraft.yaml | 4 ++-- src/charm.py | 9 ++++----- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/charmcraft.yaml b/charmcraft.yaml index 372d728..6b3dd84 100644 --- a/charmcraft.yaml +++ b/charmcraft.yaml @@ -133,10 +133,10 @@ parts: config: options: - retention_period_hours: + retention-period: description: | Maximum trace retention period, in hours. This will be used to configure the compactor to clean up trace data after this time. - Defaults to 720 hours, which is equivalent to 30 days. + Defaults to 720 hours, which is equivalent to 30 days. Per-stream retention limits are currently not supported. type: int default: 720 always_enable_zipkin: diff --git a/src/charm.py b/src/charm.py index 4496048..57285eb 100755 --- a/src/charm.py +++ b/src/charm.py @@ -49,7 +49,7 @@ def __init__(self, *args): self.ingress = TraefikRouteRequirer(self, self.model.get_relation("ingress"), "ingress") # type: ignore self.tempo = Tempo( requested_receivers=self._requested_receivers, - retention_period_hours=self.trace_retention_period_hours, + retention_period_hours=self._trace_retention_period_hours, ) # set alert_rules_path="", as we don't want to populate alert rules into the relation databag # we only need `self._remote_write.endpoints` @@ -245,11 +245,10 @@ def _requested_receivers(self) -> Tuple[ReceiverProtocol, ...]: return tuple(requested_receivers) @property - def trace_retention_period_hours(self) -> int: + def _trace_retention_period_hours(self) -> int: """Trace retention period for the compactor.""" - # if unset, default to 30 days - trace_retention_period_hours = cast(int, self.config.get("retention_period_hours", 720)) - return trace_retention_period_hours + # if unset, defaults to 30 days + return cast(int, self.config["retention-period"]) def server_ca_cert(self) -> str: """For charm tracing.""" From 1ea6e175281911a9ff97f49f7869fa7ad783c0f9 Mon Sep 17 00:00:00 2001 From: Pietro Pasotti Date: Tue, 10 Sep 2024 07:46:38 +0200 Subject: [PATCH 07/12] lint --- src/charm.py | 1 - tests/integration/test_ingressed_tls.py | 3 ++- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/charm.py b/src/charm.py index a1bd967..44960f8 100755 --- a/src/charm.py +++ b/src/charm.py @@ -192,7 +192,6 @@ def _on_list_receivers_action(self, event: ops.ActionEvent): res[receiver.replace("_", "-")] = self.get_receiver_url(receiver) event.set_results(res) - def _on_collect_status(self, e: CollectStatusEvent): # add Tempo coordinator-specific statuses if ( diff --git a/tests/integration/test_ingressed_tls.py b/tests/integration/test_ingressed_tls.py index fc09b8d..ff003ab 100644 --- a/tests/integration/test_ingressed_tls.py +++ b/tests/integration/test_ingressed_tls.py @@ -5,10 +5,11 @@ import pytest import yaml from helpers import ( + WORKER_NAME, deploy_cluster, emit_trace, get_traces_patiently, - protocols_endpoints, WORKER_NAME, + protocols_endpoints, ) from juju.application import Application from pytest_operator.plugin import OpsTest From cebaed427e1de9e722862f2133721e39c02b39ef Mon Sep 17 00:00:00 2001 From: Pietro Pasotti Date: Tue, 10 Sep 2024 12:16:35 +0200 Subject: [PATCH 08/12] simplified tests --- tests/integration/conftest.py | 14 ++-- tests/integration/helpers.py | 14 ++-- tests/integration/test_charm.py | 74 ------------------- tests/integration/test_ingressed_tls.py | 48 ++++++------ tests/integration/test_integration.py | 14 ++-- tests/integration/test_scaling_monolithic.py | 4 +- tests/integration/test_self_monitoring.py | 5 +- tests/integration/test_self_tracing.py | 3 +- tests/integration/test_self_tracing_remote.py | 3 +- tests/integration/test_tls.py | 3 +- 10 files changed, 54 insertions(+), 128 deletions(-) delete mode 100644 tests/integration/test_charm.py diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index d1df125..3c342c6 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -7,7 +7,9 @@ import shutil import tempfile from pathlib import Path +from subprocess import check_call +from paramiko.proxy import subprocess from pytest import fixture from pytest_operator.plugin import OpsTest @@ -20,11 +22,13 @@ logger = logging.getLogger(__name__) -@fixture(scope="module") -async def tempo_charm(ops_test: OpsTest): - """Zinc charm used for integration testing.""" - charm = await ops_test.build_charm(".") - return charm +@fixture(scope="session") +def tempo_charm(): + """Tempo charm used for integration testing.""" + if tempo_charm:=os.getenv("TEMPO_CHARM"): + return tempo_charm + check_call(["charmcraft", "pack", "-v"]) + return "./tempo-coordinator-k8s_ubuntu-22.04-amd64.charm" @fixture(scope="module", autouse=True) diff --git a/tests/integration/helpers.py b/tests/integration/helpers.py index 151a6d2..ba25856 100644 --- a/tests/integration/helpers.py +++ b/tests/integration/helpers.py @@ -299,13 +299,13 @@ async def deploy_cluster(ops_test: OpsTest, tempo_app=APP_NAME): await ops_test.model.integrate(tempo_app + ":tempo-cluster", WORKER_NAME + ":tempo-cluster") await deploy_and_configure_minio(ops_test) - - await ops_test.model.wait_for_idle( - apps=[tempo_app, WORKER_NAME, S3_INTEGRATOR], - status="active", - timeout=1000, - idle_period=30, - ) + with ops_test.fast_forward(): + await ops_test.model.wait_for_idle( + apps=[tempo_app, WORKER_NAME, S3_INTEGRATOR], + status="active", + timeout=1000, + idle_period=30, + ) def get_traces(tempo_host: str, service_name="tracegen-otlp_http", tls=True): diff --git a/tests/integration/test_charm.py b/tests/integration/test_charm.py deleted file mode 100644 index f28a3ea..0000000 --- a/tests/integration/test_charm.py +++ /dev/null @@ -1,74 +0,0 @@ -#!/usr/bin/env python3 -# Copyright 2023 Ubuntu -# See LICENSE file for licensing details. - -import logging -from pathlib import Path -from textwrap import dedent -from types import SimpleNamespace - -import pytest -import yaml -from helpers import deploy_literal_bundle -from pytest_operator.plugin import OpsTest - -logger = logging.getLogger(__name__) - -METADATA = yaml.safe_load(Path("./charmcraft.yaml").read_text()) -mc = SimpleNamespace(name="mc") - - -@pytest.mark.abort_on_fail -async def test_build_and_deploy(ops_test: OpsTest): - """Build the charm-under-test and deploy it together with related charms.""" - # Build and deploy charm from local source folder - charm = await ops_test.build_charm(".") - - test_bundle = dedent( - f""" - --- - bundle: kubernetes - name: test-charm - applications: - {mc.name}: - charm: {charm} - trust: true - resources: - nginx-image: {METADATA["resources"]["nginx-image"]["upstream-source"]} - nginx-prometheus-exporter-image: {METADATA["resources"]["nginx-prometheus-exporter-image"]["upstream-source"]} - scale: 1 - loki: - charm: loki-k8s - trust: true - channel: edge - scale: 1 - prometheus: - charm: prometheus-k8s - trust: true - channel: edge - scale: 1 - grafana: - charm: grafana-k8s - trust: true - channel: edge - scale: 1 - - relations: - - [mc:logging, loki:logging] - - [mc:metrics-endpoint, prometheus:metrics-endpoint] - - [mc:grafana-dashboard, grafana:grafana-dashboard] - """ - ) - - # Deploy the charm and wait for active/idle status - await deploy_literal_bundle(ops_test, test_bundle) # See appendix below - await ops_test.model.wait_for_idle( - apps=["loki", "prometheus", "grafana"], - status="active", - raise_on_error=False, - timeout=600, - idle_period=30, - ) - await ops_test.model.wait_for_idle( - apps=[mc.name], status="blocked", raise_on_error=False, timeout=600, idle_period=30 - ) diff --git a/tests/integration/test_ingressed_tls.py b/tests/integration/test_ingressed_tls.py index ff003ab..e2fc367 100644 --- a/tests/integration/test_ingressed_tls.py +++ b/tests/integration/test_ingressed_tls.py @@ -45,8 +45,7 @@ async def get_tempo_ingressed_endpoint(hostname, protocol): @pytest.mark.setup @pytest.mark.abort_on_fail -async def test_build_and_deploy(ops_test: OpsTest): - tempo_charm = await ops_test.build_charm(".") +async def test_build_and_deploy(ops_test: OpsTest, tempo_charm): resources = { "nginx-image": METADATA["resources"]["nginx-image"]["upstream-source"], "nginx-prometheus-exporter-image": METADATA["resources"][ @@ -66,15 +65,16 @@ async def test_build_and_deploy(ops_test: OpsTest): # deploy cluster await deploy_cluster(ops_test) - await asyncio.gather( - ops_test.model.wait_for_idle( - apps=[APP_NAME, SSC_APP_NAME, TRAEFIK_APP_NAME], - status="active", - raise_on_blocked=True, - timeout=10000, - raise_on_error=False, - ), - ) + with ops_test.fast_forward(): # worker will take a little to report active + await asyncio.gather( + ops_test.model.wait_for_idle( + apps=[APP_NAME, SSC_APP_NAME, TRAEFIK_APP_NAME], + status="active", + raise_on_blocked=True, + timeout=10000, + raise_on_error=False, + ), + ) @pytest.mark.setup @@ -97,13 +97,14 @@ async def test_relate(ops_test: OpsTest): SSC_APP_NAME + ":certificates", TRAEFIK_APP_NAME + ":certificates" ) await ops_test.model.integrate(APP_NAME + ":ingress", TRAEFIK_APP_NAME + ":traefik-route") - await ops_test.model.wait_for_idle( - apps=[APP_NAME, SSC_APP_NAME, TRAEFIK_APP_NAME, WORKER_NAME], - status="active", - timeout=1000, - # make idle period 1 minute, as Tempo workload might not be up yet - idle_period=60, - ) + with ops_test.fast_forward(): # worker will take a little to report active + await ops_test.model.wait_for_idle( + apps=[APP_NAME, SSC_APP_NAME, TRAEFIK_APP_NAME, WORKER_NAME], + status="active", + timeout=1000, + # make idle period 1 minute, as Tempo workload might not be up yet + idle_period=60, + ) # TODO: Uncomment and fix once below issue is fixed @@ -134,11 +135,12 @@ async def test_verify_traces_force_enabled_protocols_tls(ops_test: OpsTest, nonc f"always_enable_{protocol}": "True", } ) - await ops_test.model.wait_for_idle( - apps=[APP_NAME, WORKER_NAME], - status="active", - timeout=1000, - ) + with ops_test.fast_forward(): # worker will take a little to report active + await ops_test.model.wait_for_idle( + apps=[APP_NAME, WORKER_NAME], + status="active", + timeout=1000, + ) tempo_host = await get_ingress_proxied_hostname(ops_test) tempo_endpoint = await get_tempo_ingressed_endpoint(tempo_host, protocol=protocol) diff --git a/tests/integration/test_integration.py b/tests/integration/test_integration.py index 0b1183c..0beb97c 100644 --- a/tests/integration/test_integration.py +++ b/tests/integration/test_integration.py @@ -22,11 +22,10 @@ @pytest.mark.setup @pytest.mark.abort_on_fail -async def test_build_and_deploy(ops_test: OpsTest): +async def test_build_and_deploy(ops_test: OpsTest, tempo_charm): # Given a fresh build of the charm # When deploying it together with testers # Then applications should eventually be created - tempo_charm = await ops_test.build_charm(".") tester_charm = await ops_test.build_charm("./tests/integration/tester/") tester_grpc_charm = await ops_test.build_charm("./tests/integration/tester-grpc/") resources = { @@ -80,11 +79,12 @@ async def test_relate(ops_test: OpsTest): # then relation should appear await ops_test.model.add_relation(APP_NAME + ":tracing", TESTER_APP_NAME + ":tracing") await ops_test.model.add_relation(APP_NAME + ":tracing", TESTER_GRPC_APP_NAME + ":tracing") - await ops_test.model.wait_for_idle( - apps=[APP_NAME, WORKER_NAME, TESTER_APP_NAME, TESTER_GRPC_APP_NAME], - status="active", - timeout=1000, - ) + with ops_test.fast_forward(): + await ops_test.model.wait_for_idle( + apps=[APP_NAME, WORKER_NAME, TESTER_APP_NAME, TESTER_GRPC_APP_NAME], + status="active", + timeout=1000, + ) async def test_verify_traces_http(ops_test: OpsTest): diff --git a/tests/integration/test_scaling_monolithic.py b/tests/integration/test_scaling_monolithic.py index 348c0b1..b7b4f8c 100644 --- a/tests/integration/test_scaling_monolithic.py +++ b/tests/integration/test_scaling_monolithic.py @@ -17,9 +17,7 @@ @pytest.mark.setup @pytest.mark.abort_on_fail -async def test_deploy_tempo(ops_test: OpsTest): - tempo_charm = await ops_test.build_charm(".") - +async def test_deploy_tempo(ops_test: OpsTest, tempo_charm): resources = { "nginx-image": METADATA["resources"]["nginx-image"]["upstream-source"], "nginx-prometheus-exporter-image": METADATA["resources"][ diff --git a/tests/integration/test_self_monitoring.py b/tests/integration/test_self_monitoring.py index 5bdf0e7..7e41cc1 100644 --- a/tests/integration/test_self_monitoring.py +++ b/tests/integration/test_self_monitoring.py @@ -21,9 +21,8 @@ @pytest.mark.abort_on_fail -async def test_build_and_deploy(ops_test: OpsTest): +async def test_build_and_deploy(ops_test: OpsTest, tempo_charm): """Build the charm-under-test and deploy it together with related charms.""" - charm = await ops_test.build_charm(".") test_bundle = dedent( f""" @@ -32,7 +31,7 @@ async def test_build_and_deploy(ops_test: OpsTest): name: test-charm applications: {coord.name}: - charm: {charm} + charm: {tempo_charm} trust: true scale: 1 resources: diff --git a/tests/integration/test_self_tracing.py b/tests/integration/test_self_tracing.py index b6f7cc2..4dbe12b 100644 --- a/tests/integration/test_self_tracing.py +++ b/tests/integration/test_self_tracing.py @@ -18,8 +18,7 @@ @pytest.mark.abort_on_fail -async def test_build_and_deploy(ops_test: OpsTest): - tempo_charm = await ops_test.build_charm(".") +async def test_build_and_deploy(ops_test: OpsTest, tempo_charm): resources = { "nginx-image": METADATA["resources"]["nginx-image"]["upstream-source"], "nginx-prometheus-exporter-image": METADATA["resources"][ diff --git a/tests/integration/test_self_tracing_remote.py b/tests/integration/test_self_tracing_remote.py index 62a9f78..4a14a59 100644 --- a/tests/integration/test_self_tracing_remote.py +++ b/tests/integration/test_self_tracing_remote.py @@ -24,8 +24,7 @@ @pytest.mark.abort_on_fail -async def test_build_and_deploy(ops_test: OpsTest): - tempo_charm = await ops_test.build_charm(".") +async def test_build_and_deploy(ops_test: OpsTest, tempo_charm): resources = { "nginx-image": METADATA["resources"]["nginx-image"]["upstream-source"], "nginx-prometheus-exporter-image": METADATA["resources"][ diff --git a/tests/integration/test_tls.py b/tests/integration/test_tls.py index 2f0f33f..d8c1aa3 100644 --- a/tests/integration/test_tls.py +++ b/tests/integration/test_tls.py @@ -36,8 +36,7 @@ async def get_tempo_traces_internal_endpoint(ops_test: OpsTest, protocol): @pytest.mark.setup @pytest.mark.abort_on_fail -async def test_build_and_deploy(ops_test: OpsTest): - tempo_charm = await ops_test.build_charm(".") +async def test_build_and_deploy(ops_test: OpsTest, tempo_charm): resources = { "nginx-image": METADATA["resources"]["nginx-image"]["upstream-source"], "nginx-prometheus-exporter-image": METADATA["resources"][ From 504ef8cfb1db1fd007ce127052abf585ff92329e Mon Sep 17 00:00:00 2001 From: Pietro Pasotti Date: Tue, 10 Sep 2024 12:30:07 +0200 Subject: [PATCH 09/12] fix for cosl bump --- tests/integration/conftest.py | 3 +-- tests/scenario/conftest.py | 1 + tests/scenario/test_tempo_clustered.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 3c342c6..f2a1daa 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -9,7 +9,6 @@ from pathlib import Path from subprocess import check_call -from paramiko.proxy import subprocess from pytest import fixture from pytest_operator.plugin import OpsTest @@ -25,7 +24,7 @@ @fixture(scope="session") def tempo_charm(): """Tempo charm used for integration testing.""" - if tempo_charm:=os.getenv("TEMPO_CHARM"): + if tempo_charm := os.getenv("TEMPO_CHARM"): return tempo_charm check_call(["charmcraft", "pack", "-v"]) return "./tempo-coordinator-k8s_ubuntu-22.04-amd64.charm" diff --git a/tests/scenario/conftest.py b/tests/scenario/conftest.py index 4e8c373..f9abfa3 100644 --- a/tests/scenario/conftest.py +++ b/tests/scenario/conftest.py @@ -22,6 +22,7 @@ def tempo_charm(): _namespace="test-namespace", _patch=lambda _: None, get_status=lambda _: ActiveStatus(""), + is_ready=lambda _: True ): yield TempoCoordinatorCharm diff --git a/tests/scenario/test_tempo_clustered.py b/tests/scenario/test_tempo_clustered.py index 62a3fa2..ee6b1df 100644 --- a/tests/scenario/test_tempo_clustered.py +++ b/tests/scenario/test_tempo_clustered.py @@ -55,7 +55,7 @@ def all_worker_with_initial_config(all_worker: Relation, coordinator_with_initia @pytest.fixture def certs_relation(): - return scenario.Relation("certificates") + return scenario.Relation("certificates", remote_app_data={}) MOCK_SERVER_CERT = "SERVER_CERT-foo" From 676254a173e5ba01fd87f8eda86ad931532da340 Mon Sep 17 00:00:00 2001 From: Pietro Pasotti Date: Tue, 10 Sep 2024 12:30:19 +0200 Subject: [PATCH 10/12] lint --- tests/scenario/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/scenario/conftest.py b/tests/scenario/conftest.py index f9abfa3..7ce008d 100644 --- a/tests/scenario/conftest.py +++ b/tests/scenario/conftest.py @@ -22,7 +22,7 @@ def tempo_charm(): _namespace="test-namespace", _patch=lambda _: None, get_status=lambda _: ActiveStatus(""), - is_ready=lambda _: True + is_ready=lambda _: True, ): yield TempoCoordinatorCharm From e8b6435b2e31e2764048f1dc2d67d7c4df876510 Mon Sep 17 00:00:00 2001 From: Pietro Pasotti Date: Tue, 10 Sep 2024 12:45:48 +0200 Subject: [PATCH 11/12] async with --- tests/integration/helpers.py | 2 +- tests/integration/test_ingressed_tls.py | 6 +++--- tests/integration/test_integration.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/integration/helpers.py b/tests/integration/helpers.py index ba25856..97f60a5 100644 --- a/tests/integration/helpers.py +++ b/tests/integration/helpers.py @@ -299,7 +299,7 @@ async def deploy_cluster(ops_test: OpsTest, tempo_app=APP_NAME): await ops_test.model.integrate(tempo_app + ":tempo-cluster", WORKER_NAME + ":tempo-cluster") await deploy_and_configure_minio(ops_test) - with ops_test.fast_forward(): + async with ops_test.fast_forward(): await ops_test.model.wait_for_idle( apps=[tempo_app, WORKER_NAME, S3_INTEGRATOR], status="active", diff --git a/tests/integration/test_ingressed_tls.py b/tests/integration/test_ingressed_tls.py index e2fc367..a766eb1 100644 --- a/tests/integration/test_ingressed_tls.py +++ b/tests/integration/test_ingressed_tls.py @@ -65,7 +65,7 @@ async def test_build_and_deploy(ops_test: OpsTest, tempo_charm): # deploy cluster await deploy_cluster(ops_test) - with ops_test.fast_forward(): # worker will take a little to report active + async with ops_test.fast_forward(): # worker will take a little to report active await asyncio.gather( ops_test.model.wait_for_idle( apps=[APP_NAME, SSC_APP_NAME, TRAEFIK_APP_NAME], @@ -97,7 +97,7 @@ async def test_relate(ops_test: OpsTest): SSC_APP_NAME + ":certificates", TRAEFIK_APP_NAME + ":certificates" ) await ops_test.model.integrate(APP_NAME + ":ingress", TRAEFIK_APP_NAME + ":traefik-route") - with ops_test.fast_forward(): # worker will take a little to report active + async with ops_test.fast_forward(): # worker will take a little to report active await ops_test.model.wait_for_idle( apps=[APP_NAME, SSC_APP_NAME, TRAEFIK_APP_NAME, WORKER_NAME], status="active", @@ -135,7 +135,7 @@ async def test_verify_traces_force_enabled_protocols_tls(ops_test: OpsTest, nonc f"always_enable_{protocol}": "True", } ) - with ops_test.fast_forward(): # worker will take a little to report active + async with ops_test.fast_forward(): # worker will take a little to report active await ops_test.model.wait_for_idle( apps=[APP_NAME, WORKER_NAME], status="active", diff --git a/tests/integration/test_integration.py b/tests/integration/test_integration.py index 0beb97c..e9f7d33 100644 --- a/tests/integration/test_integration.py +++ b/tests/integration/test_integration.py @@ -79,7 +79,7 @@ async def test_relate(ops_test: OpsTest): # then relation should appear await ops_test.model.add_relation(APP_NAME + ":tracing", TESTER_APP_NAME + ":tracing") await ops_test.model.add_relation(APP_NAME + ":tracing", TESTER_GRPC_APP_NAME + ":tracing") - with ops_test.fast_forward(): + async with ops_test.fast_forward(): await ops_test.model.wait_for_idle( apps=[APP_NAME, WORKER_NAME, TESTER_APP_NAME, TESTER_GRPC_APP_NAME], status="active", From eae7faa1e882cad14996bdf219aa04abb6254b22 Mon Sep 17 00:00:00 2001 From: Pietro Pasotti Date: Wed, 11 Sep 2024 09:24:47 +0200 Subject: [PATCH 12/12] removed one empty file --- tests/integration/conftest.py | 1 - tests/integration/test_charm.py | 0 2 files changed, 1 deletion(-) delete mode 100644 tests/integration/test_charm.py diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index c8a1683..e50f125 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -8,7 +8,6 @@ import subprocess import tempfile from pathlib import Path -from subprocess import check_call from pytest import fixture from pytest_operator.plugin import OpsTest diff --git a/tests/integration/test_charm.py b/tests/integration/test_charm.py deleted file mode 100644 index e69de29..0000000