From e1d846ec69f62d27cb311e64ef65183d2d0511a9 Mon Sep 17 00:00:00 2001 From: Paulo Machado Date: Wed, 11 Oct 2023 09:36:00 -0300 Subject: [PATCH] DPE-2656 Profile limite memory config support (#283) * first pass * the mandatory libpatch bump * fix bad c-c c-v * prettier assignment * exporting requirements without hashes * revert pydantic bump --- config.yaml | 6 + .../data_platform_libs/v0/data_models.py | 354 ++++++++++++++++++ lib/charms/postgresql_k8s/v0/postgresql.py | 28 +- poetry.lock | 37 +- requirements.txt | 6 +- src/charm.py | 40 +- src/config.py | 55 +++ templates/patroni.yml.j2 | 5 + tox.ini | 4 +- 9 files changed, 500 insertions(+), 35 deletions(-) create mode 100644 lib/charms/data_platform_libs/v0/data_models.py create mode 100644 src/config.py diff --git a/config.yaml b/config.yaml index 0da20fda2f..8885803da9 100644 --- a/config.yaml +++ b/config.yaml @@ -34,3 +34,9 @@ options: minimal running performance. type: string default: production + profile-limit-memory: + type: int + description: | + Amount of memory in Megabytes to limit PostgreSQL and associated process to. + If unset, this will be decided according to the default memory limit in the selected profile. + Only comes into effect when the `production` profile is selected. diff --git a/lib/charms/data_platform_libs/v0/data_models.py b/lib/charms/data_platform_libs/v0/data_models.py new file mode 100644 index 0000000000..a1dbb8299a --- /dev/null +++ b/lib/charms/data_platform_libs/v0/data_models.py @@ -0,0 +1,354 @@ +# Copyright 2023 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +r"""Library to provide simple API for promoting typed, validated and structured dataclass in charms. + +Dict-like data structure are often used in charms. They are used for config, action parameters +and databag. This library aims at providing simple API for using pydantic BaseModel-derived class +in charms, in order to enhance: +* Validation, by embedding custom business logic to validate single parameters or even have + validators that acts across different fields +* Parsing, by loading data into pydantic object we can both allow for other types (e.g. float) to + be used in configuration/parameters as well as specify even nested complex objects for databags +* Static typing checks, by moving from dict-like object to classes with typed-annotated properties, + that can be statically checked using mypy to ensure that the code is correct. + +Pydantic models can be used on: + +* Charm Configuration (as defined in config.yaml) +* Actions parameters (as defined in actions.yaml) +* Application/Unit Databag Information (thus making it more structured and encoded) + + +## Creating models + +Any data-structure can be modeled using dataclasses instead of dict-like objects (e.g. storing +config, action parameters and databags). Within pydantic, we can define dataclasses that provides +also parsing and validation on standard dataclass implementation: + +```python + +from charms.data_platform_libs.v0.data_models import BaseConfigModel + +class MyConfig(BaseConfigModel): + + my_key: int + + @validator("my_key") + def is_lower_than_100(cls, v: int): + if v > 100: + raise ValueError("Too high") + +``` + +This should allow to collapse both parsing and validation as the dataclass object is parsed and +created: + +```python +dataclass = MyConfig(my_key="1") + +dataclass.my_key # this returns 1 (int) +dataclass["my_key"] # this returns 1 (int) + +dataclass = MyConfig(my_key="102") # this returns a ValueError("Too High") +``` + +## Charm Configuration Model + +Using the class above, we can implement parsing and validation of configuration by simply +extending our charms using the `TypedCharmBase` class, as shown below. + +```python +class MyCharm(TypedCharmBase[MyConfig]): + config_type = MyConfig + + # everywhere in the code you will have config property already parsed and validate + def my_method(self): + self.config: MyConfig +``` + +## Action parameters + +In order to parse action parameters, we can use a decorator to be applied to action event +callbacks, as shown below. + +```python +@validate_params(PullActionModel) +def _pull_site_action( + self, event: ActionEvent, + params: Optional[Union[PullActionModel, ValidationError]] = None +): + if isinstance(params, ValidationError): + # handle errors + else: + # do stuff +``` + +Note that this changes the signature of the callbacks by adding an extra parameter with the parsed +counterpart of the `event.params` dict-like field. If validation fails, we return (not throw!) the +exception, to be handled (or raised) in the callback. + +## Databag + +In order to parse databag fields, we define a decorator to be applied to base relation event +callbacks. + +```python +@parse_relation_data(app_model=AppDataModel, unit_model=UnitDataModel) +def _on_cluster_relation_joined( + self, event: RelationEvent, + app_data: Optional[Union[AppDataModel, ValidationError]] = None, + unit_data: Optional[Union[UnitDataModel, ValidationError]] = None +) -> None: + ... +``` + +The parameters `app_data` and `unit_data` refers to the databag of the entity which fired the +RelationEvent. + +When we want to access to a relation databag outsides of an action, it can be useful also to +compact multiple databags into a single object (if there are no conflicting fields), e.g. + +```python + +class ProviderDataBag(BaseClass): + provider_key: str + +class RequirerDataBag(BaseClass): + requirer_key: str + +class MergedDataBag(ProviderDataBag, RequirerDataBag): + pass + +merged_data = get_relation_data_as( + MergedDataBag, relation.data[self.app], relation.data[relation.app] +) + +merged_data.requirer_key +merged_data.provider_key + +``` + +The above code can be generalized to other kinds of merged objects, e.g. application and unit, and +it can be extended to multiple sources beyond 2: + +```python +merged_data = get_relation_data_as( + MergedDataBag, relation.data[self.app], relation.data[relation.app], ... +) +``` + +""" + +import json +from functools import reduce, wraps +from typing import Callable, Generic, MutableMapping, Optional, Type, TypeVar, Union + +import pydantic +from ops.charm import ActionEvent, CharmBase, RelationEvent +from ops.model import RelationDataContent +from pydantic import BaseModel, ValidationError + +# The unique Charmhub library identifier, never change it +LIBID = "cb2094c5b07d47e1bf346aaee0fcfcfe" + +# Increment this major API version when introducing breaking changes +LIBAPI = 0 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 4 + +PYDEPS = ["ops>=2.0.0", "pydantic>=1.10,<2"] + +G = TypeVar("G") +T = TypeVar("T", bound=BaseModel) +AppModel = TypeVar("AppModel", bound=BaseModel) +UnitModel = TypeVar("UnitModel", bound=BaseModel) + +DataBagNativeTypes = (int, str, float) + + +class BaseConfigModel(BaseModel): + """Class to be used for defining the structured configuration options.""" + + def __getitem__(self, x): + """Return the item using the notation instance[key].""" + return getattr(self, x.replace("-", "_")) + + +class TypedCharmBase(CharmBase, Generic[T]): + """Class to be used for extending config-typed charms.""" + + config_type: Type[T] + + @property + def config(self) -> T: + """Return a config instance validated and parsed using the provided pydantic class.""" + translated_keys = {k.replace("-", "_"): v for k, v in self.model.config.items()} + return self.config_type(**translated_keys) + + +def validate_params(cls: Type[T]): + """Return a decorator to allow pydantic parsing of action parameters. + + Args: + cls: Pydantic class representing the model to be used for parsing the content of the + action parameter + """ + + def decorator( + f: Callable[[CharmBase, ActionEvent, Union[T, ValidationError]], G] + ) -> Callable[[CharmBase, ActionEvent], G]: + @wraps(f) + def event_wrapper(self: CharmBase, event: ActionEvent): + try: + params = cls( + **{key.replace("-", "_"): value for key, value in event.params.items()} + ) + except ValidationError as e: + params = e + return f(self, event, params) + + return event_wrapper + + return decorator + + +def write(relation_data: RelationDataContent, model: BaseModel): + """Write the data contained in a domain object to the relation databag. + + Args: + relation_data: pointer to the relation databag + model: instance of pydantic model to be written + """ + for key, value in model.dict(exclude_none=False).items(): + if value: + relation_data[key.replace("_", "-")] = ( + str(value) + if any(isinstance(value, _type) for _type in DataBagNativeTypes) + else json.dumps(value) + ) + else: + relation_data[key.replace("_", "-")] = "" + + +def read(relation_data: MutableMapping[str, str], obj: Type[T]) -> T: + """Read data from a relation databag and parse it into a domain object. + + Args: + relation_data: pointer to the relation databag + obj: pydantic class representing the model to be used for parsing + """ + return obj( + **{ + field_name: ( + relation_data[parsed_key] + if field.outer_type_ in DataBagNativeTypes + else json.loads(relation_data[parsed_key]) + ) + for field_name, field in obj.__fields__.items() + # pyright: ignore[reportGeneralTypeIssues] + if (parsed_key := field_name.replace("_", "-")) in relation_data + if relation_data[parsed_key] + } + ) + + +def parse_relation_data( + app_model: Optional[Type[AppModel]] = None, unit_model: Optional[Type[UnitModel]] = None +): + """Return a decorator to allow pydantic parsing of the app and unit databags. + + Args: + app_model: Pydantic class representing the model to be used for parsing the content of the + app databag. None if no parsing ought to be done. + unit_model: Pydantic class representing the model to be used for parsing the content of the + unit databag. None if no parsing ought to be done. + """ + + def decorator( + f: Callable[ + [ + CharmBase, + RelationEvent, + Optional[Union[AppModel, ValidationError]], + Optional[Union[UnitModel, ValidationError]], + ], + G, + ] + ) -> Callable[[CharmBase, RelationEvent], G]: + @wraps(f) + def event_wrapper(self: CharmBase, event: RelationEvent): + try: + app_data = ( + read(event.relation.data[event.app], app_model) + if app_model is not None and event.app + else None + ) + except pydantic.ValidationError as e: + app_data = e + + try: + unit_data = ( + read(event.relation.data[event.unit], unit_model) + if unit_model is not None and event.unit + else None + ) + except pydantic.ValidationError as e: + unit_data = e + + return f(self, event, app_data, unit_data) + + return event_wrapper + + return decorator + + +class RelationDataModel(BaseModel): + """Base class to be used for creating data models to be used for relation databags.""" + + def write(self, relation_data: RelationDataContent): + """Write data to a relation databag. + + Args: + relation_data: pointer to the relation databag + """ + return write(relation_data, self) + + @classmethod + def read(cls, relation_data: RelationDataContent) -> "RelationDataModel": + """Read data from a relation databag and parse it as an instance of the pydantic class. + + Args: + relation_data: pointer to the relation databag + """ + return read(relation_data, cls) + + +def get_relation_data_as( + model_type: Type[AppModel], + *relation_data: RelationDataContent, +) -> Union[AppModel, ValidationError]: + """Return a merged representation of the provider and requirer databag into a single object. + + Args: + model_type: pydantic class representing the merged databag + relation_data: list of RelationDataContent of provider/requirer/unit sides + """ + try: + app_data = read(reduce(lambda x, y: dict(x) | dict(y), relation_data, {}), model_type) + except pydantic.ValidationError as e: + app_data = e + return app_data diff --git a/lib/charms/postgresql_k8s/v0/postgresql.py b/lib/charms/postgresql_k8s/v0/postgresql.py index a8b5931030..7df406de0f 100644 --- a/lib/charms/postgresql_k8s/v0/postgresql.py +++ b/lib/charms/postgresql_k8s/v0/postgresql.py @@ -32,7 +32,7 @@ # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 14 +LIBPATCH = 15 INVALID_EXTRA_USER_ROLE_BLOCKING_MESSAGE = "invalid role(s) for extra user roles" @@ -406,6 +406,7 @@ def update_user_password(self, username: str, password: str) -> None: Raises: PostgreSQLUpdateUserPasswordError if the password couldn't be changed. """ + connection = None try: with self._connect_to_database() as connection, connection.cursor() as cursor: cursor.execute( @@ -420,19 +421,39 @@ def update_user_password(self, username: str, password: str) -> None: if connection is not None: connection.close() + def is_restart_pending(self) -> bool: + """Query pg_settings for pending restart.""" + connection = None + try: + with self._connect_to_database() as connection, connection.cursor() as cursor: + cursor.execute("SELECT COUNT(*) FROM pg_settings WHERE pending_restart=True;") + return cursor.fetchone()[0] > 0 + except psycopg2.OperationalError: + logger.warning("Failed to connect to PostgreSQL.") + return False + except psycopg2.Error as e: + logger.error(f"Failed to check if restart is pending: {e}") + return False + finally: + if connection: + connection.close() + @staticmethod def build_postgresql_parameters( - profile: str, available_memory: int + profile: str, available_memory: int, limit_memory: Optional[int] = None ) -> Optional[dict[str, str]]: """Builds the PostgreSQL parameters. Args: profile: the profile to use. available_memory: available memory to use in calculation in bytes. + limit_memory: (optional) limit memory to use in calculation in bytes. Returns: Dictionary with the PostgreSQL parameters. """ + if limit_memory: + available_memory = min(available_memory, limit_memory) logger.debug(f"Building PostgreSQL parameters for {profile=} and {available_memory=}") if profile == "production": # Use 25% of the available memory for shared_buffers. @@ -446,3 +467,6 @@ def build_postgresql_parameters( } return parameters + else: + # Return default + return {"shared_buffers": "128MB"} diff --git a/poetry.lock b/poetry.lock index c92c682a94..9704e2b79f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -179,19 +179,19 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.31.61" +version = "1.31.62" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">= 3.7" files = [ - {file = "botocore-1.31.61-py3-none-any.whl", hash = "sha256:433bf93af09ad205d6db4c2ffc1f0e3193ddad4e0aced0a68ad8b0fa9de903e2"}, - {file = "botocore-1.31.61.tar.gz", hash = "sha256:39b059603f0e92a26599eecc7fe9b141f13eb412c964786ca3a7df5375928c87"}, + {file = "botocore-1.31.62-py3-none-any.whl", hash = "sha256:be792d806afc064694a2d0b9b25779f3ca0c1584b29a35ac32e67f0064ddb8b7"}, + {file = "botocore-1.31.62.tar.gz", hash = "sha256:272b78ac65256b6294cb9cdb0ac484d447ad3a85642e33cb6a3b1b8afee15a4c"}, ] [package.dependencies] jmespath = ">=0.7.1,<2.0.0" python-dateutil = ">=2.1,<3.0.0" -urllib3 = ">=1.25.4,<1.27" +urllib3 = {version = ">=1.25.4,<2.1", markers = "python_version >= \"3.10\""} [package.extras] crt = ["awscrt (==0.16.26)"] @@ -864,13 +864,13 @@ PyYAML = ">=3.11" [[package]] name = "kubernetes" -version = "28.1.0" +version = "27.2.0" description = "Kubernetes python client" optional = false python-versions = ">=3.6" files = [ - {file = "kubernetes-28.1.0-py2.py3-none-any.whl", hash = "sha256:10f56f8160dcb73647f15fafda268e7f60cf7dbc9f8e46d52fcd46d3beb0c18d"}, - {file = "kubernetes-28.1.0.tar.gz", hash = "sha256:1468069a573430fb1cb5ad22876868f57977930f80a6749405da31cd6086a7e9"}, + {file = "kubernetes-27.2.0-py2.py3-none-any.whl", hash = "sha256:0f9376329c85cf07615ed6886bf9bf21eb1cbfc05e14ec7b0f74ed8153cd2815"}, + {file = "kubernetes-27.2.0.tar.gz", hash = "sha256:d479931c6f37561dbfdf28fc5f46384b1cb8b28f9db344ed4a232ce91990825a"}, ] [package.dependencies] @@ -882,7 +882,7 @@ pyyaml = ">=5.4.1" requests = "*" requests-oauthlib = "*" six = ">=1.9.0" -urllib3 = ">=1.24.2,<2.0" +urllib3 = ">=1.24.2" websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" [package.extras] @@ -1936,19 +1936,20 @@ typing-extensions = ">=3.7.4" [[package]] name = "urllib3" -version = "1.26.17" +version = "2.0.6" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.7" files = [ - {file = "urllib3-1.26.17-py2.py3-none-any.whl", hash = "sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b"}, - {file = "urllib3-1.26.17.tar.gz", hash = "sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21"}, + {file = "urllib3-2.0.6-py3-none-any.whl", hash = "sha256:7a7c7003b000adf9e7ca2a377c9688bbc54ed41b985789ed576570342a375cd2"}, + {file = "urllib3-2.0.6.tar.gz", hash = "sha256:b19e1a85d206b56d7df1d5e683df4a7725252a964e3993648dd0fb5a1c157564"}, ] [package.extras] -brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "wcwidth" @@ -1963,13 +1964,13 @@ files = [ [[package]] name = "websocket-client" -version = "1.6.3" +version = "1.6.4" description = "WebSocket client for Python with low level API options" optional = false python-versions = ">=3.8" files = [ - {file = "websocket-client-1.6.3.tar.gz", hash = "sha256:3aad25d31284266bcfcfd1fd8a743f63282305a364b8d0948a43bd606acc652f"}, - {file = "websocket_client-1.6.3-py3-none-any.whl", hash = "sha256:6cfc30d051ebabb73a5fa246efdcc14c8fbebbd0330f8984ac3bb6d9edd2ad03"}, + {file = "websocket-client-1.6.4.tar.gz", hash = "sha256:b3324019b3c28572086c4a319f91d1dcd44e6e11cd340232978c684a7650d0df"}, + {file = "websocket_client-1.6.4-py3-none-any.whl", hash = "sha256:084072e0a7f5f347ef2ac3d8698a5e0b4ffbfcab607628cadabc650fc9a83a24"}, ] [package.extras] diff --git a/requirements.txt b/requirements.txt index af6c119abc..a26ff4fd0e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ anyio==4.0.0 ; python_full_version >= "3.10.6" and python_full_version < "4.0.0" attrs==23.1.0 ; python_full_version >= "3.10.6" and python_full_version < "4.0.0" boto3==1.28.61 ; python_full_version >= "3.10.6" and python_full_version < "4.0.0" -botocore==1.31.61 ; python_full_version >= "3.10.6" and python_full_version < "4.0.0" +botocore==1.31.62 ; python_full_version >= "3.10.6" and python_full_version < "4.0.0" certifi==2023.7.22 ; python_full_version >= "3.10.6" and python_full_version < "4.0.0" cffi==1.16.0 ; python_full_version >= "3.10.6" and python_full_version < "4.0.0" charset-normalizer==3.3.0 ; python_full_version >= "3.10.6" and python_full_version < "4.0.0" @@ -34,5 +34,5 @@ six==1.16.0 ; python_full_version >= "3.10.6" and python_full_version < "4.0.0" sniffio==1.3.0 ; python_full_version >= "3.10.6" and python_full_version < "4.0.0" tenacity==8.2.3 ; python_full_version >= "3.10.6" and python_full_version < "4.0.0" typing-extensions==4.8.0 ; python_full_version >= "3.10.6" and python_full_version < "4.0.0" -urllib3==1.26.17 ; python_full_version >= "3.10.6" and python_full_version < "4.0.0" -websocket-client==1.6.3 ; python_full_version >= "3.10.6" and python_full_version < "4.0.0" +urllib3==2.0.6 ; python_full_version >= "3.10.6" and python_full_version < "4.0.0" +websocket-client==1.6.4 ; python_full_version >= "3.10.6" and python_full_version < "4.0.0" diff --git a/src/charm.py b/src/charm.py index 4f1107c6ac..0c166cb172 100755 --- a/src/charm.py +++ b/src/charm.py @@ -8,6 +8,7 @@ import logging from typing import Dict, List, Optional +from charms.data_platform_libs.v0.data_models import TypedCharmBase from charms.grafana_k8s.v0.grafana_dashboard import GrafanaDashboardProvider from charms.loki_k8s.v0.loki_push_api import LogProxyConsumer from charms.observability_libs.v1.kubernetes_service_patch import KubernetesServicePatch @@ -26,7 +27,6 @@ from ops import JujuVersion from ops.charm import ( ActionEvent, - CharmBase, HookEvent, LeaderElectedEvent, RelationDepartedEvent, @@ -47,6 +47,7 @@ from tenacity import RetryError, Retrying, stop_after_attempt, wait_fixed from backups import PostgreSQLBackups +from config import CharmConfig from constants import ( APP_SCOPE, BACKUP_USER, @@ -81,10 +82,16 @@ logger = logging.getLogger(__name__) +# http{x,core} clutter the logs with debug messages +logging.getLogger("httpcore").setLevel(logging.ERROR) +logging.getLogger("httpx").setLevel(logging.ERROR) -class PostgresqlOperatorCharm(CharmBase): + +class PostgresqlOperatorCharm(TypedCharmBase[CharmConfig]): """Charmed Operator for the PostgreSQL database.""" + config_type = CharmConfig + def __init__(self, *args): super().__init__(*args) @@ -500,6 +507,13 @@ def _on_config_changed(self, _) -> None: logger.debug("Early exit on_config_changed: cluster not initialised yet") return + if not self.upgrade.idle: + logger.debug("Early exit on_config_changed: upgrade in progress") + return + + # update config on every run + self.update_config() + if not self.unit.is_leader(): return @@ -528,13 +542,10 @@ def enable_disable_extensions(self, database: str = None) -> None: database: optional database where to enable/disable the extension. """ orginial_status = self.unit.status - for config, enable in self.model.config.items(): - # Filter config option not related to plugins. - if not config.startswith("plugin_"): - continue - + for plugin in self.config.plugin_keys(): + enable = self.config[plugin] # Enable or disable the plugin/extension. - extension = "_".join(config.split("_")[1:-1]) + extension = "_".join(plugin.split("_")[1:-1]) try: self.unit.status = WaitingStatus( f"{'Enabling' if enable else 'Disabling'} {extension}" @@ -1316,6 +1327,7 @@ def _restart(self, event: RunWithLock) -> None: return try: + logger.debug("Restarting PostgreSQL") self._patroni.restart_postgresql() except RetryError: error_message = "failed to restart PostgreSQL" @@ -1345,10 +1357,15 @@ def _is_workload_running(self) -> bool: def update_config(self, is_creating_backup: bool = False) -> bool: """Updates Patroni config file based on the existence of the TLS files.""" # Retrieve PostgreSQL parameters. + if self.config.profile_limit_memory: + limit_memory = self.config.profile_limit_memory * 10**6 + else: + limit_memory = None postgresql_parameters = self.postgresql.build_postgresql_parameters( - self.config["profile"], self.get_available_memory() + self.config.profile, self.get_available_memory(), limit_memory ) + logger.info("Updating Patroni config file") # Update and reload configuration based on TLS files availability. self._patroni.render_patroni_yml_file( connectivity=self.unit_peer_data.get("connectivity", "on") == "on", @@ -1373,13 +1390,16 @@ def update_config(self, is_creating_backup: bool = False) -> bool: logger.debug("Early exit update_config: Patroni not started yet") return False - restart_postgresql = self.is_tls_enabled != self.postgresql.is_tls_enabled() + restart_postgresql = ( + self.is_tls_enabled != self.postgresql.is_tls_enabled() + ) or self.postgresql.is_restart_pending() self._patroni.reload_patroni_configuration() self.unit_peer_data.update({"tls": "enabled" if self.is_tls_enabled else ""}) # Restart PostgreSQL if TLS configuration has changed # (so the both old and new connections use the configuration). if restart_postgresql: + logger.info("PostgreSQL restart required") self.metrics_endpoint.update_scrape_job_spec( self._generate_metrics_jobs(self.is_tls_enabled) ) diff --git a/src/config.py b/src/config.py new file mode 100644 index 0000000000..c612d90702 --- /dev/null +++ b/src/config.py @@ -0,0 +1,55 @@ +#!/usr/bin/env python3 +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Structured configuration for the PostgreSQL charm.""" +import logging +from typing import Optional + +from charms.data_platform_libs.v0.data_models import BaseConfigModel +from pydantic import validator + +logger = logging.getLogger(__name__) + + +class CharmConfig(BaseConfigModel): + """Manager for the structured configuration.""" + + profile: str + profile_limit_memory: Optional[int] + plugin_citext_enable: bool + plugin_debversion_enable: bool + plugin_hstore_enable: bool + plugin_pg_trgm_enable: bool + plugin_plpython3u_enable: bool + plugin_unaccent_enable: bool + + @classmethod + def keys(cls) -> list[str]: + """Return config as list items.""" + return list(cls.__fields__.keys()) + + @classmethod + def plugin_keys(cls) -> filter: + """Return plugin config names in a iterable.""" + return filter(lambda x: x.startswith("plugin_"), cls.keys()) + + @validator("profile") + @classmethod + def profile_values(cls, value: str) -> Optional[str]: + """Check profile config option is one of `testing` or `production`.""" + if value not in ["testing", "production"]: + raise ValueError("Value not one of 'testing' or 'production'") + + return value + + @validator("profile_limit_memory") + @classmethod + def profile_limit_memory_validator(cls, value: int) -> Optional[int]: + """Check profile limit memory.""" + if value < 128: + raise ValueError("PostgreSQL Charm requires at least 128MB") + if value > 9999999: + raise ValueError("`profile-limit-memory` limited to 7 digits (9999999MB)") + + return value diff --git a/templates/patroni.yml.j2 b/templates/patroni.yml.j2 index 48ee71a929..bdf0515b76 100644 --- a/templates/patroni.yml.j2 +++ b/templates/patroni.yml.j2 @@ -97,6 +97,11 @@ postgresql: ssl_cert_file: {{ storage_path }}/cert.pem ssl_key_file: {{ storage_path }}/key.pem {%- endif %} + {%- if pg_parameters %} + {%- for key, value in pg_parameters.items() %} + {{key}}: {{value}} + {%- endfor -%} + {% endif %} pgpass: /tmp/pgpass pg_hba: - local all backup peer map=operator diff --git a/tox.ini b/tox.ini index 0e04ccec28..180ef1324d 100644 --- a/tox.ini +++ b/tox.ini @@ -188,5 +188,5 @@ commands = [testenv:poetry-lock] description = Install, lock and export poetry dependencies commands = - poetry lock - poetry export -f requirements.txt -o requirements.txt + poetry lock --no-update + poetry export -f requirements.txt -o requirements.txt --without-hashes