diff --git a/CODEOWNERS b/CODEOWNERS index d492524bb..73659fed6 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -27,6 +27,7 @@ /nginx_k8s/ @canonical/tracing-and-profiling /passwd/ @canonical/charmlibs-maintainers /pathops/ @canonical/charmlibs-maintainers +/rollingops/ @canonical/data /snap/ @canonical/charmlibs-maintainers /sysctl/ @canonical/charmlibs-maintainers /systemd/ @canonical/charmlibs-maintainers diff --git a/rollingops/CHANGELOG.md b/rollingops/CHANGELOG.md new file mode 100644 index 000000000..e69de29bb diff --git a/rollingops/README.md b/rollingops/README.md new file mode 100644 index 000000000..fbe937d06 --- /dev/null +++ b/rollingops/README.md @@ -0,0 +1,29 @@ +# charmlibs.rollingops + +The `rollingops` library. + +`rollingops` provides a rolling-operations manager for Juju charms backed by etcd. + +It coordinates operations across units by using etcd as a shared lock and queue backend, +and uses TLS client credentials to authenticate requests to the etcd cluster. + +To install, add `charmlibs-rollingops` to your Python dependencies. Then in your Python code, import as: + +```py +from charmlibs import rollingops +``` + +See the [reference documentation](https://documentation.ubuntu.com/charmlibs/reference/charmlibs/rollingops) for more. + +## Unit tests +```py +just python=3.12 unit rollingops +``` +## Pack +```py +just python=3.12 pack-machine rollingops +``` +## Integration tests +```py +just python=3.12 integration-machine rollingops +``` diff --git a/rollingops/pyproject.toml b/rollingops/pyproject.toml new file mode 100644 index 000000000..1cfff856a --- /dev/null +++ b/rollingops/pyproject.toml @@ -0,0 +1,75 @@ +[project] +name = "charmlibs-rollingops" +description = "The charmlibs.rollingops package." +readme = "README.md" +requires-python = ">=3.12" +authors = [ + {name="Data Platform"}, +] +classifiers = [ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: Apache Software License", + "Intended Audience :: Developers", + "Operating System :: POSIX :: Linux", + "Development Status :: 5 - Production/Stable", +] +dynamic = ["version"] +dependencies = [ + # "ops", + "charmlibs-interfaces-tls-certificates>=1.8.1", +] + +[dependency-groups] +lint = [ # installed for `just lint rollingops` (unit, functional, and integration are also installed) + # "typing_extensions", +] +unit = [ # installed for `just unit rollingops` + "ops[testing]", +] +functional = [ # installed for `just functional rollingops` +] +integration = [ # installed for `just integration rollingops` + "jubilant", + "tenacity", +] + +[project.urls] +"Repository" = "https://github.com/canonical/charmlibs" +"Issues" = "https://github.com/canonical/charmlibs/issues" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["src/charmlibs"] + +[tool.hatch.version] +path = "src/charmlibs/rollingops/_version.py" + +[tool.ruff] +extend = "../pyproject.toml" +src = ["src", "tests/unit", "tests/functional", "tests/integration"] # correctly sort local imports in tests + +[tool.ruff.lint.extend-per-file-ignores] +# add additional per-file-ignores here to avoid overriding repo-level config +"tests/**/*" = [ + # "E501", # line too long +] +"src/charmlibs/rollingops/_dp_interfaces_v1.py" = ["ALL"] + +[tool.pyright] +extends = "../pyproject.toml" +include = ["src", "tests"] +exclude = ["**/_dp_interfaces_v1.py", "tests/integration/.tmp/**"] +pythonVersion = "3.12" # check no python > 3.12 features are used + +[tool.charmlibs.functional] +ubuntu = [] # ubuntu versions to run functional tests with, e.g. "24.04" (defaults to just "latest") +pebble = [] # pebble versions to run functional tests with, e.g. "v1.0.0", "master" (defaults to no pebble versions) +sudo = false # whether to run functional tests with sudo (defaults to false) + +[tool.charmlibs.integration] +# tags to run integration tests with (defaults to running once with no tag, i.e. tags = ['']) +# Available in CI in tests/integration/pack.sh and integration tests as CHARMLIBS_TAG +tags = [] # Not used by the pack.sh and integration tests generated by the template diff --git a/rollingops/src/charmlibs/rollingops/__init__.py b/rollingops/src/charmlibs/rollingops/__init__.py new file mode 100644 index 000000000..83413b068 --- /dev/null +++ b/rollingops/src/charmlibs/rollingops/__init__.py @@ -0,0 +1,36 @@ +# Copyright 2026 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""The charmlibs.rollingops package.""" + +from ._certificates import CertificatesManager +from ._etcdctl import EtcdCtl +from ._manager import EtcdRollingOpsManager +from ._models import ( + OperationResult, + RollingOpsEtcdNotConfiguredError, + RollingOpsKeys, +) +from ._relations import SECRET_FIELD +from ._version import __version__ as __version__ + +__all__ = ( + 'SECRET_FIELD', + 'CertificatesManager', + 'EtcdCtl', + 'EtcdRollingOpsManager', + 'OperationResult', + 'RollingOpsEtcdNotConfiguredError', + 'RollingOpsKeys', +) diff --git a/rollingops/src/charmlibs/rollingops/_certificates.py b/rollingops/src/charmlibs/rollingops/_certificates.py new file mode 100644 index 000000000..219da90f2 --- /dev/null +++ b/rollingops/src/charmlibs/rollingops/_certificates.py @@ -0,0 +1,160 @@ +# Copyright 2026 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import os +from datetime import timedelta +from pathlib import Path + +from charmlibs.interfaces.tls_certificates import ( + Certificate, + CertificateRequestAttributes, + CertificateSigningRequest, + PrivateKey, +) + +logger = logging.getLogger(__name__) + + +class CertificatesManager: + """Manage generation and persistence of TLS certificates for etcd client access. + + This class is responsible for creating and storing a client Certificate + Authority (CA) and a client certificate/key pair used to authenticate + with etcd via TLS. Certificates are generated only once and persisted + under a local directory so they can be reused across charm executions. + + Certificates are valid for 20 years. They are not renewed or rotated. + """ + + BASE_DIR = Path('/var/lib/rollingops/tls') + + CA_CERT = BASE_DIR / 'client-ca.pem' + CLIENT_KEY = BASE_DIR / 'client.key' + CLIENT_CERT = BASE_DIR / 'client.pem' + + VALIDITY_DAYS = 365 * 20 + + @classmethod + def _exists(cls) -> bool: + """Check whether the client certificates and CA certificate already exist.""" + return cls.CA_CERT.exists() and cls.CLIENT_KEY.exists() and cls.CLIENT_CERT.exists() + + @classmethod + def client_paths(cls) -> tuple[Path, Path]: + """Return filesystem paths for the client certificate and key. + + Returns: + A tuple containing: + - Path to the client certificate + - Path to the client private key + """ + return cls.CLIENT_CERT, cls.CLIENT_KEY + + @classmethod + def persist_client_cert_key_and_ca(cls, cert_pem: str, key_pem: str, ca_pem: str) -> None: + """Persist the provided client certificate, key, and CA to disk. + + Args: + cert_pem: PEM-encoded client certificate. + key_pem: PEM-encoded client private key. + ca_pem: PEM-encoded CA certificate. + """ + cls.BASE_DIR.mkdir(parents=True, exist_ok=True) + + cls.CLIENT_CERT.write_text(cert_pem) + cls.CLIENT_KEY.write_text(key_pem) + cls.CA_CERT.write_text(ca_pem) + + os.chmod(cls.CLIENT_CERT, 0o644) + os.chmod(cls.CLIENT_KEY, 0o600) + os.chmod(cls.CA_CERT, 0o644) + + @classmethod + def has_client_cert_key_and_ca(cls, cert_pem: str, key_pem: str, ca_pem: str) -> bool: + """Return whether the provided certificate material matches local files.""" + if not cls.CLIENT_CERT.exists() or not cls.CLIENT_KEY.exists() or not cls.CA_CERT.exists(): + return False + + return ( + cls.CLIENT_CERT.read_text() == cert_pem + and cls.CLIENT_KEY.read_text() == key_pem + and cls.CA_CERT.read_text() == ca_pem + ) + + @classmethod + def generate(cls, common_name: str) -> tuple[str, str, str]: + """Generate a client CA and client certificate if they do not exist. + + This method creates: + 1. A CA private key and self-signed CA certificate. + 2. A client private key. + 3. A certificate signing request (CSR) using the provided common name. + 4. A client certificate signed by the generated CA. + + The generated files are written to disk and reused in future runs. + If the certificates already exist, this method does nothing. + + Args: + common_name: Common Name (CN) used in the client certificate + subject. This value should not contain slashes. + + Returns: + A tuple containing: + - The client certificate PEM string + - The client private key PEM string + - The client CA certificate PEM string + """ + if cls._exists(): + return cls.CLIENT_CERT.read_text(), cls.CLIENT_KEY.read_text(), cls.CA_CERT.read_text() + + cls.BASE_DIR.mkdir(parents=True, exist_ok=True) + + ca_key = PrivateKey.generate(key_size=4096) + ca_attributes = CertificateRequestAttributes( + common_name='rollingops-client-ca', is_ca=True + ) + ca_crt = Certificate.generate_self_signed_ca( + attributes=ca_attributes, + private_key=ca_key, + validity=timedelta(days=cls.VALIDITY_DAYS), + ) + + client_key = PrivateKey.generate(key_size=4096) + + csr_attributes = CertificateRequestAttributes( + common_name=common_name, add_unique_id_to_subject_name=False + ) + csr = CertificateSigningRequest.generate( + attributes=csr_attributes, + private_key=client_key, + ) + + client_crt = Certificate.generate( + csr=csr, + ca=ca_crt, + ca_private_key=ca_key, + validity=timedelta(days=cls.VALIDITY_DAYS), + is_ca=False, + ) + + cls.CA_CERT.write_text(ca_crt.raw) + cls.CLIENT_KEY.write_text(client_key.raw) + cls.CLIENT_CERT.write_text(client_crt.raw) + + os.chmod(cls.CLIENT_KEY, 0o600) + os.chmod(cls.CA_CERT, 0o644) + os.chmod(cls.CLIENT_CERT, 0o644) + + return client_crt.raw, client_key.raw, ca_crt.raw diff --git a/rollingops/src/charmlibs/rollingops/_dp_interfaces_v1.py b/rollingops/src/charmlibs/rollingops/_dp_interfaces_v1.py new file mode 100644 index 000000000..7312444d7 --- /dev/null +++ b/rollingops/src/charmlibs/rollingops/_dp_interfaces_v1.py @@ -0,0 +1,3242 @@ +# Copyright 2026 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +r"""Library to manage the relation for the data-platform products. + +This V1 has been specified in https://docs.google.com/document/d/1lnuonWnoQb36RWYwfHOBwU0VClLbawpTISXIC_yNKYo, and should be backward compatible with v0 clients. + +This library contains the Requires and Provides classes for handling the relation +between an application and multiple managed application supported by the data-team: +MySQL, Postgresql, MongoDB, Redis, Kafka, and Karapace. + +#### Models + +This library exposes basic default models that can be used in most cases. +If you need more complex models, you can subclass them. + +```python +from charms.data_platform_libs.v1.data_interfaces import RequirerCommonModel, ExtraSecretStr + +class ExtendedCommonModel(RequirerCommonModel): + operator_password: ExtraSecretStr +``` + +Secret groups are handled using annotated types. If you wish to add extra secret groups, please follow the following model. The string metadata represents the secret group name, and `OptionalSecretStr` is a TypeAlias for `SecretStr | None`. Finally, `SecretStr` represents a field validating the URI pattern `secret:.*` + +```python +MyGroupSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), "mygroup"] +``` + +Fields not specified as OptionalSecretStr and extended with a group name in the metadata will NOT get serialised. + + +#### Requirer Charm + +This library is a uniform interface to a selection of common database +metadata, with added custom events that add convenience to database management, +and methods to consume the application related data. + + +```python +from charms.data_platform_libs.v1.data_interfaces import ( + RequirerCommonModel, + RequirerDataContractV1, + ResourceCreatedEvent, + ResourceEntityCreatedEvent, + ResourceProviderModel, + ResourceRequirerEventHandler, +) + +class ClientCharm(CharmBase): + # Database charm that accepts connections from application charms. + def __init__(self, *args) -> None: + super().__init__(*args) + + requests = [ + RequirerCommonModel( + resource="clientdb", + ), + RequirerCommonModel( + resource="clientbis", + ), + RequirerCommonModel( + entity_type="USER", + ) + ] + self.database = ResourceRequirerEventHandler( + self,"database", requests, response_model=ResourceProviderModel + ) + self.framework.observe(self.database.on.resource_created, self._on_resource_created) + self.framework.observe(self.database.on.resource_entity_created, self._on_resource_entity_created) + + def _on_resource_created(self, event: ResourceCreatedEvent) -> None: + # Event triggered when a new database is created. + relation_id = event.relation.id + response = event.response # This is the response model + + username = event.response.username + password = event.response.password + ... + + def _on_resource_entity_created(self, event: ResourceCreatedEvent) -> None: + # Event triggered when a new entity is created. + ... + +Compared to V0, this library makes heavy use of pydantic models, and allows for +multiple requests, specified as a list. +On the Requirer side, each response will trigger one custom event for that response. +This way, it allows for more strategic events to be emitted according to the request. + +As show above, the library provides some custom events to handle specific situations, which are listed below: +- resource_created: event emitted when the requested database is created. +- resource_entity_created: event emitted when the requested entity is created. +- endpoints_changed: event emitted when the read/write endpoints of the database have changed. +- read_only_endpoints_changed: event emitted when the read-only endpoints of the database + have changed. Event is not triggered if read/write endpoints changed too. + +If it is needed to connect multiple database clusters to the same relation endpoint +the application charm can implement the same code as if it would connect to only +one database cluster (like the above code example). + +To differentiate multiple clusters connected to the same relation endpoint +the application charm can use the name of the remote application: + +```python + +def _on_resource_created(self, event: ResourceCreatedEvent) -> None: + # Get the remote app name of the cluster that triggered this event + cluster = event.relation.app.name +``` + +It is also possible to provide an alias for each different database cluster/relation. + +So, it is possible to differentiate the clusters in two ways. +The first is to use the remote application name, i.e., `event.relation.app.name`, as above. + +The second way is to use different event handlers to handle each cluster events. +The implementation would be something like the following code: + +```python + +from charms.data_platform_libs.v1.data_interfaces import ( + RequirerCommonModel, + RequirerDataContractV1, + ResourceCreatedEvent, + ResourceEntityCreatedEvent, + ResourceProviderModel, + ResourceRequirerEventHandler, +) + +class ApplicationCharm(CharmBase): + # Application charm that connects to database charms. + + def __init__(self, *args): + super().__init__(*args) + + requests = [ + RequirerCommonModel( + resource="clientdb", + ), + RequirerCommonModel( + resource="clientbis", + ), + ] + # Define the cluster aliases and one handler for each cluster database created event. + self.database = ResourceRequirerEventHandler( + self, + relation_name="database" + relations_aliases = ["cluster1", "cluster2"], + requests= + ) + self.framework.observe( + self.database.on.cluster1_resource_created, self._on_cluster1_resource_created + ) + self.framework.observe( + self.database.on.cluster2_resource_created, self._on_cluster2_resource_created + ) + + def _on_cluster1_resource_created(self, event: ResourceCreatedEvent) -> None: + # Handle the created database on the cluster named cluster1 + + # Create configuration file for app + config_file = self._render_app_config_file( + event.response.username, + event.response.password, + event.response.endpoints, + ) + ... + + def _on_cluster2_resource_created(self, event: ResourceCreatedEvent) -> None: + # Handle the created database on the cluster named cluster2 + + # Create configuration file for app + config_file = self._render_app_config_file( + event.response.username, + event.response.password, + event.response.endpoints, + ) + ... +``` + +### Provider Charm + +Following an example of using the ResourceRequestedEvent, in the context of the +database charm code: + +```python +from charms.data_platform_libs.v1.data_interfaces import ( + ResourceProviderEventHandler, + ResourceProviderModel, + ResourceRequestedEvent, + RequirerCommonModel, +) + +class SampleCharm(CharmBase): + + def __init__(self, *args): + super().__init__(*args) + # Charm events defined in the database provides charm library. + self.provided_database = ResourceProviderEventHandler(self, "database", RequirerCommonModel) + self.framework.observe(self.provided_database.on.resource_requested, + self._on_resource_requested) + # Database generic helper + self.database = DatabaseHelper() + + def _on_resource_requested(self, event: ResourceRequestedEvent) -> None: + # Handle the event triggered by a new database requested in the relation + # Retrieve the database name using the charm library. + db_name = event.request.resource + # generate a new user credential + username = self.database.generate_user(event.request.request_id) + password = self.database.generate_password(event.request.request_id) + # set the credentials for the relation + response = ResourceProviderModel( + salt=event.request.salt, + request_id=event.request.request_id, + resource=db_name, + username=username, + password=password, + ... + ) + self.provided_database.set_response(event.relation.id, response) +``` + +As shown above, the library provides a custom event (resource_requested) to handle +the situation when an application charm requests a new database to be created. +It's preferred to subscribe to this event instead of relation changed event to avoid +creating a new database when other information other than a database name is +exchanged in the relation databag. + +""" + +from __future__ import annotations + +import copy +import hashlib +import json +import logging +import pickle +import random +import string +from abc import ABC, abstractmethod +from collections.abc import Sequence +from datetime import datetime +from enum import Enum +from os import PathLike +from pathlib import Path +from typing import ( + Annotated, + Any, + Generic, + Literal, + NamedTuple, + NewType, + Self, + TypeAlias, + TypedDict, + TypeVar, + overload, + override, +) + +from ops import ( + CharmBase, + EventBase, + Model, + RelationChangedEvent, + RelationCreatedEvent, + RelationEvent, + Secret, + SecretChangedEvent, + SecretInfo, + SecretNotFoundError, +) +from ops.charm import CharmEvents, SecretRemoveEvent +from ops.framework import EventSource, Handle, Object +from ops.model import Application, ModelError, Relation, Unit +from pydantic import ( + AfterValidator, + AliasChoices, + BaseModel, + ConfigDict, + Discriminator, + Field, + SerializationInfo, + SerializerFunctionWrapHandler, + Tag, + TypeAdapter, + ValidationInfo, + model_serializer, + model_validator, +) + +try: + import psycopg2 +except ImportError: + psycopg2 = None + +# The unique Charmhub library identifier, never change it +LIBID = '6c3e6b6680d64e9c89e611d1a15f65be' + +# Increment this major API version when introducing breaking changes +LIBAPI = 1 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 3 + +PYDEPS = ['ops>=2.0.0', 'pydantic>=2.11'] + +logger = logging.getLogger(__name__) + +MODEL_ERRORS = { + 'not_leader': 'this unit is not the leader', + 'no_label_and_uri': 'ERROR either URI or label should be used for getting an owned secret but not both', + 'owner_no_refresh': 'ERROR secret owner cannot use --refresh', + 'permission_denied': 'ERROR permission denied', +} + +RESOURCE_ALIASES = [ + 'database', + 'subject', + 'topic', + 'index', + 'plugin-url', + 'prefix', +] + +SECRET_PREFIX = 'secret-' +STATUS_FIELD = 'status' + + +############################################################################## +# Exceptions +############################################################################## + + +class DataInterfacesError(Exception): + """Common ancestor for DataInterfaces related exceptions.""" + + +class SecretError(DataInterfacesError): + """Common ancestor for Secrets related exceptions.""" + + +class SecretAlreadyExistsError(SecretError): + """A secret that was to be added already exists.""" + + +class SecretsUnavailableError(SecretError): + """Secrets aren't yet available for Juju version used.""" + + +class IllegalOperationError(DataInterfacesError): + """To be used when an operation is not allowed to be performed.""" + + +############################################################################## +# Global helpers / utilities +############################################################################## + + +def gen_salt() -> str: + """Generates a consistent salt.""" + return ''.join(random.choices(string.ascii_letters + string.digits, k=16)) + + +def gen_hash(resource_name: str, salt: str) -> str: + """Generates a consistent hash based on the resource name and salt.""" + hasher = hashlib.sha256() + hasher.update(f'{resource_name}:{salt}'.encode()) + return hasher.hexdigest()[:16] + + +def ensure_leader_for_app(f): + """Decorator to ensure that only leader can perform given operation.""" + + def wrapper(self, *args, **kwargs): + if self.component == self._local_app and not self._local_unit.is_leader(): + logger.error(f'This operation ({f.__name__}) can only be performed by the leader unit') + return + return f(self, *args, **kwargs) + + return wrapper + + +def get_encoded_dict( + relation: Relation, member: Unit | Application, field: str +) -> dict[str, Any] | None: + """Retrieve and decode an encoded field from relation data.""" + data = json.loads(relation.data[member].get(field, '{}')) + if isinstance(data, dict): + return data + logger.error('Unexpected datatype for %s instead of dict.', str(data)) + + +class Diff(NamedTuple): + """A tuple for storing the diff between two data mappings. + + added - keys that were added + changed - keys that still exist but have new values + deleted - key that were deleted + """ + + added: set[str] + changed: set[str] + deleted: set[str] + + +def diff(old_data: dict[str, str] | None, new_data: dict[str, str]) -> Diff: + """Retrieves the diff of the data in the relation changed databag for v1. + + Args: + old_data: dictionary of the stored data before the event. + new_data: dictionary of the received data to compute the diff. + + Returns: + a Diff instance containing the added, deleted and changed + keys from the event relation databag. + """ + old_data = old_data or {} + + # These are the keys that were added to the databag and triggered this event. + added = new_data.keys() - old_data.keys() + # These are the keys that were removed from the databag and triggered this event. + deleted = old_data.keys() - new_data.keys() + # These are the keys that already existed in the databag, + # but had their values changed. + changed = {key for key in old_data.keys() & new_data.keys() if old_data[key] != new_data[key]} + # Return the diff with all possible changes. + return Diff(added, changed, deleted) + + +def resource_added(diff: Diff) -> bool: + """Ensures that one of the aliased resources has been added.""" + return any(item in diff.added for item in RESOURCE_ALIASES + ['resource']) + + +def store_new_data( + relation: Relation, + component: Unit | Application, + new_data: dict[str, str], + short_uuid: str | None = None, + global_data: dict[str, Any] = {}, +): + """Stores the new data in the databag for diff computation. + + Args: + relation: The relation considered to write data to + component: The component databag to write data to + new_data: a dictionary containing the data to write + short_uuid: Only present in V1, the request-id of that data to write. + global_data: request-independent, global state data to be written. + """ + global_data = {k: v for k, v in global_data.items() if v} + # First, the case for V0 + if not short_uuid: + relation.data[component].update({'data': json.dumps(new_data | global_data)}) + # Then the case for V1, where we have a ShortUUID + else: + data = json.loads(relation.data[component].get('data', '{}')) | global_data + if not isinstance(data, dict): + raise ValueError + data[short_uuid] = new_data + relation.data[component].update({'data': json.dumps(data)}) + + +############################################################################## +# Helper classes +############################################################################## + +SecretGroup = NewType('SecretGroup', str) + + +type SecretString = Annotated[str, Field(pattern='secret:.*')] + + +OptionalSecretStr: TypeAlias = str | None +OptionalSecretBool: TypeAlias = bool | None + +OptionalSecrets = (OptionalSecretStr, OptionalSecretBool) + +OptionalPathLike = PathLike | str | None + +UserSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), 'user'] +TlsSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), 'tls'] +TlsSecretBool = Annotated[OptionalSecretBool, Field(exclude=True, default=None), 'tls'] +MtlsSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), 'mtls'] +ExtraSecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), 'extra'] +EntitySecretStr = Annotated[OptionalSecretStr, Field(exclude=True, default=None), 'entity'] + + +class Scope(Enum): + """Peer relations scope.""" + + APP = 'app' + UNIT = 'unit' + + +class RelationStatusDict(TypedDict): + """Base type for dict representation of `RelationStatus` dataclass.""" + + code: int + message: str + resolution: str + + +class CachedSecret: + """Locally cache a secret. + + The data structure is precisely reusing/simulating as in the actual Secret Storage + """ + + KNOWN_MODEL_ERRORS = [ + MODEL_ERRORS['no_label_and_uri'], + MODEL_ERRORS['owner_no_refresh'], + MODEL_ERRORS['permission_denied'], + ] + + def __init__( + self, + model: Model, + component: Application | Unit, + label: str, + secret_uri: str | None = None, + ): + self._secret_meta = None + self._secret_content = {} + self._secret_uri = secret_uri + self.label = label + self._model = model + self.component = component + self.current_label = None + + @property + def meta(self) -> Secret | None: + """Getting cached secret meta-information.""" + if self._secret_meta: + return self._secret_meta + + if not (self._secret_uri or self.label): + return + + try: + self._secret_meta = self._model.get_secret(label=self.label) + except SecretNotFoundError: + # Falling back to seeking for potential legacy labels + logger.debug(f'Secret with label {self.label} not found') + except ModelError as err: + if not any(msg in str(err) for msg in self.KNOWN_MODEL_ERRORS): + raise + + # If still not found, to be checked by URI, to be labelled with the proposed label + if not self._secret_meta and self._secret_uri: + try: + self._secret_meta = self._model.get_secret(id=self._secret_uri, label=self.label) + except ModelError as err: + if not any(msg in str(err) for msg in self.KNOWN_MODEL_ERRORS): + raise + + return self._secret_meta + + ########################################################################## + # Public functions + ########################################################################## + + def add_secret( + self, + content: dict[str, str], + relation: Relation | None = None, + label: str | None = None, + ) -> Secret: + """Create a new secret.""" + if self._secret_uri: + raise SecretAlreadyExistsError( + 'Secret is already defined with uri %s', self._secret_uri + ) + + label = self.label if not label else label + + secret = self.component.add_secret(content, label=label) + if relation and relation.app != self._model.app: + # If it's not a peer relation, grant is to be applied + secret.grant(relation) + self._secret_uri = secret.id + self._secret_meta = secret + return self._secret_meta + + def get_content(self) -> dict[str, str]: + """Getting cached secret content.""" + if not self._secret_content: + if self.meta: + try: + self._secret_content = self.meta.get_content(refresh=True) + except (ValueError, ModelError) as err: + # https://bugs.launchpad.net/juju/+bug/2042596 + # Only triggered when 'refresh' is set + if isinstance(err, ModelError) and not any( + msg in str(err) for msg in self.KNOWN_MODEL_ERRORS + ): + raise + # Due to: ValueError: Secret owner cannot use refresh=True + self._secret_content = self.meta.get_content() + return self._secret_content + + def set_content(self, content: dict[str, str]) -> None: + """Setting cached secret content.""" + if not self.meta: + return + + if content == self.get_content(): + return + + if content: + self.meta.set_content(content) + self._secret_content = content + else: + self.meta.remove_all_revisions() + + def get_info(self) -> SecretInfo | None: + """Wrapper function to apply the corresponding call on the Secret object within CachedSecret if any.""" + if self.meta: + return self.meta.get_info() + + def remove(self) -> None: + """Remove secret.""" + if not self.meta: + raise SecretsUnavailableError('Non-existent secret was attempted to be removed.') + try: + self.meta.remove_all_revisions() + except SecretNotFoundError: + pass + self._secret_content = {} + self._secret_meta = None + self._secret_uri = None + + +class SecretCache: + """A data structure storing CachedSecret objects.""" + + def __init__(self, model: Model, component: Application | Unit): + self._model = model + self.component = component + self._secrets: dict[str, CachedSecret] = {} + + def get(self, label: str, uri: str | None = None) -> CachedSecret | None: + """Getting a secret from Juju Secret store or cache.""" + if not self._secrets.get(label): + secret = CachedSecret(self._model, self.component, label, uri) + if secret.meta: + self._secrets[label] = secret + return self._secrets.get(label) + + def add(self, label: str, content: dict[str, str], relation: Relation) -> CachedSecret: + """Adding a secret to Juju Secret.""" + if self._secrets.get(label): + raise SecretAlreadyExistsError(f'Secret {label} already exists') + + secret = CachedSecret(self._model, self.component, label) + secret.add_secret(content, relation) + self._secrets[label] = secret + return self._secrets[label] + + def remove(self, label: str) -> None: + """Remove a secret from the cache.""" + if secret := self.get(label): + try: + secret.remove() + self._secrets.pop(label) + except (SecretsUnavailableError, KeyError): + pass + else: + return + logging.debug('Non-existing Juju Secret was attempted to be removed %s', label) + + +############################################################################## +# Models classes +############################################################################## + + +class PeerModel(BaseModel): + """Common Model for all peer relations.""" + + model_config = ConfigDict( + validate_by_name=True, + validate_by_alias=True, + populate_by_name=True, + serialize_by_alias=True, + alias_generator=lambda x: x.replace('_', '-'), + extra='allow', + ) + + @model_validator(mode='after') + def extract_secrets(self, info: ValidationInfo): + """Extract all secret_fields into their local field.""" + if not info.context or not isinstance(info.context.get('repository'), AbstractRepository): + logger.debug("No secret parsing as we're lacking context here.") + return self + repository: AbstractRepository = info.context.get('repository') + for field, field_info in self.__pydantic_fields__.items(): + if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: + secret_group = SecretGroup(field_info.metadata[0]) + if not secret_group: + raise SecretsUnavailableError(field) + + aliased_field = field_info.serialization_alias or field + secret = repository.get_secret(secret_group, secret_uri=None) + + if not secret: + logger.info(f'No secret for group {secret_group}') + continue + + value = secret.get_content().get(aliased_field) + if value and field_info.annotation == OptionalSecretBool: + value = json.loads(value) + setattr(self, field, value) + + return self + + @model_serializer(mode='wrap') + def serialize_model(self, handler: SerializerFunctionWrapHandler, info: SerializationInfo): + """Serializes the model writing the secrets in their respective secrets.""" + if not info.context or not isinstance(info.context.get('repository'), AbstractRepository): + logger.debug("No secret parsing serialization as we're lacking context here.") + return handler(self) + repository: AbstractRepository = info.context.get('repository') + + for field, field_info in self.__pydantic_fields__.items(): + if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: + secret_group = SecretGroup(field_info.metadata[0]) + if not secret_group: + raise SecretsUnavailableError(field) + + aliased_field = field_info.serialization_alias or field + secret = repository.get_secret(secret_group, secret_uri=None) + + value = getattr(self, field) + + if (value is not None) and not isinstance(value, str): + value = json.dumps(value) + + if secret is None: + if value: + secret = repository.add_secret( + aliased_field, + value, + secret_group, + ) + if not secret or not secret.meta: + raise SecretError('No secret to send back') + continue + + content = secret.get_content() + full_content = copy.deepcopy(content) + + if value is None: + full_content.pop(aliased_field, None) + else: + full_content.update({aliased_field: value}) + secret.set_content(full_content) + return handler(self) + + def __getitem__(self, key): + """Dict like access to the model.""" + try: + return getattr(self, key.replace('-', '_')) + except Exception: + raise KeyError(f'{key} is not present in the model') + + def __setitem__(self, key, value): + """Dict like setter for the model.""" + return setattr(self, key.replace('-', '_'), value) + + def __delitem__(self, key): + """Dict like deleter for the model.""" + try: + return delattr(self, key.replace('-', '_')) + except Exception: + raise KeyError(f'{key} is not present in the model.') + + +class BaseCommonModel(BaseModel): + """Embeds the logic of parsing and serializing.""" + + model_config = ConfigDict( + validate_by_name=True, + validate_by_alias=True, + populate_by_name=True, + serialize_by_alias=True, + alias_generator=lambda x: x.replace('_', '-'), + extra='allow', + ) + + def update(self: Self, model: Self): + """Updates a common Model with another one.""" + # Iterate on all the fields that where explicitly set. + for item in model.model_fields_set: + # ignore the outstanding fields. + if item not in ['salt', 'request_id']: + value = getattr(model, item) + setattr(self, item, value) + return self + + @model_validator(mode='after') + def extract_secrets(self, info: ValidationInfo): + """Extract all secret_fields into their local field.""" + if not info.context or not isinstance(info.context.get('repository'), AbstractRepository): + logger.debug("No secret parsing as we're lacking context here.") + return self + repository: AbstractRepository = info.context.get('repository') + short_uuid = self.short_uuid + for field, field_info in self.__pydantic_fields__.items(): + if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: + secret_group = field_info.metadata[0] + if not secret_group: + raise SecretsUnavailableError(field) + + aliased_field = field_info.serialization_alias or field + secret_field = repository.secret_field(secret_group, aliased_field).replace( + '-', '_' + ) + secret_uri: str | None = getattr(self, secret_field, None) + + if not secret_uri: + continue + + secret = repository.get_secret( + secret_group, secret_uri=secret_uri, short_uuid=short_uuid + ) + + if not secret: + logger.info(f'No secret for group {secret_group} and short uuid {short_uuid}') + continue + + value = secret.get_content().get(aliased_field) + + if value and field_info.annotation == OptionalSecretBool: + value = json.loads(value) + + setattr(self, field, value) + + return self + + @model_serializer(mode='wrap') + def serialize_model(self, handler: SerializerFunctionWrapHandler, info: SerializationInfo): + """Serializes the model writing the secrets in their respective secrets.""" + if not info.context or not isinstance(info.context.get('repository'), AbstractRepository): + logger.debug("No secret parsing serialization as we're lacking context here.") + return handler(self) + repository: AbstractRepository = info.context.get('repository') + + short_uuid = self.short_uuid + # Backward compatibility for v0 regarding secrets. + if info.context.get('version') == 'v0': + short_uuid = None + + for field, field_info in self.__pydantic_fields__.items(): + if field_info.annotation in OptionalSecrets and len(field_info.metadata) == 1: + secret_group = field_info.metadata[0] + if not secret_group: + raise SecretsUnavailableError(field) + aliased_field = field_info.serialization_alias or field + secret_field = repository.secret_field(secret_group, aliased_field).replace( + '-', '_' + ) + secret_uri: str | None = getattr(self, secret_field, None) + secret = repository.get_secret( + secret_group, secret_uri=secret_uri, short_uuid=short_uuid + ) + + value = getattr(self, field) + + if (value is not None) and not isinstance(value, str): + value = json.dumps(value) + + if secret is None: + if value: + secret = repository.add_secret( + aliased_field, value, secret_group, short_uuid + ) + if not secret or not secret.meta: + raise SecretError('No secret to send back') + setattr(self, secret_field, secret.meta.id) + continue + + if secret and secret.meta and secret.meta.id: + # In case we lost the secret uri in the structure, let's add it back. + setattr(self, secret_field, secret.meta.id) + + content = secret.get_content() + full_content = copy.deepcopy(content) + + if value is None: + full_content.pop(aliased_field, None) + else: + full_content.update({aliased_field: value}) + secret.set_content(full_content) + + if not full_content: + # Setting a field to '' deletes it + setattr(self, secret_field, None) + repository.delete_secret(secret.label) + + return handler(self) + + @classmethod + def _get_secret_field(cls, field: str) -> SecretGroup | None: + """Checks if the field is a secret uri or not.""" + if not field.startswith(SECRET_PREFIX): + return None + + value = field.split('-')[1] + if info := cls.__pydantic_fields__.get(field.replace('-', '_')): + if info.annotation == SecretString: + return SecretGroup(value) + return None + + @property + def short_uuid(self) -> str | None: + """The request id.""" + return None + + def __getitem__(self, key): + """Dict like access to the model.""" + try: + return getattr(self, key.replace('-', '_')) + except Exception: + raise KeyError(f'{key} is not present in the model') + + def __setitem__(self, key, value): + """Dict like setter for the model.""" + return setattr(self, key.replace('-', '_'), value) + + def __delitem__(self, key): + """Dict like deleter for the model.""" + try: + return delattr(self, key.replace('-', '_')) + except Exception: + raise KeyError(f'{key} is not present in the model.') + + +class CommonModel(BaseCommonModel): + """Common Model for both requirer and provider. + + request_id stores the request identifier for easier access. + salt is used to create a valid request id. + resource is the requested resource. + """ + + model_config = ConfigDict( + validate_by_name=True, + validate_by_alias=True, + populate_by_name=True, + serialize_by_alias=True, + alias_generator=lambda x: x.replace('_', '-'), + extra='allow', + ) + + resource: str = Field(validation_alias=AliasChoices(*RESOURCE_ALIASES), default='') + request_id: str | None = Field(default=None) + salt: str = Field( + description='This salt is used to create unique hashes even when other fields map 1-1', + default_factory=gen_salt, + ) + + @property + def short_uuid(self) -> str | None: + """The request id.""" + return self.request_id or gen_hash(self.resource, self.salt) + + +class EntityPermissionModel(BaseModel): + """Entity Permissions Model.""" + + resource_name: str + resource_type: str + privileges: list + + +class RequirerCommonModel(CommonModel): + """Requirer side of the request model. + + extra_user_roles is used to request more roles for that user. + external_node_connectivity is used to indicate that the URI should be made for external clients when True + """ + + extra_user_roles: str | None = Field(default=None) + extra_group_roles: str | None = Field(default=None) + external_node_connectivity: bool = Field(default=False) + entity_type: Literal['USER', 'GROUP'] | None = Field(default=None) + entity_permissions: list[EntityPermissionModel] | None = Field(default=None) + secret_mtls: SecretString | None = Field(default=None) + mtls_cert: MtlsSecretStr = Field(default=None) + + @model_validator(mode='after') + def validate_fields(self): + """Validates that no inconsistent request is being sent.""" + if self.entity_type and self.entity_type not in ['USER', 'GROUP']: + raise ValueError('Invalid entity-type. Possible values are USER and GROUP') + + if self.entity_type == 'USER' and self.extra_group_roles: + raise ValueError('Inconsistent entity information. Use extra_user_roles instead') + + if self.entity_type == 'GROUP' and self.extra_user_roles: + raise ValueError('Inconsistent entity information. Use extra_group_roles instead') + + return self + + +class ProviderCommonModel(CommonModel): + """Serialized fields added to the databag. + + endpoints stores the endpoints exposed to that client. + secret_user is a secret URI mapping to the user credentials + secret_tls is a secret URI mapping to the TLS certificate + secret_extra is a secret URI for all additional secrets requested. + """ + + endpoints: str | None = Field(default=None) + read_only_endpoints: str | None = Field(default=None) + secret_user: SecretString | None = Field(default=None) + secret_tls: SecretString | None = Field(default=None) + secret_extra: SecretString | None = Field(default=None) + secret_entity: SecretString | None = Field(default=None) + + +class ResourceProviderModel(ProviderCommonModel): + """Extended model including the deserialized fields.""" + + username: UserSecretStr = Field(default=None) + password: UserSecretStr = Field(default=None) + uris: UserSecretStr = Field(default=None) + read_only_uris: UserSecretStr = Field(default=None) + tls: TlsSecretBool = Field(default=None) + tls_ca: TlsSecretStr = Field(default=None) + entity_name: EntitySecretStr = Field(default=None) + entity_password: EntitySecretStr = Field(default=None) + version: str | None = Field(default=None) + + +class RequirerDataContractV0(RequirerCommonModel): + """Backward compatibility.""" + + version: Literal['v0'] = Field(default='v0') + + original_field: str = Field(exclude=True, default='') + + @model_validator(mode='before') + @classmethod + def ensure_original_field(cls, data: Any): + """Ensures that we keep the original field.""" + if isinstance(data, dict): + for alias in RESOURCE_ALIASES: + if data.get(alias) is not None: + data['original_field'] = alias + break + else: + for alias in RESOURCE_ALIASES: + if getattr(data, alias) is not None: + data.original_field = alias + return data + + +TResourceProviderModel = TypeVar('TResourceProviderModel', bound=ResourceProviderModel) +TRequirerCommonModel = TypeVar('TRequirerCommonModel', bound=RequirerCommonModel) + + +class RequirerDataContractV1(BaseModel, Generic[TRequirerCommonModel]): + """The new Data Contract.""" + + version: Literal['v1'] = Field(default='v1') + requests: list[TRequirerCommonModel] = Field(default_factory=list) + + +def discriminate_on_version(payload: Any) -> str: + """Use the version to discriminate.""" + if isinstance(payload, dict): + return payload.get('version', 'v0') + return getattr(payload, 'version', 'v0') + + +RequirerDataContractType = Annotated[ + Annotated[RequirerDataContractV0, Tag('v0')] | Annotated[RequirerDataContractV1, Tag('v1')], + Discriminator(discriminate_on_version), +] + + +RequirerDataContract = TypeAdapter(RequirerDataContractType) + + +class DataContractV0(ResourceProviderModel): + """The Data contract of the response, for V0.""" + + +class DataContractV1(BaseModel, Generic[TResourceProviderModel]): + """The Data contract of the response, for V1.""" + + version: Literal['v1'] = Field(default='v1') + requests: list[TResourceProviderModel] = Field(default_factory=list) + + +DataContract = TypeAdapter(DataContractV1[ResourceProviderModel]) + + +TCommonModel = TypeVar('TCommonModel', bound=CommonModel) + + +def is_topic_value_acceptable(value: str | None) -> str | None: + """Check whether the given Kafka topic value is acceptable.""" + if value and '*' in value[:3]: + raise ValueError(f"Error on topic '{value}',, unacceptable value.") + return value + + +class KafkaRequestModel(RequirerCommonModel): + """Specialised model for Kafka.""" + + consumer_group_prefix: Annotated[str | None, AfterValidator(is_topic_value_acceptable)] = ( + Field(default=None) + ) + + +class KafkaResponseModel(ResourceProviderModel): + """Kafka response model.""" + + consumer_group_prefix: ExtraSecretStr = Field(default=None) + zookeeper_uris: ExtraSecretStr = Field(default=None) + + +class RelationStatus(BaseModel): + """Base model for status propagation on charm relations.""" + + code: int + message: str + resolution: str + + @property + def is_informational(self) -> bool: + """Is this an informational status?""" + return self.code // 1000 == 1 + + @property + def is_transitory(self) -> bool: + """Is this a transitory status?""" + return self.code // 1000 == 4 + + @property + def is_fatal(self) -> bool: + """Is this a fatal status, requiring removing the relation?""" + return self.code // 1000 == 5 + + +############################################################################## +# AbstractRepository class +############################################################################## + + +class AbstractRepository(ABC): + """Abstract repository interface.""" + + @abstractmethod + def get_secret( + self, secret_group, secret_uri: str | None, short_uuid: str | None = None + ) -> CachedSecret | None: + """Gets a secret from the secret cache by uri or label.""" + ... + + @abstractmethod + def get_secret_field( + self, + field: str, + secret_group: SecretGroup, + short_uuid: str | None = None, + ) -> str | None: + """Gets a value for a field stored in a secret group.""" + ... + + @abstractmethod + def get_field(self, field: str) -> str | None: + """Gets the value for one field.""" + ... + + @abstractmethod + def get_fields(self, *fields: str) -> dict[str, str | None]: + """Gets the values for all provided fields.""" + ... + + @abstractmethod + def write_field(self, field: str, value: Any) -> None: + """Writes the value in the field, without any secret support.""" + ... + + @abstractmethod + def write_fields(self, mapping: dict[str, Any]) -> None: + """Writes the values of mapping in the fields without any secret support (keys of mapping).""" + ... + + def write_secret_field( + self, field: str, value: Any, group: SecretGroup + ) -> CachedSecret | None: + """Writes a secret field.""" + ... + + @abstractmethod + def add_secret( + self, + field: str, + value: Any, + secret_group: SecretGroup, + short_uuid: str | None = None, + ) -> CachedSecret | None: + """Gets a value for a field stored in a secret group.""" + ... + + @abstractmethod + def delete_secret(self, label: str): + """Deletes a secret by its label.""" + ... + + @abstractmethod + def delete_field(self, field: str) -> None: + """Deletes a field.""" + ... + + @abstractmethod + def delete_fields(self, *fields: str) -> None: + """Deletes all the provided fields.""" + ... + + @abstractmethod + def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: + """Delete a field stored in a secret group.""" + ... + + @abstractmethod + def register_secret(self, secret_group: SecretGroup, short_uuid: str | None = None) -> None: + """Registers a secret using the repository.""" + ... + + @abstractmethod + def get_data(self) -> dict[str, Any] | None: + """Gets the whole data.""" + ... + + @abstractmethod + def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: + """Builds a secret field.""" + + +class OpsRepository(AbstractRepository): + """Implementation for ops repositories, with some methods left out.""" + + SECRET_FIELD_NAME: str + + uri_to_databag: bool = True + + def __init__( + self, + model: Model, + relation: Relation | None, + component: Unit | Application, + ): + self._local_app = model.app + self._local_unit = model.unit + self.relation = relation + self.component = component + self.model = model + self.secrets = SecretCache(model, component) + + @abstractmethod + def _generate_secret_label( + self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None = None + ) -> str: + """Generate unique group mapping for secrets within a relation context.""" + ... + + @override + def get_data(self) -> dict[str, Any] | None: + ret: dict[str, Any] = {} + if not self.relation: + logger.info('No relation to get value from') + return None + if self.component not in self.relation.data: + logger.info(f'Component {self.component} not in relation {self.relation}') + return None + + for key, value in self.relation.data[self.component].items(): + try: + ret[key] = json.loads(value) + except json.JSONDecodeError: + ret[key] = value + + return ret + + @override + @ensure_leader_for_app + def get_field( + self, + field: str, + ) -> str | None: + if not self.relation: + logger.info('No relation to get value from') + return None + if self.component not in self.relation.data: + logger.info(f'Component {self.component} not in relation {self.relation}') + return None + relation_data = self.relation.data[self.component] + return relation_data.get(field) + + @override + @ensure_leader_for_app + def get_fields(self, *fields: str) -> dict[str, str]: + res = {} + for field in fields: + if (value := self.get_field(field)) is not None: + res[field] = value + return res + + @override + @ensure_leader_for_app + def write_field(self, field: str, value: Any) -> None: + if not self.relation: + logger.info('No relation to get value from') + return None + if self.component not in self.relation.data: + logger.info(f'Component {self.component} not in relation {self.relation}') + return None + if not value: + return None + self.relation.data[self.component].update({field: value}) + + @override + @ensure_leader_for_app + def write_fields(self, mapping: dict[str, Any]) -> None: + if not self.relation: + logger.info('No relation to get value from') + return None + if self.component not in self.relation.data: + logger.info(f'Component {self.component} not in relation {self.relation}') + return None + (self.write_field(field, value) for field, value in mapping.items()) + + @override + @ensure_leader_for_app + def write_secret_field( + self, field: str, value: Any, secret_group: SecretGroup + ) -> CachedSecret | None: + if not self.relation: + logger.info('No relation to get value from') + return None + if self.component not in self.relation.data: + logger.info(f'Component {self.component} not in relation {self.relation}') + return None + + label = self._generate_secret_label(self.relation, secret_group) + secret_uri = self.get_field(self.secret_field(secret_group, field)) + + secret = self.secrets.get(label=label, uri=secret_uri) + if not secret: + return self.add_secret(field, value, secret_group) + else: + content = secret.get_content() + full_content = copy.deepcopy(content) + full_content.update({field: value}) + secret.set_content(full_content) + return secret + + @override + @ensure_leader_for_app + def delete_field(self, field: str) -> None: + if not self.relation: + logger.info('No relation to get value from') + return None + if self.component not in self.relation.data: + logger.info(f'Component {self.component} not in relation {self.relation}') + return None + relation_data = self.relation.data[self.component] + try: + relation_data.pop(field) + except KeyError: + logger.debug( + f'Non existent field {field} was attempted to be removed from the databag (relation ID: {self.relation.id})' + ) + + @override + @ensure_leader_for_app + def delete_fields(self, *fields: str) -> None: + (self.delete_field(field) for field in fields) + + @override + @ensure_leader_for_app + def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: + if not self.relation: + logger.info('No relation to get value from') + return None + if self.component not in self.relation.data: + logger.info(f'Component {self.component} not in relation {self.relation}') + return None + + relation_data = self.relation.data[self.component] + secret_field = self.secret_field(secret_group, field) + + label = self._generate_secret_label(self.relation, secret_group) + secret_uri = relation_data.get(secret_field) + + secret = self.secrets.get(label=label, uri=secret_uri) + + if not secret: + logging.error(f"Can't delete secret for relation {self.relation.id}") + return None + + content = secret.get_content() + new_content = copy.deepcopy(content) + try: + new_content.pop(field) + except KeyError: + logging.debug( + f"Non-existing secret '{field}' was attempted to be removed" + f'from relation {self.relation.id} and group {secret_group}' + ) + + # Write the new secret content if necessary + if new_content: + secret.set_content(new_content) + return + + # Remove the secret from the relation if it's fully gone. + try: + relation_data.pop(field) + except KeyError: + pass + self.secrets.remove(label) + return + + @ensure_leader_for_app + def register_secret(self, uri: str, secret_group: SecretGroup, short_uuid: str | None = None): + """Registers the secret group for this relation. + + [MAGIC HERE] + If we fetch a secret using get_secret(id=, label=), + then will be "stuck" on the Secret object, whenever it may + appear (i.e. as an event attribute, or fetched manually) on future occasions. + + This will allow us to uniquely identify the secret on Provider side (typically on + 'secret-changed' events), and map it to the corresponding relation. + """ + if not self.relation: + raise ValueError('Cannot register without relation.') + + label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) + CachedSecret(self.model, self.component, label, uri).meta + + @override + def get_secret( + self, secret_group, secret_uri: str | None, short_uuid: str | None = None + ) -> CachedSecret | None: + """Gets a secret from the secret cache by uri or label.""" + if not self.relation: + logger.info('No relation to get value from') + return None + if self.component not in self.relation.data: + logger.info(f'Component {self.component} not in relation {self.relation}') + return None + + label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) + + return self.secrets.get(label=label, uri=secret_uri) + + @override + def get_secret_field( + self, + field: str, + secret_group: SecretGroup, + uri: str | None = None, + short_uuid: str | None = None, + ) -> str | None: + """Gets a value for a field stored in a secret group.""" + if not self.relation: + logger.info('No relation to get value from') + return None + if self.component not in self.relation.data: + logger.info(f'Component {self.component} not in relation {self.relation}') + return None + + secret_field = self.secret_field(secret_group, field) + + relation_data = self.relation.data[self.component] + secret_uri = uri or relation_data.get(secret_field) + label = self._generate_secret_label(self.relation, secret_group, short_uuid=short_uuid) + + if self.uri_to_databag and not secret_uri: + logger.info(f'No secret for group {secret_group} in relation {self.relation}') + return None + + secret = self.secrets.get(label=label, uri=secret_uri) + + if not secret: + logger.info(f'No secret for group {secret_group} in relation {self.relation}') + return None + + content = secret.get_content().get(field) + + if not content: + return + + try: + return json.loads(content) + except json.JSONDecodeError: + return content + + @override + @ensure_leader_for_app + def add_secret( + self, + field: str, + value: Any, + secret_group: SecretGroup, + short_uuid: str | None = None, + ) -> CachedSecret | None: + if not self.relation: + logger.info('No relation to get value from') + return None + + if self.component not in self.relation.data: + logger.info(f'Component {self.component} not in relation {self.relation}') + return None + + label = self._generate_secret_label(self.relation, secret_group, short_uuid) + + secret = self.secrets.add(label, {field: value}, self.relation) + + if not secret.meta or not secret.meta.id: + logging.error('Secret is missing Secret ID') + raise SecretError('Secret added but is missing Secret ID') + + return secret + + @override + @ensure_leader_for_app + def delete_secret(self, label: str) -> None: + self.secrets.remove(label) + + +class OpsRelationRepository(OpsRepository): + """Implementation of the Abstract Repository for non peer relations.""" + + SECRET_FIELD_NAME: str = 'secret' + + @override + def _generate_secret_label( + self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None + ) -> str: + """Generate unique group_mappings for secrets within a relation context.""" + if short_uuid: + return f'{relation.name}.{relation.id}.{short_uuid}.{secret_group}.secret' + return f'{relation.name}.{relation.id}.{secret_group}.secret' + + def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: + """Generates the field name to store in the peer relation.""" + return f'{self.SECRET_FIELD_NAME}-{secret_group}' + + @ensure_leader_for_app + @override + def get_data(self) -> dict[str, Any] | None: + return super().get_data() + + +class OpsPeerRepository(OpsRepository): + """Implementation of the Ops Repository for peer relations.""" + + SECRET_FIELD_NAME = 'internal_secret' + + uri_to_databag: bool = False + + @property + def scope(self) -> Scope: + """Returns a scope.""" + if isinstance(self.component, Application): + return Scope.APP + if isinstance(self.component, Unit): + return Scope.UNIT + raise ValueError('Invalid component, neither a Unit nor an Application') + + @override + def _generate_secret_label( + self, relation: Relation, secret_group: SecretGroup, short_uuid: str | None = None + ) -> str: + """Generate unique group_mappings for secrets within a relation context.""" + members = [relation.name, self._local_app.name, self.scope.value] + + if secret_group != SecretGroup('extra'): + members.append(secret_group) + return f'{".".join(members)}' + + def secret_field(self, secret_group: SecretGroup, field: str | None = None) -> str: + """Generates the field name to store in the peer relation.""" + if not field: + raise ValueError('Must have a field.') + return f'{field}@{secret_group}' + + +class OpsPeerUnitRepository(OpsPeerRepository): + """Implementation for a unit.""" + + @override + def __init__(self, model: Model, relation: Relation | None, component: Unit): + super().__init__(model, relation, component) + + +class OpsOtherPeerUnitRepository(OpsPeerRepository): + """Implementation for a remote unit.""" + + @override + def __init__(self, model: Model, relation: Relation | None, component: Unit): + if component == model.unit: + raise ValueError(f"Can't instantiate {self.__class__.__name__} with local unit.") + super().__init__(model, relation, component) + + @override + def write_field(self, field: str, value: Any) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def write_fields(self, mapping: dict[str, Any]) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def add_secret( + self, field: str, value: Any, secret_group: SecretGroup, short_uuid: str | None = None + ) -> CachedSecret | None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def delete_field(self, field: str) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def delete_fields(self, *fields: str) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + @override + def delete_secret_field(self, field: str, secret_group: SecretGroup) -> None: + raise NotImplementedError("It's not possible to update data of another unit.") + + +TRepository = TypeVar('TRepository', bound=OpsRepository) +TCommon = TypeVar('TCommon', bound=BaseModel) +TPeerCommon = TypeVar('TPeerCommon', bound=PeerModel) +TCommonBis = TypeVar('TCommonBis', bound=BaseModel) + + +class RepositoryInterface(Generic[TRepository, TCommon]): + """Repository builder.""" + + def __init__( + self, + model: Model, + relation_name: str, + component: Unit | Application, + repository_type: type[TRepository], + data_model: type[TCommon] | TypeAdapter | None, + ): + self._model = model + self.repository_type = repository_type + self.relation_name = relation_name + self.model = data_model + self.component = component + + @property + def relations(self) -> list[Relation]: + """The list of Relation instances associated with this relation name.""" + return self._model.relations[self.relation_name] + + def repository( + self, relation_id: int, component: Unit | Application | None = None + ) -> TRepository: + """Returns a repository for the relation.""" + relation = self._model.get_relation(self.relation_name, relation_id) + if not relation: + raise ValueError('Missing relation.') + return self.repository_type(self._model, relation, component or self.component) + + @overload + def build_model( + self, + relation_id: int, + model: type[TCommonBis], + component: Unit | Application | None = None, + ) -> TCommonBis: ... + + @overload + def build_model( + self, + relation_id: int, + model: type[TCommon], + component: Unit | Application | None = None, + ) -> TCommon: ... + + @overload + def build_model( + self, + relation_id: int, + model: TypeAdapter[TCommonBis], + component: Unit | Application | None = None, + ) -> TCommonBis: ... + + @overload + def build_model( + self, + relation_id: int, + model: None = None, + component: Unit | Application | None = None, + ) -> TCommon: ... + + def build_model( + self, + relation_id: int, + model: type[TCommon] | TypeAdapter[TCommonBis] | None = None, + component: Unit | Application | None = None, + ) -> TCommon | TCommonBis: + """Builds a model using the repository for that relation.""" + model = model or self.model # First the provided model (allows for specialisation) + component = component or self.component + if not model: + raise ValueError('Missing model to specialise data') + relation = self._model.get_relation(self.relation_name, relation_id) + if not relation: + raise ValueError('Missing relation.') + return build_model(self.repository_type(self._model, relation, component), model) + + def write_model( + self, relation_id: int, model: BaseModel, context: dict[str, str] | None = None + ): + """Writes the model using the repository.""" + relation = self._model.get_relation(self.relation_name, relation_id) + if not relation: + raise ValueError('Missing relation.') + + write_model( + self.repository_type(self._model, relation, self.component), model, context=context + ) + + +class OpsRelationRepositoryInterface(RepositoryInterface[OpsRelationRepository, TCommon]): + """Specialised Interface to build repositories for app peer relations.""" + + def __init__( + self, + model: Model, + relation_name: str, + data_model: type[TCommon] | TypeAdapter | None = None, + ): + super().__init__(model, relation_name, model.app, OpsRelationRepository, data_model) + + +class OpsPeerRepositoryInterface(RepositoryInterface[OpsPeerRepository, TPeerCommon]): + """Specialised Interface to build repositories for app peer relations.""" + + def __init__( + self, + model: Model, + relation_name: str, + data_model: type[TPeerCommon] | TypeAdapter | None = None, + ): + super().__init__(model, relation_name, model.app, OpsPeerRepository, data_model) + + +class OpsPeerUnitRepositoryInterface(RepositoryInterface[OpsPeerUnitRepository, TPeerCommon]): + """Specialised Interface to build repositories for this unit peer relations.""" + + def __init__( + self, + model: Model, + relation_name: str, + data_model: type[TPeerCommon] | TypeAdapter | None = None, + ): + super().__init__(model, relation_name, model.unit, OpsPeerUnitRepository, data_model) + + +class OpsOtherPeerUnitRepositoryInterface( + RepositoryInterface[OpsOtherPeerUnitRepository, TPeerCommon] +): + """Specialised Interface to build repositories for another unit peer relations.""" + + def __init__( + self, + model: Model, + relation_name: str, + unit: Unit, + data_model: type[TPeerCommon] | TypeAdapter | None = None, + ): + super().__init__(model, relation_name, unit, OpsOtherPeerUnitRepository, data_model) + + +############################################################################## +# DDD implementation methods +############################################################################## +############################################################################## + + +def build_model(repository: AbstractRepository, model: type[TCommon] | TypeAdapter) -> TCommon: + """Builds a common model using the provided repository and provided model structure.""" + data = repository.get_data() or {} + + data.pop('data', None) + + # Beware this means all fields should have a default value here. + if isinstance(model, TypeAdapter): + return model.validate_python(data, context={'repository': repository}) + + return model.model_validate(data, context={'repository': repository}) + + +def write_model( + repository: AbstractRepository, model: BaseModel, context: dict[str, str] | None = None +): + """Writes the data stored in the model using the repository object.""" + context = context or {} + dumped = model.model_dump( + mode='json', context={'repository': repository} | context, exclude_none=False + ) + for field, value in dumped.items(): + if value is None: + repository.delete_field(field) + continue + dumped_value = value if isinstance(value, str) else json.dumps(value) + repository.write_field(field, dumped_value) + + +############################################################################## +# Custom Events +############################################################################## + + +class ResourceProviderEvent(EventBase, Generic[TRequirerCommonModel]): + """Resource requested event. + + Contains the request that should be handled. + + fields to serialize: relation, app, unit, request + """ + + def __init__( + self, + handle: Handle, + relation: Relation, + app: Application | None, + unit: Unit | None, + request: TRequirerCommonModel, + ): + super().__init__(handle) + self.relation = relation + self.app = app + self.unit = unit + self.request = request + + def snapshot(self) -> dict[str, Any]: + """Save the event information.""" + snapshot = {'relation_name': self.relation.name, 'relation_id': self.relation.id} + if self.app: + snapshot['app_name'] = self.app.name + if self.unit: + snapshot['unit_name'] = self.unit.name + # The models are too complex and would be blocked by marshal so we pickle dump the model. + # The full dictionary is pickled afterwards anyway. + snapshot['request'] = pickle.dumps(self.request) + return snapshot + + def restore(self, snapshot: dict[str, Any]): + """Restore event information.""" + relation = self.framework.model.get_relation( + snapshot['relation_name'], snapshot['relation_id'] + ) + if not relation: + raise ValueError('Missing relation') + self.relation = relation + self.app = None + app_name = snapshot.get('app_name') + if app_name: + self.app = self.framework.model.get_app(app_name) + self.unit = None + unit_name = snapshot.get('unit_name') + if unit_name: + self.app = self.framework.model.get_app(unit_name) + self.request = pickle.loads(snapshot['request']) + + +class ResourceRequestedEvent(ResourceProviderEvent[TRequirerCommonModel]): + """Resource requested event.""" + + pass + + +class ResourceEntityRequestedEvent(ResourceProviderEvent[TRequirerCommonModel]): + """Resource Entity requested event.""" + + pass + + +class ResourceEntityPermissionsChangedEvent(ResourceProviderEvent[TRequirerCommonModel]): + """Resource entity permissions changed event.""" + + pass + + +class MtlsCertUpdatedEvent(ResourceProviderEvent[TRequirerCommonModel]): + """Resource entity permissions changed event.""" + + def __init__( + self, + handle: Handle, + relation: Relation, + app: Application | None, + unit: Unit | None, + request: TRequirerCommonModel, + old_mtls_cert: str | None = None, + ): + super().__init__(handle, relation, app, unit, request) + + self.old_mtls_cert = old_mtls_cert + + def snapshot(self): + """Return a snapshot of the event.""" + return super().snapshot() | {'old_mtls_cert': self.old_mtls_cert} + + def restore(self, snapshot): + """Restore the event from a snapshot.""" + super().restore(snapshot) + self.old_mtls_cert = snapshot['old_mtls_cert'] + + +class BulkResourcesRequestedEvent(EventBase, Generic[TRequirerCommonModel]): + """Resource requested event. + + Contains the request that should be handled. + + fields to serialize: relation, app, unit, request + """ + + def __init__( + self, + handle: Handle, + relation: Relation, + app: Application | None, + unit: Unit | None, + requests: list[TRequirerCommonModel], + ): + super().__init__(handle) + self.relation = relation + self.app = app + self.unit = unit + self.requests = requests + + def snapshot(self) -> dict[str, Any]: + """Save the event information.""" + snapshot = {'relation_name': self.relation.name, 'relation_id': self.relation.id} + if self.app: + snapshot['app_name'] = self.app.name + if self.unit: + snapshot['unit_name'] = self.unit.name + # The models are too complex and would be blocked by marshal so we pickle dump the model. + # The full dictionary is pickled afterwards anyway. + snapshot['requests'] = [pickle.dumps(request) for request in self.requests] + return snapshot + + def restore(self, snapshot: dict[str, Any]): + """Restore event information.""" + relation = self.framework.model.get_relation( + snapshot['relation_name'], snapshot['relation_id'] + ) + if not relation: + raise ValueError('Missing relation') + self.relation = relation + self.app = None + app_name = snapshot.get('app_name') + if app_name: + self.app = self.framework.model.get_app(app_name) + self.unit = None + unit_name = snapshot.get('unit_name') + if unit_name: + self.app = self.framework.model.get_app(unit_name) + self.requests = [pickle.loads(request) for request in snapshot['requests']] + + +class ResourceProvidesEvents(CharmEvents, Generic[TRequirerCommonModel]): + """Database events. + + This class defines the events that the database can emit. + """ + + bulk_resources_requested = EventSource(BulkResourcesRequestedEvent) + resource_requested = EventSource(ResourceRequestedEvent) + resource_entity_requested = EventSource(ResourceEntityRequestedEvent) + resource_entity_permissions_changed = EventSource(ResourceEntityPermissionsChangedEvent) + mtls_cert_updated = EventSource(MtlsCertUpdatedEvent) + + +class ResourceRequirerEvent(EventBase, Generic[TResourceProviderModel]): + """Resource created/changed event. + + Contains the request that should be handled. + + fields to serialize: relation, app, unit, response + """ + + def __init__( + self, + handle: Handle, + relation: Relation, + app: Application | None, + unit: Unit | None, + response: TResourceProviderModel, + ): + super().__init__(handle) + self.relation = relation + self.app = app + self.unit = unit + self.response = response + + def snapshot(self) -> dict: + """Save the event information.""" + snapshot = {'relation_name': self.relation.name, 'relation_id': self.relation.id} + if self.app: + snapshot['app_name'] = self.app.name + if self.unit: + snapshot['unit_name'] = self.unit.name + # The models are too complex and would be blocked by marshal so we pickle dump the model. + # The full dictionary is pickled afterwards anyway. + snapshot['response'] = pickle.dumps(self.response) + return snapshot + + def restore(self, snapshot: dict): + """Restore event information.""" + relation = self.framework.model.get_relation( + snapshot['relation_name'], snapshot['relation_id'] + ) + if not relation: + raise ValueError('Missing relation') + self.relation = relation + self.app = None + app_name = snapshot.get('app_name') + if app_name: + self.app = self.framework.model.get_app(app_name) + self.unit = None + unit_name = snapshot.get('unit_name') + if unit_name: + self.app = self.framework.model.get_app(unit_name) + + self.response = pickle.loads(snapshot['response']) + + +class ResourceCreatedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Resource has been created.""" + + pass + + +class ResourceEntityCreatedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Resource entity has been created.""" + + pass + + +class ResourceEndpointsChangedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Read/Write endpoints are changed.""" + + pass + + +class ResourceReadOnlyEndpointsChangedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Read-only endpoints are changed.""" + + pass + + +class AuthenticationUpdatedEvent(ResourceRequirerEvent[TResourceProviderModel]): + """Authentication was updated for a user.""" + + pass + + +# Error Propagation Events + + +class StatusEventBase(RelationEvent): + """Base class for relation status change events.""" + + def __init__( + self, + handle: Handle, + relation: Relation, + status: RelationStatus, + app: Application | None = None, + unit: Unit | None = None, + ): + super().__init__(handle, relation, app=app, unit=unit) + self.status = status + + def snapshot(self) -> dict: + """Return a snapshot of the event.""" + return super().snapshot() | {'status': json.dumps(self.status.model_dump())} + + def restore(self, snapshot: dict): + """Restore the event from a snapshot.""" + super().restore(snapshot) + self.status = RelationStatus(**json.loads(snapshot['status'])) + + @property + def active_statuses(self) -> list[RelationStatus]: + """Returns a list of all currently active statuses on this relation.""" + if not self.relation.app: + return [] + + raw = json.loads(self.relation.data[self.relation.app].get(STATUS_FIELD, '[]')) + + return [RelationStatus(**item) for item in raw] + + +class StatusRaisedEvent(StatusEventBase): + """Event emitted on the requirer when a new status is being raised by the provider on relation.""" + + +class StatusResolvedEvent(StatusEventBase): + """Event emitted on the requirer when a status is marked as resolved by the provider on relation.""" + + +class ResourceRequiresEvents(CharmEvents, Generic[TResourceProviderModel]): + """Database events. + + This class defines the events that the database can emit. + """ + + resource_created = EventSource(ResourceCreatedEvent) + resource_entity_created = EventSource(ResourceEntityCreatedEvent) + endpoints_changed = EventSource(ResourceEndpointsChangedEvent) + read_only_endpoints_changed = EventSource(ResourceReadOnlyEndpointsChangedEvent) + authentication_updated = EventSource(AuthenticationUpdatedEvent) + status_raised = EventSource(StatusRaisedEvent) + status_resolved = EventSource(StatusResolvedEvent) + + +############################################################################## +# Event Handlers +############################################################################## + + +class EventHandlers(Object): + """Requires-side of the relation.""" + + component: Application | Unit + interface: RepositoryInterface + + def __init__(self, charm: CharmBase, relation_name: str, unique_key: str = ''): + """Manager of base client relations.""" + if not unique_key: + unique_key = relation_name + super().__init__(charm, unique_key) + + self.charm = charm + self.relation_name = relation_name + + self.framework.observe( + charm.on[self.relation_name].relation_changed, + self._on_relation_changed_event, + ) + + self.framework.observe( + self.charm.on[self.relation_name].relation_created, + self._on_relation_created_event, + ) + + self.framework.observe( + charm.on.secret_changed, + self._on_secret_changed_event, + ) + self.framework.observe(charm.on.secret_remove, self._on_secret_remove_event) + + @property + def relations(self) -> list[Relation]: + """Shortcut to get access to the relations.""" + return self.interface.relations + + def get_remote_unit(self, relation: Relation) -> Unit | None: + """Gets the remote unit in the relation.""" + remote_unit = None + for unit in relation.units: + if unit.app != self.charm.app: + remote_unit = unit + break + return remote_unit + + def get_statuses(self, relation_id: int) -> dict[int, RelationStatus]: + """Return all currently active statuses on this relation. Can only be called on leader units. + + Args: + relation_id (int): the identifier for a particular relation. + + Returns: + Dict[int, RelationStatus]: A mapping of status code to RelationStatus instances. + """ + relation = self.charm.model.get_relation(self.relation_name, relation_id) + + if not relation: + raise ValueError('Missing relation.') + + component = self.charm.app if isinstance(self.component, Application) else relation.app + + raw = relation.data[component].get(STATUS_FIELD, '[]') + + return {int(item['code']): RelationStatus(**item) for item in json.loads(raw)} + + # Event handlers + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the relation is created.""" + pass + + @abstractmethod + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the relation data has changed.""" + raise NotImplementedError + + @abstractmethod + def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: + """Event emitted when the relation data has changed.""" + raise NotImplementedError + + def _on_secret_remove_event(self, event: SecretRemoveEvent) -> None: + """Event emitted when a secret is removed. + + A secret removal (entire removal, not just a revision removal) causes + https://github.com/juju/juju/issues/20794. This check is to avoid the + errors that would happen if we tried to remove the revision in that case + (in the revision removal, the label is present). + """ + if not event.secret.label: + return + relation = self._relation_from_secret_label(event.secret.label) + + if not relation: + logging.info( + f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" + ) + return + + try: + event.secret.get_info() + except SecretNotFoundError: + logging.info('Secret removed event ignored for non Secret Owner') + return + + if relation.name != self.relation_name: + logging.info('Secret changed on wrong relation.') + return + + event.remove_revision() + + @abstractmethod + def _handle_event( + self, + ): + """Handles the event and reacts accordingly.""" + pass + + def compute_diff( + self, + relation: Relation, + request: RequirerCommonModel | ResourceProviderModel, + repository: AbstractRepository | None = None, + store: bool = True, + ) -> Diff: + """Computes, stores and returns a diff for that request.""" + if not repository: + repository = OpsRelationRepository(self.model, relation, component=relation.app) + + # Gets the data stored in the databag for diff computation + old_data = get_encoded_dict(relation, self.component, 'data') + + # In case we're V1, we select specifically this request + if old_data and request.request_id: + old_data: dict | None = old_data.get(request.request_id, None) + + # dump the data of the current request so we can compare + new_data = request.model_dump( + mode='json', + exclude={'data'}, + exclude_none=True, + exclude_defaults=True, + ) + + # Computes the diff + _diff = diff(old_data, new_data) + + if store: + # Update the databag with the new data for later diff computations + store_new_data( + relation, + self.component, + new_data, + short_uuid=request.request_id, + global_data={ + STATUS_FIELD: { + code: status.model_dump() + for code, status in self.get_statuses(relation.id).items() + } + }, + ) + + return _diff + + def _relation_from_secret_label(self, secret_label: str) -> Relation | None: + """Retrieve the relation that belongs to a secret label.""" + contents = secret_label.split('.') + + if not (contents and len(contents) >= 3): + return + + try: + relation_id = int(contents[1]) + except ValueError: + return + + relation_name = contents[0] + + try: + return self.model.get_relation(relation_name, relation_id) + except ModelError: + return + + def _short_uuid_from_secret_label(self, secret_label: str) -> str | None: + """Retrieve the relation that belongs to a secret label.""" + contents = secret_label.split('.') + + if not (contents and len(contents) >= 5): + return + + return contents[2] + + +class ResourceProviderEventHandler(EventHandlers, Generic[TRequirerCommonModel]): + """Event Handler for resource provider.""" + + on = ResourceProvidesEvents[TRequirerCommonModel]() # type: ignore[reportAssignmentType] + + def __init__( + self, + charm: CharmBase, + relation_name: str, + request_model: type[TRequirerCommonModel], + unique_key: str = '', + mtls_enabled: bool = False, + bulk_event: bool = False, + status_schema_path: OptionalPathLike = None, + ): + """Builds a resource provider event handler. + + Args: + charm: The charm. + relation_name: The relation name this event handler is listening to. + request_model: The request model that is expected to be received. + unique_key: An optional unique key for that object. + mtls_enabled: If True, means the server supports MTLS integration. + bulk_event: If this is true, only one event will be emitted with all requests in the case of a v1 requirer. + status_schema_path: Path to the JSON file defining status/error codes and their definitions. + """ + super().__init__(charm, relation_name, unique_key) + self.component = self.charm.app + self.request_model = request_model + self.interface = OpsRelationRepositoryInterface(charm.model, relation_name, request_model) + self.mtls_enabled = mtls_enabled + self.bulk_event = bulk_event + + self._status_schema = ( + {} if not status_schema_path else self._load_status_schema(Path(status_schema_path)) + ) + + def _load_status_schema(self, schema_path: Path) -> dict[int, RelationStatus]: + """Load JSON schema defining status codes and their details. + + Args: + schema_path: JSON schema file path. + + Raises: + FileNotFoundError: If the provided path is invalid/inaccessible. + + Returns: + dict[int, RelationStatusDict]: Mapping of status code to RelationStatus data objects. + """ + if not schema_path.exists(): + raise FileNotFoundError(f"Can't locate status schema file: {schema_path}") + + content = json.load(open(schema_path)) + + return {s['code']: RelationStatus(**s) for s in content.get('statuses', [])} + + @staticmethod + def _validate_diff(event: RelationEvent, _diff: Diff) -> None: + """Validates that entity information is not changed after relation is established. + + - When entity-type changes, backwards compatibility is broken. + - When extra-user-roles changes, role membership checks become incredibly complex. + - When extra-group-roles changes, role membership checks become incredibly complex. + """ + if not isinstance(event, RelationChangedEvent): + return + + for key in [ + 'resource', + 'entity-type', + 'extra-user-roles', + 'extra-group-roles', + ]: + if key in _diff.changed: + raise ValueError(f'Cannot change {key} after relation has already been created') + + def _dispatch_events(self, event: RelationEvent, _diff: Diff, request: RequirerCommonModel): + if self.mtls_enabled and 'secret-mtls' in _diff.added: + self.on.mtls_cert_updated.emit( + event.relation, app=event.app, unit=event.unit, request=request, old_mtls_cert=None + ) + return + # Emit a resource requested event if the setup key (resource name) + # was added to the relation databag, but the entity-type key was not. + if resource_added(_diff) and 'entity-type' not in _diff.added: + self.on.resource_requested.emit( + event.relation, + app=event.app, + unit=event.unit, + request=request, + ) + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit an entity requested event if the setup key (resource name) + # was added to the relation databag, in addition to the entity-type key. + if resource_added(_diff) and 'entity-type' in _diff.added: + self.on.resource_entity_requested.emit( + event.relation, + app=event.app, + unit=event.unit, + request=request, + ) + # To avoid unnecessary application restarts do not trigger other events. + return + + # Emit a permissions changed event if the setup key (resource name) + # was added to the relation databag, and the entity-permissions key changed. + if ( + not resource_added(_diff) + and 'entity-type' not in _diff.added + and ('entity-permissions' in _diff.added or 'entity-permissions' in _diff.changed) + ): + self.on.resource_entity_permissions_changed.emit( + event.relation, app=event.app, unit=event.unit, request=request + ) + # To avoid unnecessary application restarts do not trigger other events. + return + + @override + def _handle_event( + self, + event: RelationChangedEvent, + repository: AbstractRepository, + request: RequirerCommonModel, + ): + _diff = self.compute_diff(event.relation, request, repository) + + self._validate_diff(event, _diff) + self._dispatch_events(event, _diff, request) + + def _handle_bulk_event( + self, + event: RelationChangedEvent, + repository: AbstractRepository, + request_model: RequirerDataContractV1[TRequirerCommonModel], + ): + """Validate all the diffs, then dispatch the bulk event AND THEN stores the diff. + + This allows for the developer to process the diff and store it themselves + """ + for request in request_model.requests: + # Compute the diff without storing it so we can validate the diffs. + _diff = self.compute_diff(event.relation, request, repository, store=False) + self._validate_diff(event, _diff) + + self.on.bulk_resources_requested.emit( + event.relation, app=event.app, unit=event.unit, requests=request_model.requests + ) + + # Store all the diffs if they were not already stored. + for request in request_model.requests: + new_data = request.model_dump( + mode='json', + exclude={'data'}, + context={'repository': repository}, + exclude_none=True, + exclude_defaults=True, + ) + store_new_data(event.relation, self.component, new_data, request.request_id) + + @override + def _on_secret_changed_event(self, event: SecretChangedEvent) -> None: + if not self.mtls_enabled: + logger.info('MTLS is disabled, exiting early.') + return + if not event.secret.label: + return + + relation = self._relation_from_secret_label(event.secret.label) + short_uuid = self._short_uuid_from_secret_label(event.secret.label) + + if not relation: + logging.info( + f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" + ) + return + + if relation.name != self.relation_name: + logging.info('Secret changed on wrong relation.') + return + + try: + event.secret.get_info() + logging.info('Secret changed event ignored for Secret Owner') + return + except SecretNotFoundError: + pass + + remote_unit = self.get_remote_unit(relation) + + repository = OpsRelationRepository(self.model, relation, component=relation.app) + version = repository.get_field('version') or 'v0' + + old_mtls_cert = event.secret.get_content().get('mtls-cert') + logger.info('mtls-cert-updated') + + # V0, just fire the event. + if version == 'v0': + request = build_model(repository, RequirerDataContractV0) + # V1, find the corresponding request. + else: + request_model = build_model(repository, RequirerDataContractV1[self.request_model]) + if not short_uuid: + return + for _request in request_model.requests: + if _request.request_id == short_uuid: + request = _request + break + else: + logger.info(f'Unknown request id {short_uuid}') + return + + self.on.mtls_cert_updated.emit( + relation, + app=relation.app, + unit=remote_unit, + request=request, + old_mtls_cert=old_mtls_cert, + ) + + @override + def _on_relation_changed_event(self, event: RelationChangedEvent): + if not self.charm.unit.is_leader(): + return + + repository = OpsRelationRepository( + self.model, event.relation, component=event.relation.app + ) + + # Don't do anything until we get some data + if not repository.get_data(): + return + + version = repository.get_field('version') or 'v0' + if version == 'v0': + request_model = build_model(repository, RequirerDataContractV0) + old_name = request_model.original_field + request_model.request_id = None # For safety, let's ensure that we don't have a model. + self._handle_event(event, repository, request_model) + logger.info( + f"Patching databag for v0 compatibility: replacing 'resource' by '{old_name}'" + ) + self.interface.repository( + event.relation.id, + ).write_field(old_name, request_model.resource) + else: + request_model = build_model(repository, RequirerDataContractV1[self.request_model]) + if self.bulk_event: + self._handle_bulk_event(event, repository, request_model) + return + for request in request_model.requests: + self._handle_event(event, repository, request) + + def set_response(self, relation_id: int, response: ResourceProviderModel): + r"""Sets a response in the databag. + + This function will react accordingly to the version number. + If the version number is v0, then we write the data directly in the databag. + If the version number is v1, then we write the data in the list of responses. + + /!\ This function updates a response if it was already present in the databag! + + Args: + relation_id: The specific relation id for that event. + response: The response to write in the databag. + """ + if not self.charm.unit.is_leader(): + return + + relation = self.charm.model.get_relation(self.relation_name, relation_id) + + if not relation: + raise ValueError('Missing relation.') + + repository = OpsRelationRepository(self.model, relation, component=relation.app) + version = repository.get_field('version') or 'v0' + + if version == 'v0': + # Ensure the request_id is None + response.request_id = None + self.interface.write_model( + relation_id, response, context={'version': 'v0'} + ) # {"database": "database-name", "secret-user": "uri", ...} + return + + model = self.interface.build_model(relation_id, DataContractV1[response.__class__]) + + # for/else syntax allows to execute the else if break was not called. + # This allows us to update or append easily. + for index, _response in enumerate(model.requests): + if _response.request_id == response.request_id: + model.requests[index].update(response) + break + else: + model.requests.append(response) + + self.interface.write_model(relation_id, model) + return + + def set_responses(self, relation_id: int, responses: list[ResourceProviderModel]) -> None: + r"""Sets a list of responses in the databag. + + This function will react accordingly to the version number. + If the version number is v0, then we write the data directly in the databag. + If the version number is v1, then we write the data in the list of responses. + + /!\ This function updates a response if it was already present in the databag! + + Args: + relation_id: The specific relation id for that event. + responses: The response to write in the databag. + """ + if not self.charm.unit.is_leader(): + return + + relation = self.charm.model.get_relation(self.relation_name, relation_id) + + assert len(responses) >= 1, 'List of responses is empty' + + if not relation: + raise ValueError('Missing relation.') + + repository = OpsRelationRepository(self.model, relation, component=relation.app) + version = repository.get_field('version') or 'v0' + + if version == 'v0': + assert len(responses) == 1, 'V0 only expects one response' + # Ensure the request_id is None + response = responses[0] + response.request_id = None + self.interface.write_model( + relation_id, response, context={'version': 'v0'} + ) # {"database": "database-name", "secret-user": "uri", ...} + return + + model = self.interface.build_model(relation_id, DataContractV1[responses[0].__class__]) + + response_map: dict[str, ResourceProviderModel] = { + response.request_id: response for response in responses if response.request_id + } + + # Update all the already existing keys + for index, _response in enumerate(model.requests): + assert _response.request_id, 'Missing request id in the response' + response = response_map.get(_response.request_id) + if response: + model.requests[index].update(response) + del response_map[_response.request_id] + + # Add the missing keys + model.requests += list(response_map.values()) + + self.interface.write_model(relation_id, model) + return + + def requests(self, relation: Relation) -> Sequence[RequirerCommonModel]: + """Returns the list of requests that we got.""" + repository = OpsRelationRepository(self.model, relation, component=relation.app) + + # Don't do anything until we get some data + if not repository.get_data(): + return [] + + version = repository.get_field('version') or 'v0' + if version == 'v0': + request_model = build_model(repository, RequirerDataContractV0) + request_model.request_id = None # For safety, let's ensure that we don't have a model. + return [request_model] + else: + request_model = build_model(repository, RequirerDataContractV1[self.request_model]) + return request_model.requests + + def responses( + self, relation: Relation, model: type[ResourceProviderModel] + ) -> list[ResourceProviderModel]: + """Returns the list of responses that we currently have.""" + repository = self.interface.repository(relation.id, component=relation.app) + + version = repository.get_field('version') or 'v0' + if version == 'v0': + # Ensure the request_id is None + return [self.interface.build_model(relation.id, DataContractV0)] + + return self.interface.build_model(relation.id, DataContractV1[model]).requests + + @overload + def raise_status(self, relation_id: int, status: int) -> None: ... + + @overload + def raise_status(self, relation_id: int, status: RelationStatusDict) -> None: ... + + @overload + def raise_status(self, relation_id: int, status: RelationStatus) -> None: ... + + def raise_status( + self, relation_id: int, status: RelationStatus | RelationStatusDict | int + ) -> None: + """Raise a status on the relation. Can only be called on leader units. + + Args: + relation_id (int): the identifier for a particular relation. + status (RelationStatus | RelationStatusDict | int): A representation of the status being raised, + which could be either a RelationStatus, an appropriate dict, or the numeric status code. + + Raises: + ValueError: If the status provided is not correctly formatted. + """ + relation = self.charm.model.get_relation(self.relation_name, relation_id) + + if not relation: + raise ValueError('Missing relation.') + + if isinstance(status, int): + # we expect the status schema to be defined in this case. + if status not in self._status_schema: + raise KeyError(f'Status code [{status}] not defined.') + _status = self._status_schema[status] + elif isinstance(status, dict): + _status = RelationStatus(**status) + elif isinstance(status, RelationStatus): + _status = status + else: + raise ValueError( + 'The status should be either a RelationStatus, an appropriate dict, or the numeric status code.' + ) + + statuses = self.get_statuses(relation_id) + statuses.update({_status.code: _status}) + serialized = json.dumps([statuses[k].model_dump() for k in sorted(statuses)]) + + repository = OpsRelationRepository(self.model, relation, component=self.charm.app) + repository.write_field(STATUS_FIELD, serialized) + + def resolve_status(self, relation_id: int, status_code: int) -> None: + """Set a previously raised status as resolved. + + Args: + relation_id (int): the identifier for a particular relation. + status_code (int): the numeric code of the resolved status. + """ + relation = self.charm.model.get_relation(self.relation_name, relation_id) + + if not relation: + raise ValueError('Missing relation.') + + statuses = self.get_statuses(relation_id) + if status_code not in statuses: + logger.error(f'Status [{status_code}] has never been raised before.') + return + + statuses.pop(status_code) + serialized = json.dumps([statuses[k].model_dump() for k in sorted(statuses)]) + + repository = OpsRelationRepository(self.model, relation, component=self.charm.app) + repository.write_field(STATUS_FIELD, serialized) + + def clear_statuses(self, relation_id: int) -> None: + """Clear all previously raised statuses. + + Args: + relation_id (int): the identifier for a particular relation. + """ + relation = self.charm.model.get_relation(self.relation_name, relation_id) + + if not relation: + raise ValueError('Missing relation.') + + repository = OpsRelationRepository(self.model, relation, component=self.charm.app) + repository.delete_field(STATUS_FIELD) + + +class ResourceRequirerEventHandler(EventHandlers, Generic[TResourceProviderModel]): + """Event Handler for resource requirer.""" + + on = ResourceRequiresEvents[TResourceProviderModel]() # type: ignore[reportAssignmentType] + + def __init__( + self, + charm: CharmBase, + relation_name: str, + requests: list[RequirerCommonModel], + response_model: type[TResourceProviderModel], + unique_key: str = '', + relation_aliases: list[str] | None = None, + ): + super().__init__(charm, relation_name, unique_key) + self.component = self.charm.unit + self.relation_aliases = relation_aliases + self._requests = requests + self.response_model = DataContractV1[response_model] + self.interface: OpsRelationRepositoryInterface[DataContractV1[TResourceProviderModel]] = ( + OpsRelationRepositoryInterface(charm.model, relation_name, self.response_model) + ) + + if requests: + self._request_model = requests[0].__class__ + else: + self._request_model = RequirerCommonModel + + # First, check that the number of aliases matches the one defined in charm metadata. + if self.relation_aliases: + relation_connection_limit = self.charm.meta.requires[relation_name].limit + if len(self.relation_aliases) != relation_connection_limit: + raise ValueError( + f'Invalid number of aliases, expected {relation_connection_limit}, received {len(self.relation_aliases)}' + ) + + # Created custom event names for each alias. + if self.relation_aliases: + for relation_alias in self.relation_aliases: + self.on.define_event( + f'{relation_alias}_resource_created', + ResourceCreatedEvent, + ) + self.on.define_event( + f'{relation_alias}_resource_entity_created', + ResourceEntityCreatedEvent, + ) + self.on.define_event( + f'{relation_alias}_endpoints_changed', + ResourceEndpointsChangedEvent, + ) + self.on.define_event( + f'{relation_alias}_read_only_endpoints_changed', + ResourceReadOnlyEndpointsChangedEvent, + ) + + ############################################################################## + # Extra useful functions + ############################################################################## + def is_resource_created( + self, + rel_id: int, + request_id: str, + model: DataContractV1[TResourceProviderModel] | None = None, + ) -> bool: + """Checks if a resource has been created or not. + + Args: + rel_id: The relation id to check. + request_id: The specific request id to check. + model: An optional model to use (for performances). + """ + if not model: + relation = self.model.get_relation(self.relation_name, rel_id) + if not relation: + return False + model = self.interface.build_model(relation_id=rel_id, component=relation.app) + for request in model.requests: + if request.request_id == request_id: + return request.secret_user is not None or request.secret_entity is not None + return False + + def are_all_resources_created(self, rel_id: int) -> bool: + """Checks that all resources have been created for a relation. + + Args: + rel_id: The relation id to check. + """ + relation = self.model.get_relation(self.relation_name, rel_id) + if not relation: + return False + model = self.interface.build_model(relation_id=rel_id, component=relation.app) + return all( + self.is_resource_created(rel_id, request.request_id, model) + for request in model.requests + if request.request_id + ) + + @staticmethod + def _is_pg_plugin_enabled(plugin: str, connection_string: str) -> bool: + # Actual checking method. + # No need to check for psycopg here, it's been checked before. + if not psycopg2: + return False + + try: + with psycopg2.connect(connection_string) as connection: + with connection.cursor() as cursor: + cursor.execute( + 'SELECT TRUE FROM pg_extension WHERE extname=%s::text;', (plugin,) + ) + return cursor.fetchone() is not None + except psycopg2.Error as e: + logger.exception( + f'failed to check whether {plugin} plugin is enabled in the database: %s', + str(e), + ) + return False + + def is_postgresql_plugin_enabled(self, plugin: str, relation_index: int = 0) -> bool: + """Returns whether a plugin is enabled in the database. + + Args: + plugin: name of the plugin to check. + relation_index: Optional index to check the database (default: 0 - first relation). + """ + if not psycopg2: + return False + + # Can't check a non existing relation. + if len(self.relations) <= relation_index: + return False + + relation = self.relations[relation_index] + model = self.interface.build_model(relation_id=relation.id, component=relation.app) + for request in model.requests: + if request.endpoints and request.username and request.password: + host = request.endpoints.split(':')[0] + username = request.username + password = request.password + + connection_string = f"host='{host}' dbname='{request.resource}' user='{username}' password='{password}'" + return self._is_pg_plugin_enabled(plugin, connection_string) + logger.info('No valid request to use to check for plugin.') + return False + + ############################################################################## + # Helpers for aliases + ############################################################################## + + def _assign_relation_alias(self, relation_id: int) -> None: + """Assigns an alias to a relation. + + This function writes in the unit data bag. + + Args: + relation_id: the identifier for a particular relation. + """ + # If no aliases were provided, return immediately. + if not self.relation_aliases: + return + + # Return if an alias was already assigned to this relation + # (like when there are more than one unit joining the relation). + relation = self.charm.model.get_relation(self.relation_name, relation_id) + if relation and relation.data[self.charm.unit].get('alias'): + return + + # Retrieve the available aliases (the ones that weren't assigned to any relation). + available_aliases = self.relation_aliases[:] + for relation in self.charm.model.relations[self.relation_name]: + alias = relation.data[self.charm.unit].get('alias') + if alias: + logger.debug('Alias %s was already assigned to relation %d', alias, relation.id) + available_aliases.remove(alias) + + # Set the alias in the unit relation databag of the specific relation. + relation = self.charm.model.get_relation(self.relation_name, relation_id) + if relation: + relation.data[self.charm.unit].update({'alias': available_aliases[0]}) + + # We need to set relation alias also on the application level so, + # it will be accessible in show-unit juju command, executed for a consumer application unit + if relation and self.charm.unit.is_leader(): + relation.data[self.charm.app].update({'alias': available_aliases[0]}) + + def _emit_aliased_event( + self, event: RelationChangedEvent, event_name: str, response: ResourceProviderModel + ): + """Emit all aliased events.""" + alias = self._get_relation_alias(event.relation.id) + if alias: + getattr(self.on, f'{alias}_{event_name}').emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + + def _get_relation_alias(self, relation_id: int) -> str | None: + """Gets the relation alias for a relation id.""" + for relation in self.charm.model.relations[self.relation_name]: + if relation.id == relation_id: + return relation.data[self.charm.unit].get('alias') + return None + + ############################################################################## + # Event Handlers + ############################################################################## + + def _on_secret_changed_event(self, event: SecretChangedEvent): + """Event notifying about a new value of a secret.""" + if not event.secret.label: + return + relation = self._relation_from_secret_label(event.secret.label) + short_uuid = self._short_uuid_from_secret_label(event.secret.label) + + if not relation: + logging.info( + f"Received secret {event.secret.label} but couldn't parse, seems irrelevant" + ) + return + + if relation.name != self.relation_name: + logging.info('Secret changed on wrong relation.') + return + + try: + event.secret.get_info() + logging.info('Secret changed event ignored for Secret Owner') + return + except SecretNotFoundError: + pass + + remote_unit = self.get_remote_unit(relation) + + response_model = self.interface.build_model(relation.id, component=relation.app) + if not short_uuid: + return + for _response in response_model.requests: + if _response.request_id == short_uuid: + response = _response + break + else: + logger.info(f'Unknown request id {short_uuid}') + return + + self.on.authentication_updated.emit( + relation, + app=relation.app, + unit=remote_unit, + response=response, + ) + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Event emitted when the database relation is created.""" + super()._on_relation_created_event(event) + + repository = OpsRelationRepository(self.model, event.relation, self.charm.app) + + # If relations aliases were provided, assign one to the relation. + self._assign_relation_alias(event.relation.id) + + if not self.charm.unit.is_leader(): + return + + # Generate all requests id so they are saved already. + for request in self._requests: + request.request_id = gen_hash(request.resource, request.salt) + + full_request = RequirerDataContractV1[self._request_model]( + version='v1', requests=self._requests + ) + write_model(repository, full_request) + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Event emitted when the database relation has changed.""" + is_subordinate = False + remote_unit_data = None + for key in event.relation.data.keys(): + if isinstance(key, Unit) and not key.name.startswith(self.charm.app.name): + remote_unit_data = event.relation.data[key] + elif isinstance(key, Application) and key.name != self.charm.app.name: + is_subordinate = event.relation.data[key].get('subordinated') == 'true' + + if is_subordinate: + if not remote_unit_data or remote_unit_data.get('state') != 'ready': + return + + repository = self.interface.repository(event.relation.id, event.app) + response_model = self.interface.build_model(event.relation.id, component=event.app) + + if not response_model.requests: + logger.info('Still waiting for data.') + return + + data = repository.get_field('data') + if not data: + logger.info('Missing data to compute diffs') + return + + request_map = TypeAdapter(dict[str, self._request_model]).validate_json(data) + + for response in response_model.requests: + response_id = response.request_id or gen_hash(response.resource, response.salt) + request = request_map.get(response_id, None) + if not request: + raise ValueError( + f'No request matching the response with response_id {response_id}' + ) + self._handle_event(event, repository, request, response) + + # Retrieve old statuses from "data" + old_data = json.loads(data or '{}') + old_statuses = old_data.get(STATUS_FIELD, {}) + previous_codes = {int(k) for k in old_statuses.keys()} + + # Compute current statuses + current_statuses = json.loads(repository.get_field(STATUS_FIELD) or '[]') + current_codes = {status.get('code') for status in current_statuses} + + # Detect changes + raised = current_codes - previous_codes + resolved = previous_codes - current_codes + + for status_code in raised: + logger.debug(f'Status [{status_code}] raised') + _status = next(s for s in current_statuses if s['code'] == status_code) + _status_instance = RelationStatus(**_status) + self.on.status_raised.emit( + event.relation, + status=_status_instance, + app=event.app, + unit=event.unit, + ) + + for status_code in resolved: + logger.debug(f'Status [{status_code}] resolved') + # Because JSON keys are always string, we should convert the int code to str. + _status = old_statuses[str(status_code)] + _status_instance = RelationStatus(**_status) + self.on.status_resolved.emit( + event.relation, + status=_status_instance, + app=event.app, + unit=event.unit, + ) + + if not any([raised, resolved]): + return + + # Store new state of the statuses in the "data" field + data = get_encoded_dict(event.relation, self.component, 'data') or {} + store_new_data( + event.relation, + self.component, + data, + short_uuid=None, + global_data={ + STATUS_FIELD: { + code: status.model_dump() + for code, status in self.get_statuses(event.relation.id).items() + } + }, + ) + + ############################################################################## + # Methods to handle specificities of relation events + ############################################################################## + + @override + def _handle_event( + self, + event: RelationChangedEvent, + repository: OpsRelationRepository, + request: RequirerCommonModel, + response: ResourceProviderModel, + ): + _diff = self.compute_diff(event.relation, response, repository, store=True) + + for newval in _diff.added: + if secret_group := response._get_secret_field(newval): + uri = getattr(response, newval.replace('-', '_')) + repository.register_secret(uri, secret_group, response.request_id) + + if 'secret-user' in _diff.added and not request.entity_type: + logger.info(f'resource {response.resource} created at {datetime.now()}') + self.on.resource_created.emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + self._emit_aliased_event(event, 'resource_created', response) + return + + if 'secret-entity' in _diff.added and request.entity_type: + logger.info(f'entity {response.entity_name} created at {datetime.now()}') + self.on.resource_entity_created.emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + self._emit_aliased_event(event, 'resource_entity_created', response) + return + + if 'endpoints' in _diff.added or 'endpoints' in _diff.changed: + logger.info(f'endpoints changed at {datetime.now()}') + self.on.endpoints_changed.emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + self._emit_aliased_event(event, 'endpoints_changed', response) + return + + if 'read-only-endpoints' in _diff.added or 'read-only-endpoints' in _diff.changed: + logger.info(f'read-only-endpoints changed at {datetime.now()}') + self.on.read_only_endpoints_changed.emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + self._emit_aliased_event(event, 'read_only_endpoints_changed', response) + return + + if 'secret-tls' in _diff.added or 'secret-tls' in _diff.changed: + logger.info(f'auth updated for {response.resource} at {datetime.now()}') + self.on.authentication_updated.emit( + event.relation, app=event.app, unit=event.unit, response=response + ) + self._emit_aliased_event(event, 'authentication_updated', response) + return diff --git a/rollingops/src/charmlibs/rollingops/_etcd_rollingops.py b/rollingops/src/charmlibs/rollingops/_etcd_rollingops.py new file mode 100644 index 000000000..7ff89d384 --- /dev/null +++ b/rollingops/src/charmlibs/rollingops/_etcd_rollingops.py @@ -0,0 +1,43 @@ +# Copyright 2026 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import argparse +import logging +import subprocess +import time + +logger = logging.getLogger(__name__) + + +def main(): + """Juju hook event dispatcher.""" + parser = argparse.ArgumentParser() + parser.add_argument('--run-cmd', required=True) + parser.add_argument('--unit-name', required=True) + parser.add_argument('--charm-dir', required=True) + parser.add_argument('--owner', required=True) + args = parser.parse_args() + + time.sleep(10) + + dispatch_sub_cmd = ( + f'JUJU_DISPATCH_PATH=hooks/rollingops_lock_granted {args.charm_dir}/dispatch' + ) + res = subprocess.run([args.run_cmd, '-u', args.unit_name, dispatch_sub_cmd]) + res.check_returncode() + + +if __name__ == '__main__': + main() diff --git a/rollingops/src/charmlibs/rollingops/_etcdctl.py b/rollingops/src/charmlibs/rollingops/_etcdctl.py new file mode 100644 index 000000000..d36f7ef35 --- /dev/null +++ b/rollingops/src/charmlibs/rollingops/_etcdctl.py @@ -0,0 +1,235 @@ +# Copyright 2026 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import logging +import os +import subprocess +from pathlib import Path + +from charmlibs.rollingops._models import RollingOpsEtcdNotConfiguredError + +logger = logging.getLogger(__name__) + + +class EtcdCtl: + """Class for interacting with etcd through the etcdctl CLI. + + This class encapsulates configuration and execution of the tool. It manages + the environment variables required for connecting to an etcd cluster, + including TLS configuration, and provides convenience methods for + executing commands and retrieving structured results. + """ + + BASE_DIR = Path('/var/lib/rollingops/etcd') + SERVER_CA = BASE_DIR / 'server-ca.pem' + ENV_FILE = BASE_DIR / 'etcdctl.env' + + @classmethod + def write_trusted_server_ca(cls, tls_ca_pem: str) -> None: + """Persist the etcd server CA certificate to disk. + + Args: + tls_ca_pem: PEM-encoded CA certificate. + + Returns: + Path to the stored CA certificate. + """ + cls.BASE_DIR.mkdir(parents=True, exist_ok=True) + + cls.SERVER_CA.write_text(tls_ca_pem or '') + os.chmod(cls.SERVER_CA, 0o644) + + @classmethod + def write_env_file( + cls, + endpoints: str, + client_cert_path: Path, + client_key_path: Path, + ) -> None: + """Create or update the etcdctl environment configuration file. + + This method writes an environment file containing the required + ETCDCTL_* variables used by etcdctl to connect to the etcd cluster. + + Args: + endpoints: Comma-separated list of etcd endpoints. + client_cert_path: Path to the client certificate. + client_key_path: Path to the client private key. + """ + cls.BASE_DIR.mkdir(parents=True, exist_ok=True) + + lines = [ + 'export ETCDCTL_API="3"', + f'export ETCDCTL_ENDPOINTS="{endpoints}"', + f'export ETCDCTL_CACERT="{cls.SERVER_CA}"', + f'export ETCDCTL_CERT="{client_cert_path}"', + f'export ETCDCTL_KEY="{client_key_path}"', + '', + ] + + cls.ENV_FILE.write_text('\n'.join(lines)) + os.chmod(cls.ENV_FILE, 0o600) + + @classmethod + def load_env(cls) -> dict[str, str]: + """Load etcdctl environment variables from the env file. + + Parses the generated environment file and extracts ETCDCTL_* + variables so they can be injected into subprocess environments. + + Returns: + A dictionary containing environment variables to pass to + subprocess calls. + + Raises: + RollingOpsEtcdNotConfiguredError: If the environment file does not exist. + """ + cls.ensure_initialized() + + env = os.environ.copy() + + for line in cls.ENV_FILE.read_text().splitlines(): + line = line.strip() + if not line or line.startswith('#'): + continue + + if line.startswith('export '): + line = line[len('export ') :].strip() + + if not line.startswith('ETCDCTL_'): + continue + + key, value = line.split('=', 1) + env[key] = value.strip().strip('"').strip("'") + + env.setdefault('ETCDCTL_API', '3') + return env + + @classmethod + def ensure_initialized(cls): + """Checks whether the environment file for etcdctl is setup.""" + if not cls.ENV_FILE.exists(): + raise RollingOpsEtcdNotConfiguredError( + f'etcdctl env file does not exist: {cls.ENV_FILE}' + ) + if not cls.SERVER_CA.exists(): + raise RollingOpsEtcdNotConfiguredError( + f'etcdctl server CA file does not exist: {cls.SERVER_CA}' + ) + + @classmethod + def cleanup(cls) -> None: + """Removes the etcdctl env file and the trusted etcd server CA.""" + cls.SERVER_CA.unlink(missing_ok=True) + cls.ENV_FILE.unlink(missing_ok=True) + + @classmethod + def run( + cls, args: list[str], check: bool = True, capture: bool = True + ) -> subprocess.CompletedProcess[str]: + """Execute an etcdctl command. + + Args: + args: List of arguments to pass to etcdctl. + check: If True, raise an exception on non-zero exit status. + capture: Whether to capture stdout and stderr. + + Returns: + A CompletedProcess object containing the result. + """ + cls.ensure_initialized() + cmd = ['etcdctl', *args] + return subprocess.run( + cmd, env=cls.load_env(), check=check, text=True, capture_output=capture + ) + + @classmethod + def get_first_key_value(cls, key_prefix: str) -> tuple[str, dict[str, str]] | None: + """Retrieve the first key and value under a given prefix. + + Args: + key_prefix: Key prefix to search for. + + Returns: + A tuple containing: + - The key string + - The parsed JSON value as a dictionary + + Returns None if no key exists or the command fails. + """ + res = cls.run( + ['get', key_prefix, '--prefix', '--limit=1'], + check=False, + ) + + if res.returncode != 0: + return None + + out = res.stdout.strip().splitlines() + if len(out) < 2: + return None + + return out[0], json.loads(out[1]) + + @classmethod + def get_last_key_value(cls, key_prefix: str) -> tuple[str, dict[str, str]] | None: + """Retrieve the last key and value under a given prefix. + + Args: + key_prefix: Key prefix to search for. + + Returns: + A tuple containing: + - The key string + - The parsed JSON value as a dictionary + + Returns None if no key exists or the command fails. + """ + res = cls.run( + ['get', key_prefix, '--prefix', '--sort-by=KEY', '--order=DESCEND', '--limit=1'], + check=False, + ) + if res.returncode != 0: + return None + out = res.stdout.strip().splitlines() + if len(out) < 2: + return None + + return out[0], json.loads(out[1]) + + @classmethod + def txn(cls, txn: str) -> bool: + """Execute an etcd transaction. + + The transaction string should follow the etcdctl transaction format + where comparison statements are followed by operations. + + Args: + txn: The transaction specification passed to `etcdctl txn`. + + Returns: + True if the transaction succeeded, otherwise False. + """ + cls.ensure_initialized() + res = subprocess.run( + ['bash', '-lc', f"printf %s '{txn}' | etcdctl txn"], + text=True, + env=cls.load_env(), + capture_output=True, + check=False, + ) + + logger.debug('etcd txn result: %s', res.stdout) + return 'SUCCESS' in res.stdout diff --git a/rollingops/src/charmlibs/rollingops/_manager.py b/rollingops/src/charmlibs/rollingops/_manager.py new file mode 100644 index 000000000..976ce995e --- /dev/null +++ b/rollingops/src/charmlibs/rollingops/_manager.py @@ -0,0 +1,190 @@ +# Copyright 2026 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import subprocess +from typing import Any + +from ops import Relation +from ops.charm import CharmBase, InstallEvent, RelationBrokenEvent, RelationDepartedEvent +from ops.framework import EventBase, Object + +from charmlibs.rollingops._etcdctl import EtcdCtl +from charmlibs.rollingops._models import ( + RollingOpsEtcdNotConfiguredError, + RollingOpsKeys, + RollingOpsNoEtcdRelationError, +) +from charmlibs.rollingops._relations import EtcdRequiresV1, SharedClientCertificateManager +from charmlibs.rollingops._worker import EtcdRollingOpsAsyncWorker + +logger = logging.getLogger(__name__) + + +class RollingOpsLockGrantedEvent(EventBase): + """Custom event emitted when the background worker grants the lock.""" + + +class EtcdRollingOpsManager(Object): + """Rolling ops manager for clusters.""" + + def __init__( + self, + charm: CharmBase, + peer_relation_name: str, + etcd_relation_name: str, + cluster_id: str, + callback_targets: dict[str, Any], + ): + """Register our custom events. + + params: + charm: the charm we are attaching this to. + peer_relation_name: peer relation used for rolling ops. + etcd_relation_name: the relation to integrate with etcd. + cluster_id: unique identifier for the cluster + callback_targets: mapping from callback_id -> callable. + """ + super().__init__(charm, 'rolling-ops-manager') + self._charm = charm + self.peer_relation_name = peer_relation_name + self.etcd_relation_name = etcd_relation_name + self.callback_targets = callback_targets + self.charm_dir = charm.charm_dir + + owner = f'{self.model.uuid}-{self.model.unit.name}'.replace('/', '-') + self.worker = EtcdRollingOpsAsyncWorker( + charm, peer_relation_name=peer_relation_name, owner=owner + ) + self.keys = RollingOpsKeys.for_owner(cluster_id, owner) + + self.shared_certificates = SharedClientCertificateManager( + charm, + peer_relation_name=peer_relation_name, + ) + + self.etcd = EtcdRequiresV1( + charm, + relation_name=etcd_relation_name, + cluster_id=self.keys.cluster_prefix, + shared_certificates=self.shared_certificates, + ) + + charm.on.define_event('rollingops_lock_granted', RollingOpsLockGrantedEvent) + + self.framework.observe( + charm.on[self.peer_relation_name].relation_departed, self._on_relation_departed + ) + self.framework.observe( + charm.on[self.etcd_relation_name].relation_broken, self._on_relation_broken + ) + self.framework.observe(charm.on.rollingops_lock_granted, self._on_rollingop_granted) + self.framework.observe(charm.on.install, self._on_install) + + @property + def _peer_relation(self) -> Relation | None: + return self.model.get_relation(self.peer_relation_name) + + @property + def _etcd_relation(self) -> Relation | None: + return self.model.get_relation(self.etcd_relation_name) + + def _on_install(self, event: InstallEvent) -> None: + subprocess.run(['apt-get', 'update'], check=True) + subprocess.run(['apt-get', 'install', '-y', 'etcd-client'], check=True) + + def _on_rollingop_granted(self, event: RollingOpsLockGrantedEvent) -> None: + if not self._peer_relation or not self._etcd_relation: + return + try: + EtcdCtl.ensure_initialized() + except RollingOpsEtcdNotConfiguredError: + return + logger.info('Received a rolling-op lock granted event.') + self._on_run_with_lock() + + def _on_relation_departed(self, event: RelationDepartedEvent) -> None: + """Stop the etcd worker process in the current unit.""" + unit = event.departing_unit + if unit == self.model.unit: + self.worker.stop() + + def _on_relation_broken(self, event: RelationBrokenEvent) -> None: + """Stop the etcd worker process in the current unit.""" + self.worker.stop() + + def request_async_lock( + self, + callback_id: str, + kwargs: dict[str, Any] | None = None, + max_retry: int | None = None, + ) -> None: + """Queue a rolling operation and trigger asynchronous lock acquisition. + + This method creates a new operation representing a callback to execute + once the distributed lock is granted. The operation is appended to the + unit's pending operation queue stored in etcd. + + If the operation is successfully enqueued, the background worker process + responsible for acquiring the distributed lock and processing operations + is started. + + Args: + callback_id: Identifier of the registered callback to execute when + the lock is granted. + kwargs: Optional keyword arguments passed to the callback when + executed. Must be JSON-serializable. + max_retry: Maximum number of retries for the operation. + - None: retry indefinitely + - 0: do not retry on failure + + Raises: + ValueError: If the callback_id is not registered or invalid parameters + RollingOpsNoEtcdRelationError: if the etcd relation does not exist + RollingOpsEtcdNotConfiguredError: if etcd client has not been configured yet + """ + if callback_id not in self.callback_targets: + raise ValueError(f'Unknown callback_id: {callback_id}') + + etcd_relation = self.model.get_relation(self.etcd_relation_name) + if not etcd_relation: + raise RollingOpsNoEtcdRelationError + + EtcdCtl.ensure_initialized() + + self.worker.start() + + def _on_run_with_lock(self) -> None: + """Execute the current operation while holding the distributed lock. + + This method is triggered when the worker determines that the current + unit owns the distributed lock. The method retrieves the head operation + from the in-progress queue and executes its registered callback. + + After execution, the operation is moved to the completed queue and its + updated state is persisted. + """ + EtcdCtl.run(['put', self.keys.lock_key, self.keys.owner]) + + proc = EtcdCtl.run(['get', self.keys.lock_key, '--print-value-only'], check=False) + + if proc.returncode != 0: + return + + value = proc.stdout.strip() + if value != self.keys.owner: + logger.info('Callback not executed.') + + callback = self.callback_targets.get('_restart', '') + callback(delay=1) diff --git a/rollingops/src/charmlibs/rollingops/_models.py b/rollingops/src/charmlibs/rollingops/_models.py new file mode 100644 index 000000000..9e4ba57be --- /dev/null +++ b/rollingops/src/charmlibs/rollingops/_models.py @@ -0,0 +1,96 @@ +# Copyright 2026 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""etcd rolling ops.""" + +import logging +from dataclasses import dataclass +from enum import StrEnum +from typing import ClassVar + +logger = logging.getLogger(__name__) + + +class RollingOpsNoEtcdRelationError(Exception): + """Raised if we are trying to process a lock, but do not appear to have a relation yet.""" + + +class RollingOpsEtcdUnreachableError(Exception): + """Raised if etcd server is unreachable.""" + + +class RollingOpsEtcdNotConfiguredError(Exception): + """Raised if etcd client has not been configured yet (env file does not exist).""" + + +class OperationResult(StrEnum): + """Callback return values.""" + + RELEASE = 'release' + RETRY_RELEASE = 'retry-release' + RETRY_HOLD = 'retry-hold' + + +@dataclass(frozen=True) +class RollingOpsKeys: + """Collection of etcd key prefixes used for rolling operations. + + Layout: + /rollingops/{cluster_id}/granted-unit + /rollingops/{cluster_id}/{owner}/pending/ + /rollingops/{cluster_id}/{owner}/inprogress/ + /rollingops/{cluster_id}/{owner}/completed/ + + The distributed lock key is cluster-scoped + """ + + ROOT: ClassVar[str] = '/rollingops' + + cluster_id: str + owner: str + + @property + def cluster_prefix(self) -> str: + """Etcd prefix corresponding to the cluster namespace.""" + return f'{self.ROOT}/{self.cluster_id}/' + + @property + def _owner_prefix(self) -> str: + """Etcd prefix for all the queues belonging to an owner.""" + return f'{self.cluster_prefix}{self.owner}' + + @property + def lock_key(self) -> str: + """Etcd key of the lock.""" + return f'{self.cluster_prefix}granted-unit' + + @property + def pending(self) -> str: + """Prefix for operations waiting to be executed.""" + return f'{self._owner_prefix}/pending/' + + @property + def inprogress(self) -> str: + """Prefix for operations currently being executed.""" + return f'{self._owner_prefix}/inprogress/' + + @property + def completed(self) -> str: + """Prefix for operations that have finished execution.""" + return f'{self._owner_prefix}/completed/' + + @classmethod + def for_owner(cls, cluster_id: str, owner: str) -> 'RollingOpsKeys': + """Create a set of keys for a given owner on a cluster.""" + return cls(cluster_id=cluster_id, owner=owner) diff --git a/rollingops/src/charmlibs/rollingops/_relations.py b/rollingops/src/charmlibs/rollingops/_relations.py new file mode 100644 index 000000000..6d10d0287 --- /dev/null +++ b/rollingops/src/charmlibs/rollingops/_relations.py @@ -0,0 +1,224 @@ +# Copyright 2026 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +from ops import Relation +from ops.charm import ( + CharmBase, + LeaderElectedEvent, + RelationBrokenEvent, + RelationChangedEvent, + SecretChangedEvent, +) +from ops.framework import Object + +from charmlibs.rollingops._certificates import CertificatesManager +from charmlibs.rollingops._dp_interfaces_v1 import ( + RequirerCommonModel, + ResourceCreatedEvent, + ResourceEndpointsChangedEvent, + ResourceProviderModel, + ResourceRequirerEventHandler, +) +from charmlibs.rollingops._etcdctl import EtcdCtl + +logger = logging.getLogger(__name__) +SECRET_FIELD = 'rollingops-client-secret-id' # noqa: S105 + + +class SharedClientCertificateManager(Object): + """Manage the shared rollingops client certificate via peer relation secret.""" + + def __init__(self, charm: CharmBase, peer_relation_name: str) -> None: + super().__init__(charm, 'shared-client-certificate') + self.charm = charm + self.peer_relation_name = peer_relation_name + + self.framework.observe(charm.on.leader_elected, self._on_leader_elected) + self.framework.observe( + charm.on[peer_relation_name].relation_changed, + self._on_peer_relation_changed, + ) + self.framework.observe(charm.on.secret_changed, self._on_secret_changed) + + @property + def _peer_relation(self) -> Relation | None: + return self.model.get_relation(self.peer_relation_name) + + def _on_leader_elected(self, event: LeaderElectedEvent) -> None: + self.create_and_share_certificate() + + def _on_secret_changed(self, event: SecretChangedEvent) -> None: + # if event.secret.label == "rollingops-client-cert": + # self._sync_client_certificate() + self.sync_to_local_files() + + def _on_peer_relation_changed(self, event: RelationChangedEvent) -> None: + """React to peer relation changes. + + The leader ensures the shared certificate exists. + All units try to persist the shared certificate locally if available. + """ + self.create_and_share_certificate() + self.sync_to_local_files() + + def create_and_share_certificate(self) -> None: + """Ensure the application client certificate exists. + + Only the leader generates the certificate and writes it to the peer + relation application databag. + """ + relation = self._peer_relation + if relation is None or not self.model.unit.is_leader(): + return + + app_data = relation.data[self.model.app] + secret_id = app_data.get(SECRET_FIELD) + if secret_id: + return + + common_name = f'rollingops-{self.model.uuid}-{self.model.app.name}' + cert_pem, key_pem, ca_pem = CertificatesManager.generate(common_name) + + secret = self.model.app.add_secret({ + 'client-cert': cert_pem, + 'client-key': key_pem, + 'client-ca': ca_pem, + }) + + app_data.update({SECRET_FIELD: secret.id}) # type: ignore[arg-type] + + def get_shared_certificate(self) -> tuple[str, str, str] | None: + """Return the client certificate, key and ca from peer app data. + + Returns: + A tuple of (certificate_pem, key_pem, ca_pem), or None if not yet available. + """ + relation = self._peer_relation + if relation is None: + return None + + secret_id = relation.data[self.model.app].get(SECRET_FIELD) + if not secret_id: + return None + + secret = self.model.get_secret(id=secret_id) + content = secret.get_content(refresh=True) + return content['client-cert'], content['client-key'], content['client-ca'] + + def sync_to_local_files(self) -> None: + """Persist shared certificate locally if available.""" + shared = self.get_shared_certificate() + if shared is None: + logger.debug('Shared rollingops client certificate is not available yet') + return + + cert_pem, key_pem, ca_pem = shared + if CertificatesManager.has_client_cert_key_and_ca(cert_pem, key_pem, ca_pem): + return + + CertificatesManager.persist_client_cert_key_and_ca(cert_pem, key_pem, ca_pem) + + def get_local_request_cert(self) -> str: + """Return the cert to place in relation requests.""" + shared = self.get_shared_certificate() + return '' if shared is None else shared[0] + + +class EtcdRequiresV1(Object): + """EtcdRequires implementation for data interfaces version 1.""" + + def __init__( + self, + charm: CharmBase, + relation_name: str, + cluster_id: str, + shared_certificates: SharedClientCertificateManager, + ) -> None: + super().__init__(charm, 'requirer-etcd') + self.charm = charm + self.cluster_id = cluster_id + self.shared_certificates = shared_certificates + + self.etcd_interface = ResourceRequirerEventHandler( + self.charm, + relation_name=relation_name, + requests=self.client_requests(), + response_model=ResourceProviderModel, + ) + + self.framework.observe( + self.etcd_interface.on.endpoints_changed, self._on_endpoints_changed + ) + self.framework.observe(charm.on[relation_name].relation_broken, self._on_relation_broken) + self.framework.observe(self.etcd_interface.on.resource_created, self._on_resource_created) + + @property + def etcd_relation(self) -> Relation | None: + """Return the etcd relation if present.""" + relations = self.etcd_interface.relations + return relations[0] if relations else None + + def _on_relation_broken(self, event: RelationBrokenEvent) -> None: + """Remove the stored information about the etcd server.""" + EtcdCtl.cleanup() + + def _on_endpoints_changed( + self, event: ResourceEndpointsChangedEvent[ResourceProviderModel] + ) -> None: + """Handle etcd client relation data changed event.""" + response = event.response + logger.info('etcd endpoints changed: %s', response.endpoints) + + if not response.endpoints: + logger.error('No etcd endpoints available') + return + + self.shared_certificates.sync_to_local_files() + cert_path, key_path = CertificatesManager.client_paths() + EtcdCtl.write_env_file( + endpoints=response.endpoints, + client_cert_path=cert_path, + client_key_path=key_path, + ) + + def _on_resource_created(self, event: ResourceCreatedEvent[ResourceProviderModel]) -> None: + """Handle resource created event.""" + response = event.response + + if not response.tls_ca: + logger.error('No etcd server CA chain available') + return + + EtcdCtl.write_trusted_server_ca(tls_ca_pem=response.tls_ca) + + if response.endpoints: + cert_path, key_path = CertificatesManager.client_paths() + EtcdCtl.write_env_file( + endpoints=response.endpoints, client_cert_path=cert_path, client_key_path=key_path + ) + else: + logger.error('No etcd endpoints available') + + self.shared_certificates.sync_to_local_files() + + def client_requests(self) -> list[RequirerCommonModel]: + """Return the client requests for the etcd requirer interface.""" + return [ + RequirerCommonModel( + resource=self.cluster_id, + mtls_cert=self.shared_certificates.get_local_request_cert(), + ) + ] diff --git a/rollingops/src/charmlibs/rollingops/_version.py b/rollingops/src/charmlibs/rollingops/_version.py new file mode 100644 index 000000000..867de4948 --- /dev/null +++ b/rollingops/src/charmlibs/rollingops/_version.py @@ -0,0 +1,15 @@ +# Copyright 2026 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +__version__ = '0.0.0.dev0' diff --git a/rollingops/src/charmlibs/rollingops/_worker.py b/rollingops/src/charmlibs/rollingops/_worker.py new file mode 100644 index 000000000..198b617aa --- /dev/null +++ b/rollingops/src/charmlibs/rollingops/_worker.py @@ -0,0 +1,146 @@ +# Copyright 2026 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""etcd rolling ops.""" + +import logging +import os +import signal +import subprocess +from pathlib import Path +from sys import version_info + +from ops import Relation +from ops.charm import CharmBase +from ops.framework import Object + +logger = logging.getLogger(__name__) + + +class EtcdRollingOpsAsyncWorker(Object): + """Spawns and manages the external rolling-ops worker process.""" + + def __init__(self, charm: CharmBase, peer_relation_name: str, owner: str): + super().__init__(charm, 'etcd-ollingops-async-worker') + self._charm = charm + self._peer_relation_name = peer_relation_name + self._run_cmd = '/usr/bin/juju-exec' + self._owner = owner + self._charm_dir = charm.charm_dir + + @property + def _relation(self) -> Relation | None: + return self.model.get_relation(self._peer_relation_name) + + def start(self) -> None: + """Start a new worker process.""" + if self._relation is None: + return + + pid_str = self._relation.data[self.model.unit].get('etcd-rollingops-worker-pid', '') + if pid_str: + try: + pid = int(pid_str) + except ValueError: + pid = -1 + + if self._is_pid_alive(pid): + logger.info( + 'RollingOps worker already running with PID %s; not starting a new one.', pid + ) + return + + # Remove JUJU_CONTEXT_ID so juju-run works from the spawned process + new_env = os.environ.copy() + new_env.pop('JUJU_CONTEXT_ID', None) + + for loc in new_env.get('PYTHONPATH', '').split(':'): + path = Path(loc) + venv_path = ( + path + / '..' + / 'venv' + / 'lib' + / f'python{version_info.major}.{version_info.minor}' + / 'site-packages' + ) + if path.stem == 'lib': + new_env['PYTHONPATH'] = f'{venv_path.resolve()}:{new_env["PYTHONPATH"]}' + break + + worker = ( + self._charm_dir + / 'venv' + / 'lib' + / f'python{version_info.major}.{version_info.minor}' + / 'site-packages' + / 'charmlibs' + / 'rollingops' + / '_etcd_rollingops.py' + ) + + # These files must stay open for the lifetime of the worker process. + log_out = open('/var/log/etcd_rollingops_worker.log', 'a') # noqa: SIM115 + log_err = open('/var/log/etcd_rollingops_worker.err', 'a') # noqa: SIM115 + + pid = subprocess.Popen( + [ + '/usr/bin/python3', + '-u', + str(worker), + '--run-cmd', + self._run_cmd, + '--unit-name', + self.model.unit.name, + '--charm-dir', + str(self._charm_dir), + '--owner', + self._owner, + ], + cwd=str(self._charm_dir), + stdout=log_out, + stderr=log_err, + env=new_env, + ).pid + + self._relation.data[self.model.unit].update({'etcd-rollingops-worker-pid': str(pid)}) + logger.info('Started etcd rollingops worker process with PID %s', pid) + + def _is_pid_alive(self, pid: int) -> bool: + if pid <= 0: + return False + try: + os.kill(pid, 0) + return True + except ProcessLookupError: + return False + except PermissionError: + return True + + def stop(self) -> None: + """Stop the running worker process if it exists.""" + if self._relation is None: + return + pid_str = self._relation.data[self.model.unit].get('etcd-rollingops-worker-pid', '') + if not pid_str: + return + + pid = int(pid_str) + try: + os.kill(pid, signal.SIGINT) + logger.info('Stopped etcd rollingops worker process PID %s', pid) + except OSError: + logger.info('Failed to stop etcd rollingops worker process PID %s', pid) + + self._relation.data[self.model.unit].update({'etcd-rollingops-worker-pid': ''}) diff --git a/rollingops/src/charmlibs/rollingops/py.typed b/rollingops/src/charmlibs/rollingops/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/rollingops/tests/functional/conftest.py b/rollingops/tests/functional/conftest.py new file mode 100644 index 000000000..048308943 --- /dev/null +++ b/rollingops/tests/functional/conftest.py @@ -0,0 +1,15 @@ +# Copyright 2026 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Fixtures for functional tests, which interact with the real system, but not with Juju.""" diff --git a/rollingops/tests/functional/test_version.py b/rollingops/tests/functional/test_version.py new file mode 100644 index 000000000..3673995ae --- /dev/null +++ b/rollingops/tests/functional/test_version.py @@ -0,0 +1,21 @@ +# Copyright 2026 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Functional tests interacting with the real system, but not with Juju.""" + +from charmlibs import rollingops + + +def test_version(): + assert isinstance(rollingops.__version__, str) diff --git a/rollingops/tests/integration/charms/actions.yaml b/rollingops/tests/integration/charms/actions.yaml new file mode 100644 index 000000000..d514e1d0d --- /dev/null +++ b/rollingops/tests/integration/charms/actions.yaml @@ -0,0 +1,32 @@ +# common actions.yaml file symlinked by these charms +# consider adding an action for each thing you want to test + +restart: + description: Restarts the example service + params: + delay: + description: "Introduce an artificial delay (for testing)." + type: integer + default: 0 + +failed-restart: + description: Example restart with a custom callback function. Used in testing + params: + delay: + description: "Introduce an artificial delay (for testing)." + type: integer + default: 0 + max-retry: + description: "Number of times the operation should be retried." + type: integer + +deferred-restart: + description: Example restart with a custom callback function. Used in testing + params: + delay: + description: "Introduce an artificial delay (for testing)." + type: integer + default: 0 + max-retry: + description: "Number of times the operation should be retried." + type: integer diff --git a/rollingops/tests/integration/charms/common.py b/rollingops/tests/integration/charms/common.py new file mode 100644 index 000000000..795f32812 --- /dev/null +++ b/rollingops/tests/integration/charms/common.py @@ -0,0 +1,135 @@ +# Copyright 2026 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Common charm code for integration test charms. + +This file is symlinked alongside src/charm.py by these charms. +""" + +import json +import logging +import time +from datetime import UTC, datetime +from pathlib import Path +from typing import Any + +from ops import ActionEvent, CharmBase, Framework, InstallEvent +from ops.model import ActiveStatus, MaintenanceStatus, WaitingStatus + +from charmlibs.rollingops import ( + EtcdRollingOpsManager, + OperationResult, +) + +logger = logging.getLogger(__name__) + +TRACE_FILE = Path('/var/lib/charm-rolling-ops/transitions.log') + + +def _now_timestamp_str() -> str: + """UTC timestamp as a string using ISO 8601 format.""" + return datetime.now(UTC).isoformat() + + +class Charm(CharmBase): + """Charm the service.""" + + def __init__(self, framework: Framework): + super().__init__(framework) + callback_targets = { + '_restart': self._restart, + '_failed_restart': self._failed_restart, + '_deferred_restart': self._deferred_restart, + } + + self.restart_manager = EtcdRollingOpsManager( + charm=self, + peer_relation_name='restart', + etcd_relation_name='etcd', + cluster_id='cluster-12345', + callback_targets=callback_targets, + ) + + self.framework.observe(self.on.install, self._on_install) + self.framework.observe(self.on.restart_action, self._on_restart_action) + self.framework.observe(self.on.failed_restart_action, self._on_failed_restart_action) + self.framework.observe(self.on.deferred_restart_action, self._on_deferred_restart_action) + + def _restart(self, delay: int = 0) -> None: + self._record_transition('_restart:start', delay=delay) + logger.info('Starting restart operation') + self.model.unit.status = MaintenanceStatus('Executing _restart operation') + time.sleep(int(delay)) + self.model.unit.status = ActiveStatus() + self._record_transition('_restart:done') + + def _failed_restart(self, delay: int = 0) -> OperationResult: + self._record_transition('_failed_restart:start', delay=delay) + logger.info('Starting failed restart operation') + self.model.unit.status = MaintenanceStatus('Executing _failed_restart operation') + time.sleep(int(delay)) + self.model.unit.status = MaintenanceStatus('Rolling _failed_restart operation failed') + self._record_transition('_failed_restart:retry_release') + return OperationResult.RETRY_RELEASE + + def _deferred_restart(self, delay: int = 0) -> OperationResult: + self._record_transition('_deferred_restart:start', delay=delay) + logger.info('Starting deferred restart operation') + self.model.unit.status = MaintenanceStatus('Executing _deferred_restart operation') + time.sleep(int(delay)) + self.model.unit.status = MaintenanceStatus('Rolling _deferred_restart operation failed') + self._record_transition('_deferred_restart:retry_hold', delay=delay) + return OperationResult.RETRY_HOLD + + def _on_install(self, event: InstallEvent) -> None: + self.unit.status = ActiveStatus() + + def _on_restart_action(self, event: ActionEvent) -> None: + delay = event.params.get('delay') + self._record_transition('action:restart', delay=delay) + self.model.unit.status = WaitingStatus('Awaiting _restart operation') + self.restart_manager.request_async_lock(callback_id='_restart', kwargs={'delay': delay}) + + def _on_failed_restart_action(self, event: ActionEvent) -> None: + delay = event.params.get('delay') + max_retry = event.params.get('max-retry', None) + self._record_transition('action:failed-restart', delay=delay, max_retry=max_retry) + self.model.unit.status = WaitingStatus('Awaiting _failed_restart operation') + self.restart_manager.request_async_lock( + callback_id='_failed_restart', + kwargs={'delay': delay}, + max_retry=max_retry, + ) + + def _on_deferred_restart_action(self, event: ActionEvent) -> None: + delay = event.params.get('delay') + max_retry = event.params.get('max-retry', None) + self._record_transition('action:deferred-restart', delay=delay, max_retry=max_retry) + self.model.unit.status = WaitingStatus('Awaiting _deferred_restart operation') + self.restart_manager.request_async_lock( + callback_id='_deferred_restart', + kwargs={'delay': delay}, + max_retry=max_retry, + ) + + def _record_transition(self, name: str, **data: Any) -> None: + TRACE_FILE.parent.mkdir(parents=True, exist_ok=True) + payload = { + 'ts': _now_timestamp_str(), + 'unit': self.model.unit.name, + 'event': name, + **data, + } + with TRACE_FILE.open('a', encoding='utf-8') as f: + f.write(json.dumps(payload) + '\n') diff --git a/rollingops/tests/integration/charms/k8s/actions.yaml b/rollingops/tests/integration/charms/k8s/actions.yaml new file mode 120000 index 000000000..9adaf92ea --- /dev/null +++ b/rollingops/tests/integration/charms/k8s/actions.yaml @@ -0,0 +1 @@ +../actions.yaml \ No newline at end of file diff --git a/rollingops/tests/integration/charms/k8s/charmcraft.yaml b/rollingops/tests/integration/charms/k8s/charmcraft.yaml new file mode 100644 index 000000000..f5044f254 --- /dev/null +++ b/rollingops/tests/integration/charms/k8s/charmcraft.yaml @@ -0,0 +1,27 @@ +# common charmcraft.yaml file symlinked by these charms +# k8s charms can define containers + resources in metadata.yaml + +name: test +type: charm +summary: A small charm for use in integration tests. +description: A small charm for use in integration tests. + +base: ubuntu@24.04 +platforms: + amd64: + +parts: + charm: + source: . + plugin: uv + build-snaps: [astral-uv] + +containers: + workload: + resource: workload + +resources: + workload: + type: oci-image + description: OCI image for the 'workload' container. + upstream-source: some-repo/some-image:some-tag diff --git a/rollingops/tests/integration/charms/k8s/library/README.md b/rollingops/tests/integration/charms/k8s/library/README.md new file mode 120000 index 000000000..1dfab2425 --- /dev/null +++ b/rollingops/tests/integration/charms/k8s/library/README.md @@ -0,0 +1 @@ +../../../../../README.md \ No newline at end of file diff --git a/rollingops/tests/integration/charms/k8s/library/pyproject.toml b/rollingops/tests/integration/charms/k8s/library/pyproject.toml new file mode 120000 index 000000000..be00ff53f --- /dev/null +++ b/rollingops/tests/integration/charms/k8s/library/pyproject.toml @@ -0,0 +1 @@ +../../../../../pyproject.toml \ No newline at end of file diff --git a/rollingops/tests/integration/charms/k8s/library/src b/rollingops/tests/integration/charms/k8s/library/src new file mode 120000 index 000000000..d753b57a1 --- /dev/null +++ b/rollingops/tests/integration/charms/k8s/library/src @@ -0,0 +1 @@ +../../../../../src \ No newline at end of file diff --git a/rollingops/tests/integration/charms/k8s/pyproject.toml b/rollingops/tests/integration/charms/k8s/pyproject.toml new file mode 120000 index 000000000..1e11d7825 --- /dev/null +++ b/rollingops/tests/integration/charms/k8s/pyproject.toml @@ -0,0 +1 @@ +../pyproject.toml \ No newline at end of file diff --git a/rollingops/tests/integration/charms/k8s/src/charm.py b/rollingops/tests/integration/charms/k8s/src/charm.py new file mode 100644 index 000000000..abb93f656 --- /dev/null +++ b/rollingops/tests/integration/charms/k8s/src/charm.py @@ -0,0 +1,40 @@ +# Copyright 2026 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""K8s charm for testing.""" + +import logging + +import common +import ops + +logger = logging.getLogger(__name__) + +CONTAINER = 'workload' + + +class RollingOpsCharm(common.Charm): + """Charm the application.""" + + def __init__(self, framework: ops.Framework): + super().__init__(framework) + framework.observe(self.on[CONTAINER].pebble_ready, self._on_pebble_ready) + + def _on_pebble_ready(self, event: ops.PebbleReadyEvent): + """Handle pebble-ready event.""" + self.unit.status = ops.ActiveStatus() + + +if __name__ == '__main__': # pragma: nocover + ops.main(RollingOpsCharm) diff --git a/rollingops/tests/integration/charms/k8s/src/common.py b/rollingops/tests/integration/charms/k8s/src/common.py new file mode 120000 index 000000000..349c5f056 --- /dev/null +++ b/rollingops/tests/integration/charms/k8s/src/common.py @@ -0,0 +1 @@ +../../common.py \ No newline at end of file diff --git a/rollingops/tests/integration/charms/machine/actions.yaml b/rollingops/tests/integration/charms/machine/actions.yaml new file mode 120000 index 000000000..9adaf92ea --- /dev/null +++ b/rollingops/tests/integration/charms/machine/actions.yaml @@ -0,0 +1 @@ +../actions.yaml \ No newline at end of file diff --git a/rollingops/tests/integration/charms/machine/charmcraft.yaml b/rollingops/tests/integration/charms/machine/charmcraft.yaml new file mode 100644 index 000000000..25d6dd0bb --- /dev/null +++ b/rollingops/tests/integration/charms/machine/charmcraft.yaml @@ -0,0 +1,25 @@ +# common charmcraft.yaml file symlinked by these charms +# k8s charms can define containers + resources in metadata.yaml + +name: test +type: charm +summary: A small charm for use in integration tests. +description: A small charm for use in integration tests. + +base: ubuntu@24.04 +platforms: + amd64: + +parts: + charm: + source: . + plugin: uv + build-snaps: [astral-uv] + +peers: + restart: + interface: rolling_op + +requires: + etcd: + interface: etcd_client diff --git a/rollingops/tests/integration/charms/machine/library/README.md b/rollingops/tests/integration/charms/machine/library/README.md new file mode 120000 index 000000000..1dfab2425 --- /dev/null +++ b/rollingops/tests/integration/charms/machine/library/README.md @@ -0,0 +1 @@ +../../../../../README.md \ No newline at end of file diff --git a/rollingops/tests/integration/charms/machine/library/pyproject.toml b/rollingops/tests/integration/charms/machine/library/pyproject.toml new file mode 120000 index 000000000..be00ff53f --- /dev/null +++ b/rollingops/tests/integration/charms/machine/library/pyproject.toml @@ -0,0 +1 @@ +../../../../../pyproject.toml \ No newline at end of file diff --git a/rollingops/tests/integration/charms/machine/library/src b/rollingops/tests/integration/charms/machine/library/src new file mode 120000 index 000000000..d753b57a1 --- /dev/null +++ b/rollingops/tests/integration/charms/machine/library/src @@ -0,0 +1 @@ +../../../../../src \ No newline at end of file diff --git a/rollingops/tests/integration/charms/machine/pyproject.toml b/rollingops/tests/integration/charms/machine/pyproject.toml new file mode 120000 index 000000000..1e11d7825 --- /dev/null +++ b/rollingops/tests/integration/charms/machine/pyproject.toml @@ -0,0 +1 @@ +../pyproject.toml \ No newline at end of file diff --git a/rollingops/tests/integration/charms/machine/src/charm.py b/rollingops/tests/integration/charms/machine/src/charm.py new file mode 100644 index 000000000..49452cd2d --- /dev/null +++ b/rollingops/tests/integration/charms/machine/src/charm.py @@ -0,0 +1,38 @@ +# Copyright 2026 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Machine charm for testing.""" + +import logging + +import common +import ops + +logger = logging.getLogger(__name__) + + +class RollingOpsCharm(common.Charm): + """Charm the application.""" + + def __init__(self, framework: ops.Framework): + super().__init__(framework) + framework.observe(self.on.start, self._on_start) + + def _on_start(self, event: ops.StartEvent): + """Handle start event.""" + self.unit.status = ops.ActiveStatus() + + +if __name__ == '__main__': # pragma: nocover + ops.main(RollingOpsCharm) diff --git a/rollingops/tests/integration/charms/machine/src/common.py b/rollingops/tests/integration/charms/machine/src/common.py new file mode 120000 index 000000000..349c5f056 --- /dev/null +++ b/rollingops/tests/integration/charms/machine/src/common.py @@ -0,0 +1 @@ +../../common.py \ No newline at end of file diff --git a/rollingops/tests/integration/charms/pyproject.toml b/rollingops/tests/integration/charms/pyproject.toml new file mode 100644 index 000000000..db43ceba6 --- /dev/null +++ b/rollingops/tests/integration/charms/pyproject.toml @@ -0,0 +1,14 @@ +# common pyproject.toml file symlinked by these charms + +[project] +name = "integration-test-charm" +version = "0.0.0.dev0" +description = "Charm for integration tests." +requires-python = ">=3.12" +dependencies = [ + "ops==3.*", + "charmlibs-rollingops", +] + +[tool.uv.sources] +"charmlibs-rollingops" = { path = "library", editable = true } diff --git a/rollingops/tests/integration/conftest.py b/rollingops/tests/integration/conftest.py new file mode 100644 index 000000000..b12f1f01e --- /dev/null +++ b/rollingops/tests/integration/conftest.py @@ -0,0 +1,82 @@ +# Copyright 2026 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Fixtures for Juju integration tests.""" + +import logging +import os +import pathlib +import sys +import time +import typing +from collections.abc import Iterator + +import jubilant +import pytest + +logger = logging.getLogger(__name__) + + +def pytest_addoption(parser: pytest.OptionGroup): + parser.addoption( + '--keep-models', + action='store_true', + default=False, + help='keep temporarily-created models', + ) + + +@pytest.fixture(scope='session') +def app_name() -> str: + """Return the default application name.""" + return 'test' # determined by test charms' charmcraft.yaml + + +@pytest.fixture(scope='session') +def charm() -> pathlib.Path: + """Return the packed charm path.""" + substrate = os.environ['CHARMLIBS_SUBSTRATE'] + # tag = os.environ.get('CHARMLIBS_TAG', '') # get the tag if needed + return pathlib.Path(__file__).parent / '.packed' / f'{substrate}.charm' # set by pack.sh + + +@pytest.fixture(scope='module') +def juju( + request: pytest.FixtureRequest, charm: pathlib.Path, app_name: str +) -> Iterator[jubilant.Juju]: + """Pytest fixture that wraps :meth:`jubilant.with_model`. + + This adds command line parameter ``--keep-models`` (see help for details). + """ + keep_models = typing.cast('bool', request.config.getoption('--keep-models')) + with jubilant.temp_model(keep=keep_models) as juju: + juju.model_config({'logging-config': '=INFO;unit=DEBUG'}) + _deploy(juju, charm=charm, app_name=app_name) + juju.wait(jubilant.all_active) + yield juju + if request.session.testsfailed: + logger.info('Collecting Juju logs ...') + time.sleep(0.5) # Wait for Juju to process logs. + log = juju.debug_log(limit=1000) + print(log, end='', file=sys.stderr) + + +def _deploy(juju: jubilant.Juju, charm: pathlib.Path, app_name: str, num_units: int = 1) -> None: + substrate = os.environ['CHARMLIBS_SUBSTRATE'] + if substrate == 'k8s': + juju.deploy( + charm, app=app_name, num_units=num_units, resources={'workload': 'ubuntu:latest'} + ) + else: + juju.deploy(charm, app=app_name, num_units=num_units) diff --git a/rollingops/tests/integration/pack.sh b/rollingops/tests/integration/pack.sh new file mode 100755 index 000000000..b5c7113ff --- /dev/null +++ b/rollingops/tests/integration/pack.sh @@ -0,0 +1,31 @@ +#!/usr/bin/env bash +# This script is executed in this directory via `just pack-k8s` or `just pack-machine`. +# Extra args are passed to this script, e.g. `just pack-k8s foo` -> $1 is 'foo'. +# In CI, the `just pack-` commands are invoked: +# - If this file exists and `just integration-` would execute any tests +# - Before running integration tests +# - With no additional arguments +# +# Environment variables: +# $CHARMLIBS_SUBSTRATE will have the value 'k8s' or 'machine' (set by pack-k8s or pack-machine) +# In CI, $CHARMLIBS_TAG is set based on pyproject.toml:tool.charmlibs.integration.tags +# For local testing, set $CHARMLIBS_TAG directly or use the tag variable. For example: +# just tag=24.04 pack-k8s some extra args +set -xueo pipefail + +TMP_DIR=".tmp" # clean temporary directory where charms will be packed +PACKED_DIR=".packed" # where packed charms will be placed with name expected in conftest.py + +: copy charm files to temporary directory for packing, dereferencing symlinks +rm -rf "$TMP_DIR" +cp --recursive --dereference "charms/$CHARMLIBS_SUBSTRATE/" "$TMP_DIR" + +: pack charm +cd "$TMP_DIR" +uv lock # required by uv charm plugin +charmcraft pack +cd - + +: place packed charm in expected location +mkdir -p "$PACKED_DIR" # -p means create parents and don't complain if dir already exists +mv "$TMP_DIR"/*.charm "$PACKED_DIR/$CHARMLIBS_SUBSTRATE.charm" # read by conftest.py diff --git a/rollingops/tests/integration/test_etcd_rolling_ops.py b/rollingops/tests/integration/test_etcd_rolling_ops.py new file mode 100644 index 000000000..3e68d2c0b --- /dev/null +++ b/rollingops/tests/integration/test_etcd_rolling_ops.py @@ -0,0 +1,182 @@ +# Copyright 2026 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Integration tests using real Juju and pre-packed charm(s).""" + +import json +import logging +from datetime import datetime +from pathlib import Path + +import jubilant +from tenacity import retry, stop_after_delay, wait_fixed + +TRACE_FILE = '/var/lib/charm-rolling-ops/transitions.log' +logger = logging.getLogger(__name__) + + +@retry(wait=wait_fixed(10), stop=stop_after_delay(60), reraise=True) +def wait_for_etcdctl_env(juju: jubilant.Juju, unit: str) -> None: + task = juju.exec('test -f /var/lib/rollingops/etcd/etcdctl.env', unit=unit) + if task.status != 'completed' or task.return_code != 0: + raise RuntimeError('etcdctl env file not ready') + + +def get_unit_events(juju: jubilant.Juju, unit: str) -> list[dict[str, str]]: + task = juju.exec(f'cat {TRACE_FILE}', unit=unit) + + if not task.stdout.strip(): + return [] + + return [json.loads(line) for line in task.stdout.strip().splitlines()] + + +def parse_ts(event: dict[str, str]) -> datetime: + return datetime.fromisoformat(event['ts']) + + +def test_deploy(juju: jubilant.Juju, app_name: str): + """The deployment takes place in the module scoped `juju` fixture.""" + assert app_name in juju.status().apps + + +def test_restart_action_one_unit(juju: jubilant.Juju, app_name: str): + """Verify that restart action runs through the expected workflow.""" + + juju.deploy( + 'self-signed-certificates', + app='self-signed-certificates', + channel='1/stable', + ) + juju.deploy( + 'charmed-etcd', + app='etcd', + channel='3.6/stable', + ) + + juju.integrate( + 'etcd:client-certificates', + 'self-signed-certificates:certificates', + ) + juju.wait(jubilant.all_active, error=jubilant.any_error) + + juju.integrate(f'{app_name}:etcd', 'etcd:etcd-client') + juju.wait(jubilant.all_active, error=jubilant.any_error) + + wait_for_etcdctl_env(juju, f'{app_name}/0') + + juju.run(f'{app_name}/0', 'restart', {'delay': 1}, wait=300) + + juju.wait( + jubilant.all_active, + error=jubilant.any_error, + timeout=300, + ) + + events = get_unit_events(juju, f'{app_name}/0') + restart_events = [e['event'] for e in events] + + expected = [ + 'action:restart', + '_restart:start', + '_restart:done', + ] + + assert expected == restart_events + + +def test_all_units_can_connect_to_etcd(juju: jubilant.Juju, app_name: str): + juju.add_unit(app_name, num_units=2) + juju.wait( + lambda status: jubilant.all_active(status, app_name), + error=jubilant.any_error, + ) + + status = juju.status() + units = sorted(status.apps[app_name].units) + + for unit in units: + juju.exec(f'rm -f {TRACE_FILE}', unit=unit) + + for unit in units: + juju.run(unit, 'restart', {'delay': 2}, wait=300) + + juju.wait( + lambda status: jubilant.all_active(status, app_name, 'etcd', 'self-signed-certificates'), + error=jubilant.any_error, + timeout=600, + ) + + expected = [ + 'action:restart', + '_restart:start', + '_restart:done', + ] + + for unit in units: + events = get_unit_events(juju, unit) + restart_events = [e['event'] for e in events] + assert restart_events == expected + + +def test_all_units_can_connect_to_etcd_multi_app(juju: jubilant.Juju, charm: Path, app_name: str): + second_app = f'{app_name}-secondary' + + juju.deploy(charm, app=second_app, num_units=3) + juju.wait( + lambda status: jubilant.all_active(status, second_app), + error=jubilant.any_error, + timeout=600, + ) + juju.integrate(f'{second_app}:etcd', 'etcd:etcd-client') + + juju.wait( + lambda status: jubilant.all_active( + status, app_name, second_app, 'etcd', 'self-signed-certificates' + ), + error=jubilant.any_error, + timeout=600, + ) + + primary_units = sorted(juju.status().apps[app_name].units.keys()) + secondary_units = sorted(juju.status().apps[second_app].units.keys()) + all_units: list[str] = primary_units + secondary_units + + for unit in all_units: + juju.exec(f'rm -f {TRACE_FILE}', unit=unit) + + for unit in all_units: + wait_for_etcdctl_env(juju, unit) + + for unit in all_units: + juju.run(unit, 'restart', {'delay': 2}, wait=300) + + juju.wait( + lambda status: jubilant.all_active( + status, app_name, second_app, 'etcd', 'self-signed-certificates' + ), + error=jubilant.any_error, + timeout=600, + ) + + expected = [ + 'action:restart', + '_restart:start', + '_restart:done', + ] + + for unit in all_units: + events = get_unit_events(juju, unit) + restart_events = [e['event'] for e in events] + assert restart_events == expected diff --git a/rollingops/tests/unit/conftest.py b/rollingops/tests/unit/conftest.py new file mode 100644 index 000000000..bfdd0a14f --- /dev/null +++ b/rollingops/tests/unit/conftest.py @@ -0,0 +1,121 @@ +# Copyright 2026 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Fixtures for unit tests, typically mocking out parts of the external system.""" + +from collections.abc import Generator +from pathlib import Path +from typing import Any +from unittest.mock import MagicMock, patch + +import ops +import pytest +from ops.testing import Context + +from charmlibs import rollingops + + +@pytest.fixture +def temp_cert_manager(tmp_path: Path) -> type[rollingops.CertificatesManager]: + class TestCertificatesManager(rollingops.CertificatesManager): + BASE_DIR = tmp_path / 'tls' + CA_CERT = BASE_DIR / 'client-ca.pem' + CLIENT_KEY = BASE_DIR / 'client.key' + CLIENT_CERT = BASE_DIR / 'client.pem' + + TestCertificatesManager.BASE_DIR.mkdir(parents=True, exist_ok=True) + return TestCertificatesManager + + +@pytest.fixture +def temp_etcdctl(tmp_path: Path) -> type[rollingops.EtcdCtl]: + class TestEtcdCtl(rollingops.EtcdCtl): + BASE_DIR = tmp_path / 'etcd' + SERVER_CA = BASE_DIR / 'server-ca.pem' + ENV_FILE = BASE_DIR / 'etcdctl.env' + + return TestEtcdCtl + + +@pytest.fixture +def etcdctl_patch() -> Generator[MagicMock, None, None]: + with patch('charmlibs.rollingops.EtcdCtl') as mock_etcdctl: + yield mock_etcdctl + + +@pytest.fixture +def certificates_manager_patches() -> Generator[dict[str, MagicMock], None, None]: + with ( + patch( + 'charmlibs.rollingops.CertificatesManager._exists', + return_value=False, + ), + patch( + 'charmlibs.rollingops.CertificatesManager.generate', + return_value=('CERT_PEM', 'KEY_PEM', 'CA_PEM'), + ) as mock_generate, + patch( + 'charmlibs.rollingops.CertificatesManager.persist_client_cert_key_and_ca', + return_value=None, + ) as mock_persit, + ): + yield { + 'generate': mock_generate, + 'persist': mock_persit, + } + + +class RollingOpsCharm(ops.CharmBase): + def __init__(self, framework: ops.Framework): + super().__init__(framework) + + callback_targets = { + '_restart': self.restart, + } + + self.restart_manager = rollingops.EtcdRollingOpsManager( + charm=self, + peer_relation_name='restart', + etcd_relation_name='etcd', + cluster_id='cluster-12345', + callback_targets=callback_targets, + ) + + def restart(self) -> None: + pass + + +@pytest.fixture +def charm_test() -> type[RollingOpsCharm]: + return RollingOpsCharm + + +meta: dict[str, Any] = { + 'name': 'charm', + 'peers': { + 'restart': { + 'interface': 'rolling_op', + }, + }, + 'requires': { + 'etcd': { + 'interface': 'etcd_client', + }, + }, +} + + +@pytest.fixture +def ctx(charm_test: type[RollingOpsCharm]) -> Context[RollingOpsCharm]: + return Context(charm_test, meta=meta) diff --git a/rollingops/tests/unit/test_certificates.py b/rollingops/tests/unit/test_certificates.py new file mode 100644 index 000000000..042759d61 --- /dev/null +++ b/rollingops/tests/unit/test_certificates.py @@ -0,0 +1,112 @@ +# Copyright 2026 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Learn more about testing at: https://juju.is/docs/sdk/testing +from charmlibs.rollingops import CertificatesManager + + +def test_certificates_manager_exists_returns_false_when_no_files( + temp_cert_manager: CertificatesManager, +) -> None: + assert temp_cert_manager._exists() is False + + +def test_certificates_manager_exists_returns_false_when_cert_does_not_exist( + temp_cert_manager: CertificatesManager, +) -> None: + temp_cert_manager.CLIENT_KEY.write_text('client-key') + + assert temp_cert_manager._exists() is False + + +def test_certificates_manager_exists_returns_false_when_key_does_not_exist( + temp_cert_manager: CertificatesManager, +) -> None: + temp_cert_manager.CLIENT_CERT.write_text('client-cert') + + assert temp_cert_manager._exists() is False + + +def test_certificates_manager_exists_returns_true_when_all_files_exist( + temp_cert_manager: CertificatesManager, +) -> None: + temp_cert_manager.CLIENT_KEY.write_text('client-key') + temp_cert_manager.CLIENT_CERT.write_text('client-cert') + temp_cert_manager.CA_CERT.write_text('ca-cert') + + assert temp_cert_manager._exists() is True + + +def test_certificates_manager_persist_client_cert_and_key_writes_files( + temp_cert_manager: CertificatesManager, +) -> None: + temp_cert_manager.persist_client_cert_key_and_ca('cert-pem', 'key-pem', 'ca-pem') + + assert temp_cert_manager.CLIENT_CERT.read_text() == 'cert-pem' + assert temp_cert_manager.CLIENT_KEY.read_text() == 'key-pem' + + +def test_certificates_manager_has_client_cert_and_key_returns_false_when_files_missing( + temp_cert_manager: CertificatesManager, +) -> None: + assert temp_cert_manager.has_client_cert_key_and_ca('cert', 'key', 'ca') is False + + +def test_certificates_manager_has_client_cert_and_key_returns_true_when_material_matches( + temp_cert_manager: CertificatesManager, +) -> None: + temp_cert_manager.CLIENT_CERT.write_text('cert-pem') + temp_cert_manager.CLIENT_KEY.write_text('key-pem') + temp_cert_manager.CA_CERT.write_text('ca-pem') + + assert temp_cert_manager.has_client_cert_key_and_ca('cert-pem', 'key-pem', 'ca-pem') is True + + +def test_certificates_manager_has_client_cert_and_key_returns_false_when_material_differs( + temp_cert_manager: CertificatesManager, +) -> None: + temp_cert_manager.CLIENT_CERT.write_text('cert-pem') + temp_cert_manager.CLIENT_KEY.write_text('key-pem') + temp_cert_manager.CA_CERT.write_text('ca-pem') + + assert temp_cert_manager.has_client_cert_key_and_ca('other-cert', 'key-pem', 'ca-pem') is False + assert temp_cert_manager.has_client_cert_key_and_ca('cert-pem', 'other-key', 'ca-pem') is False + assert ( + temp_cert_manager.has_client_cert_key_and_ca('cert-pem', 'key-pem', 'other-pem') is False + ) + + +def test_certificates_manager_generate_does_nothing_when_files_already_exist( + temp_cert_manager: CertificatesManager, +) -> None: + temp_cert_manager.CA_CERT.write_text('existing-ca-cert') + temp_cert_manager.CLIENT_KEY.write_text('existing-client-key') + temp_cert_manager.CLIENT_CERT.write_text('existing-client-cert') + + temp_cert_manager.generate(common_name='unit-1') + + assert temp_cert_manager.CA_CERT.read_text() == 'existing-ca-cert' + assert temp_cert_manager.CLIENT_KEY.read_text() == 'existing-client-key' + assert temp_cert_manager.CLIENT_CERT.read_text() == 'existing-client-cert' + + +def test_certificates_manager_generate_creates_all_files( + temp_cert_manager: CertificatesManager, +) -> None: + temp_cert_manager.generate(common_name='unit-1') + assert temp_cert_manager._exists() is True + + assert temp_cert_manager.CA_CERT.read_text().startswith('-----BEGIN CERTIFICATE-----') + assert temp_cert_manager.CLIENT_KEY.read_text().startswith('-----BEGIN RSA PRIVATE KEY-----') + assert temp_cert_manager.CLIENT_CERT.read_text().startswith('-----BEGIN CERTIFICATE-----') diff --git a/rollingops/tests/unit/test_etcd_rollingops_in_charm.py b/rollingops/tests/unit/test_etcd_rollingops_in_charm.py new file mode 100644 index 000000000..3bb96b6b8 --- /dev/null +++ b/rollingops/tests/unit/test_etcd_rollingops_in_charm.py @@ -0,0 +1,110 @@ +# Copyright 2026 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Learn more about testing at: https://juju.is/docs/sdk/testing + + +from unittest.mock import MagicMock + +from ops.testing import Context, PeerRelation, Secret, State +from tests.unit.conftest import RollingOpsCharm + +from charmlibs.rollingops import ( + SECRET_FIELD, +) + + +def test_leader_elected_creates_shared_secret_and_stores_id( + certificates_manager_patches: dict[str, MagicMock], + etcdctl_patch: MagicMock, + ctx: Context[RollingOpsCharm], +): + peer_relation = PeerRelation(endpoint='restart') + + state_in = State(leader=True, relations={peer_relation}) + state_out = ctx.run(ctx.on.leader_elected(), state_in) + + peer_out = next(r for r in state_out.relations if r.endpoint == 'restart') + assert SECRET_FIELD in peer_out.local_app_data + assert peer_out.local_app_data[SECRET_FIELD].startswith('secret:') + + certificates_manager_patches['generate'].assert_called_once() + + +def test_leader_elected_does_not_regenerate_when_secret_already_exists( + certificates_manager_patches: dict[str, MagicMock], + etcdctl_patch: MagicMock, + ctx: Context[RollingOpsCharm], +): + peer_relation = PeerRelation( + endpoint='restart', local_app_data={SECRET_FIELD: 'secret:existing'} + ) + secret = Secret( + id='secret:existing', + owner='app', + tracked_content={ + 'client-cert': 'CERT_PEM', + 'client-key': 'KEY_PEM', + 'client-ca': 'CA_PEM', + }, + ) + + state_in = State(leader=True, relations={peer_relation}, secrets=[secret]) + + state_out = ctx.run(ctx.on.leader_elected(), state_in) + + peer_out = next(r for r in state_out.relations if r.endpoint == 'restart') + assert peer_out.local_app_data[SECRET_FIELD] == 'secret:existing' + certificates_manager_patches['generate'].assert_not_called() + + +def test_non_leader_does_not_create_shared_secret( + certificates_manager_patches: dict[str, MagicMock], + etcdctl_patch: MagicMock, + ctx: Context[RollingOpsCharm], +): + peer_relation = PeerRelation(endpoint='restart') + state_in = State(leader=False, relations={peer_relation}) + + state_out = ctx.run(ctx.on.relation_changed(peer_relation, remote_unit=1), state_in) + + peer_out = next(r for r in state_out.relations if r.endpoint == 'restart') + assert SECRET_FIELD not in peer_out.local_app_data + certificates_manager_patches['generate'].assert_not_called() + + +def test_relation_changed_syncs_local_certificate_from_secret( + certificates_manager_patches: dict[str, MagicMock], + etcdctl_patch: MagicMock, + ctx: Context[RollingOpsCharm], +): + peer_relation = PeerRelation( + endpoint='restart', local_app_data={SECRET_FIELD: 'secret:rollingops-cert'} + ) + + secret = Secret( + id='secret:rollingops-cert', + tracked_content={ + 'client-cert': 'CERT_PEM', + 'client-key': 'KEY_PEM', + 'client-ca': 'CA_PEM', + }, + ) + + state_in = State(leader=False, relations={peer_relation}, secrets=[secret]) + + ctx.run(ctx.on.relation_changed(peer_relation, remote_unit=1), state_in) + certificates_manager_patches['persist'].assert_called_once_with( + 'CERT_PEM', 'KEY_PEM', 'CA_PEM' + ) diff --git a/rollingops/tests/unit/test_etcdctl.py b/rollingops/tests/unit/test_etcdctl.py new file mode 100644 index 000000000..8dd0b219f --- /dev/null +++ b/rollingops/tests/unit/test_etcdctl.py @@ -0,0 +1,94 @@ +# Copyright 2026 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Learn more about testing at: https://juju.is/docs/sdk/testing + +from pathlib import Path +from unittest.mock import patch + +import pytest + +from charmlibs.rollingops import EtcdCtl, RollingOpsEtcdNotConfiguredError + + +def test_etcdctl_write_env(temp_etcdctl: EtcdCtl) -> None: + temp_etcdctl.write_env_file( + endpoints='https://10.0.0.1:2379,https://10.0.0.2:2379', + client_cert_path=Path('PATH1'), + client_key_path=Path('PATH2'), + ) + + assert temp_etcdctl.BASE_DIR.exists() + + env_text = temp_etcdctl.ENV_FILE.read_text() + assert 'export ETCDCTL_API="3"' in env_text + assert 'export ETCDCTL_ENDPOINTS="https://10.0.0.1:2379,https://10.0.0.2:2379"' in env_text + assert f'export ETCDCTL_CACERT="{temp_etcdctl.SERVER_CA}"' in env_text + assert 'export ETCDCTL_CERT="PATH1"' in env_text + assert 'export ETCDCTL_KEY="PATH2"' in env_text + + +def test_etcdctl_ensure_initialized_raises_when_env_missing(temp_etcdctl: EtcdCtl) -> None: + with pytest.raises(RollingOpsEtcdNotConfiguredError): + temp_etcdctl.ensure_initialized() + + +def test_etcdctl_cleanup_removes_env_file_and_server_ca(temp_etcdctl: EtcdCtl) -> None: + temp_etcdctl.BASE_DIR.mkdir(parents=True, exist_ok=True) + temp_etcdctl.ENV_FILE.write_text('env') + temp_etcdctl.SERVER_CA.write_text('ca') + + assert temp_etcdctl.ENV_FILE.exists() + assert temp_etcdctl.SERVER_CA.exists() + + temp_etcdctl.cleanup() + + assert not temp_etcdctl.ENV_FILE.exists() + assert not temp_etcdctl.SERVER_CA.exists() + + +def test_etcdctl_cleanup_is_noop_when_files_do_not_exist(temp_etcdctl: EtcdCtl) -> None: + assert not temp_etcdctl.ENV_FILE.exists() + assert not temp_etcdctl.SERVER_CA.exists() + + temp_etcdctl.cleanup() + + assert not temp_etcdctl.ENV_FILE.exists() + assert not temp_etcdctl.SERVER_CA.exists() + + +def test_etcdctl_load_env_parses_exported_vars(temp_etcdctl: EtcdCtl) -> None: + temp_etcdctl.BASE_DIR.mkdir(parents=True, exist_ok=True) + temp_etcdctl.SERVER_CA.write_text('SERVER CA') + temp_etcdctl.ENV_FILE.write_text( + '\n'.join([ + '# comment', + 'export ETCDCTL_API="3"', + 'export ETCDCTL_ENDPOINTS="https://10.0.0.1:2379"', + "export ETCDCTL_CERT='/a-path/client.pem'", + 'export ETCDCTL_KEY="/a-path/client.key"', + 'export ETCDCTL_CACERT="/a-path/server-ca.pem"', + '', + ]) + ) + + with patch.dict('os.environ', {'EXISTING_VAR': 'present'}, clear=True): + env = temp_etcdctl.load_env() + + assert env['EXISTING_VAR'] == 'present' + assert env['ETCDCTL_API'] == '3' + assert env['ETCDCTL_ENDPOINTS'] == 'https://10.0.0.1:2379' + assert env['ETCDCTL_CERT'] == '/a-path/client.pem' + assert env['ETCDCTL_KEY'] == '/a-path/client.key' + assert env['ETCDCTL_CACERT'] == '/a-path/server-ca.pem' diff --git a/rollingops/tests/unit/test_models.py b/rollingops/tests/unit/test_models.py new file mode 100644 index 000000000..40d1d0ecb --- /dev/null +++ b/rollingops/tests/unit/test_models.py @@ -0,0 +1,41 @@ +# Copyright 2026 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Learn more about testing at: https://juju.is/docs/sdk/testing + + +from charmlibs.rollingops import ( + RollingOpsKeys, +) + + +def test_rollingopskeys_paths() -> None: + keys = RollingOpsKeys.for_owner('cluster-a', 'unit-1') + + assert keys.cluster_prefix == '/rollingops/cluster-a/' + assert keys._owner_prefix == '/rollingops/cluster-a/unit-1' + assert keys.lock_key == '/rollingops/cluster-a/granted-unit' + assert keys.pending == '/rollingops/cluster-a/unit-1/pending/' + assert keys.inprogress == '/rollingops/cluster-a/unit-1/inprogress/' + assert keys.completed == '/rollingops/cluster-a/unit-1/completed/' + + +def test_rollingopskeys_lock_key_is_shared_within_cluster() -> None: + k1 = RollingOpsKeys.for_owner('cluster-a', 'unit-1') + k2 = RollingOpsKeys.for_owner('cluster-a', 'unit-2') + + assert k1.lock_key == k2.lock_key + assert k1.pending != k2.pending + assert k1.inprogress != k2.inprogress + assert k1.completed != k2.completed diff --git a/rollingops/tests/unit/test_version.py b/rollingops/tests/unit/test_version.py new file mode 100644 index 000000000..23f6c4f6b --- /dev/null +++ b/rollingops/tests/unit/test_version.py @@ -0,0 +1,21 @@ +# Copyright 2026 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Unit tests for library code, not involving charm code.""" + +from charmlibs import rollingops + + +def test_version(): + assert isinstance(rollingops.__version__, str) diff --git a/rollingops/tests/unit/test_version_in_charm.py b/rollingops/tests/unit/test_version_in_charm.py new file mode 100644 index 000000000..3ef98de07 --- /dev/null +++ b/rollingops/tests/unit/test_version_in_charm.py @@ -0,0 +1,38 @@ +# Copyright 2026 Canonical Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Light weight state-transition tests of the library in a charming context.""" + +import ops +import ops.testing + +from charmlibs import rollingops + + +class Charm(ops.CharmBase): + package_version: str + + def __init__(self, framework: ops.Framework): + super().__init__(framework) + framework.observe(self.on.start, self._on_start) + + def _on_start(self, event: ops.StartEvent): + self.package_version = rollingops.__version__ + + +def test_version(): + ctx = ops.testing.Context(Charm, meta={'name': 'charm'}) + with ctx(ctx.on.start(), ops.testing.State()) as manager: + manager.run() + assert isinstance(manager.charm.package_version, str) diff --git a/rollingops/uv.lock b/rollingops/uv.lock new file mode 100644 index 000000000..553fb6a91 --- /dev/null +++ b/rollingops/uv.lock @@ -0,0 +1,423 @@ +version = 1 +revision = 3 +requires-python = ">=3.12" + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "cffi" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" }, + { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, + { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, + { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, + { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, + { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, + { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" }, + { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" }, + { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" }, + { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" }, + { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" }, + { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" }, + { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" }, + { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" }, + { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" }, + { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" }, + { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" }, + { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" }, + { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" }, + { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" }, + { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" }, + { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, +] + +[[package]] +name = "charmlibs-interfaces-tls-certificates" +version = "1.8.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "ops" }, + { name = "pydantic" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/78/7e/166af1e71f2bf96482845a1806dc345cbc5507134a99ccbbae297f174e4b/charmlibs_interfaces_tls_certificates-1.8.1.tar.gz", hash = "sha256:f2bfabf3a3b4c18034941771733177b30e4742c06d7742d4bb30da6ead953f43", size = 148059, upload-time = "2026-02-27T13:46:50.086Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/17/1d1b0083800f4cc20f42e5d2763521d93975376499565c62da5276a80629/charmlibs_interfaces_tls_certificates-1.8.1-py3-none-any.whl", hash = "sha256:8e8fe047e02515d76f57a1d019056d72ce8c859c2ffb39a1e379cfc11fc048e6", size = 28208, upload-time = "2026-02-27T13:46:48.959Z" }, +] + +[[package]] +name = "charmlibs-rollingops" +source = { editable = "." } +dependencies = [ + { name = "charmlibs-interfaces-tls-certificates" }, +] + +[package.dev-dependencies] +integration = [ + { name = "jubilant" }, + { name = "tenacity" }, +] +unit = [ + { name = "ops", extra = ["testing"] }, +] + +[package.metadata] +requires-dist = [{ name = "charmlibs-interfaces-tls-certificates", specifier = ">=1.8.1" }] + +[package.metadata.requires-dev] +functional = [] +integration = [ + { name = "jubilant" }, + { name = "tenacity" }, +] +lint = [] +unit = [{ name = "ops", extras = ["testing"] }] + +[[package]] +name = "cryptography" +version = "46.0.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/60/04/ee2a9e8542e4fa2773b81771ff8349ff19cdd56b7258a0cc442639052edb/cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d", size = 750064, upload-time = "2026-02-10T19:18:38.255Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/81/b0bb27f2ba931a65409c6b8a8b358a7f03c0e46eceacddff55f7c84b1f3b/cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad", size = 7176289, upload-time = "2026-02-10T19:17:08.274Z" }, + { url = "https://files.pythonhosted.org/packages/ff/9e/6b4397a3e3d15123de3b1806ef342522393d50736c13b20ec4c9ea6693a6/cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b", size = 4275637, upload-time = "2026-02-10T19:17:10.53Z" }, + { url = "https://files.pythonhosted.org/packages/63/e7/471ab61099a3920b0c77852ea3f0ea611c9702f651600397ac567848b897/cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b", size = 4424742, upload-time = "2026-02-10T19:17:12.388Z" }, + { url = "https://files.pythonhosted.org/packages/37/53/a18500f270342d66bf7e4d9f091114e31e5ee9e7375a5aba2e85a91e0044/cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263", size = 4277528, upload-time = "2026-02-10T19:17:13.853Z" }, + { url = "https://files.pythonhosted.org/packages/22/29/c2e812ebc38c57b40e7c583895e73c8c5adb4d1e4a0cc4c5a4fdab2b1acc/cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d", size = 4947993, upload-time = "2026-02-10T19:17:15.618Z" }, + { url = "https://files.pythonhosted.org/packages/6b/e7/237155ae19a9023de7e30ec64e5d99a9431a567407ac21170a046d22a5a3/cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed", size = 4456855, upload-time = "2026-02-10T19:17:17.221Z" }, + { url = "https://files.pythonhosted.org/packages/2d/87/fc628a7ad85b81206738abbd213b07702bcbdada1dd43f72236ef3cffbb5/cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2", size = 3984635, upload-time = "2026-02-10T19:17:18.792Z" }, + { url = "https://files.pythonhosted.org/packages/84/29/65b55622bde135aedf4565dc509d99b560ee4095e56989e815f8fd2aa910/cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2", size = 4277038, upload-time = "2026-02-10T19:17:20.256Z" }, + { url = "https://files.pythonhosted.org/packages/bc/36/45e76c68d7311432741faf1fbf7fac8a196a0a735ca21f504c75d37e2558/cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0", size = 4912181, upload-time = "2026-02-10T19:17:21.825Z" }, + { url = "https://files.pythonhosted.org/packages/6d/1a/c1ba8fead184d6e3d5afcf03d569acac5ad063f3ac9fb7258af158f7e378/cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731", size = 4456482, upload-time = "2026-02-10T19:17:25.133Z" }, + { url = "https://files.pythonhosted.org/packages/f9/e5/3fb22e37f66827ced3b902cf895e6a6bc1d095b5b26be26bd13c441fdf19/cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82", size = 4405497, upload-time = "2026-02-10T19:17:26.66Z" }, + { url = "https://files.pythonhosted.org/packages/1a/df/9d58bb32b1121a8a2f27383fabae4d63080c7ca60b9b5c88be742be04ee7/cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1", size = 4667819, upload-time = "2026-02-10T19:17:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/ea/ed/325d2a490c5e94038cdb0117da9397ece1f11201f425c4e9c57fe5b9f08b/cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48", size = 3028230, upload-time = "2026-02-10T19:17:30.518Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5a/ac0f49e48063ab4255d9e3b79f5def51697fce1a95ea1370f03dc9db76f6/cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4", size = 3480909, upload-time = "2026-02-10T19:17:32.083Z" }, + { url = "https://files.pythonhosted.org/packages/00/13/3d278bfa7a15a96b9dc22db5a12ad1e48a9eb3d40e1827ef66a5df75d0d0/cryptography-46.0.5-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:94a76daa32eb78d61339aff7952ea819b1734b46f73646a07decb40e5b3448e2", size = 7119287, upload-time = "2026-02-10T19:17:33.801Z" }, + { url = "https://files.pythonhosted.org/packages/67/c8/581a6702e14f0898a0848105cbefd20c058099e2c2d22ef4e476dfec75d7/cryptography-46.0.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5be7bf2fb40769e05739dd0046e7b26f9d4670badc7b032d6ce4db64dddc0678", size = 4265728, upload-time = "2026-02-10T19:17:35.569Z" }, + { url = "https://files.pythonhosted.org/packages/dd/4a/ba1a65ce8fc65435e5a849558379896c957870dd64fecea97b1ad5f46a37/cryptography-46.0.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe346b143ff9685e40192a4960938545c699054ba11d4f9029f94751e3f71d87", size = 4408287, upload-time = "2026-02-10T19:17:36.938Z" }, + { url = "https://files.pythonhosted.org/packages/f8/67/8ffdbf7b65ed1ac224d1c2df3943553766914a8ca718747ee3871da6107e/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:c69fd885df7d089548a42d5ec05be26050ebcd2283d89b3d30676eb32ff87dee", size = 4270291, upload-time = "2026-02-10T19:17:38.748Z" }, + { url = "https://files.pythonhosted.org/packages/f8/e5/f52377ee93bc2f2bba55a41a886fd208c15276ffbd2569f2ddc89d50e2c5/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:8293f3dea7fc929ef7240796ba231413afa7b68ce38fd21da2995549f5961981", size = 4927539, upload-time = "2026-02-10T19:17:40.241Z" }, + { url = "https://files.pythonhosted.org/packages/3b/02/cfe39181b02419bbbbcf3abdd16c1c5c8541f03ca8bda240debc467d5a12/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:1abfdb89b41c3be0365328a410baa9df3ff8a9110fb75e7b52e66803ddabc9a9", size = 4442199, upload-time = "2026-02-10T19:17:41.789Z" }, + { url = "https://files.pythonhosted.org/packages/c0/96/2fcaeb4873e536cf71421a388a6c11b5bc846e986b2b069c79363dc1648e/cryptography-46.0.5-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:d66e421495fdb797610a08f43b05269e0a5ea7f5e652a89bfd5a7d3c1dee3648", size = 3960131, upload-time = "2026-02-10T19:17:43.379Z" }, + { url = "https://files.pythonhosted.org/packages/d8/d2/b27631f401ddd644e94c5cf33c9a4069f72011821cf3dc7309546b0642a0/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:4e817a8920bfbcff8940ecfd60f23d01836408242b30f1a708d93198393a80b4", size = 4270072, upload-time = "2026-02-10T19:17:45.481Z" }, + { url = "https://files.pythonhosted.org/packages/f4/a7/60d32b0370dae0b4ebe55ffa10e8599a2a59935b5ece1b9f06edb73abdeb/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:68f68d13f2e1cb95163fa3b4db4bf9a159a418f5f6e7242564fc75fcae667fd0", size = 4892170, upload-time = "2026-02-10T19:17:46.997Z" }, + { url = "https://files.pythonhosted.org/packages/d2/b9/cf73ddf8ef1164330eb0b199a589103c363afa0cf794218c24d524a58eab/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a3d1fae9863299076f05cb8a778c467578262fae09f9dc0ee9b12eb4268ce663", size = 4441741, upload-time = "2026-02-10T19:17:48.661Z" }, + { url = "https://files.pythonhosted.org/packages/5f/eb/eee00b28c84c726fe8fa0158c65afe312d9c3b78d9d01daf700f1f6e37ff/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4143987a42a2397f2fc3b4d7e3a7d313fbe684f67ff443999e803dd75a76826", size = 4396728, upload-time = "2026-02-10T19:17:50.058Z" }, + { url = "https://files.pythonhosted.org/packages/65/f4/6bc1a9ed5aef7145045114b75b77c2a8261b4d38717bd8dea111a63c3442/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7d731d4b107030987fd61a7f8ab512b25b53cef8f233a97379ede116f30eb67d", size = 4652001, upload-time = "2026-02-10T19:17:51.54Z" }, + { url = "https://files.pythonhosted.org/packages/86/ef/5d00ef966ddd71ac2e6951d278884a84a40ffbd88948ef0e294b214ae9e4/cryptography-46.0.5-cp314-cp314t-win32.whl", hash = "sha256:c3bcce8521d785d510b2aad26ae2c966092b7daa8f45dd8f44734a104dc0bc1a", size = 3003637, upload-time = "2026-02-10T19:17:52.997Z" }, + { url = "https://files.pythonhosted.org/packages/b7/57/f3f4160123da6d098db78350fdfd9705057aad21de7388eacb2401dceab9/cryptography-46.0.5-cp314-cp314t-win_amd64.whl", hash = "sha256:4d8ae8659ab18c65ced284993c2265910f6c9e650189d4e3f68445ef82a810e4", size = 3469487, upload-time = "2026-02-10T19:17:54.549Z" }, + { url = "https://files.pythonhosted.org/packages/e2/fa/a66aa722105ad6a458bebd64086ca2b72cdd361fed31763d20390f6f1389/cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31", size = 7170514, upload-time = "2026-02-10T19:17:56.267Z" }, + { url = "https://files.pythonhosted.org/packages/0f/04/c85bdeab78c8bc77b701bf0d9bdcf514c044e18a46dcff330df5448631b0/cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18", size = 4275349, upload-time = "2026-02-10T19:17:58.419Z" }, + { url = "https://files.pythonhosted.org/packages/5c/32/9b87132a2f91ee7f5223b091dc963055503e9b442c98fc0b8a5ca765fab0/cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235", size = 4420667, upload-time = "2026-02-10T19:18:00.619Z" }, + { url = "https://files.pythonhosted.org/packages/a1/a6/a7cb7010bec4b7c5692ca6f024150371b295ee1c108bdc1c400e4c44562b/cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a", size = 4276980, upload-time = "2026-02-10T19:18:02.379Z" }, + { url = "https://files.pythonhosted.org/packages/8e/7c/c4f45e0eeff9b91e3f12dbd0e165fcf2a38847288fcfd889deea99fb7b6d/cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76", size = 4939143, upload-time = "2026-02-10T19:18:03.964Z" }, + { url = "https://files.pythonhosted.org/packages/37/19/e1b8f964a834eddb44fa1b9a9976f4e414cbb7aa62809b6760c8803d22d1/cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614", size = 4453674, upload-time = "2026-02-10T19:18:05.588Z" }, + { url = "https://files.pythonhosted.org/packages/db/ed/db15d3956f65264ca204625597c410d420e26530c4e2943e05a0d2f24d51/cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229", size = 3978801, upload-time = "2026-02-10T19:18:07.167Z" }, + { url = "https://files.pythonhosted.org/packages/41/e2/df40a31d82df0a70a0daf69791f91dbb70e47644c58581d654879b382d11/cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1", size = 4276755, upload-time = "2026-02-10T19:18:09.813Z" }, + { url = "https://files.pythonhosted.org/packages/33/45/726809d1176959f4a896b86907b98ff4391a8aa29c0aaaf9450a8a10630e/cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d", size = 4901539, upload-time = "2026-02-10T19:18:11.263Z" }, + { url = "https://files.pythonhosted.org/packages/99/0f/a3076874e9c88ecb2ecc31382f6e7c21b428ede6f55aafa1aa272613e3cd/cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c", size = 4452794, upload-time = "2026-02-10T19:18:12.914Z" }, + { url = "https://files.pythonhosted.org/packages/02/ef/ffeb542d3683d24194a38f66ca17c0a4b8bf10631feef44a7ef64e631b1a/cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4", size = 4404160, upload-time = "2026-02-10T19:18:14.375Z" }, + { url = "https://files.pythonhosted.org/packages/96/93/682d2b43c1d5f1406ed048f377c0fc9fc8f7b0447a478d5c65ab3d3a66eb/cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9", size = 4667123, upload-time = "2026-02-10T19:18:15.886Z" }, + { url = "https://files.pythonhosted.org/packages/45/2d/9c5f2926cb5300a8eefc3f4f0b3f3df39db7f7ce40c8365444c49363cbda/cryptography-46.0.5-cp38-abi3-win32.whl", hash = "sha256:02f547fce831f5096c9a567fd41bc12ca8f11df260959ecc7c3202555cc47a72", size = 3010220, upload-time = "2026-02-10T19:18:17.361Z" }, + { url = "https://files.pythonhosted.org/packages/48/ef/0c2f4a8e31018a986949d34a01115dd057bf536905dca38897bacd21fac3/cryptography-46.0.5-cp38-abi3-win_amd64.whl", hash = "sha256:556e106ee01aa13484ce9b0239bca667be5004efb0aabbed28d353df86445595", size = 3467050, upload-time = "2026-02-10T19:18:18.899Z" }, +] + +[[package]] +name = "importlib-metadata" +version = "8.7.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f3/49/3b30cad09e7771a4982d9975a8cbf64f00d4a1ececb53297f1d9a7be1b10/importlib_metadata-8.7.1.tar.gz", hash = "sha256:49fef1ae6440c182052f407c8d34a68f72efc36db9ca90dc0113398f2fdde8bb", size = 57107, upload-time = "2025-12-21T10:00:19.278Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl", hash = "sha256:5a1f80bf1daa489495071efbb095d75a634cf28a8bc299581244063b53176151", size = 27865, upload-time = "2025-12-21T10:00:18.329Z" }, +] + +[[package]] +name = "jubilant" +version = "1.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyyaml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cc/0b/275edac8b57b0aac34f84073997660ebf536f97d2fa0d85a2cc3321047b6/jubilant-1.7.0.tar.gz", hash = "sha256:46b7c29a4f3336ab16d77d88418dbf8c9d0746e3f80ef42ee4c2d103eff79650", size = 32455, upload-time = "2026-01-29T02:40:10.335Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/86/d5/5b95ae9ab5abf283e33c802d286045abda7d826396ba417d5d3a20201b24/jubilant-1.7.0-py3-none-any.whl", hash = "sha256:1dcd70eb10299a95ae9fab405a3ce5f01a15513776b7f8eb4cf7b02808c93cdf", size = 33396, upload-time = "2026-01-29T02:40:09.222Z" }, +] + +[[package]] +name = "opentelemetry-api" +version = "1.40.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2c/1d/4049a9e8698361cc1a1aa03a6c59e4fa4c71e0c0f94a30f988a6876a2ae6/opentelemetry_api-1.40.0.tar.gz", hash = "sha256:159be641c0b04d11e9ecd576906462773eb97ae1b657730f0ecf64d32071569f", size = 70851, upload-time = "2026-03-04T14:17:21.555Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/bf/93795954016c522008da367da292adceed71cca6ee1717e1d64c83089099/opentelemetry_api-1.40.0-py3-none-any.whl", hash = "sha256:82dd69331ae74b06f6a874704be0cfaa49a1650e1537d4a813b86ecef7d0ecf9", size = 68676, upload-time = "2026-03-04T14:17:01.24Z" }, +] + +[[package]] +name = "ops" +version = "3.6.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "pyyaml" }, + { name = "websocket-client" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d3/bb/79b7efdb1243cbad11b6568c51ba4fb7358cd2c4d13bfd971a77c0aa7440/ops-3.6.0.tar.gz", hash = "sha256:a1c3361049c66759840a436143b07c74c2a46dcc44cbfd1177a9051f849c7971", size = 579236, upload-time = "2026-02-26T04:19:12.689Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8c/b6/d7daab4f841566d3cb0402d3463f7c1a00626724d6d7c02d7bf934ae6c86/ops-3.6.0-py3-none-any.whl", hash = "sha256:341c6688684446cc4b42860738898683feb271175bb9c4775ae68c81e4e0976a", size = 211856, upload-time = "2026-02-26T04:19:08.012Z" }, +] + +[package.optional-dependencies] +testing = [ + { name = "ops-scenario" }, +] + +[[package]] +name = "ops-scenario" +version = "8.6.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ops" }, + { name = "pyyaml" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/63/c8/15d9f91eafa46d1dfa7f580be3274c22399f941724b74e274334de9468bb/ops_scenario-8.6.0.tar.gz", hash = "sha256:5a40a91fd5e9b6c8249933944dfc6e807ad2ddbd36a68c800746b9bb8a0eabfb", size = 71728, upload-time = "2026-02-26T04:19:15.34Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ea/d2/fb3176805339d3aa95b9d6e43478d0e34355c6c46f27723249f46bb8d19d/ops_scenario-8.6.0-py3-none-any.whl", hash = "sha256:469490a042dc45eca24eef7aa1b9214704d97d67503ad8465414ab68dc989d30", size = 64241, upload-time = "2026-02-26T04:19:09.579Z" }, +] + +[[package]] +name = "pycparser" +version = "3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492, upload-time = "2026-01-21T14:26:51.89Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" }, +] + +[[package]] +name = "pydantic" +version = "2.12.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, + { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, + { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, + { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, + { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, + { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, + { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, + { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, + { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, + { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, + { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, + { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, + { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, + { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, + { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, + { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, + { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, + { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, + { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, + { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, + { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, + { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, + { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, + { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, + { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, + { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, + { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, + { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, + { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, + { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, + { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, + { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, + { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, + { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, + { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, + { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, + { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, +] + +[[package]] +name = "tenacity" +version = "9.1.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/47/c6/ee486fd809e357697ee8a44d3d69222b344920433d3b6666ccd9b374630c/tenacity-9.1.4.tar.gz", hash = "sha256:adb31d4c263f2bd041081ab33b498309a57c77f9acf2db65aadf0898179cf93a", size = 49413, upload-time = "2026-02-07T10:45:33.841Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/c1/eb8f9debc45d3b7918a32ab756658a0904732f75e555402972246b0b8e71/tenacity-9.1.4-py3-none-any.whl", hash = "sha256:6095a360c919085f28c6527de529e76a06ad89b23659fa881ae0649b867a9d55", size = 28926, upload-time = "2026-02-07T10:45:32.24Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + +[[package]] +name = "websocket-client" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2c/41/aa4bf9664e4cda14c3b39865b12251e8e7d239f4cd0e3cc1b6c2ccde25c1/websocket_client-1.9.0.tar.gz", hash = "sha256:9e813624b6eb619999a97dc7958469217c3176312b3a16a4bd1bc7e08a46ec98", size = 70576, upload-time = "2025-10-07T21:16:36.495Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/db/b10e48aa8fff7407e67470363eac595018441cf32d5e1001567a7aeba5d2/websocket_client-1.9.0-py3-none-any.whl", hash = "sha256:af248a825037ef591efbf6ed20cc5faa03d3b47b9e5a2230a529eeee1c1fc3ef", size = 82616, upload-time = "2025-10-07T21:16:34.951Z" }, +] + +[[package]] +name = "zipp" +version = "3.23.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, +]