From b3c03d1238ab69e46f331d4bdc87cea2431bba36 Mon Sep 17 00:00:00 2001 From: arturo-seijas <102022572+arturo-seijas@users.noreply.github.com> Date: Fri, 22 Mar 2024 11:30:54 +0100 Subject: [PATCH] Support for auth_proxy integration (#109) --- .github/workflows/integration_test.yaml | 2 +- .licenserc.yaml | 1 + .trivyignore | 4 + docs/how-to/integrate-with-iam.md | 39 ++ lib/charms/oathkeeper/v0/auth_proxy.py | 481 ++++++++++++++++++++++++ metadata.yaml | 4 + src-docs/auth_proxy.py.md | 59 +++ src-docs/charm.py.md | 4 +- src-docs/jenkins.py.md | 48 ++- src/agent.py | 4 +- src/auth_proxy.py | 91 +++++ src/charm.py | 70 +--- src/jenkins.py | 10 + src/pebble.py | 95 +++++ src/state.py | 20 + templates/jenkins-auth-proxy-config.xml | 11 + tests/integration/conftest.py | 245 ++++++++++-- tests/integration/dex.py | 225 +++++++++++ tests/integration/files/dex.yaml | 129 +++++++ tests/integration/requirements.txt | 4 +- tests/integration/test_auth_proxy.py | 83 ++++ tests/integration/test_ingress.py | 2 - tests/unit/test_auth_proxy.py | 101 +++++ tests/unit/test_charm.py | 43 --- tests/unit/test_jenkins.py | 30 ++ tests/unit/test_pebble.py | 75 ++++ tests/unit/test_state.py | 25 ++ tox.ini | 1 + 28 files changed, 1751 insertions(+), 155 deletions(-) create mode 100644 docs/how-to/integrate-with-iam.md create mode 100644 lib/charms/oathkeeper/v0/auth_proxy.py create mode 100644 src-docs/auth_proxy.py.md create mode 100644 src/auth_proxy.py create mode 100644 src/pebble.py create mode 100644 templates/jenkins-auth-proxy-config.xml create mode 100644 tests/integration/dex.py create mode 100644 tests/integration/files/dex.yaml create mode 100644 tests/integration/test_auth_proxy.py create mode 100644 tests/unit/test_auth_proxy.py create mode 100644 tests/unit/test_pebble.py diff --git a/.github/workflows/integration_test.yaml b/.github/workflows/integration_test.yaml index 5d45e194..d1ba3bcb 100644 --- a/.github/workflows/integration_test.yaml +++ b/.github/workflows/integration_test.yaml @@ -11,7 +11,7 @@ jobs: channel: 1.28-strict/stable extra-arguments: | --kube-config ${GITHUB_WORKSPACE}/kube-config - modules: '["test_ingress.py", "test_jenkins.py", "test_k8s_agent.py", "test_machine_agent.py", "test_plugins.py", "test_proxy.py", "test_cos.py", "test_upgrade.py"]' + modules: '["test_auth_proxy.py", "test_cos.py", "test_ingress.py", "test_jenkins.py", "test_k8s_agent.py", "test_machine_agent.py", "test_plugins.py", "test_proxy.py", "test_upgrade.py"]' pre-run-script: | -c "sudo microk8s config > ${GITHUB_WORKSPACE}/kube-config chmod +x tests/integration/pre_run_script.sh diff --git a/.licenserc.yaml b/.licenserc.yaml index ec0bd39a..7ab44caf 100644 --- a/.licenserc.yaml +++ b/.licenserc.yaml @@ -34,4 +34,5 @@ header: - 'trivy.yaml' - 'zap_rules.tsv' - 'lib/**' + - tests/integration/files/dex.yaml comment: on-failure diff --git a/.trivyignore b/.trivyignore index 59e673ee..0d23ce13 100644 --- a/.trivyignore +++ b/.trivyignore @@ -13,3 +13,7 @@ CVE-2024-26308 # Fixed in 5.3.32 CVE-2024-22243 CVE-2024-22201 +# Fixed in 5.3.33 +CVE-2024-22259 +# Fixed in 5.7.12 +CVE-2024-22257 diff --git a/docs/how-to/integrate-with-iam.md b/docs/how-to/integrate-with-iam.md new file mode 100644 index 00000000..ebfc185f --- /dev/null +++ b/docs/how-to/integrate-with-iam.md @@ -0,0 +1,39 @@ +# How to integrate with IAM + +This charm supports integration with the [IAM bundle](https://charmhub.io/iam) via [Oathkeeper](https://charmhub.io/oathkeeper), adding an authentication layer that will front the Jenkins applications. When enabled, Jenkins authentication will be disabled. + +The steps to enable this mechanism are described below. + +## Deploy the IAM bundle + +To deploy the IAM bundle, follow [the corresponding section of the tutorial](https://charmhub.io/topics/canonical-identity-platform/tutorials/e2e-tutorial#heading--0001) and configure it with the Identity Provider of your choice, as described in [the documentation](https://charmhub.io/topics/canonical-identity-platform/tutorials/e2e-tutorial#heading--0002). + +## Deploy Oathkeeper + +Oathkeeper will interface between Jenkins and the IAM bundle. You will need to deploy the charm and issue and configure TLS certificates for in-cluster communication. Note that the [self-signed-certificates charm](https://charmhub.io/self-signed-certificates) is already deployed as part of the IAM bundle. + +``` +juju deploy oathkeeper --channel edge --trust +juju integrate oathkeeper:certificates self-signed-certificates +``` + +To leverage proxy authentication, enable traefik's `enable_experimental_forward_auth` feature and integrate the traefik charm instance with Oathkeeper. As earlier, traefik-public is already deployed as part of the bundle. +``` +juju config traefik-public enable_experimental_forward_auth=True +juju integrate oathkeeper traefik-public:experimental-forward-auth +``` + +Finally, integrate Oathkeeper with [Kratos](https://charmhub.io/kratos), the User Management system, also part of the IAM bundle. +``` +juju integrate oathkeeper kratos +``` + +## Configure the Jenkins charm + +Jenkins needs to be accessible via the same ingress in which Oathkeeper has been configured for the requests to be redirected, so upon integrating with it and with Oathkeeper itself. Authentication is set up in place. +``` +juju integrate jenkins-k8s:ingress traefik-public +juju integrate oathkeeper jenkins-k8s:auth-proxy +``` + +Now Jenkins will be reachable at https://[public_ip]/[model_name]-jenkins-k8s, where `public_ip` is the load balancer IP assigned to the traefik charm and `model_name`, the model where Jenkins is deployed. diff --git a/lib/charms/oathkeeper/v0/auth_proxy.py b/lib/charms/oathkeeper/v0/auth_proxy.py new file mode 100644 index 00000000..da6ff0ee --- /dev/null +++ b/lib/charms/oathkeeper/v0/auth_proxy.py @@ -0,0 +1,481 @@ +#!/usr/bin/env python3 +# Copyright 2023 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Interface library for providing Oathkeeper with downstream charms' auth-proxy information. + +It is required to integrate a charm into an Identity and Access Proxy (IAP). + +## Getting Started + +To get started using the library, you need to fetch the library using `charmcraft`. +**Note that you also need to add `jsonschema` to your charm's `requirements.txt`.** + +```shell +cd some-charm +charmcraft fetch-lib charms.oathkeeper.v0.auth_proxy +``` + +To use the library from the requirer side, add the following to the `metadata.yaml` of the charm: + +```yaml +requires: + auth-proxy: + interface: auth_proxy + limit: 1 +``` + +Then, to initialise the library: +```python +from charms.oathkeeper.v0.auth_proxy import AuthProxyConfig, AuthProxyRequirer + +AUTH_PROXY_ALLOWED_ENDPOINTS = ["welcome", "about/app"] +AUTH_PROXY_HEADERS = ["X-User", "X-Some-Header"] + +class SomeCharm(CharmBase): + def __init__(self, *args): + # ... + self.auth_proxy = AuthProxyRequirer(self, self._auth_proxy_config) + + @property + def external_urls(self) -> list: + # Get ingress-per-unit or externally-configured web urls + # ... + return ["https://example.com/unit-0", "https://example.com/unit-1"] + + @property + def _auth_proxy_config(self) -> AuthProxyConfig: + return AuthProxyConfig( + protected_urls=self.external_urls, + allowed_endpoints=AUTH_PROXY_ALLOWED_ENDPOINTS, + headers=AUTH_PROXY_HEADERS + ) + + def _on_ingress_ready(self, event): + self._configure_auth_proxy() + + def _configure_auth_proxy(self): + self.auth_proxy.update_auth_proxy_config(auth_proxy_config=self._auth_proxy_config) +``` +""" + +import json +import logging +import re +from dataclasses import asdict, dataclass, field +from typing import Dict, List, Mapping, Optional + +import jsonschema +from ops.charm import CharmBase, RelationBrokenEvent, RelationChangedEvent, RelationCreatedEvent +from ops.framework import EventBase, EventSource, Handle, Object, ObjectEvents +from ops.model import Relation, TooManyRelatedAppsError + +# The unique Charmhub library identifier, never change it +LIBID = "0e67a205d1c14d7a86d89f099d19c541" + +# Increment this major API version when introducing breaking changes +LIBAPI = 0 + +# Increment this PATCH version before using `charmcraft publish-lib` or reset +# to 0 if you are raising the major API version +LIBPATCH = 4 + +RELATION_NAME = "auth-proxy" +INTERFACE_NAME = "auth_proxy" + +logger = logging.getLogger(__name__) + +ALLOWED_HEADERS = ["X-User"] + +url_regex = re.compile( + r"(^http://)|(^https://)" # http:// or https:// + r"(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|" + r"[A-Z0-9-]{2,}\.?)|" # domain... + r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})" # ...or ip + r"(?::\d+)?" # optional port + r"(?:/?|[/?]\S+)$", + re.IGNORECASE, +) + +AUTH_PROXY_REQUIRER_JSON_SCHEMA = { + "$schema": "http://json-schema.org/draft-07/schema", + "$id": "https://canonical.github.io/charm-relation-interfaces/docs/json_schemas/auth_proxy/v0/requirer.json", + "type": "object", + "properties": { + "protected_urls": {"type": "array", "default": None, "items": {"type": "string"}}, + "allowed_endpoints": {"type": "array", "default": [], "items": {"type": "string"}}, + "headers": { + "type": "array", + "default": ["X-User"], + "items": { + "enum": ALLOWED_HEADERS, + "type": "string", + }, + }, + }, + "required": ["protected_urls", "allowed_endpoints", "headers"], +} + + +class AuthProxyConfigError(Exception): + """Emitted when invalid auth proxy config is provided.""" + + +class DataValidationError(RuntimeError): + """Raised when data validation fails on relation data.""" + + +def _load_data(data: Mapping, schema: Optional[Dict] = None) -> Dict: + """Parses nested fields and checks whether `data` matches `schema`.""" + ret = {} + for k, v in data.items(): + try: + ret[k] = json.loads(v) + except json.JSONDecodeError: + ret[k] = v + + if schema: + _validate_data(ret, schema) + return ret + + +def _dump_data(data: Dict, schema: Optional[Dict] = None) -> Dict: + if schema: + _validate_data(data, schema) + + ret = {} + for k, v in data.items(): + if isinstance(v, (list, dict)): + try: + ret[k] = json.dumps(v) + except json.JSONDecodeError as e: + raise DataValidationError(f"Failed to encode relation json: {e}") + else: + ret[k] = v + return ret + + +class AuthProxyRelation(Object): + """A class containing helper methods for auth-proxy relation.""" + + def _pop_relation_data(self, relation_id: Relation) -> None: + if not self.model.unit.is_leader(): + return + + if not self._charm.model.relations[self._relation_name]: + return + + relation = self.model.get_relation(self._relation_name, relation_id=relation_id) + if not relation or not relation.app: + return + + try: + for data in list(relation.data[self.model.app]): + relation.data[self.model.app].pop(data, "") + except Exception as e: + logger.info(f"Failed to pop the relation data: {e}") + + +def _validate_data(data: Dict, schema: Dict) -> None: + """Checks whether `data` matches `schema`. + + Will raise DataValidationError if the data is not valid, else return None. + """ + try: + jsonschema.validate(instance=data, schema=schema) + except jsonschema.ValidationError as e: + raise DataValidationError(data, schema) from e + + +@dataclass +class AuthProxyConfig: + """Helper class containing a configuration for the charm related with Oathkeeper.""" + + protected_urls: List[str] + headers: List[str] + allowed_endpoints: List[str] = field(default_factory=lambda: []) + + def validate(self) -> None: + """Validate the auth proxy configuration.""" + # Validate protected_urls + for url in self.protected_urls: + if not re.match(url_regex, url): + raise AuthProxyConfigError(f"Invalid URL {url}") + + for url in self.protected_urls: + if url.startswith("http://"): + logger.warning( + f"Provided URL {url} uses http scheme. In order to make the Identity Platform work with the Proxy, run kratos in dev mode: `juju config kratos dev=True`. Don't do this in production" + ) + + # Validate headers + for header in self.headers: + if header not in ALLOWED_HEADERS: + raise AuthProxyConfigError( + f"Unsupported header {header}, it must be one of {ALLOWED_HEADERS}" + ) + + def to_dict(self) -> Dict: + """Convert object to dict.""" + return {k: v for k, v in asdict(self).items() if v is not None} + + +class AuthProxyConfigChangedEvent(EventBase): + """Event to notify the Provider charm that the auth proxy config has changed.""" + + def __init__( + self, + handle: Handle, + protected_urls: List[str], + headers: List[str], + allowed_endpoints: List[str], + relation_id: int, + relation_app_name: str, + ) -> None: + super().__init__(handle) + self.protected_urls = protected_urls + self.allowed_endpoints = allowed_endpoints + self.headers = headers + self.relation_id = relation_id + self.relation_app_name = relation_app_name + + def snapshot(self) -> Dict: + """Save event.""" + return { + "protected_urls": self.protected_urls, + "headers": self.headers, + "allowed_endpoints": self.allowed_endpoints, + "relation_id": self.relation_id, + "relation_app_name": self.relation_app_name, + } + + def restore(self, snapshot: Dict) -> None: + """Restore event.""" + self.protected_urls = snapshot["protected_urls"] + self.headers = snapshot["headers"] + self.allowed_endpoints = snapshot["allowed_endpoints"] + self.relation_id = snapshot["relation_id"] + self.relation_app_name = snapshot["relation_app_name"] + + def to_auth_proxy_config(self) -> AuthProxyConfig: + """Convert the event information to an AuthProxyConfig object.""" + return AuthProxyConfig( + self.protected_urls, + self.allowed_endpoints, + self.headers, + ) + + +class AuthProxyConfigRemovedEvent(EventBase): + """Event to notify the provider charm that the auth proxy config was removed.""" + + def __init__( + self, + handle: Handle, + relation_id: int, + ) -> None: + super().__init__(handle) + self.relation_id = relation_id + + def snapshot(self) -> Dict: + """Save event.""" + return {"relation_id": self.relation_id} + + def restore(self, snapshot: Dict) -> None: + """Restore event.""" + self.relation_id = snapshot["relation_id"] + + +class AuthProxyProviderEvents(ObjectEvents): + """Event descriptor for events raised by `AuthProxyProvider`.""" + + proxy_config_changed = EventSource(AuthProxyConfigChangedEvent) + config_removed = EventSource(AuthProxyConfigRemovedEvent) + + +class AuthProxyProvider(AuthProxyRelation): + """Provider side of the auth-proxy relation.""" + + on = AuthProxyProviderEvents() + + def __init__(self, charm: CharmBase, relation_name: str = RELATION_NAME): + super().__init__(charm, relation_name) + + self._charm = charm + self._relation_name = relation_name + + events = self._charm.on[relation_name] + self.framework.observe(events.relation_changed, self._on_relation_changed_event) + self.framework.observe(events.relation_broken, self._on_relation_broken_event) + + def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: + """Get the auth-proxy config and emit a custom config-changed event.""" + if not self.model.unit.is_leader(): + return + + data = event.relation.data[event.app] + if not data: + logger.info("No requirer relation data available.") + return + + try: + auth_proxy_data = _load_data(data, AUTH_PROXY_REQUIRER_JSON_SCHEMA) + except DataValidationError as e: + logger.error( + f"Received invalid config from the requirer: {e}. Config-changed will not be emitted" + ) + return + + protected_urls = auth_proxy_data.get("protected_urls") + allowed_endpoints = auth_proxy_data.get("allowed_endpoints") + headers = auth_proxy_data.get("headers") + + relation_id = event.relation.id + relation_app_name = event.relation.app.name + + # Notify Oathkeeper to create access rules + self.on.proxy_config_changed.emit( + protected_urls, headers, allowed_endpoints, relation_id, relation_app_name + ) + + def _on_relation_broken_event(self, event: RelationBrokenEvent) -> None: + """Wipe the relation databag and notify Oathkeeper that the relation is broken.""" + # Workaround for https://github.com/canonical/operator/issues/888 + self._pop_relation_data(event.relation.id) + + self.on.config_removed.emit(event.relation.id) + + def get_headers(self) -> List[str]: + """Returns the list of headers from all relations.""" + if not self._charm.model.relations[self._relation_name]: + return [] + + headers = set() + for relation in self._charm.model.relations[self._relation_name]: + if relation.data[relation.app]: + for header in json.loads(relation.data[relation.app]["headers"]): + headers.add(header) + + return list(headers) + + def get_app_names(self) -> List[str]: + """Returns the list of all related app names.""" + if not self._charm.model.relations[self._relation_name]: + return [] + + app_names = list() + for relation in self._charm.model.relations[self._relation_name]: + if relation.data[relation.app]: + app_names.append(relation.app.name) + + return app_names + + +class InvalidAuthProxyConfigEvent(EventBase): + """Event to notify the charm that the auth proxy configuration is invalid.""" + + def __init__(self, handle: Handle, error: str): + super().__init__(handle) + self.error = error + + def snapshot(self) -> Dict: + """Save event.""" + return { + "error": self.error, + } + + def restore(self, snapshot: Dict) -> None: + """Restore event.""" + self.error = snapshot["error"] + + +class AuthProxyRelationRemovedEvent(EventBase): + """Custom event to notify the charm that the relation was removed.""" + + def snapshot(self) -> Dict: + """Save event.""" + return {} + + def restore(self, snapshot: Dict) -> None: + """Restore event.""" + pass + + +class AuthProxyRequirerEvents(ObjectEvents): + """Event descriptor for events raised by `AuthProxyRequirer`.""" + + invalid_auth_proxy_config = EventSource(InvalidAuthProxyConfigEvent) + auth_proxy_relation_removed = EventSource(AuthProxyRelationRemovedEvent) + + +class AuthProxyRequirer(AuthProxyRelation): + """Requirer side of the auth-proxy relation.""" + + on = AuthProxyRequirerEvents() + + def __init__( + self, + charm: CharmBase, + auth_proxy_config: Optional[AuthProxyConfig] = None, + relation_name: str = RELATION_NAME, + ) -> None: + super().__init__(charm, relation_name) + self._charm = charm + self._relation_name = relation_name + self._auth_proxy_config = auth_proxy_config + + events = self._charm.on[relation_name] + self.framework.observe(events.relation_created, self._on_relation_created_event) + self.framework.observe(events.relation_broken, self._on_relation_broken_event) + + def _on_relation_created_event(self, event: RelationCreatedEvent) -> None: + """Update the relation with auth proxy config when a relation is created.""" + if not self.model.unit.is_leader(): + return + + try: + self._update_relation_data(self._auth_proxy_config, event.relation.id) + except AuthProxyConfigError as e: + self.on.invalid_auth_proxy_config.emit(e.args[0]) + + def _on_relation_broken_event(self, event: RelationBrokenEvent) -> None: + """Wipe the relation databag and notify the charm when the relation is broken.""" + # Workaround for https://github.com/canonical/operator/issues/888 + self._pop_relation_data(event.relation.id) + + self.on.auth_proxy_relation_removed.emit() + + def _update_relation_data( + self, auth_proxy_config: Optional[AuthProxyConfig], relation_id: Optional[int] = None + ) -> None: + """Validate the auth-proxy config and update the relation databag.""" + if not self.model.unit.is_leader(): + return + + if not auth_proxy_config: + logger.info("Auth proxy config is missing") + return + + if not isinstance(auth_proxy_config, AuthProxyConfig): + raise ValueError(f"Unexpected auth_proxy_config type: {type(auth_proxy_config)}") + + auth_proxy_config.validate() + + try: + relation = self.model.get_relation( + relation_name=self._relation_name, relation_id=relation_id + ) + except TooManyRelatedAppsError: + raise RuntimeError("More than one relations are defined. Please provide a relation_id") + + if not relation or not relation.app: + return + + data = _dump_data(auth_proxy_config.to_dict(), AUTH_PROXY_REQUIRER_JSON_SCHEMA) + relation.data[self.model.app].update(data) + + def update_auth_proxy_config( + self, auth_proxy_config: AuthProxyConfig, relation_id: Optional[int] = None + ) -> None: + """Update the auth proxy config stored in the object.""" + self._update_relation_data(auth_proxy_config, relation_id=relation_id) diff --git a/metadata.yaml b/metadata.yaml index 9ad2e782..8545b080 100644 --- a/metadata.yaml +++ b/metadata.yaml @@ -55,6 +55,10 @@ requires: interface: jenkins_agent_v0 optional: true limit: 2 + auth-proxy: + interface: auth_proxy + optional: true + limit: 1 ingress: interface: ingress optional: true diff --git a/src-docs/auth_proxy.py.md b/src-docs/auth_proxy.py.md new file mode 100644 index 00000000..46023be2 --- /dev/null +++ b/src-docs/auth_proxy.py.md @@ -0,0 +1,59 @@ + + + + +# module `auth_proxy.py` +Observer module for Jenkins to auth_proxy integration. + +**Global Variables** +--------------- +- **AUTH_PROXY_ALLOWED_ENDPOINTS** +- **AUTH_PROXY_HEADERS** + + +--- + +## class `Observer` +The Jenkins Auth Proxy integration observer. + + + +### function `__init__` + +```python +__init__(charm: CharmBase, ingress: IngressPerAppRequirer) +``` + +Initialize the observer and register event handlers. + + + +**Args:** + + - `charm`: the parent charm to attach the observer to. + - `ingress`: the ingress object from which to extract the necessary settings. + + +--- + +#### property model + +Shortcut for more simple access the model. + + + +--- + + + +### function `has_relation` + +```python +has_relation() → bool +``` + +Check if there's a relation with data for auth proxy. + +Returns: True if there's a relation. + + diff --git a/src-docs/charm.py.md b/src-docs/charm.py.md index 48cd68cb..1b7ca527 100644 --- a/src-docs/charm.py.md +++ b/src-docs/charm.py.md @@ -17,7 +17,7 @@ Charm Jenkins. ## class `JenkinsK8sOperatorCharm` Charmed Jenkins. - + ### function `__init__` @@ -80,7 +80,7 @@ Unit that this execution is responsible for. --- - + ### function `calculate_env` diff --git a/src-docs/jenkins.py.md b/src-docs/jenkins.py.md index eb4aa109..a7ea1fc4 100644 --- a/src-docs/jenkins.py.md +++ b/src-docs/jenkins.py.md @@ -18,6 +18,7 @@ Functions to operate Jenkins. - **WAR_DOWNLOAD_URL** - **SYSTEM_PROPERTY_HEADLESS** - **SYSTEM_PROPERTY_LOGGING** +- **AUTH_PROXY_JENKINS_CONFIG** - **DEFAULT_JENKINS_CONFIG** - **JENKINS_LOGGING_CONFIG** - **PLUGIN_NAME_GROUP** @@ -29,7 +30,7 @@ Functions to operate Jenkins. --- - + ## function `get_admin_credentials` @@ -53,7 +54,7 @@ Retrieve admin credentials. --- - + ## function `is_storage_ready` @@ -83,7 +84,7 @@ Return whether the Jenkins home directory is mounted and owned by jenkins. --- - + ## function `install_default_config` @@ -102,7 +103,26 @@ Install default jenkins-config.xml. --- - + + +## function `install_auth_proxy_config` + +```python +install_auth_proxy_config(container: Container) → None +``` + +Install jenkins-config.xml for auth_proxy. + + + +**Args:** + + - `container`: The Jenkins workload container. + + +--- + + ## function `get_agent_name` @@ -163,7 +183,7 @@ Wrapper for Jenkins functionality. Attrs: environment: the Jenkins environment configuration. web_url: the Jenkins web URL. login_url: the Jenkins login URL. version: the Jenkins version. - + ### function `__init__` @@ -217,7 +237,7 @@ Returns: the web URL. --- - + ### function `add_agent_node` @@ -242,7 +262,7 @@ Add a Jenkins agent node. --- - + ### function `bootstrap` @@ -272,7 +292,7 @@ Initialize and install Jenkins. --- - + ### function `get_node_secret` @@ -302,7 +322,7 @@ Get node secret from jenkins. --- - + ### function `remove_agent_node` @@ -327,7 +347,7 @@ Remove a Jenkins agent node. --- - + ### function `remove_unlisted_plugins` @@ -357,7 +377,7 @@ Remove plugins that are not in the list of desired plugins. --- - + ### function `rotate_credentials` @@ -386,7 +406,7 @@ Invalidate all Jenkins sessions and create new password for admin account. --- - + ### function `safe_restart` @@ -410,7 +430,7 @@ Safely restart Jenkins server after all jobs are done executing. --- - + ### function `wait_ready` @@ -472,7 +492,7 @@ Represents an error probing for Jenkins storage mount. - `msg`: Explanation of the error. - + ### function `__init__` diff --git a/src/agent.py b/src/agent.py index 9b2732a4..be5df699 100644 --- a/src/agent.py +++ b/src/agent.py @@ -122,7 +122,7 @@ def _on_deprecated_agent_relation_joined(self, event: ops.RelationJoinedEvent) - container = self.charm.unit.get_container(JENKINS_SERVICE_NAME) if not jenkins.is_storage_ready(container): logger.warning("Service not yet ready. Deferring.") - event.defer() # The event needs to be handled after Jenkins has started(pebble ready). + event.defer() return # The relation is joined, it cannot be None, hence the type casting. deprecated_agent_relation_meta = typing.cast( @@ -159,7 +159,7 @@ def _on_agent_relation_joined(self, event: ops.RelationJoinedEvent) -> None: container = self.charm.unit.get_container(JENKINS_SERVICE_NAME) if not jenkins.is_storage_ready(container): logger.warning("Service not yet ready. Deferring.") - event.defer() # The event needs to be handled after Jenkins has started(pebble ready). + event.defer() return # The relation is joined, it cannot be None, hence the type casting. agent_relation_meta = typing.cast( diff --git a/src/auth_proxy.py b/src/auth_proxy.py new file mode 100644 index 00000000..d352edaa --- /dev/null +++ b/src/auth_proxy.py @@ -0,0 +1,91 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Observer module for Jenkins to auth_proxy integration.""" + +import logging +from typing import List + +import ops +from charms.oathkeeper.v0.auth_proxy import AuthProxyConfig, AuthProxyRequirer +from charms.traefik_k8s.v2.ingress import IngressPerAppRequirer + +import jenkins +import pebble +from state import AUTH_PROXY_RELATION, JENKINS_SERVICE_NAME, State + +AUTH_PROXY_ALLOWED_ENDPOINTS: List[str] = [] +AUTH_PROXY_HEADERS = ["X-User"] + + +logger = logging.getLogger(__name__) + + +class Observer(ops.Object): + """The Jenkins Auth Proxy integration observer.""" + + def __init__( + self, + charm: ops.CharmBase, + ingress: IngressPerAppRequirer, + jenkins_instance: jenkins.Jenkins, + state: State, + ): + """Initialize the observer and register event handlers. + + Args: + charm: the parent charm to attach the observer to. + ingress: the ingress object from which to extract the necessary settings. + jenkins_instance: the Jenkins instance. + state: the charm state. + """ + super().__init__(charm, "auth-proxy-observer") + self.charm = charm + self.ingress = ingress + self.jenkins = jenkins_instance + self.state = state + + self.auth_proxy = AuthProxyRequirer(self.charm) + + self.charm.framework.observe( + self.charm.on[AUTH_PROXY_RELATION].relation_joined, + self._on_auth_proxy_relation_joined, + ) + self.charm.framework.observe( + self.charm.on[AUTH_PROXY_RELATION].relation_departed, + self._auth_proxy_relation_departed, + ) + + def _on_auth_proxy_relation_joined(self, event: ops.RelationCreatedEvent) -> None: + """Configure the auth proxy. + + Args: + event: the event triggering the handler. + """ + container = self.charm.unit.get_container(JENKINS_SERVICE_NAME) + if not jenkins.is_storage_ready(container) or not self.ingress.url: + logger.warning("Service not yet ready. Deferring.") + event.defer() + return + + auth_proxy_config = AuthProxyConfig( + protected_urls=[self.ingress.url], + allowed_endpoints=AUTH_PROXY_ALLOWED_ENDPOINTS, + headers=AUTH_PROXY_HEADERS, + ) + self.auth_proxy.update_auth_proxy_config(auth_proxy_config=auth_proxy_config) + pebble.replan_jenkins(container, self.jenkins, self.state) + + # pylint: disable=duplicate-code + def _auth_proxy_relation_departed(self, event: ops.RelationDepartedEvent) -> None: + """Unconfigure the auth proxy. + + Args: + event: the event triggering the handler. + """ + container = self.charm.unit.get_container(JENKINS_SERVICE_NAME) + if not jenkins.is_storage_ready(container): + logger.warning("Service not yet ready. Deferring.") + event.defer() + return + pebble.replan_jenkins(container, self.jenkins, self.state) diff --git a/src/charm.py b/src/charm.py index c103755c..fe6b547b 100755 --- a/src/charm.py +++ b/src/charm.py @@ -5,6 +5,8 @@ """Charm Jenkins.""" +# pylint: disable=too-many-instance-attributes + import logging import typing @@ -12,9 +14,11 @@ import actions import agent +import auth_proxy import cos import ingress import jenkins +import pebble import timerange from state import ( JENKINS_SERVICE_NAME, @@ -24,9 +28,6 @@ State, ) -if typing.TYPE_CHECKING: - from ops.pebble import LayerDict # pragma: no cover - AGENT_DISCOVERY_INGRESS_RELATION_NAME = "agent-discovery-ingress" INGRESS_RELATION_NAME = "ingress" logger = logging.getLogger(__name__) @@ -64,6 +65,9 @@ def __init__(self, *args: typing.Any): self, self.state, self.agent_discovery_ingress_observer, self.jenkins ) self.cos_observer = cos.Observer(self) + self.auth_proxy_observer = auth_proxy.Observer( + self, self.ingress_observer.ingress, self.jenkins, self.state + ) self.framework.observe( self.on.jenkins_home_storage_attached, self._on_jenkins_home_storage_attached ) @@ -71,43 +75,6 @@ def __init__(self, *args: typing.Any): self.framework.observe(self.on.update_status, self._on_update_status) self.framework.observe(self.on.upgrade_charm, self._upgrade_charm) - def _get_pebble_layer(self) -> ops.pebble.Layer: - """Return a dictionary representing a Pebble layer. - - Returns: - The pebble layer defining Jenkins service layer. - """ - # TypedDict and Dict[str,str] are not compatible. - env_dict = typing.cast(typing.Dict[str, str], self.jenkins.environment) - layer: LayerDict = { - "summary": "jenkins layer", - "description": "pebble config layer for jenkins", - "services": { - JENKINS_SERVICE_NAME: { - "override": "replace", - "summary": "jenkins", - "command": f"java -D{jenkins.SYSTEM_PROPERTY_HEADLESS} " - f"-D{jenkins.SYSTEM_PROPERTY_LOGGING} " - f"-jar {jenkins.EXECUTABLES_PATH}/jenkins.war " - f"--prefix={env_dict['JENKINS_PREFIX']}", - "startup": "enabled", - "environment": env_dict, - "user": jenkins.USER, - "group": jenkins.GROUP, - }, - }, - "checks": { - "online": { - "override": "replace", - "level": "ready", - "http": {"url": self.jenkins.login_url}, - "period": "30s", - "threshold": 5, - } - }, - } - return ops.pebble.Layer(layer) - def calculate_env(self) -> jenkins.Environment: """Return a dictionary for Jenkins Pebble layer. @@ -138,28 +105,7 @@ def _on_jenkins_pebble_ready(self, event: ops.PebbleReadyEvent) -> None: self.unit.status = ops.MaintenanceStatus("Installing Jenkins.") # First Jenkins server start installs Jenkins server. - container.add_layer( - "jenkins", - self._get_pebble_layer(), - combine=True, - ) - container.replan() - try: - self.jenkins.wait_ready() - self.unit.status = ops.MaintenanceStatus("Configuring Jenkins.") - self.jenkins.bootstrap( - container, jenkins.DEFAULT_JENKINS_CONFIG, self.state.proxy_config - ) - # Second Jenkins server start restarts Jenkins to bypass Wizard setup. - container.restart(JENKINS_SERVICE_NAME) - self.jenkins.wait_ready() - except TimeoutError as exc: - logger.error("Timed out waiting for Jenkins, %s", exc) - raise - except jenkins.JenkinsBootstrapError as exc: - logger.error("Error installing plugins, %s", exc) - raise - + pebble.replan_jenkins(container, self.jenkins, self.state) try: version = self.jenkins.version except jenkins.JenkinsError as exc: diff --git a/src/jenkins.py b/src/jenkins.py index 824b4e5f..f38bee59 100644 --- a/src/jenkins.py +++ b/src/jenkins.py @@ -65,6 +65,7 @@ SYSTEM_PROPERTY_HEADLESS = "java.awt.headless=true" # Java system property to load logging configuration from file SYSTEM_PROPERTY_LOGGING = f"java.util.logging.config.file={LOGGING_CONFIG_PATH}" +AUTH_PROXY_JENKINS_CONFIG = "templates/jenkins-auth-proxy-config.xml" DEFAULT_JENKINS_CONFIG = "templates/jenkins-config.xml" JENKINS_LOGGING_CONFIG = "templates/logging.properties" @@ -711,6 +712,15 @@ def install_default_config(container: ops.Container) -> None: _install_config(container, DEFAULT_JENKINS_CONFIG, CONFIG_FILE_PATH) +def install_auth_proxy_config(container: ops.Container) -> None: + """Install jenkins-config.xml for auth_proxy. + + Args: + container: The Jenkins workload container. + """ + _install_config(container, AUTH_PROXY_JENKINS_CONFIG, CONFIG_FILE_PATH) + + def _get_groovy_proxy_args(proxy_config: state.ProxyConfig) -> typing.Iterable[str]: """Get proxy arguments for proxy configuration Groovy script. diff --git a/src/pebble.py b/src/pebble.py new file mode 100644 index 00000000..a591ced0 --- /dev/null +++ b/src/pebble.py @@ -0,0 +1,95 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Pebble functionality.""" + +import logging +import typing + +import ops + +import jenkins +from state import JENKINS_SERVICE_NAME, State + +if typing.TYPE_CHECKING: + from ops.pebble import LayerDict # pragma: no cover + +logger = logging.getLogger(__name__) + + +def replan_jenkins( + container: ops.Container, jenkins_instance: jenkins.Jenkins, state: State +) -> None: + """Replan the jenkins services. + + Args: + container: the container for with to replan the services. + jenkins_instance: the Jenkins instance. + state: the charm state. + + Raises: + JenkinsBootstrapError: if an error occurs while bootstrapping Jenkins. + """ + container.add_layer("jenkins", _get_pebble_layer(jenkins_instance), combine=True) + container.replan() + try: + jenkins_instance.wait_ready() + # Tested in integration + if state.auth_proxy_integrated: # pragma: no cover + jenkins_instance.bootstrap( + container, jenkins.AUTH_PROXY_JENKINS_CONFIG, state.proxy_config + ) + else: # pragma: no cover + jenkins_instance.bootstrap( + container, jenkins.DEFAULT_JENKINS_CONFIG, state.proxy_config + ) + # Second Jenkins server start restarts Jenkins to bypass Wizard setup. + container.restart(JENKINS_SERVICE_NAME) + jenkins_instance.wait_ready() + except TimeoutError as exc: + logger.error("Timed out waiting for Jenkins, %s", exc) + raise jenkins.JenkinsBootstrapError from exc + except jenkins.JenkinsBootstrapError as exc: + logger.error("Error installing Jenkins, %s", exc) + raise + + +def _get_pebble_layer(jenkins_instance: jenkins.Jenkins) -> ops.pebble.Layer: + """Return a dictionary representing a Pebble layer. + + Args: + jenkins_instance: the Jenkins instance. + + Returns: + The pebble layer defining Jenkins service layer. + """ + # TypedDict and Dict[str,str] are not compatible. + env_dict = typing.cast(typing.Dict[str, str], jenkins_instance.environment) + layer: LayerDict = { + "summary": "jenkins layer", + "description": "pebble config layer for jenkins", + "services": { + JENKINS_SERVICE_NAME: { + "override": "replace", + "summary": "jenkins", + "command": f"java -D{jenkins.SYSTEM_PROPERTY_HEADLESS} " + f"-D{jenkins.SYSTEM_PROPERTY_LOGGING} " + f"-jar {jenkins.EXECUTABLES_PATH}/jenkins.war " + f"--prefix={env_dict['JENKINS_PREFIX']}", + "startup": "enabled", + "environment": env_dict, + "user": jenkins.USER, + "group": jenkins.GROUP, + }, + }, + "checks": { + "online": { + "override": "replace", + "level": "ready", + "http": {"url": jenkins_instance.login_url}, + "period": "30s", + "threshold": 5, + } + }, + } + return ops.pebble.Layer(layer) diff --git a/src/state.py b/src/state.py index 2e630ae2..de731656 100644 --- a/src/state.py +++ b/src/state.py @@ -17,6 +17,7 @@ AGENT_RELATION = "agent" DEPRECATED_AGENT_RELATION = "agent-deprecated" +AUTH_PROXY_RELATION = "auth-proxy" JENKINS_SERVICE_NAME = "jenkins" JENKINS_HOME_STORAGE_NAME = "jenkins-home" @@ -185,6 +186,19 @@ def _get_agent_meta_map_from_relation( return unit_metadata_mapping +def _is_auth_proxy_integrated(relation: typing.Optional[ops.Relation]) -> bool: + """Check if there is an auth proxy integration.. + + Args: + relation: The auth-proxy relation. + + Returns: + True if an integration for atuh proxy exists. + """ + # No relation data is written by the provider, so checking the existence suffices. + return bool(relation) + + class ProxyConfig(BaseModel): """Configuration for accessing Jenkins through proxy. @@ -227,6 +241,7 @@ class State: deprecated agent relation. proxy_config: Proxy configuration to access Jenkins upstream through. plugins: The list of allowed plugins to install. + auth_proxy_integrated: if an auth proxy integrated has been set. """ @@ -237,6 +252,7 @@ class State: ] proxy_config: typing.Optional[ProxyConfig] plugins: typing.Optional[typing.Iterable[str]] + auth_proxy_integrated: bool @classmethod def from_charm(cls, charm: ops.CharmBase) -> "State": @@ -270,6 +286,9 @@ def from_charm(cls, charm: ops.CharmBase) -> "State": deprecated_agent_meta_map = _get_agent_meta_map_from_relation( charm.model.relations[DEPRECATED_AGENT_RELATION], charm.app.name ) + is_auth_proxy_integrated = _is_auth_proxy_integrated( + charm.model.get_relation(AUTH_PROXY_RELATION) + ) except ValidationError as exc: logger.error("Invalid agent relation data received, %s", exc) raise CharmRelationDataInvalidError( @@ -296,4 +315,5 @@ def from_charm(cls, charm: ops.CharmBase) -> "State": deprecated_agent_relation_meta=deprecated_agent_meta_map, plugins=plugins, proxy_config=proxy_config, + auth_proxy_integrated=is_auth_proxy_integrated, ) diff --git a/templates/jenkins-auth-proxy-config.xml b/templates/jenkins-auth-proxy-config.xml new file mode 100644 index 00000000..be195876 --- /dev/null +++ b/templates/jenkins-auth-proxy-config.xml @@ -0,0 +1,11 @@ + + + false + 0 + + + 50000 + + hudson.model.UpdateCenter$CoreUpdateMonitor + + diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 832bc2b5..b8558dba 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -3,13 +3,11 @@ """Fixtures for Jenkins-k8s-operator charm integration tests.""" -# pylint: disable=too-many-lines - import os import random import secrets import string -from typing import AsyncGenerator, Iterable, Optional +from typing import Any, AsyncGenerator, Callable, Coroutine, Generator, Iterable, Optional import jenkinsapi.jenkins import kubernetes.config @@ -22,12 +20,24 @@ from juju.model import Controller, Model from juju.unit import Unit from keycloak import KeycloakAdmin, KeycloakOpenIDConnection +from lightkube import Client, KubeConfig +from lightkube.core.exceptions import ApiError +from playwright.async_api import async_playwright +from playwright.async_api._generated import Browser, BrowserContext, BrowserType, Page +from playwright.async_api._generated import Playwright as AsyncPlaywright from pytest import FixtureRequest from pytest_operator.plugin import OpsTest import state from .constants import ALLOWED_PLUGINS +from .dex import ( + apply_dex_resources, + create_dex_resources, + get_dex_manifest, + get_dex_service_url, + update_redirect_uri, +) from .helpers import generate_jenkins_client_from_application, get_pod_ip from .types_ import KeycloakOIDCMetadata, LDAPSettings, ModelAppUnit, UnitWebClient @@ -71,7 +81,6 @@ async def charm_fixture(request: FixtureRequest, ops_test: OpsTest) -> str: charm = await ops_test.build_charm(".") else: charm = f"./{charm}" - return charm @@ -248,7 +257,6 @@ async def machine_model_fixture( model = await machine_controller.add_model(machine_model_name) await model.connect(f"localhost:admin/{model.name}") yield model - await machine_controller.destroy_models( model.name, destroy_storage=True, force=True, max_wait=10 * 60 ) @@ -454,13 +462,9 @@ async def jenkins_with_proxy_fixture( ) -> AsyncGenerator[Application, None]: """Jenkins server charm deployed under model with proxy configuration.""" resources = {"jenkins-image": jenkins_image} - # Deploy the charm and wait for active/idle status application = await model_with_proxy.deploy( - charm, - resources=resources, - series="jammy", - application_name="jenkins-proxy-k8s", + charm, resources=resources, series="jammy", application_name="jenkins-proxy-k8s" ) await model_with_proxy.wait_for_idle( apps=[application.name], @@ -469,11 +473,9 @@ async def jenkins_with_proxy_fixture( timeout=20 * 60, idle_period=30, ) - # slow down update-status so that it doesn't intervene currently running tests async with ops_test.fast_forward(fast_interval="5h"): yield application - await model_with_proxy.remove_application(application.name, block_until_done=True) @@ -527,11 +529,7 @@ async def app_with_allowed_plugins_fixture( @pytest.fixture(scope="module", name="ldap_settings") def ldap_settings_fixture() -> LDAPSettings: """LDAP user for testing.""" - return LDAPSettings( - container_port=1389, - username="customuser", - password=secrets.token_hex(16), - ) + return LDAPSettings(container_port=1389, username="customuser", password=secrets.token_hex(16)) @pytest_asyncio.fixture(scope="module", name="ldap_server") @@ -774,21 +772,212 @@ def external_hostname_fixture() -> str: async def traefik_application_fixture(model: Model): """The application related to Jenkins via ingress v2 relation.""" traefik = await model.deploy( - "traefik-k8s", - channel="edge", - trust=True, - config={"routing_mode": "path"}, + "traefik-k8s", channel="edge", trust=True, config={"routing_mode": "path"} ) - await model.wait_for_idle( - status="active", - apps=[traefik.name], - timeout=20 * 60, - idle_period=30, - raise_on_error=False, + status="active", apps=[traefik.name], timeout=20 * 60, idle_period=30, raise_on_error=False ) status = await model.get_status(filters=[traefik.name]) unit = next(iter(status.applications[traefik.name].units)) traefik_address = status["applications"][traefik.name]["units"][unit]["address"] - return (traefik, traefik_address) + + +@pytest_asyncio.fixture(scope="module", name="oathkeeper_related") +async def oathkeeper_application_related_fixture( + application: Application, client: Client, ext_idp_service: str +): + """The application related to Jenkins via auth_proxy v0 relation.""" + oathkeeper = await application.model.deploy("oathkeeper", channel="edge", trust=True) + identity_platform = await application.model.deploy( + "identity-platform", channel="edge", trust=True + ) + await application.model.applications["kratos-external-idp-integrator"].set_config( + { + "client_id": "client_id", + "client_secret": "client_secret", + "provider": "generic", + "issuer_url": ext_idp_service, + "scope": "profile email", + "provider_id": "Dex", + } + ) + + # See https://github.com/canonical/kratos-operator/issues/182 + await application.model.wait_for_idle( + status="active", + apps=[application.name, oathkeeper.name] + [app.name for app in identity_platform], + raise_on_error=False, + timeout=30 * 60, + idle_period=5, + ) + + await application.model.add_relation( + f"{oathkeeper.name}:certificates", "self-signed-certificates" + ) + await application.model.add_relation( + "traefik-public:receive-ca-cert", "self-signed-certificates" + ) + await application.model.applications["traefik-public"].set_config( + {"enable_experimental_forward_auth": "True"} + ) + await application.model.add_relation( + f"{oathkeeper.name}", "traefik-public:experimental-forward-auth" + ) + await application.model.add_relation(f"{oathkeeper.name}:kratos-info", "kratos") + # Needed per https://github.com/canonical/oathkeeper-operator/issues/49 + await application.model.applications["kratos"].set_config({"dev": "True"}) + await application.model.add_relation(f"{application.name}:ingress", "traefik-public") + await application.model.add_relation(f"{application.name}:auth-proxy", oathkeeper.name) + + await application.model.wait_for_idle( + status="active", + apps=[application.name, oathkeeper.name] + [app.name for app in identity_platform], + raise_on_error=False, + timeout=30 * 60, + idle_period=5, + ) + + get_redirect_uri_action = ( + await application.model.applications["kratos-external-idp-integrator"] + .units[0] + .run_action("get-redirect-uri") + ) + action_output = await get_redirect_uri_action.wait() + update_redirect_uri(client, action_output.results["redirect-uri"]) + return oathkeeper + + +@pytest.fixture(scope="session", name="client") +def client_fixture() -> Client: + """k8s client.""" + return Client(config=KubeConfig.from_file(KUBECONFIG), field_manager="dex-test") + + +@pytest.fixture(scope="module", name="ext_idp_service") +def ext_idp_service_fixture(ops_test: OpsTest, client: Client) -> Generator[str, None, None]: + """Deploy a DEX service on top of k8s for authentication.""" + try: + create_dex_resources(client) + # We need to set the dex issuer_url to be the IP that was assigned to + # the dex service by metallb. We can't know that before hand, so we + # reapply the dex manifests. + apply_dex_resources(client) + yield get_dex_service_url(client) + finally: + if not ops_test.keep_model: + for obj in get_dex_manifest(): + try: + # mypy doesn't work well with lightkube + client.delete( + type(obj), + obj.metadata.name, # type: ignore + namespace=obj.metadata.namespace, # type: ignore + ) + except ApiError: + pass + + +@pytest.fixture() +def external_user_email() -> str: + """Username for testing proxy authentication.""" + return "admin@example.com" + + +@pytest.fixture() +def external_user_password() -> str: + """Password for testing proxy authentication.""" + return "password" + + +# The playwright fixtures are taken from: +# https://github.com/microsoft/playwright-python/blob/main/tests/async/conftest.py +@pytest_asyncio.fixture(scope="module", name="playwright") +async def playwright_fixture() -> AsyncGenerator[AsyncPlaywright, None]: + """Playwright object.""" + async with async_playwright() as playwright_object: + yield playwright_object + + +@pytest_asyncio.fixture(scope="module", name="browser_type") +async def browser_type_fixture(playwright: AsyncPlaywright) -> AsyncGenerator[BrowserType, None]: + """Browser type for playwright.""" + yield playwright.firefox + + +@pytest_asyncio.fixture(scope="module", name="browser_factory") +async def browser_factory_fixture( + browser_type: BrowserType, +) -> AsyncGenerator[Callable[..., Coroutine[Any, Any, Browser]], None]: + """Browser factory.""" + browsers = [] + + async def launch(**kwargs: Any) -> Browser: + """Launch browser. + + Args: + kwargs: kwargs. + + Returns: + a browser instance. + """ + browser = await browser_type.launch(**kwargs) + browsers.append(browser) + return browser + + yield launch + for browser in browsers: + await browser.close() + + +@pytest_asyncio.fixture(scope="module", name="browser") +async def browser_fixture( + browser_factory: Callable[..., Coroutine[Any, Any, Browser]] +) -> AsyncGenerator[Browser, None]: + """Browser.""" + browser = await browser_factory() + yield browser + await browser.close() + + +@pytest_asyncio.fixture(name="context_factory") +async def context_factory_fixture( + browser: Browser, +) -> AsyncGenerator[Callable[..., Coroutine[Any, Any, BrowserContext]], None]: + """Playwright context factory.""" + contexts = [] + + async def launch(**kwargs: Any) -> BrowserContext: + """Launch browser. + + Args: + kwargs: kwargs. + + Returns: + the browser context. + """ + context = await browser.new_context(**kwargs) + contexts.append(context) + return context + + yield launch + for context in contexts: + await context.close() + + +@pytest_asyncio.fixture(name="context") +async def context_fixture( + context_factory: Callable[..., Coroutine[Any, Any, BrowserContext]] +) -> AsyncGenerator[BrowserContext, None]: + """Playwright context.""" + context = await context_factory(ignore_https_errors=True) + yield context + await context.close() + + +@pytest_asyncio.fixture +async def page(context: BrowserContext) -> AsyncGenerator[Page, None]: + """Playwright page.""" + new_page = await context.new_page() + yield new_page + await new_page.close() diff --git a/tests/integration/dex.py b/tests/integration/dex.py new file mode 100644 index 00000000..eaadd414 --- /dev/null +++ b/tests/integration/dex.py @@ -0,0 +1,225 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +"""DEX deployment and utilities for testing.""" + +import logging +from os.path import join +from pathlib import Path +from time import sleep +from typing import List, Optional + +import requests +from lightkube import Client, codecs +from lightkube.core.exceptions import ApiError, ObjectDeleted +from lightkube.resources.apps_v1 import Deployment +from lightkube.resources.core_v1 import Pod, Service +from requests.exceptions import RequestException + +logger = logging.getLogger(__name__) + + +DEX_MANIFESTS = Path(__file__).parent / "files" / "dex.yaml" + + +def get_dex_manifest( + client_id: Optional[str] = None, + client_secret: Optional[str] = None, + redirect_uri: Optional[str] = None, + issuer_url: Optional[str] = None, +) -> List[codecs.AnyResource]: + """Get the DEX manifest interpolating the needed variables. + + Args: + client_id: client ID. + client_secret: client secret. + redirect_uri: redirect URI. + issuer_url: issuer URL. + + Returns: + the list of created resources. + """ + with open(DEX_MANIFESTS, "r", encoding="utf-8") as file: + return codecs.load_all_yaml( + file, + context={ + "client_id": client_id, + "client_secret": client_secret, + "redirect_uri": redirect_uri, + "issuer_url": issuer_url, + }, + ) + + +def _restart_dex(client: Client) -> None: + """Restart the DEX pods. + + Args: + client: k8s client. + """ + for pod in client.list(Pod, namespace="dex", labels={"app": "dex"}): + # mypy doesn't work well with lightkube + client.delete(Pod, pod.metadata.name, namespace="dex") # type: ignore + + +def _wait_until_dex_is_ready(client: Client, issuer_url: Optional[str] = None) -> None: + """Wait for DEX to be up. + + Args: + client: k8s client. + issuer_url: issuer URL. + + Raises: + RuntimeError: if DEX fails to start. + """ + for pod in client.list(Pod, namespace="dex", labels={"app": "dex"}): + # Some pods may be deleted, if we are restarting + try: + # mypy doesn't work well with lightkube + client.wait( + Pod, + pod.metadata.name, # type: ignore + for_conditions=["Ready", "Deleted"], + namespace="dex", + ) + except ObjectDeleted: + pass + client.wait(Deployment, "dex", namespace="dex", for_conditions=["Available"]) + if not issuer_url: + issuer_url = get_dex_service_url(client) + + resp = requests.get(join(issuer_url, ".well-known/openid-configuration"), timeout=5) + if resp.status_code != 200: + raise RuntimeError("Failed to deploy dex") + + +def wait_until_dex_is_ready(client: Client, issuer_url: Optional[str] = None) -> None: + """Wait for DEX to be up. + + Args: + client: k8s client. + issuer_url: issuer URL. + """ + try: + _wait_until_dex_is_ready(client, issuer_url) + except (RuntimeError, RequestException): + # It may take some time for dex to restart, so we sleep a little + # and try again + sleep(3) + _wait_until_dex_is_ready(client, issuer_url) + + +def _apply_dex_manifests( + client: Client, + client_id: str, + client_secret: str, + redirect_uri: str, + issuer_url: Optional[str], +) -> None: + """Apply the DEX manifest definitions. + + Args: + client: k8s client. + client_id: client ID. + client_secret: client secret. + redirect_uri: redirect URI. + issuer_url: issuer URL. + """ + objs = get_dex_manifest( + client_id=client_id, + client_secret=client_secret, + redirect_uri=redirect_uri, + issuer_url=issuer_url, + ) + + for obj in objs: + client.apply(obj, force=True) + + +def create_dex_resources( + client: Client, + client_id: str = "client_id", + client_secret: str = "client_secret", # nosec + redirect_uri: str = "", + issuer_url: Optional[str] = None, +): + """Apply the DEX manifest definitions and wait for DEX to be up. + + Args: + client: k8s client. + client_id: client ID. + client_secret: client secret. + redirect_uri: redirect URI. + issuer_url: issuer URL. + """ + _apply_dex_manifests( + client, + client_id=client_id, + client_secret=client_secret, + redirect_uri=redirect_uri, + issuer_url=issuer_url, + ) + + logger.info("Waiting for dex to be ready") + wait_until_dex_is_ready(client, issuer_url) + + +def apply_dex_resources( + client: Client, + client_id: str = "client_id", + client_secret: str = "client_secret", # nosec + redirect_uri: str = "", + issuer_url: Optional[str] = None, +) -> None: + """Apply the DEX manifest definitions and wait for DEX to start up. + + Args: + client: k8s client. + client_id: client ID. + client_secret: client secret. + redirect_uri: redirect URI. + issuer_url: issuer URL. + """ + if not issuer_url: + try: + issuer_url = get_dex_service_url(client) + except ApiError: + logger.info("No service found for dex") + + _apply_dex_manifests( + client, + client_id=client_id, + client_secret=client_secret, + redirect_uri=redirect_uri, + issuer_url=issuer_url, + ) + + logger.info("Restarting dex") + _restart_dex(client) + + logger.info("Waiting for dex to be ready") + wait_until_dex_is_ready(client, issuer_url) + + +def update_redirect_uri(client: Client, redirect_uri: str) -> None: + """Update DEX's redirect URI. + + Args: + client: k8s client. + redirect_uri: THE NEW REDIRECT uri. + """ + apply_dex_resources(client, redirect_uri=redirect_uri) + + +def get_dex_service_url(client: Client) -> str: + """Get the DEX service URL. + + Args: + client: k8s client. + + Returns: + the service URL. + """ + service = client.get(Service, "dex", namespace="dex") + # mypy doesn't work well with lightkube + return f"http://{service.status.loadBalancer.ingress[0].ip}:5556/" # type: ignore diff --git a/tests/integration/files/dex.yaml b/tests/integration/files/dex.yaml new file mode 100644 index 00000000..cb74cbc8 --- /dev/null +++ b/tests/integration/files/dex.yaml @@ -0,0 +1,129 @@ +# Taken from https://github.com/dexidp/dex/blob/master/examples/k8s/dex.yaml +--- +apiVersion: v1 +kind: Namespace +metadata: + name: dex +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: + app: dex + name: dex + namespace: dex +spec: + replicas: 1 + selector: + matchLabels: + app: dex + template: + metadata: + labels: + app: dex + spec: + serviceAccountName: dex # This is created below + containers: + - image: ghcr.io/dexidp/dex:v2.32.0 + name: dex + command: ["/usr/local/bin/dex", "serve", "/etc/dex/cfg/config.yaml"] + + ports: + - name: http + containerPort: 5556 + + volumeMounts: + - name: config + mountPath: /etc/dex/cfg + + readinessProbe: + httpGet: + path: /healthz + port: 5556 + scheme: HTTP + volumes: + - name: config + configMap: + name: dex + items: + - key: config.yaml + path: config.yaml +--- +kind: ConfigMap +apiVersion: v1 +metadata: + name: dex + namespace: dex +data: + config.yaml: | + issuer: {{ issuer_url | d("http://dex.dex.svc.cluster.local:5556", true) }} + storage: + type: kubernetes + config: + inCluster: true + web: + http: 0.0.0.0:5556 + oauth2: + skipApprovalScreen: true + + staticClients: + - id: {{ client_id }} + redirectURIs: + - '{{ redirect_uri | d("http://example.com/redirect", true) }}' + name: 'Test App' + secret: {{ client_secret }} + + enablePasswordDB: true + staticPasswords: + - email: {{ email | d("admin@example.com", true) }} + # bcrypt hash of the string "password": $(echo password | htpasswd -BinC 10 admin | cut -d: -f2) + hash: "$2a$10$2b2cU8CPhOTaGrs1HRQuAueS7JTT5ZHsHSzYiFPm1leZck7Mc8T4W" + username: {{ username | d("admin", true) }} +--- +apiVersion: v1 +kind: Service +metadata: + name: dex + namespace: dex +spec: + type: LoadBalancer + ports: + - name: dex + port: 5556 + protocol: TCP + targetPort: 5556 + selector: + app: dex +--- +apiVersion: v1 +kind: ServiceAccount +metadata: + labels: + app: dex + name: dex + namespace: dex +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRole +metadata: + name: dex +rules: +- apiGroups: ["dex.coreos.com"] # API group created by dex + resources: ["*"] + verbs: ["*"] +- apiGroups: ["apiextensions.k8s.io"] + resources: ["customresourcedefinitions"] + verbs: ["create"] # To manage its own resources, dex must be able to create customresourcedefinitions +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRoleBinding +metadata: + name: dex +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: ClusterRole + name: dex +subjects: +- kind: ServiceAccount + name: dex # Service account assigned to the dex pod, created above + namespace: dex # The namespace dex is running in diff --git a/tests/integration/requirements.txt b/tests/integration/requirements.txt index cd32e088..8416585e 100644 --- a/tests/integration/requirements.txt +++ b/tests/integration/requirements.txt @@ -1,2 +1,4 @@ Jinja2>=3,<4 -python-keycloak>=3,<4 \ No newline at end of file +lightkube==0.15.1 +pytest-playwright==0.4.4 +python-keycloak>=3,<4 diff --git a/tests/integration/test_auth_proxy.py b/tests/integration/test_auth_proxy.py new file mode 100644 index 00000000..b086b4a8 --- /dev/null +++ b/tests/integration/test_auth_proxy.py @@ -0,0 +1,83 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Integration tests for jenkins-k8s-operator with auth_proxy.""" + +import re +import time + +import pytest +import requests +from juju.application import Application +from juju.model import Model +from playwright.async_api import expect +from playwright.async_api._generated import Page + + +@pytest.mark.abort_on_fail +@pytest.mark.asyncio +@pytest.mark.usefixtures("oathkeeper_related") +async def test_auth_proxy_integration_returns_not_authorized( + model: Model, + application: Application, +) -> None: + """ + arrange: deploy the Jenkins charm and establish auth_proxy relations. + act: send a request Jenkins. + assert: a 401 is returned. + """ + status = await model.get_status() + address = status["applications"]["traefik-public"]["public-address"] + # The certificate is self signed, so verification is disabled. + response = requests.get( # nosec + f"https://{address}/{application.model.name}-{application.name}/", + verify=False, + timeout=5, + ) + + assert response.status_code == 401 + + +@pytest.mark.abort_on_fail +@pytest.mark.asyncio +@pytest.mark.usefixtures("oathkeeper_related") +async def test_auth_proxy_integration_authorized( + ext_idp_service: str, + external_user_email: str, + external_user_password: str, + page: Page, + application: Application, +) -> None: + """ + arrange: Deploy jenkins, the authentication bundle and DEX. + act: log into via DEX + assert: the browser is redirected to the Jenkins URL with response code 200 + """ + status = await application.model.get_status() + address = status["applications"]["traefik-public"]["public-address"] + jenkins_url = f"https://{address}/{application.model.name}-{application.name}/" + + await page.goto(jenkins_url) + + expected_url = ( + f"https://{address}/{application.model.name}" + "-identity-platform-login-ui-operator/ui/login" + ) + # Dex might take a bit to be ready + time.sleep(5) + await expect(page).to_have_url(re.compile(rf"{expected_url}*")) + + # Choose provider + async with page.expect_navigation(): + await page.get_by_role("button", name="Dex").click() + + await expect(page).to_have_url(re.compile(rf"{ext_idp_service}*")) + + # Login + await page.get_by_placeholder("email address").click() + await page.get_by_placeholder("email address").fill(external_user_email) + await page.get_by_placeholder("password").click() + await page.get_by_placeholder("password").fill(external_user_password) + await page.get_by_role("button", name="Login").click() + + await expect(page).to_have_url(re.compile(rf"{jenkins_url}*")) diff --git a/tests/integration/test_ingress.py b/tests/integration/test_ingress.py index b24711d4..233b0551 100644 --- a/tests/integration/test_ingress.py +++ b/tests/integration/test_ingress.py @@ -3,8 +3,6 @@ """Integration tests for jenkins-k8s-operator with ingress.""" -# pylint: disable=unused-argument - import typing import pytest diff --git a/tests/unit/test_auth_proxy.py b/tests/unit/test_auth_proxy.py new file mode 100644 index 00000000..f877a1b1 --- /dev/null +++ b/tests/unit/test_auth_proxy.py @@ -0,0 +1,101 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Jenkins-k8s auth_proxy unit tests.""" + +# pylint:disable=protected-access + +from unittest.mock import MagicMock, patch + +import ops +from charms.oathkeeper.v0.auth_proxy import AuthProxyRequirer +from charms.traefik_k8s.v2.ingress import IngressPerAppRequirer +from ops.testing import Harness + +from charm import JenkinsK8sOperatorCharm + + +@patch("jenkins.is_storage_ready", return_value=False) +def test_on_auth_proxy_relation_joined_when_jenkins_storage_not_ready(_): + """ + arrange: given a charm with no connectable container. + act: when auth_proxy relation joined event is fired. + assert: the event is deferred. + """ + harness = Harness(JenkinsK8sOperatorCharm) + harness.begin() + harness.set_can_connect(harness.model.unit.containers["jenkins"], True) + mock_event = MagicMock(spec=ops.RelationCreatedEvent) + harness.charm.auth_proxy_observer._on_auth_proxy_relation_joined(mock_event) + + assert mock_event.defer.to_be_called_once() + + +@patch("jenkins.is_storage_ready", return_value=False) +def test_on_auth_proxy_relation_joined_when_ingress_not_ready(_): + """ + arrange: given a charm with ready storage but no ingress related. + act: when auth_proxy relation joined event is fired. + assert: the event is deferred. + """ + harness = Harness(JenkinsK8sOperatorCharm) + harness.begin() + harness.set_can_connect(harness.model.unit.containers["jenkins"], True) + mock_event = MagicMock(spec=ops.RelationCreatedEvent) + harness.charm.auth_proxy_observer._on_auth_proxy_relation_joined(mock_event) + + assert mock_event.defer.to_be_called_once() + + +@patch("jenkins.is_storage_ready", return_value=True) +@patch("pebble.replan_jenkins") +def test_on_auth_proxy_relation_joined(replan_mock, _): + """ + arrange: given a charm with ready storage and ingress related. + act: when auth_proxy relation joined event is fired. + assert: the pebble service is replaned. + """ + harness = Harness(JenkinsK8sOperatorCharm) + harness.begin() + harness.set_can_connect(harness.model.unit.containers["jenkins"], True) + mock_event = MagicMock(spec=ops.RelationCreatedEvent) + mock_ingress = MagicMock(spec=IngressPerAppRequirer) + mock_ingress.url.return_value = "https://example.com" + harness.charm.auth_proxy_observer.ingress = mock_ingress + harness.charm.auth_proxy_observer.auth_proxy = MagicMock(spec=AuthProxyRequirer) + harness.charm.auth_proxy_observer._on_auth_proxy_relation_joined(mock_event) + + replan_mock.assert_called_once() + + +@patch("jenkins.is_storage_ready", return_value=False) +def test_auth_proxy_relation_departed_when_jenkins_storage_not_ready(_): + """ + arrange: given a charm with no connectable container. + act: when auth_proxy departed joined event is fired. + assert: the event is deferred. + """ + harness = Harness(JenkinsK8sOperatorCharm) + harness.begin() + harness.set_can_connect(harness.model.unit.containers["jenkins"], True) + mock_event = MagicMock(spec=ops.RelationCreatedEvent) + harness.charm.auth_proxy_observer._auth_proxy_relation_departed(mock_event) + + assert mock_event.defer.to_be_called_once() + + +@patch("jenkins.is_storage_ready", return_value=True) +@patch("pebble.replan_jenkins") +def test_auth_proxy_relation_departed(replan_mock, _): + """ + arrange: given a charm with ready storage and ingress related. + act: when auth_proxy relation departed event is fired. + assert: the pebble service is replaned. + """ + harness = Harness(JenkinsK8sOperatorCharm) + harness.begin() + harness.set_can_connect(harness.model.unit.containers["jenkins"], True) + mock_event = MagicMock(spec=ops.RelationDepartedEvent) + harness.charm.auth_proxy_observer._auth_proxy_relation_departed(mock_event) + + replan_mock.assert_called_once() diff --git a/tests/unit/test_charm.py b/tests/unit/test_charm.py index 7a78dbcb..e97e1a59 100644 --- a/tests/unit/test_charm.py +++ b/tests/unit/test_charm.py @@ -94,31 +94,6 @@ def test_storage_not_ready(harness: Harness, event_handler: str): assert jenkins_charm.unit.status.name == WAITING_STATUS_NAME -def test__on_jenkins_pebble_ready_error( - harness_container: HarnessWithContainer, - mocked_get_request: typing.Callable[[str, int, typing.Any, typing.Any], requests.Response], - monkeypatch: pytest.MonkeyPatch, -): - """ - arrange: given a patched jenkins bootstrap method that raises an exception. - act: when the jenkins_pebble_ready event is fired. - assert: the charm raises an error. - """ - # speed up waiting by changing default argument values - monkeypatch.setattr(requests, "get", functools.partial(mocked_get_request, status_code=200)) - with ( - patch.object(jenkins.Jenkins, "wait_ready"), - patch.object(jenkins.Jenkins, "bootstrap") as bootstrap_mock, - ): - bootstrap_mock.side_effect = jenkins.JenkinsBootstrapError - harness = harness_container.harness - harness.begin() - - jenkins_charm = typing.cast(JenkinsK8sOperatorCharm, harness.charm) - with pytest.raises(jenkins.JenkinsBootstrapError): - jenkins_charm._on_jenkins_pebble_ready(MagicMock(spec=ops.PebbleReadyEvent)) - - def test__on_jenkins_pebble_ready_get_version_error( harness_container: HarnessWithContainer, mocked_get_request: typing.Callable[[str, int, typing.Any, typing.Any], requests.Response], @@ -145,24 +120,6 @@ def test__on_jenkins_pebble_ready_get_version_error( jenkins_charm._on_jenkins_pebble_ready(MagicMock(spec=ops.PebbleReadyEvent)) -@pytest.mark.usefixtures("patch_os_environ") -def test__on_jenkins_pebble_jenkins_not_ready(harness_container: HarnessWithContainer): - """ - arrange: given a mocked jenkins version raises TimeoutError on the second call. - act: when the Jenkins pebble ready event is fired. - assert: the charm raises an error. - """ - harness = harness_container.harness - harness.begin() - with patch.object(jenkins.Jenkins, "wait_ready") as wait_ready_mock: - wait_ready_mock.side_effect = TimeoutError - - jenkins_charm = typing.cast(JenkinsK8sOperatorCharm, harness.charm) - - with pytest.raises(TimeoutError): - jenkins_charm._on_jenkins_pebble_ready(MagicMock(spec=ops.PebbleReadyEvent)) - - @pytest.mark.usefixtures("patch_os_environ") def test__on_jenkins_pebble_ready(harness_container: HarnessWithContainer): """ diff --git a/tests/unit/test_jenkins.py b/tests/unit/test_jenkins.py index 1720ca58..2d539dde 100644 --- a/tests/unit/test_jenkins.py +++ b/tests/unit/test_jenkins.py @@ -366,6 +366,36 @@ def test_install_config_raises_exception(): jenkins._install_configs(mock_container, jenkins.DEFAULT_JENKINS_CONFIG) +def test_install_auth_proxy_config(harness_container: HarnessWithContainer): + """ + arrange: given a mocked uninitialized container. + act: when install_auth_proxy_config is called. + assert: jenkins configuration file is generated. + """ + jenkins.install_auth_proxy_config(harness_container.container) + + config_xml = str( + harness_container.container.pull(jenkins.CONFIG_FILE_PATH, encoding="utf-8").read() + ) + + assert "false" in config_xml + + +def test_install_auth_proxy_config_raises_exception(): + """ + arrange: set up a container raising an exception. + act: when install_auth_proxy_config is called. + assert: a JenkinsBootstrapError is raised. + """ + mock_container = MagicMock(ops.Container) + mock_container.push = MagicMock( + side_effect=ops.pebble.PathError(kind="not-found", message="Path not found.") + ) + + with pytest.raises(jenkins.JenkinsBootstrapError): + jenkins.install_auth_proxy_config(mock_container) + + def test_install_defalt_config(harness_container: HarnessWithContainer): """ arrange: given a mocked uninitialized container. diff --git a/tests/unit/test_pebble.py b/tests/unit/test_pebble.py new file mode 100644 index 00000000..3d010c20 --- /dev/null +++ b/tests/unit/test_pebble.py @@ -0,0 +1,75 @@ +# Copyright 2024 Canonical Ltd. +# See LICENSE file for licensing details. + +"""Unit tests for the pebble module.""" + +import functools +import typing +from unittest.mock import patch + +import pytest +import requests + +import jenkins +import pebble +import state + +from .types_ import HarnessWithContainer + + +def test_replan_jenkins_pebble_error( + harness_container: HarnessWithContainer, + mocked_get_request: typing.Callable[[str, int, typing.Any, typing.Any], requests.Response], + monkeypatch: pytest.MonkeyPatch, +): + """ + arrange: given a patched jenkins bootstrap method that raises an exception. + act: when the a replan is executed. + assert: an error is raised. + """ + # speed up waiting by changing default argument values + monkeypatch.setattr(requests, "get", functools.partial(mocked_get_request, status_code=200)) + with ( + patch.object(jenkins.Jenkins, "wait_ready"), + patch.object(jenkins.Jenkins, "bootstrap") as bootstrap_mock, + ): + bootstrap_mock.side_effect = jenkins.JenkinsBootstrapError + harness = harness_container.harness + harness.begin() + + env = jenkins.Environment( + JENKINS_HOME=str(jenkins.JENKINS_HOME_PATH), + JENKINS_PREFIX="/", + ) + + with pytest.raises(jenkins.JenkinsBootstrapError): + pebble.replan_jenkins( + harness_container.container, + jenkins.Jenkins(env), + state.State.from_charm(harness.charm), + ) + + +@pytest.mark.usefixtures("patch_os_environ") +def test_replan_jenkins_when_not_ready(harness_container: HarnessWithContainer): + """ + arrange: given a mocked jenkins version raises TimeoutError on the second call. + act: when the a replan is executed. + assert: an error is raised. + """ + harness = harness_container.harness + harness.begin() + with patch.object(jenkins.Jenkins, "wait_ready") as wait_ready_mock: + wait_ready_mock.side_effect = TimeoutError + + env = jenkins.Environment( + JENKINS_HOME=str(jenkins.JENKINS_HOME_PATH), + JENKINS_PREFIX="/", + ) + + with pytest.raises(jenkins.JenkinsBootstrapError): + pebble.replan_jenkins( + harness_container.container, + jenkins.Jenkins(env), + state.State.from_charm(harness.charm), + ) diff --git a/tests/unit/test_state.py b/tests/unit/test_state.py index b497a6ab..8bc97bde 100644 --- a/tests/unit/test_state.py +++ b/tests/unit/test_state.py @@ -159,6 +159,31 @@ def test_plugins_config(mock_charm: MagicMock): assert tuple(config.plugins) == ("hello", "world") +def test_auth_proxy_integrated_false(mock_charm: MagicMock): + """ + arrange: given a charm with no auth proxy integration. + act: when state is initialized from charm. + assert: auth_proxy_integrated is False. + """ + mock_charm.config = {} + mock_charm.model.get_relation.return_value = {} + + config = state.State.from_charm(mock_charm) + assert not config.auth_proxy_integrated + + +def test_auth_proxy_integrated_true(mock_charm: MagicMock): + """ + arrange: given a charm with auth proxy integration. + act: when state is initialized from charm. + assert: auth_proxy_integrated is True. + """ + mock_charm.config = {} + + config = state.State.from_charm(mock_charm) + assert not config.auth_proxy_integrated + + def test_invalid_num_units(mock_charm: MagicMock): """ arrange: given a mock charm with more than 1 unit of deployment. diff --git a/tox.ini b/tox.ini index 56add3ff..7e218e39 100644 --- a/tox.ini +++ b/tox.ini @@ -124,6 +124,7 @@ deps = -r{toxinidir}/requirements.txt -r{[vars]tst_path}integration/requirements.txt commands = + playwright install --with-deps firefox pytest --tb native --ignore={[vars]tst_path}unit --log-cli-level=INFO -s {posargs} # uncomment the following lines to use with output of tox -e build. # --jenkins-image=localhost:32000/jenkins:test \