From 37622c38129be88f02d04cacdec5a27972be4ace Mon Sep 17 00:00:00 2001 From: Leon <82407168+sed-i@users.noreply.github.com> Date: Mon, 25 Mar 2024 10:11:17 -0400 Subject: [PATCH] Fix client tls (#239) --- src/alertmanager_client.py | 2 +- src/charm.py | 14 ++--- tests/integration/test_persistence.py | 5 +- tests/integration/test_templates.py | 53 +++++++++---------- tests/integration/test_upgrade_charm.py | 5 +- tests/unit/test_charm.py | 6 +-- tests/unit/test_external_url.py | 5 +- ...test_push_config_to_workload_on_startup.py | 7 +-- tox.ini | 1 + 9 files changed, 43 insertions(+), 55 deletions(-) diff --git a/src/alertmanager_client.py b/src/alertmanager_client.py index 4198dc37..802511af 100644 --- a/src/alertmanager_client.py +++ b/src/alertmanager_client.py @@ -19,7 +19,7 @@ logger = logging.getLogger(__name__) -class AlertmanagerBadResponse(RuntimeError): +class AlertmanagerBadResponse(ConnectionError): """A catch-all exception type to indicate 'no reply', regardless the reason.""" diff --git a/src/charm.py b/src/charm.py index e3e80018..d4588b25 100755 --- a/src/charm.py +++ b/src/charm.py @@ -19,7 +19,6 @@ WorkloadManager, WorkloadManagerError, ) -from alertmanager_client import Alertmanager, AlertmanagerBadResponse from charms.alertmanager_k8s.v0.alertmanager_remote_configuration import ( RemoteConfigurationRequirer, ) @@ -67,12 +66,7 @@ ), ) class AlertmanagerCharm(CharmBase): - """A Juju charm for alertmanager. - - Attributes: - api: an API client instance for communicating with the alertmanager workload - server - """ + """A Juju charm for alertmanager.""" # Container name must match metadata.yaml # Layer name is used for the layer label argument in container.add_layer @@ -124,8 +118,6 @@ def __init__(self, *args): external_url=self._internal_url, # TODO See 'TODO' below, about external_url ) - self.api = Alertmanager(endpoint_url=self._external_url) - self.grafana_dashboard_provider = GrafanaDashboardProvider(charm=self) self.grafana_source_provider = GrafanaSourceProvider( charm=self, @@ -498,7 +490,7 @@ def _on_update_status(self, _): Logs list of peers, uptime and version info. """ try: - status = self.api.status() + status = self.alertmanager_workload.api.status() logger.info( "alertmanager %s is up and running (uptime: %s); " "cluster mode: %s, with %d peers", @@ -507,7 +499,7 @@ def _on_update_status(self, _): status["cluster"]["status"], len(status["cluster"]["peers"]), ) - except AlertmanagerBadResponse as e: + except ConnectionError as e: logger.error("Failed to obtain status: %s", str(e)) # Calling the common hook to make sure a single unit set its IP in case all events fired diff --git a/tests/integration/test_persistence.py b/tests/integration/test_persistence.py index 6e21cac1..5e74ecb9 100644 --- a/tests/integration/test_persistence.py +++ b/tests/integration/test_persistence.py @@ -26,7 +26,10 @@ async def test_silences_persist_across_upgrades(ops_test: OpsTest, charm_under_t await ops_test.model.deploy( "ch:alertmanager-k8s", application_name=app_name, channel="edge", trust=True ) - await ops_test.model.wait_for_idle(apps=[app_name], status="active", timeout=1000) + await ops_test.model.wait_for_idle( + apps=[app_name], status="active", timeout=1000, raise_on_error=False + ) + await ops_test.model.wait_for_idle(apps=[app_name], status="active", timeout=30) # set a silencer for an alert and check it is set unit_address = await get_unit_address(ops_test, app_name, 0) diff --git a/tests/integration/test_templates.py b/tests/integration/test_templates.py index 1a879330..603eb77b 100644 --- a/tests/integration/test_templates.py +++ b/tests/integration/test_templates.py @@ -5,13 +5,12 @@ import json import logging import time -from datetime import datetime, timedelta, timezone from pathlib import Path import pytest +import sh import yaml -from alertmanager_client import Alertmanager -from helpers import get_unit_address, is_alertmanager_up +from helpers import is_alertmanager_up from pytest_operator.plugin import OpsTest from werkzeug.wrappers import Request, Response @@ -73,29 +72,6 @@ async def test_configure_alertmanager_with_templates(ops_test: OpsTest, httpserv @pytest.mark.abort_on_fail async def test_receiver_gets_alert(ops_test: OpsTest, httpserver): - # create an alert - start_time = datetime.now(timezone.utc) - end_time = start_time + timedelta(minutes=5) - alert_name = "fake-alert" - model_uuid = "1234" - alerts = [ - { - "startsAt": start_time.isoformat("T"), - "endsAt": end_time.isoformat("T"), - "status": "firing", - "annotations": { - "summary": "A fake alert", - }, - "labels": { - "juju_model_uuid": model_uuid, - "juju_application": app_name, - "juju_model": ops_test.model_name, - "alertname": alert_name, - }, - "generatorURL": f"http://localhost/{alert_name}", - } - ] - request_from_alertmanager = None def request_handler(request: Request): @@ -127,9 +103,28 @@ def request_handler(request: Request): with httpserver.wait(timeout=120) as waiting: # expect an alert to be forwarded to the receiver httpserver.expect_oneshot_request("/", method="POST").respond_with_handler(request_handler) - unit_address = await get_unit_address(ops_test, app_name, 0) - amanager = Alertmanager(f"http://{unit_address}:9093") - amanager.set_alerts(alerts) + + # Use amtool to fire a stand-in alert + sh.juju( + [ + "ssh", + "-m", + ops_test.model_name, + "--container", + "alertmanager", + f"{app_name}/0", + "amtool", + "alert", + "add", + "foo", + "node=bar", + "status=firing", + "juju_model_uuid=1234", + f"juju_application={app_name}", + "juju_model=model_name", + "--annotation=summary=summary", + ] + ) # check receiver got an alert assert waiting.result diff --git a/tests/integration/test_upgrade_charm.py b/tests/integration/test_upgrade_charm.py index 77db1444..181e926e 100644 --- a/tests/integration/test_upgrade_charm.py +++ b/tests/integration/test_upgrade_charm.py @@ -71,7 +71,10 @@ async def test_upgrade_local_with_local_with_relations(ops_test: OpsTest, charm_ # Refresh from path await ops_test.model.applications[app_name].refresh(path=charm_under_test, resources=resources) await ops_test.model.wait_for_idle( - apps=[app_name, "prom", "karma"], status="active", timeout=2500 + apps=[app_name, "prom", "karma"], + status="active", + timeout=2500, + raise_on_error=False, ) assert await is_alertmanager_up(ops_test, app_name) diff --git a/tests/unit/test_charm.py b/tests/unit/test_charm.py index 18375ade..b04bce25 100644 --- a/tests/unit/test_charm.py +++ b/tests/unit/test_charm.py @@ -8,8 +8,8 @@ import ops import yaml from alertmanager import WorkloadManager -from charm import Alertmanager, AlertmanagerCharm -from helpers import k8s_resource_multipatch, tautology +from charm import AlertmanagerCharm +from helpers import k8s_resource_multipatch from ops import pebble from ops.model import ActiveStatus, BlockedStatus from ops.testing import Harness @@ -20,7 +20,6 @@ class TestWithInitialHooks(unittest.TestCase): container_name: str = "alertmanager" - @patch.object(Alertmanager, "reload", tautology) @patch.object(WorkloadManager, "check_config", lambda *a, **kw: ("ok", "")) @patch("socket.getfqdn", new=lambda *args: "fqdn") @k8s_resource_multipatch @@ -151,7 +150,6 @@ def test_templates_section_added_if_user_provided_templates(self, *unused): class TestWithoutInitialHooks(unittest.TestCase): container_name: str = "alertmanager" - @patch.object(Alertmanager, "reload", tautology) @patch.object(WorkloadManager, "check_config", lambda *a, **kw: ("ok", "")) @k8s_resource_multipatch @patch("lightkube.core.client.GenericSyncClient") diff --git a/tests/unit/test_external_url.py b/tests/unit/test_external_url.py index 82aa32b8..db177b44 100644 --- a/tests/unit/test_external_url.py +++ b/tests/unit/test_external_url.py @@ -9,8 +9,8 @@ import ops import yaml from alertmanager import WorkloadManager -from charm import Alertmanager, AlertmanagerCharm -from helpers import cli_arg, k8s_resource_multipatch, tautology +from charm import AlertmanagerCharm +from helpers import cli_arg, k8s_resource_multipatch from ops.testing import Harness logger = logging.getLogger(__name__) @@ -21,7 +21,6 @@ class TestExternalUrl(unittest.TestCase): - @patch.object(Alertmanager, "reload", tautology) @patch.object(WorkloadManager, "check_config", lambda *a, **kw: ("ok", "")) @patch("socket.getfqdn", new=lambda *args: "fqdn") @k8s_resource_multipatch diff --git a/tests/unit/test_push_config_to_workload_on_startup.py b/tests/unit/test_push_config_to_workload_on_startup.py index 450bed8b..d949ec70 100644 --- a/tests/unit/test_push_config_to_workload_on_startup.py +++ b/tests/unit/test_push_config_to_workload_on_startup.py @@ -11,8 +11,8 @@ import validators import yaml from alertmanager import WorkloadManager -from charm import Alertmanager, AlertmanagerCharm -from helpers import k8s_resource_multipatch, tautology +from charm import AlertmanagerCharm +from helpers import k8s_resource_multipatch from hypothesis import given from ops.model import ActiveStatus, BlockedStatus from ops.testing import Harness @@ -29,7 +29,6 @@ class TestPushConfigToWorkloadOnStartup(unittest.TestCase): Background: Charm starts up with initial hooks. """ - @patch.object(Alertmanager, "reload", tautology) @patch.object(WorkloadManager, "check_config", lambda *a, **kw: ("0.0.0", "")) @k8s_resource_multipatch @patch("lightkube.core.client.GenericSyncClient") @@ -118,7 +117,6 @@ def setUp(self): self.harness = Harness(AlertmanagerCharm) self.addCleanup(self.harness.cleanup) - @patch.object(Alertmanager, "reload", tautology) @k8s_resource_multipatch @patch("lightkube.core.client.GenericSyncClient") @patch.object(WorkloadManager, "_alertmanager_version", property(lambda *_: "0.0.0")) @@ -132,7 +130,6 @@ def test_charm_blocks_on_invalid_config_on_startup(self, *_): # THEN the charm goes into blocked status self.assertIsInstance(self.harness.charm.unit.status, BlockedStatus) - @patch.object(Alertmanager, "reload", tautology) @k8s_resource_multipatch @patch("lightkube.core.client.GenericSyncClient") @patch.object(WorkloadManager, "_alertmanager_version", property(lambda *_: "0.0.0")) diff --git a/tox.ini b/tox.ini index 359b4c11..f535e137 100644 --- a/tox.ini +++ b/tox.ini @@ -123,6 +123,7 @@ deps = pytest pytest-operator pytest-httpserver + sh commands = pytest -v --tb native --log-cli-level=INFO -s {posargs} {toxinidir}/tests/integration