Skip to content

Commit

Permalink
Fix client tls (#239)
Browse files Browse the repository at this point in the history
  • Loading branch information
sed-i authored Mar 25, 2024
1 parent af82fec commit 37622c3
Show file tree
Hide file tree
Showing 9 changed files with 43 additions and 55 deletions.
2 changes: 1 addition & 1 deletion src/alertmanager_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
logger = logging.getLogger(__name__)


class AlertmanagerBadResponse(RuntimeError):
class AlertmanagerBadResponse(ConnectionError):
"""A catch-all exception type to indicate 'no reply', regardless the reason."""


Expand Down
14 changes: 3 additions & 11 deletions src/charm.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
WorkloadManager,
WorkloadManagerError,
)
from alertmanager_client import Alertmanager, AlertmanagerBadResponse
from charms.alertmanager_k8s.v0.alertmanager_remote_configuration import (
RemoteConfigurationRequirer,
)
Expand Down Expand Up @@ -67,12 +66,7 @@
),
)
class AlertmanagerCharm(CharmBase):
"""A Juju charm for alertmanager.
Attributes:
api: an API client instance for communicating with the alertmanager workload
server
"""
"""A Juju charm for alertmanager."""

# Container name must match metadata.yaml
# Layer name is used for the layer label argument in container.add_layer
Expand Down Expand Up @@ -124,8 +118,6 @@ def __init__(self, *args):
external_url=self._internal_url, # TODO See 'TODO' below, about external_url
)

self.api = Alertmanager(endpoint_url=self._external_url)

self.grafana_dashboard_provider = GrafanaDashboardProvider(charm=self)
self.grafana_source_provider = GrafanaSourceProvider(
charm=self,
Expand Down Expand Up @@ -498,7 +490,7 @@ def _on_update_status(self, _):
Logs list of peers, uptime and version info.
"""
try:
status = self.api.status()
status = self.alertmanager_workload.api.status()
logger.info(
"alertmanager %s is up and running (uptime: %s); "
"cluster mode: %s, with %d peers",
Expand All @@ -507,7 +499,7 @@ def _on_update_status(self, _):
status["cluster"]["status"],
len(status["cluster"]["peers"]),
)
except AlertmanagerBadResponse as e:
except ConnectionError as e:
logger.error("Failed to obtain status: %s", str(e))

# Calling the common hook to make sure a single unit set its IP in case all events fired
Expand Down
5 changes: 4 additions & 1 deletion tests/integration/test_persistence.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,10 @@ async def test_silences_persist_across_upgrades(ops_test: OpsTest, charm_under_t
await ops_test.model.deploy(
"ch:alertmanager-k8s", application_name=app_name, channel="edge", trust=True
)
await ops_test.model.wait_for_idle(apps=[app_name], status="active", timeout=1000)
await ops_test.model.wait_for_idle(
apps=[app_name], status="active", timeout=1000, raise_on_error=False
)
await ops_test.model.wait_for_idle(apps=[app_name], status="active", timeout=30)

# set a silencer for an alert and check it is set
unit_address = await get_unit_address(ops_test, app_name, 0)
Expand Down
53 changes: 24 additions & 29 deletions tests/integration/test_templates.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,12 @@
import json
import logging
import time
from datetime import datetime, timedelta, timezone
from pathlib import Path

import pytest
import sh
import yaml
from alertmanager_client import Alertmanager
from helpers import get_unit_address, is_alertmanager_up
from helpers import is_alertmanager_up
from pytest_operator.plugin import OpsTest
from werkzeug.wrappers import Request, Response

Expand Down Expand Up @@ -73,29 +72,6 @@ async def test_configure_alertmanager_with_templates(ops_test: OpsTest, httpserv

@pytest.mark.abort_on_fail
async def test_receiver_gets_alert(ops_test: OpsTest, httpserver):
# create an alert
start_time = datetime.now(timezone.utc)
end_time = start_time + timedelta(minutes=5)
alert_name = "fake-alert"
model_uuid = "1234"
alerts = [
{
"startsAt": start_time.isoformat("T"),
"endsAt": end_time.isoformat("T"),
"status": "firing",
"annotations": {
"summary": "A fake alert",
},
"labels": {
"juju_model_uuid": model_uuid,
"juju_application": app_name,
"juju_model": ops_test.model_name,
"alertname": alert_name,
},
"generatorURL": f"http://localhost/{alert_name}",
}
]

request_from_alertmanager = None

def request_handler(request: Request):
Expand Down Expand Up @@ -127,9 +103,28 @@ def request_handler(request: Request):
with httpserver.wait(timeout=120) as waiting:
# expect an alert to be forwarded to the receiver
httpserver.expect_oneshot_request("/", method="POST").respond_with_handler(request_handler)
unit_address = await get_unit_address(ops_test, app_name, 0)
amanager = Alertmanager(f"http://{unit_address}:9093")
amanager.set_alerts(alerts)

# Use amtool to fire a stand-in alert
sh.juju(
[
"ssh",
"-m",
ops_test.model_name,
"--container",
"alertmanager",
f"{app_name}/0",
"amtool",
"alert",
"add",
"foo",
"node=bar",
"status=firing",
"juju_model_uuid=1234",
f"juju_application={app_name}",
"juju_model=model_name",
"--annotation=summary=summary",
]
)

# check receiver got an alert
assert waiting.result
Expand Down
5 changes: 4 additions & 1 deletion tests/integration/test_upgrade_charm.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,10 @@ async def test_upgrade_local_with_local_with_relations(ops_test: OpsTest, charm_
# Refresh from path
await ops_test.model.applications[app_name].refresh(path=charm_under_test, resources=resources)
await ops_test.model.wait_for_idle(
apps=[app_name, "prom", "karma"], status="active", timeout=2500
apps=[app_name, "prom", "karma"],
status="active",
timeout=2500,
raise_on_error=False,
)
assert await is_alertmanager_up(ops_test, app_name)

Expand Down
6 changes: 2 additions & 4 deletions tests/unit/test_charm.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@
import ops
import yaml
from alertmanager import WorkloadManager
from charm import Alertmanager, AlertmanagerCharm
from helpers import k8s_resource_multipatch, tautology
from charm import AlertmanagerCharm
from helpers import k8s_resource_multipatch
from ops import pebble
from ops.model import ActiveStatus, BlockedStatus
from ops.testing import Harness
Expand All @@ -20,7 +20,6 @@
class TestWithInitialHooks(unittest.TestCase):
container_name: str = "alertmanager"

@patch.object(Alertmanager, "reload", tautology)
@patch.object(WorkloadManager, "check_config", lambda *a, **kw: ("ok", ""))
@patch("socket.getfqdn", new=lambda *args: "fqdn")
@k8s_resource_multipatch
Expand Down Expand Up @@ -151,7 +150,6 @@ def test_templates_section_added_if_user_provided_templates(self, *unused):
class TestWithoutInitialHooks(unittest.TestCase):
container_name: str = "alertmanager"

@patch.object(Alertmanager, "reload", tautology)
@patch.object(WorkloadManager, "check_config", lambda *a, **kw: ("ok", ""))
@k8s_resource_multipatch
@patch("lightkube.core.client.GenericSyncClient")
Expand Down
5 changes: 2 additions & 3 deletions tests/unit/test_external_url.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@
import ops
import yaml
from alertmanager import WorkloadManager
from charm import Alertmanager, AlertmanagerCharm
from helpers import cli_arg, k8s_resource_multipatch, tautology
from charm import AlertmanagerCharm
from helpers import cli_arg, k8s_resource_multipatch
from ops.testing import Harness

logger = logging.getLogger(__name__)
Expand All @@ -21,7 +21,6 @@


class TestExternalUrl(unittest.TestCase):
@patch.object(Alertmanager, "reload", tautology)
@patch.object(WorkloadManager, "check_config", lambda *a, **kw: ("ok", ""))
@patch("socket.getfqdn", new=lambda *args: "fqdn")
@k8s_resource_multipatch
Expand Down
7 changes: 2 additions & 5 deletions tests/unit/test_push_config_to_workload_on_startup.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@
import validators
import yaml
from alertmanager import WorkloadManager
from charm import Alertmanager, AlertmanagerCharm
from helpers import k8s_resource_multipatch, tautology
from charm import AlertmanagerCharm
from helpers import k8s_resource_multipatch
from hypothesis import given
from ops.model import ActiveStatus, BlockedStatus
from ops.testing import Harness
Expand All @@ -29,7 +29,6 @@ class TestPushConfigToWorkloadOnStartup(unittest.TestCase):
Background: Charm starts up with initial hooks.
"""

@patch.object(Alertmanager, "reload", tautology)
@patch.object(WorkloadManager, "check_config", lambda *a, **kw: ("0.0.0", ""))
@k8s_resource_multipatch
@patch("lightkube.core.client.GenericSyncClient")
Expand Down Expand Up @@ -118,7 +117,6 @@ def setUp(self):
self.harness = Harness(AlertmanagerCharm)
self.addCleanup(self.harness.cleanup)

@patch.object(Alertmanager, "reload", tautology)
@k8s_resource_multipatch
@patch("lightkube.core.client.GenericSyncClient")
@patch.object(WorkloadManager, "_alertmanager_version", property(lambda *_: "0.0.0"))
Expand All @@ -132,7 +130,6 @@ def test_charm_blocks_on_invalid_config_on_startup(self, *_):
# THEN the charm goes into blocked status
self.assertIsInstance(self.harness.charm.unit.status, BlockedStatus)

@patch.object(Alertmanager, "reload", tautology)
@k8s_resource_multipatch
@patch("lightkube.core.client.GenericSyncClient")
@patch.object(WorkloadManager, "_alertmanager_version", property(lambda *_: "0.0.0"))
Expand Down
1 change: 1 addition & 0 deletions tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,7 @@ deps =
pytest
pytest-operator
pytest-httpserver
sh
commands =
pytest -v --tb native --log-cli-level=INFO -s {posargs} {toxinidir}/tests/integration

Expand Down

0 comments on commit 37622c3

Please sign in to comment.