From 26c34b0d5e0dd7ed349cad15f401bf5989436ce3 Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Wed, 21 Aug 2024 13:58:38 -0400 Subject: [PATCH 01/44] Add scaffolding --- octopus_deploy/CHANGELOG.md | 4 + octopus_deploy/README.md | 60 ++++++++++++ octopus_deploy/assets/configuration/spec.yaml | 10 ++ .../dashboards/octopus_deploy_overview.json | 1 + octopus_deploy/assets/service_checks.json | 1 + octopus_deploy/changelog.d/1.added | 1 + octopus_deploy/datadog_checks/__init__.py | 4 + .../octopus_deploy/__about__.py | 4 + .../datadog_checks/octopus_deploy/__init__.py | 7 ++ .../datadog_checks/octopus_deploy/check.py | 98 +++++++++++++++++++ .../octopus_deploy/config_models/__init__.py | 25 +++++ .../octopus_deploy/config_models/defaults.py | 16 +++ .../octopus_deploy/config_models/instance.py | 51 ++++++++++ .../octopus_deploy/config_models/shared.py | 48 +++++++++ .../config_models/validators.py | 13 +++ .../octopus_deploy/data/conf.yaml.example | 44 +++++++++ octopus_deploy/hatch.toml | 4 + octopus_deploy/images/IMAGES_README.md | 41 ++++++++ octopus_deploy/manifest.json | 57 +++++++++++ octopus_deploy/metadata.csv | 1 + octopus_deploy/pyproject.toml | 60 ++++++++++++ octopus_deploy/tests/__init__.py | 3 + octopus_deploy/tests/conftest.py | 14 +++ octopus_deploy/tests/test_unit.py | 26 +++++ 24 files changed, 593 insertions(+) create mode 100644 octopus_deploy/CHANGELOG.md create mode 100644 octopus_deploy/README.md create mode 100644 octopus_deploy/assets/configuration/spec.yaml create mode 100644 octopus_deploy/assets/dashboards/octopus_deploy_overview.json create mode 100644 octopus_deploy/assets/service_checks.json create mode 100644 octopus_deploy/changelog.d/1.added create mode 100644 octopus_deploy/datadog_checks/__init__.py create mode 100644 octopus_deploy/datadog_checks/octopus_deploy/__about__.py create mode 100644 octopus_deploy/datadog_checks/octopus_deploy/__init__.py create mode 100644 octopus_deploy/datadog_checks/octopus_deploy/check.py create mode 100644 octopus_deploy/datadog_checks/octopus_deploy/config_models/__init__.py create mode 100644 octopus_deploy/datadog_checks/octopus_deploy/config_models/defaults.py create mode 100644 octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py create mode 100644 octopus_deploy/datadog_checks/octopus_deploy/config_models/shared.py create mode 100644 octopus_deploy/datadog_checks/octopus_deploy/config_models/validators.py create mode 100644 octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example create mode 100644 octopus_deploy/hatch.toml create mode 100644 octopus_deploy/images/IMAGES_README.md create mode 100644 octopus_deploy/manifest.json create mode 100644 octopus_deploy/metadata.csv create mode 100644 octopus_deploy/pyproject.toml create mode 100644 octopus_deploy/tests/__init__.py create mode 100644 octopus_deploy/tests/conftest.py create mode 100644 octopus_deploy/tests/test_unit.py diff --git a/octopus_deploy/CHANGELOG.md b/octopus_deploy/CHANGELOG.md new file mode 100644 index 0000000000000..743bea7d151ee --- /dev/null +++ b/octopus_deploy/CHANGELOG.md @@ -0,0 +1,4 @@ +# CHANGELOG - Octopus Deploy + + + diff --git a/octopus_deploy/README.md b/octopus_deploy/README.md new file mode 100644 index 0000000000000..916b0a6b8a014 --- /dev/null +++ b/octopus_deploy/README.md @@ -0,0 +1,60 @@ +# Agent Check: Octopus Deploy + +## Overview + +This check monitors [Octopus Deploy][1] through the Datadog Agent. + +Include a high level overview of what this integration does: +- What does your product do (in 1-2 sentences)? +- What value will customers get from this integration, and why is it valuable to them? +- What specific data will your integration monitor, and what's the value of that data? + +## Setup + +Follow the instructions below to install and configure this check for an Agent running on a host. For containerized environments, see the [Autodiscovery Integration Templates][3] for guidance on applying these instructions. + +### Installation + +The Octopus Deploy check is included in the [Datadog Agent][2] package. +No additional installation is needed on your server. + +### Configuration + +1. Edit the `octopus_deploy.d/conf.yaml` file, in the `conf.d/` folder at the root of your Agent's configuration directory to start collecting your octopus_deploy performance data. See the [sample octopus_deploy.d/conf.yaml][4] for all available configuration options. + +2. [Restart the Agent][5]. + +### Validation + +[Run the Agent's status subcommand][6] and look for `octopus_deploy` under the Checks section. + +## Data Collected + +### Metrics + +See [metadata.csv][7] for a list of metrics provided by this integration. + +### Events + +The Octopus Deploy integration does not include any events. + +### Service Checks + +The Octopus Deploy integration does not include any service checks. + +See [service_checks.json][8] for a list of service checks provided by this integration. + +## Troubleshooting + +Need help? Contact [Datadog support][9]. + + +[1]: **LINK_TO_INTEGRATION_SITE** +[2]: https://app.datadoghq.com/account/settings/agent/latest +[3]: https://docs.datadoghq.com/agent/kubernetes/integrations/ +[4]: https://github.com/DataDog/integrations-core/blob/master/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example +[5]: https://docs.datadoghq.com/agent/guide/agent-commands/#start-stop-and-restart-the-agent +[6]: https://docs.datadoghq.com/agent/guide/agent-commands/#agent-status-and-information +[7]: https://github.com/DataDog/integrations-core/blob/master/octopus_deploy/metadata.csv +[8]: https://github.com/DataDog/integrations-core/blob/master/octopus_deploy/assets/service_checks.json +[9]: https://docs.datadoghq.com/help/ diff --git a/octopus_deploy/assets/configuration/spec.yaml b/octopus_deploy/assets/configuration/spec.yaml new file mode 100644 index 0000000000000..9efdf98a42a2a --- /dev/null +++ b/octopus_deploy/assets/configuration/spec.yaml @@ -0,0 +1,10 @@ +name: Octopus Deploy +files: +- name: octopus_deploy.yaml + options: + - template: init_config + options: + - template: init_config/default + - template: instances + options: + - template: instances/default diff --git a/octopus_deploy/assets/dashboards/octopus_deploy_overview.json b/octopus_deploy/assets/dashboards/octopus_deploy_overview.json new file mode 100644 index 0000000000000..e9e23301af626 --- /dev/null +++ b/octopus_deploy/assets/dashboards/octopus_deploy_overview.json @@ -0,0 +1 @@ +Please build an out-of-the-box dashboard for your integration following our best practices here: https://datadoghq.dev/integrations-core/guidelines/dashboards/#best-practices \ No newline at end of file diff --git a/octopus_deploy/assets/service_checks.json b/octopus_deploy/assets/service_checks.json new file mode 100644 index 0000000000000..fe51488c7066f --- /dev/null +++ b/octopus_deploy/assets/service_checks.json @@ -0,0 +1 @@ +[] diff --git a/octopus_deploy/changelog.d/1.added b/octopus_deploy/changelog.d/1.added new file mode 100644 index 0000000000000..aa949b47b7b41 --- /dev/null +++ b/octopus_deploy/changelog.d/1.added @@ -0,0 +1 @@ +Initial Release \ No newline at end of file diff --git a/octopus_deploy/datadog_checks/__init__.py b/octopus_deploy/datadog_checks/__init__.py new file mode 100644 index 0000000000000..1517d901c0aae --- /dev/null +++ b/octopus_deploy/datadog_checks/__init__.py @@ -0,0 +1,4 @@ +# (C) Datadog, Inc. 2024-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore diff --git a/octopus_deploy/datadog_checks/octopus_deploy/__about__.py b/octopus_deploy/datadog_checks/octopus_deploy/__about__.py new file mode 100644 index 0000000000000..e9541ce83e9e5 --- /dev/null +++ b/octopus_deploy/datadog_checks/octopus_deploy/__about__.py @@ -0,0 +1,4 @@ +# (C) Datadog, Inc. 2024-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +__version__ = '0.0.1' diff --git a/octopus_deploy/datadog_checks/octopus_deploy/__init__.py b/octopus_deploy/datadog_checks/octopus_deploy/__init__.py new file mode 100644 index 0000000000000..5aa2a1a8ef33b --- /dev/null +++ b/octopus_deploy/datadog_checks/octopus_deploy/__init__.py @@ -0,0 +1,7 @@ +# (C) Datadog, Inc. 2024-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +from .__about__ import __version__ +from .check import OctopusDeployCheck + +__all__ = ['__version__', 'OctopusDeployCheck'] diff --git a/octopus_deploy/datadog_checks/octopus_deploy/check.py b/octopus_deploy/datadog_checks/octopus_deploy/check.py new file mode 100644 index 0000000000000..dd6d75835109a --- /dev/null +++ b/octopus_deploy/datadog_checks/octopus_deploy/check.py @@ -0,0 +1,98 @@ +# (C) Datadog, Inc. 2024-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +from typing import Any # noqa: F401 + +from datadog_checks.base import AgentCheck # noqa: F401 + +# from datadog_checks.base.utils.db import QueryManager +# from requests.exceptions import ConnectionError, HTTPError, InvalidURL, Timeout +# from json import JSONDecodeError + + +class OctopusDeployCheck(AgentCheck): + + # This will be the prefix of every metric and service check the integration sends + __NAMESPACE__ = 'octopus_deploy' + + def __init__(self, name, init_config, instances): + super(OctopusDeployCheck, self).__init__(name, init_config, instances) + + # Use self.instance to read the check configuration + # self.url = self.instance.get("url") + + # If the check is going to perform SQL queries you should define a query manager here. + # More info at + # https://datadoghq.dev/integrations-core/base/databases/#datadog_checks.base.utils.db.core.QueryManager + # sample_query = { + # "name": "sample", + # "query": "SELECT * FROM sample_table", + # "columns": [ + # {"name": "metric", "type": "gauge"} + # ], + # } + # self._query_manager = QueryManager(self, self.execute_query, queries=[sample_query]) + # self.check_initializations.append(self._query_manager.compile_queries) + + def check(self, _): + # type: (Any) -> None + # The following are useful bits of code to help new users get started. + + # Perform HTTP Requests with our HTTP wrapper. + # More info at https://datadoghq.dev/integrations-core/base/http/ + # try: + # response = self.http.get(self.url) + # response.raise_for_status() + # response_json = response.json() + + # except Timeout as e: + # self.service_check( + # "can_connect", + # AgentCheck.CRITICAL, + # message="Request timeout: {}, {}".format(self.url, e), + # ) + # raise + + # except (HTTPError, InvalidURL, ConnectionError) as e: + # self.service_check( + # "can_connect", + # AgentCheck.CRITICAL, + # message="Request failed: {}, {}".format(self.url, e), + # ) + # raise + + # except JSONDecodeError as e: + # self.service_check( + # "can_connect", + # AgentCheck.CRITICAL, + # message="JSON Parse failed: {}, {}".format(self.url, e), + # ) + # raise + + # except ValueError as e: + # self.service_check( + # "can_connect", AgentCheck.CRITICAL, message=str(e) + # ) + # raise + + # This is how you submit metrics + # There are different types of metrics that you can submit (gauge, event). + # More info at https://datadoghq.dev/integrations-core/base/api/#datadog_checks.base.checks.base.AgentCheck + # self.gauge("test", 1.23, tags=['foo:bar']) + + # Perform database queries using the Query Manager + # self._query_manager.execute() + + # This is how you use the persistent cache. This cache file based and persists across agent restarts. + # If you need an in-memory cache that is persisted across runs + # You can define a dictionary in the __init__ method. + # self.write_persistent_cache("key", "value") + # value = self.read_persistent_cache("key") + + # If your check ran successfully, you can send the status. + # More info at + # https://datadoghq.dev/integrations-core/base/api/#datadog_checks.base.checks.base.AgentCheck.service_check + # self.service_check("can_connect", AgentCheck.OK) + + # If it didn't then it should send a critical service check + self.service_check("can_connect", AgentCheck.CRITICAL) diff --git a/octopus_deploy/datadog_checks/octopus_deploy/config_models/__init__.py b/octopus_deploy/datadog_checks/octopus_deploy/config_models/__init__.py new file mode 100644 index 0000000000000..2e7eff5e94dd7 --- /dev/null +++ b/octopus_deploy/datadog_checks/octopus_deploy/config_models/__init__.py @@ -0,0 +1,25 @@ +# (C) Datadog, Inc. 2024-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) + +# This file is autogenerated. +# To change this file you should edit assets/configuration/spec.yaml and then run the following commands: +# ddev -x validate config -s +# ddev -x validate models -s + + +from .instance import InstanceConfig +from .shared import SharedConfig + + +class ConfigMixin: + _config_model_instance: InstanceConfig + _config_model_shared: SharedConfig + + @property + def config(self) -> InstanceConfig: + return self._config_model_instance + + @property + def shared_config(self) -> SharedConfig: + return self._config_model_shared diff --git a/octopus_deploy/datadog_checks/octopus_deploy/config_models/defaults.py b/octopus_deploy/datadog_checks/octopus_deploy/config_models/defaults.py new file mode 100644 index 0000000000000..7adf7c7b7abd3 --- /dev/null +++ b/octopus_deploy/datadog_checks/octopus_deploy/config_models/defaults.py @@ -0,0 +1,16 @@ +# (C) Datadog, Inc. 2024-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) + +# This file is autogenerated. +# To change this file you should edit assets/configuration/spec.yaml and then run the following commands: +# ddev -x validate config -s +# ddev -x validate models -s + + +def instance_empty_default_hostname(): + return False + + +def instance_min_collection_interval(): + return 15 diff --git a/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py b/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py new file mode 100644 index 0000000000000..b2d32d2e62ea8 --- /dev/null +++ b/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py @@ -0,0 +1,51 @@ +# (C) Datadog, Inc. 2024-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) + +# This file is autogenerated. +# To change this file you should edit assets/configuration/spec.yaml and then run the following commands: +# ddev -x validate config -s +# ddev -x validate models -s + + +from __future__ import annotations + +from typing import Optional + +from pydantic import BaseModel, ConfigDict, field_validator, model_validator + +from datadog_checks.base.utils.functions import identity +from datadog_checks.base.utils.models import validation + +from . import defaults, validators + + +class InstanceConfig(BaseModel): + model_config = ConfigDict( + validate_default=True, + arbitrary_types_allowed=True, + frozen=True, + ) + empty_default_hostname: Optional[bool] = None + min_collection_interval: Optional[float] = None + service: Optional[str] = None + tags: Optional[tuple[str, ...]] = None + + @model_validator(mode='before') + def _initial_validation(cls, values): + return validation.core.initialize_config(getattr(validators, 'initialize_instance', identity)(values)) + + @field_validator('*', mode='before') + def _validate(cls, value, info): + field = cls.model_fields[info.field_name] + field_name = field.alias or info.field_name + if field_name in info.context['configured_fields']: + value = getattr(validators, f'instance_{info.field_name}', identity)(value, field=field) + else: + value = getattr(defaults, f'instance_{info.field_name}', lambda: value)() + + return validation.utils.make_immutable(value) + + @model_validator(mode='after') + def _final_validation(cls, model): + return validation.core.check_model(getattr(validators, 'check_instance', identity)(model)) diff --git a/octopus_deploy/datadog_checks/octopus_deploy/config_models/shared.py b/octopus_deploy/datadog_checks/octopus_deploy/config_models/shared.py new file mode 100644 index 0000000000000..656c417aa3849 --- /dev/null +++ b/octopus_deploy/datadog_checks/octopus_deploy/config_models/shared.py @@ -0,0 +1,48 @@ +# (C) Datadog, Inc. 2024-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) + +# This file is autogenerated. +# To change this file you should edit assets/configuration/spec.yaml and then run the following commands: +# ddev -x validate config -s +# ddev -x validate models -s + + +from __future__ import annotations + +from typing import Optional + +from pydantic import BaseModel, ConfigDict, field_validator, model_validator + +from datadog_checks.base.utils.functions import identity +from datadog_checks.base.utils.models import validation + +from . import defaults, validators + + +class SharedConfig(BaseModel): + model_config = ConfigDict( + validate_default=True, + arbitrary_types_allowed=True, + frozen=True, + ) + service: Optional[str] = None + + @model_validator(mode='before') + def _initial_validation(cls, values): + return validation.core.initialize_config(getattr(validators, 'initialize_shared', identity)(values)) + + @field_validator('*', mode='before') + def _validate(cls, value, info): + field = cls.model_fields[info.field_name] + field_name = field.alias or info.field_name + if field_name in info.context['configured_fields']: + value = getattr(validators, f'shared_{info.field_name}', identity)(value, field=field) + else: + value = getattr(defaults, f'shared_{info.field_name}', lambda: value)() + + return validation.utils.make_immutable(value) + + @model_validator(mode='after') + def _final_validation(cls, model): + return validation.core.check_model(getattr(validators, 'check_shared', identity)(model)) diff --git a/octopus_deploy/datadog_checks/octopus_deploy/config_models/validators.py b/octopus_deploy/datadog_checks/octopus_deploy/config_models/validators.py new file mode 100644 index 0000000000000..70150e85e6124 --- /dev/null +++ b/octopus_deploy/datadog_checks/octopus_deploy/config_models/validators.py @@ -0,0 +1,13 @@ +# (C) Datadog, Inc. 2024-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) + +# Here you can include additional config validators or transformers +# +# def initialize_instance(values, **kwargs): +# if 'my_option' not in values and 'my_legacy_option' in values: +# values['my_option'] = values['my_legacy_option'] +# if values.get('my_number') > 10: +# raise ValueError('my_number max value is 10, got %s' % str(values.get('my_number'))) +# +# return values diff --git a/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example b/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example new file mode 100644 index 0000000000000..8ee633b1335fc --- /dev/null +++ b/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example @@ -0,0 +1,44 @@ +## All options defined here are available to all instances. +# +init_config: + + ## @param service - string - optional + ## Attach the tag `service:` to every metric, event, and service check emitted by this integration. + ## + ## Additionally, this sets the default `service` for every log source. + # + # service: + +## Every instance is scheduled independently of the others. +# +instances: + + - + ## @param tags - list of strings - optional + ## A list of tags to attach to every metric and service check emitted by this instance. + ## + ## Learn more about tagging at https://docs.datadoghq.com/tagging + # + # tags: + # - : + # - : + + ## @param service - string - optional + ## Attach the tag `service:` to every metric, event, and service check emitted by this integration. + ## + ## Overrides any `service` defined in the `init_config` section. + # + # service: + + ## @param min_collection_interval - number - optional - default: 15 + ## This changes the collection interval of the check. For more information, see: + ## https://docs.datadoghq.com/developers/write_agent_check/#collection-interval + # + # min_collection_interval: 15 + + ## @param empty_default_hostname - boolean - optional - default: false + ## This forces the check to send metrics with no hostname. + ## + ## This is useful for cluster-level checks. + # + # empty_default_hostname: false diff --git a/octopus_deploy/hatch.toml b/octopus_deploy/hatch.toml new file mode 100644 index 0000000000000..001e43ce25414 --- /dev/null +++ b/octopus_deploy/hatch.toml @@ -0,0 +1,4 @@ +[env.collectors.datadog-checks] + +[[envs.default.matrix]] +python = ["3.11"] diff --git a/octopus_deploy/images/IMAGES_README.md b/octopus_deploy/images/IMAGES_README.md new file mode 100644 index 0000000000000..443f3c45e3385 --- /dev/null +++ b/octopus_deploy/images/IMAGES_README.md @@ -0,0 +1,41 @@ +# Marketplace Media Carousel Guidelines + +## Using the media gallery + +Please upload images to use the media gallery. Integrations require a minimum of 3 images. Images should highlight your product, your integration, and a full image of the Datadog integration dashboard. The gallery +can hold a maximum of 8 pieces of media total, and one of these pieces of media +can be a video (guidelines and submission steps below). Images should be +added to your /images directory and referenced in the manifest.json file. + + +## Image and video requirements + +### Images + +``` +File type : .jpg or .png +File size : ~500 KB per image, with a max of 1 MB per image +File dimensions : The image must be between 1440px and 2880px width, with a 16:9 aspect ratio (for example: 1440x810) +File name : Use only letters, numbers, underscores, and hyphens +Color mode : RGB +Color profile : sRGB +Description : 300 characters maximum +``` + +### Video + +To display a video in your media gallery, please send our team the zipped file +or a link to download the video at `marketplace@datadog.com`. In addition, +please upload a thumbnail image for your video as a part of the pull request. +Once approved, we will upload the file to Vimeo and provide you with the +vimeo_id to add to your manifest.json file. Please note that the gallery can +only hold one video. + +``` +File type : MP4 H.264 +File size : Max 1 video; 1 GB maximum size +File dimensions : The aspect ratio must be exactly 16:9, and the resolution must be 1920x1080 or higher +File name : partnerName-appName.mp4 +Run time : Recommendation of 60 seconds or less +Description : 300 characters maximum +``` diff --git a/octopus_deploy/manifest.json b/octopus_deploy/manifest.json new file mode 100644 index 0000000000000..5f98881d944c0 --- /dev/null +++ b/octopus_deploy/manifest.json @@ -0,0 +1,57 @@ +{ + "manifest_version": "2.0.0", + "app_uuid": "821889b0-d4f9-4136-8059-7b7c42e6bd43", + "app_id": "octopus-deploy", + "display_on_public_website": false, + "tile": { + "overview": "README.md#Overview", + "configuration": "README.md#Setup", + "support": "README.md#Support", + "changelog": "CHANGELOG.md", + "description": "", + "title": "Octopus Deploy", + "media": [], + "classifier_tags": [ + "", + "Supported OS::Linux", + "Supported OS::Windows", + "Supported OS::macOS", + "Category::", + "Offering::", + "Queried Data Type::", + "Submitted Data Type::" + ] + }, + "assets": { + "integration": { + "auto_install": true, + "source_type_id": 24277387, + "source_type_name": "Octopus Deploy", + "configuration": { + "spec": "assets/configuration/spec.yaml" + }, + "events": { + "creates_events": false + }, + "metrics": { + "prefix": "octopus_deploy.", + "check": "", + "metadata_path": "metadata.csv" + }, + "service_checks": { + "metadata_path": "assets/service_checks.json" + } + }, + "dashboards": { + "": "assets/dashboards/.json" + }, + "monitors": {}, + "saved_views": {} + }, + "author": { + "support_email": "help@datadoghq.com", + "name": "Datadog", + "homepage": "https://www.datadoghq.com", + "sales_email": "info@datadoghq.com" + } +} diff --git a/octopus_deploy/metadata.csv b/octopus_deploy/metadata.csv new file mode 100644 index 0000000000000..02cde5e98381e --- /dev/null +++ b/octopus_deploy/metadata.csv @@ -0,0 +1 @@ +metric_name,metric_type,interval,unit_name,per_unit_name,description,orientation,integration,short_name,curated_metric,sample_tags diff --git a/octopus_deploy/pyproject.toml b/octopus_deploy/pyproject.toml new file mode 100644 index 0000000000000..5a56841f9adfb --- /dev/null +++ b/octopus_deploy/pyproject.toml @@ -0,0 +1,60 @@ +[build-system] +requires = [ + "hatchling>=0.13.0", +] +build-backend = "hatchling.build" + +[project] +name = "datadog-octopus-deploy" +description = "The Octopus Deploy check" +readme = "README.md" +license = "BSD-3-Clause" +requires-python = ">=3.11" +keywords = [ + "datadog", + "datadog agent", + "datadog check", + "octopus_deploy", +] +authors = [ + { name = "Datadog", email = "packages@datadoghq.com" }, +] +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "License :: OSI Approved :: BSD License", + "Private :: Do Not Upload", + "Programming Language :: Python :: 3.11", + "Topic :: System :: Monitoring", +] +dependencies = [ + "datadog-checks-base>=32.6.0", +] +dynamic = [ + "version", +] + +[project.optional-dependencies] +deps = [] + +[project.urls] +Source = "https://github.com/DataDog/integrations-core" + +[tool.hatch.version] +path = "datadog_checks/octopus_deploy/__about__.py" + +[tool.hatch.build.targets.sdist] +include = [ + "/datadog_checks", + "/tests", + "/manifest.json", +] + +[tool.hatch.build.targets.wheel] +include = [ + "/datadog_checks/octopus_deploy", +] +dev-mode-dirs = [ + ".", +] diff --git a/octopus_deploy/tests/__init__.py b/octopus_deploy/tests/__init__.py new file mode 100644 index 0000000000000..9103122bf028d --- /dev/null +++ b/octopus_deploy/tests/__init__.py @@ -0,0 +1,3 @@ +# (C) Datadog, Inc. 2024-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) diff --git a/octopus_deploy/tests/conftest.py b/octopus_deploy/tests/conftest.py new file mode 100644 index 0000000000000..c8ac597a9862c --- /dev/null +++ b/octopus_deploy/tests/conftest.py @@ -0,0 +1,14 @@ +# (C) Datadog, Inc. 2024-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +import pytest + + +@pytest.fixture(scope='session') +def dd_environment(): + yield + + +@pytest.fixture +def instance(): + return {} diff --git a/octopus_deploy/tests/test_unit.py b/octopus_deploy/tests/test_unit.py new file mode 100644 index 0000000000000..aaeaef5a7af82 --- /dev/null +++ b/octopus_deploy/tests/test_unit.py @@ -0,0 +1,26 @@ +# (C) Datadog, Inc. 2024-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) + +from typing import Any, Callable, Dict # noqa: F401 + +from datadog_checks.base import AgentCheck # noqa: F401 +from datadog_checks.base.stubs.aggregator import AggregatorStub # noqa: F401 +from datadog_checks.dev.utils import get_metadata_metrics +from datadog_checks.octopus_deploy import OctopusDeployCheck + + +def test_check(dd_run_check, aggregator, instance): + # type: (Callable[[AgentCheck, bool], None], AggregatorStub, Dict[str, Any]) -> None + check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + dd_run_check(check) + + aggregator.assert_all_metrics_covered() + aggregator.assert_metrics_using_metadata(get_metadata_metrics()) + + +def test_emits_critical_service_check_when_service_is_down(dd_run_check, aggregator, instance): + # type: (Callable[[AgentCheck, bool], None], AggregatorStub, Dict[str, Any]) -> None + check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + dd_run_check(check) + aggregator.assert_service_check('octopus_deploy.can_connect', OctopusDeployCheck.CRITICAL) From 8237b439a9e6d39ba5ef6e5a7426dc7683388979 Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Fri, 30 Aug 2024 13:53:14 -0400 Subject: [PATCH 02/44] add link to readme --- octopus_deploy/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/octopus_deploy/README.md b/octopus_deploy/README.md index 916b0a6b8a014..36c68fb632b90 100644 --- a/octopus_deploy/README.md +++ b/octopus_deploy/README.md @@ -49,7 +49,7 @@ See [service_checks.json][8] for a list of service checks provided by this integ Need help? Contact [Datadog support][9]. -[1]: **LINK_TO_INTEGRATION_SITE** +[1]: https://octopus.com/ [2]: https://app.datadoghq.com/account/settings/agent/latest [3]: https://docs.datadoghq.com/agent/kubernetes/integrations/ [4]: https://github.com/DataDog/integrations-core/blob/master/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example From 87f6280e3090cce23d660428e1765a0620daa945 Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Tue, 3 Sep 2024 12:37:02 -0400 Subject: [PATCH 03/44] clean up manifest --- .../dashboards/octopus_deploy_overview.json | 1 - octopus_deploy/images/IMAGES_README.md | 41 ------------------- octopus_deploy/manifest.json | 16 ++++---- 3 files changed, 7 insertions(+), 51 deletions(-) delete mode 100644 octopus_deploy/assets/dashboards/octopus_deploy_overview.json delete mode 100644 octopus_deploy/images/IMAGES_README.md diff --git a/octopus_deploy/assets/dashboards/octopus_deploy_overview.json b/octopus_deploy/assets/dashboards/octopus_deploy_overview.json deleted file mode 100644 index e9e23301af626..0000000000000 --- a/octopus_deploy/assets/dashboards/octopus_deploy_overview.json +++ /dev/null @@ -1 +0,0 @@ -Please build an out-of-the-box dashboard for your integration following our best practices here: https://datadoghq.dev/integrations-core/guidelines/dashboards/#best-practices \ No newline at end of file diff --git a/octopus_deploy/images/IMAGES_README.md b/octopus_deploy/images/IMAGES_README.md deleted file mode 100644 index 443f3c45e3385..0000000000000 --- a/octopus_deploy/images/IMAGES_README.md +++ /dev/null @@ -1,41 +0,0 @@ -# Marketplace Media Carousel Guidelines - -## Using the media gallery - -Please upload images to use the media gallery. Integrations require a minimum of 3 images. Images should highlight your product, your integration, and a full image of the Datadog integration dashboard. The gallery -can hold a maximum of 8 pieces of media total, and one of these pieces of media -can be a video (guidelines and submission steps below). Images should be -added to your /images directory and referenced in the manifest.json file. - - -## Image and video requirements - -### Images - -``` -File type : .jpg or .png -File size : ~500 KB per image, with a max of 1 MB per image -File dimensions : The image must be between 1440px and 2880px width, with a 16:9 aspect ratio (for example: 1440x810) -File name : Use only letters, numbers, underscores, and hyphens -Color mode : RGB -Color profile : sRGB -Description : 300 characters maximum -``` - -### Video - -To display a video in your media gallery, please send our team the zipped file -or a link to download the video at `marketplace@datadog.com`. In addition, -please upload a thumbnail image for your video as a part of the pull request. -Once approved, we will upload the file to Vimeo and provide you with the -vimeo_id to add to your manifest.json file. Please note that the gallery can -only hold one video. - -``` -File type : MP4 H.264 -File size : Max 1 video; 1 GB maximum size -File dimensions : The aspect ratio must be exactly 16:9, and the resolution must be 1920x1080 or higher -File name : partnerName-appName.mp4 -Run time : Recommendation of 60 seconds or less -Description : 300 characters maximum -``` diff --git a/octopus_deploy/manifest.json b/octopus_deploy/manifest.json index 5f98881d944c0..b3bf0acec9b91 100644 --- a/octopus_deploy/manifest.json +++ b/octopus_deploy/manifest.json @@ -8,18 +8,18 @@ "configuration": "README.md#Setup", "support": "README.md#Support", "changelog": "CHANGELOG.md", - "description": "", + "description": "Monitor your Octopus Deploy Server.", "title": "Octopus Deploy", "media": [], "classifier_tags": [ - "", "Supported OS::Linux", "Supported OS::Windows", "Supported OS::macOS", - "Category::", - "Offering::", - "Queried Data Type::", - "Submitted Data Type::" + "Category::Configuration & Deployment", + "Offering::Integration", + "Submitted Data Type::Metrics", + "Submitted Data Type::Logs", + "Submitted Data Type::Events" ] }, "assets": { @@ -42,9 +42,7 @@ "metadata_path": "assets/service_checks.json" } }, - "dashboards": { - "": "assets/dashboards/.json" - }, + "dashboards": {}, "monitors": {}, "saved_views": {} }, From 152e2bde1d0145e7d793dae571034027a844ddfd Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Tue, 3 Sep 2024 12:45:43 -0400 Subject: [PATCH 04/44] validate ci --- .codecov.yml | 9 +++++++++ .github/workflows/test-all.yml | 20 ++++++++++++++++++++ 2 files changed, 29 insertions(+) diff --git a/.codecov.yml b/.codecov.yml index 5edec7fafe83d..2d707cd952a03 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -422,6 +422,10 @@ coverage: target: 75 flags: - nvidia_triton + Octopus_Deploy: + target: 75 + flags: + - octopus_deploy OpenLDAP: target: 75 flags: @@ -1249,6 +1253,11 @@ flags: paths: - nvidia_triton/datadog_checks/nvidia_triton - nvidia_triton/tests + octopus_deploy: + carryforward: true + paths: + - octopus_deploy/datadog_checks/octopus_deploy + - octopus_deploy/tests openldap: carryforward: true paths: diff --git a/.github/workflows/test-all.yml b/.github/workflows/test-all.yml index de091d4ab8329..fbc68257102c9 100644 --- a/.github/workflows/test-all.yml +++ b/.github/workflows/test-all.yml @@ -2574,6 +2574,26 @@ jobs: minimum-base-package: ${{ inputs.minimum-base-package }} pytest-args: ${{ inputs.pytest-args }} secrets: inherit + jc2a0c60: + uses: ./.github/workflows/test-target.yml + with: + job-name: Octopus Deploy + target: octopus_deploy + platform: linux + runner: '["ubuntu-22.04"]' + repo: "${{ inputs.repo }}" + python-version: "${{ inputs.python-version }}" + standard: ${{ inputs.standard }} + latest: ${{ inputs.latest }} + agent-image: "${{ inputs.agent-image }}" + agent-image-py2: "${{ inputs.agent-image-py2 }}" + agent-image-windows: "${{ inputs.agent-image-windows }}" + agent-image-windows-py2: "${{ inputs.agent-image-windows-py2 }}" + test-py2: ${{ inputs.test-py2 }} + test-py3: ${{ inputs.test-py3 }} + minimum-base-package: ${{ inputs.minimum-base-package }} + pytest-args: ${{ inputs.pytest-args }} + secrets: inherit j2cf0a0a: uses: ./.github/workflows/test-target.yml with: From fcf2fdcb35cb94d3b3dca6abdc032da891dd604d Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Tue, 3 Sep 2024 12:50:50 -0400 Subject: [PATCH 05/44] validate models and config; --- .../octopus_deploy/config_models/__init__.py | 1 - .../octopus_deploy/config_models/defaults.py | 4 ++++ .../octopus_deploy/config_models/instance.py | 12 +++++++++++- .../octopus_deploy/config_models/shared.py | 5 +---- .../octopus_deploy/data/conf.yaml.example | 11 +++++++++++ 5 files changed, 27 insertions(+), 6 deletions(-) diff --git a/octopus_deploy/datadog_checks/octopus_deploy/config_models/__init__.py b/octopus_deploy/datadog_checks/octopus_deploy/config_models/__init__.py index 2e7eff5e94dd7..106fff2032f68 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/config_models/__init__.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/config_models/__init__.py @@ -7,7 +7,6 @@ # ddev -x validate config -s # ddev -x validate models -s - from .instance import InstanceConfig from .shared import SharedConfig diff --git a/octopus_deploy/datadog_checks/octopus_deploy/config_models/defaults.py b/octopus_deploy/datadog_checks/octopus_deploy/config_models/defaults.py index 7adf7c7b7abd3..4d46152df5d40 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/config_models/defaults.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/config_models/defaults.py @@ -8,6 +8,10 @@ # ddev -x validate models -s +def instance_disable_generic_tags(): + return False + + def instance_empty_default_hostname(): return False diff --git a/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py b/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py index b2d32d2e62ea8..56acae21ba432 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py @@ -7,7 +7,6 @@ # ddev -x validate config -s # ddev -x validate models -s - from __future__ import annotations from typing import Optional @@ -20,13 +19,24 @@ from . import defaults, validators +class MetricPatterns(BaseModel): + model_config = ConfigDict( + arbitrary_types_allowed=True, + frozen=True, + ) + exclude: Optional[tuple[str, ...]] = None + include: Optional[tuple[str, ...]] = None + + class InstanceConfig(BaseModel): model_config = ConfigDict( validate_default=True, arbitrary_types_allowed=True, frozen=True, ) + disable_generic_tags: Optional[bool] = None empty_default_hostname: Optional[bool] = None + metric_patterns: Optional[MetricPatterns] = None min_collection_interval: Optional[float] = None service: Optional[str] = None tags: Optional[tuple[str, ...]] = None diff --git a/octopus_deploy/datadog_checks/octopus_deploy/config_models/shared.py b/octopus_deploy/datadog_checks/octopus_deploy/config_models/shared.py index 656c417aa3849..e39d447dfc4b9 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/config_models/shared.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/config_models/shared.py @@ -7,7 +7,6 @@ # ddev -x validate config -s # ddev -x validate models -s - from __future__ import annotations from typing import Optional @@ -17,7 +16,7 @@ from datadog_checks.base.utils.functions import identity from datadog_checks.base.utils.models import validation -from . import defaults, validators +from . import validators class SharedConfig(BaseModel): @@ -38,8 +37,6 @@ def _validate(cls, value, info): field_name = field.alias or info.field_name if field_name in info.context['configured_fields']: value = getattr(validators, f'shared_{info.field_name}', identity)(value, field=field) - else: - value = getattr(defaults, f'shared_{info.field_name}', lambda: value)() return validation.utils.make_immutable(value) diff --git a/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example b/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example index 8ee633b1335fc..57b46cc14ac44 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example +++ b/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example @@ -42,3 +42,14 @@ instances: ## This is useful for cluster-level checks. # # empty_default_hostname: false + + ## @param metric_patterns - mapping - optional + ## A mapping of metrics to include or exclude, with each entry being a regular expression. + ## + ## Metrics defined in `exclude` will take precedence in case of overlap. + # + # metric_patterns: + # include: + # - + # exclude: + # - From f77c3d7cbf6a3be7dbeccf088fa8a4e0f617a176 Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Tue, 3 Sep 2024 12:57:52 -0400 Subject: [PATCH 06/44] add labeler --- .github/workflows/config/labeler.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/config/labeler.yml b/.github/workflows/config/labeler.yml index 63203ce422d14..a8f1d4f72d166 100644 --- a/.github/workflows/config/labeler.yml +++ b/.github/workflows/config/labeler.yml @@ -331,6 +331,8 @@ integration/nvidia_jetson: - nvidia_jetson/**/* integration/nvidia_triton: - nvidia_triton/**/* +integration/octopus_deploy: +- octopus_deploy/**/* integration/oke: - oke/**/* integration/oom_kill: From fc9e61ca333c5e8fba3ad006b02da0a698adc545 Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Tue, 3 Sep 2024 13:23:20 -0400 Subject: [PATCH 07/44] remove dash --- octopus_deploy/manifest.json | 1 - 1 file changed, 1 deletion(-) diff --git a/octopus_deploy/manifest.json b/octopus_deploy/manifest.json index b3bf0acec9b91..9b7a1e06279bf 100644 --- a/octopus_deploy/manifest.json +++ b/octopus_deploy/manifest.json @@ -42,7 +42,6 @@ "metadata_path": "assets/service_checks.json" } }, - "dashboards": {}, "monitors": {}, "saved_views": {} }, From 23a626096b8c80f0934cb3d8f7b1ae23cde4afde Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Tue, 3 Sep 2024 13:26:22 -0400 Subject: [PATCH 08/44] fix manifest --- octopus_deploy/manifest.json | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/octopus_deploy/manifest.json b/octopus_deploy/manifest.json index 9b7a1e06279bf..1241134d791e0 100644 --- a/octopus_deploy/manifest.json +++ b/octopus_deploy/manifest.json @@ -41,9 +41,7 @@ "service_checks": { "metadata_path": "assets/service_checks.json" } - }, - "monitors": {}, - "saved_views": {} + } }, "author": { "support_email": "help@datadoghq.com", From f720cc9d268f917876a539d2cf194c678ea8d91a Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Tue, 3 Sep 2024 15:20:38 -0400 Subject: [PATCH 09/44] clean scaffolding and add base fixtures --- .../datadog_checks/octopus_deploy/check.py | 85 +------ .../ProjectGroups-1/projects/response.json | 214 ++++++++++++++++++ .../ProjectGroups-1/response.json | 12 + .../ProjectGroups-2/projects/response.json | 82 +++++++ .../ProjectGroups-2/response.json | 12 + .../ProjectGroups-3/projects/response.json | 15 ++ .../ProjectGroups-3/response.json | 12 + .../api/Spaces-1/projectgroups/response.json | 52 +++++ .../fixtures/GET/api/spaces/response.json | 41 ++++ 9 files changed, 441 insertions(+), 84 deletions(-) create mode 100644 octopus_deploy/tests/fixtures/GET/api/Spaces-1/projectgroups/ProjectGroups-1/projects/response.json create mode 100644 octopus_deploy/tests/fixtures/GET/api/Spaces-1/projectgroups/ProjectGroups-1/response.json create mode 100644 octopus_deploy/tests/fixtures/GET/api/Spaces-1/projectgroups/ProjectGroups-2/projects/response.json create mode 100644 octopus_deploy/tests/fixtures/GET/api/Spaces-1/projectgroups/ProjectGroups-2/response.json create mode 100644 octopus_deploy/tests/fixtures/GET/api/Spaces-1/projectgroups/ProjectGroups-3/projects/response.json create mode 100644 octopus_deploy/tests/fixtures/GET/api/Spaces-1/projectgroups/ProjectGroups-3/response.json create mode 100644 octopus_deploy/tests/fixtures/GET/api/Spaces-1/projectgroups/response.json create mode 100644 octopus_deploy/tests/fixtures/GET/api/spaces/response.json diff --git a/octopus_deploy/datadog_checks/octopus_deploy/check.py b/octopus_deploy/datadog_checks/octopus_deploy/check.py index dd6d75835109a..30a129375998e 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/check.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/check.py @@ -1,98 +1,15 @@ # (C) Datadog, Inc. 2024-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) -from typing import Any # noqa: F401 - -from datadog_checks.base import AgentCheck # noqa: F401 - -# from datadog_checks.base.utils.db import QueryManager -# from requests.exceptions import ConnectionError, HTTPError, InvalidURL, Timeout -# from json import JSONDecodeError +from datadog_checks.base import AgentCheck class OctopusDeployCheck(AgentCheck): - # This will be the prefix of every metric and service check the integration sends __NAMESPACE__ = 'octopus_deploy' def __init__(self, name, init_config, instances): super(OctopusDeployCheck, self).__init__(name, init_config, instances) - # Use self.instance to read the check configuration - # self.url = self.instance.get("url") - - # If the check is going to perform SQL queries you should define a query manager here. - # More info at - # https://datadoghq.dev/integrations-core/base/databases/#datadog_checks.base.utils.db.core.QueryManager - # sample_query = { - # "name": "sample", - # "query": "SELECT * FROM sample_table", - # "columns": [ - # {"name": "metric", "type": "gauge"} - # ], - # } - # self._query_manager = QueryManager(self, self.execute_query, queries=[sample_query]) - # self.check_initializations.append(self._query_manager.compile_queries) - def check(self, _): - # type: (Any) -> None - # The following are useful bits of code to help new users get started. - - # Perform HTTP Requests with our HTTP wrapper. - # More info at https://datadoghq.dev/integrations-core/base/http/ - # try: - # response = self.http.get(self.url) - # response.raise_for_status() - # response_json = response.json() - - # except Timeout as e: - # self.service_check( - # "can_connect", - # AgentCheck.CRITICAL, - # message="Request timeout: {}, {}".format(self.url, e), - # ) - # raise - - # except (HTTPError, InvalidURL, ConnectionError) as e: - # self.service_check( - # "can_connect", - # AgentCheck.CRITICAL, - # message="Request failed: {}, {}".format(self.url, e), - # ) - # raise - - # except JSONDecodeError as e: - # self.service_check( - # "can_connect", - # AgentCheck.CRITICAL, - # message="JSON Parse failed: {}, {}".format(self.url, e), - # ) - # raise - - # except ValueError as e: - # self.service_check( - # "can_connect", AgentCheck.CRITICAL, message=str(e) - # ) - # raise - - # This is how you submit metrics - # There are different types of metrics that you can submit (gauge, event). - # More info at https://datadoghq.dev/integrations-core/base/api/#datadog_checks.base.checks.base.AgentCheck - # self.gauge("test", 1.23, tags=['foo:bar']) - - # Perform database queries using the Query Manager - # self._query_manager.execute() - - # This is how you use the persistent cache. This cache file based and persists across agent restarts. - # If you need an in-memory cache that is persisted across runs - # You can define a dictionary in the __init__ method. - # self.write_persistent_cache("key", "value") - # value = self.read_persistent_cache("key") - - # If your check ran successfully, you can send the status. - # More info at - # https://datadoghq.dev/integrations-core/base/api/#datadog_checks.base.checks.base.AgentCheck.service_check - # self.service_check("can_connect", AgentCheck.OK) - - # If it didn't then it should send a critical service check self.service_check("can_connect", AgentCheck.CRITICAL) diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/projectgroups/ProjectGroups-1/projects/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/projectgroups/ProjectGroups-1/projects/response.json new file mode 100644 index 0000000000000..4f24d8fb90cfe --- /dev/null +++ b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/projectgroups/ProjectGroups-1/projects/response.json @@ -0,0 +1,214 @@ +{ + "ItemType": "Project", + "TotalResults": 3, + "ItemsPerPage": 30, + "NumberOfPages": 1, + "LastPageNumber": 0, + "Items": [ + { + "Id": "Projects-2", + "SpaceId": "Spaces-1", + "VariableSetId": "variableset-Projects-2", + "DeploymentProcessId": "deploymentprocess-Projects-2", + "ClonedFromProjectId": null, + "DiscreteChannelRelease": false, + "IncludedLibraryVariableSetIds": [], + "DefaultToSkipIfAlreadyInstalled": false, + "TenantedDeploymentMode": "Untenanted", + "DefaultGuidedFailureMode": "EnvironmentDefault", + "VersioningStrategy": null, + "ReleaseCreationStrategy": { + "ChannelId": null, + "ReleaseCreationPackage": null, + "ReleaseCreationPackageStepId": null + }, + "Templates": [], + "AutoDeployReleaseOverrides": [], + "ReleaseNotesTemplate": null, + "DeploymentChangesTemplate": null, + "ForcePackageDownload": false, + "Icon": null, + "ExtensionSettings": [], + "Name": "my-project", + "Slug": "my-project", + "Description": "", + "IsDisabled": false, + "ProjectGroupId": "ProjectGroups-1", + "LifecycleId": "Lifecycles-1", + "AutoCreateRelease": false, + "IsVersionControlled": false, + "PersistenceSettings": { + "Type": "Database" + }, + "ProjectConnectivityPolicy": { + "SkipMachineBehavior": "None", + "TargetRoles": [], + "AllowDeploymentsToNoTargets": false, + "ExcludeUnhealthyTargets": false + }, + "Links": { + "Self": "/api/Spaces-1/projects/Projects-2", + "Variables": "/api/Spaces-1/projects/Projects-2/variables", + "Releases": "/api/Spaces-1/projects/Projects-2/releases{/version}{?skip,take,searchByVersion}", + "Channels": "/api/Spaces-1/projects/Projects-2/channels{/id}{?skip,take,partialName}", + "Triggers": "/api/Spaces-1/projects/Projects-2/triggers{?skip,take,partialName,triggerActionType,triggerActionCategory,runbooks}", + "ScheduledTriggers": "/api/Spaces-1/projects/Projects-2/triggers/scheduled{?skip,take,partialName,ids}", + "OrderChannels": "/api/Spaces-1/projects/Projects-2/channels/order", + "Progression": "/api/Spaces-1/projects/Projects-2/progression{?releaseHistoryCount}", + "RunbookTaskRunDashboardItemsTemplate": "/api/Spaces-1/progression/runbooks/taskRuns{?skip,take,ids,projectIds,runbookIds,environmentIds,tenantIds,taskIds}", + "DeploymentProcess": "/api/Spaces-1/projects/Projects-2/deploymentprocesses", + "DeploymentSettings": "/api/Spaces-1/projects/Projects-2/deploymentsettings", + "Web": "/app#/Spaces-1/projects/Projects-2", + "Logo": "/api/Spaces-1/projects/Projects-2/logo?cb=2024.2.9409", + "Metadata": "/api/Spaces-1/projects/Projects-2/metadata", + "Runbooks": "/api/Spaces-1/projects/Projects-2/runbooks{?skip,take,partialName}", + "RunbookSnapshots": "/api/Spaces-1/projects/Projects-2/runbookSnapshots{/name}{?skip,take,searchByName}", + "Summary": "/api/Spaces-1/projects/Projects-2/summary", + "GitConnectionTest": "/api/Spaces-1/projects/Projects-2/git/connectivity-test", + "InsightsMetrics": "/api/Spaces-1/projects/Projects-2/insights/metrics{?channelId,environmentId,tenantId,tenantFilter,timeRange,granularity,timeZone}", + "GitCompatibilityReport": "/api/Spaces-1/projects/Projects-2/git/compatibility-report", + "ConvertToGit": "/api/Spaces-1/projects/Projects-2/git/convert", + "ConvertToVcs": "/api/Spaces-1/projects/Projects-2/git/convert" + } + }, + { + "Id": "Projects-3", + "SpaceId": "Spaces-1", + "VariableSetId": "variableset-Projects-3", + "DeploymentProcessId": "deploymentprocess-Projects-3", + "ClonedFromProjectId": null, + "DiscreteChannelRelease": false, + "IncludedLibraryVariableSetIds": [], + "DefaultToSkipIfAlreadyInstalled": false, + "TenantedDeploymentMode": "Untenanted", + "DefaultGuidedFailureMode": "EnvironmentDefault", + "VersioningStrategy": null, + "ReleaseCreationStrategy": { + "ChannelId": null, + "ReleaseCreationPackage": null, + "ReleaseCreationPackageStepId": null + }, + "Templates": [], + "AutoDeployReleaseOverrides": [], + "ReleaseNotesTemplate": null, + "DeploymentChangesTemplate": null, + "ForcePackageDownload": false, + "Icon": null, + "ExtensionSettings": [], + "Name": "test", + "Slug": "test", + "Description": "", + "IsDisabled": false, + "ProjectGroupId": "ProjectGroups-1", + "LifecycleId": "Lifecycles-1", + "AutoCreateRelease": false, + "IsVersionControlled": false, + "PersistenceSettings": { + "Type": "Database" + }, + "ProjectConnectivityPolicy": { + "SkipMachineBehavior": "None", + "TargetRoles": [], + "AllowDeploymentsToNoTargets": false, + "ExcludeUnhealthyTargets": false + }, + "Links": { + "Self": "/api/Spaces-1/projects/Projects-3", + "Variables": "/api/Spaces-1/projects/Projects-3/variables", + "Releases": "/api/Spaces-1/projects/Projects-3/releases{/version}{?skip,take,searchByVersion}", + "Channels": "/api/Spaces-1/projects/Projects-3/channels{/id}{?skip,take,partialName}", + "Triggers": "/api/Spaces-1/projects/Projects-3/triggers{?skip,take,partialName,triggerActionType,triggerActionCategory,runbooks}", + "ScheduledTriggers": "/api/Spaces-1/projects/Projects-3/triggers/scheduled{?skip,take,partialName,ids}", + "OrderChannels": "/api/Spaces-1/projects/Projects-3/channels/order", + "Progression": "/api/Spaces-1/projects/Projects-3/progression{?releaseHistoryCount}", + "RunbookTaskRunDashboardItemsTemplate": "/api/Spaces-1/progression/runbooks/taskRuns{?skip,take,ids,projectIds,runbookIds,environmentIds,tenantIds,taskIds}", + "DeploymentProcess": "/api/Spaces-1/projects/Projects-3/deploymentprocesses", + "DeploymentSettings": "/api/Spaces-1/projects/Projects-3/deploymentsettings", + "Web": "/app#/Spaces-1/projects/Projects-3", + "Logo": "/api/Spaces-1/projects/Projects-3/logo?cb=2024.2.9409", + "Metadata": "/api/Spaces-1/projects/Projects-3/metadata", + "Runbooks": "/api/Spaces-1/projects/Projects-3/runbooks{?skip,take,partialName}", + "RunbookSnapshots": "/api/Spaces-1/projects/Projects-3/runbookSnapshots{/name}{?skip,take,searchByName}", + "Summary": "/api/Spaces-1/projects/Projects-3/summary", + "GitConnectionTest": "/api/Spaces-1/projects/Projects-3/git/connectivity-test", + "InsightsMetrics": "/api/Spaces-1/projects/Projects-3/insights/metrics{?channelId,environmentId,tenantId,tenantFilter,timeRange,granularity,timeZone}", + "GitCompatibilityReport": "/api/Spaces-1/projects/Projects-3/git/compatibility-report", + "ConvertToGit": "/api/Spaces-1/projects/Projects-3/git/convert", + "ConvertToVcs": "/api/Spaces-1/projects/Projects-3/git/convert" + } + }, + { + "Id": "Projects-1", + "SpaceId": "Spaces-1", + "VariableSetId": "variableset-Projects-1", + "DeploymentProcessId": "deploymentprocess-Projects-1", + "ClonedFromProjectId": null, + "DiscreteChannelRelease": false, + "IncludedLibraryVariableSetIds": [], + "DefaultToSkipIfAlreadyInstalled": false, + "TenantedDeploymentMode": "Untenanted", + "DefaultGuidedFailureMode": "EnvironmentDefault", + "VersioningStrategy": null, + "ReleaseCreationStrategy": { + "ChannelId": null, + "ReleaseCreationPackage": null, + "ReleaseCreationPackageStepId": null + }, + "Templates": [], + "AutoDeployReleaseOverrides": [], + "ReleaseNotesTemplate": null, + "DeploymentChangesTemplate": null, + "ForcePackageDownload": false, + "Icon": null, + "ExtensionSettings": [], + "Name": "test-api", + "Slug": "test-api", + "Description": "", + "IsDisabled": false, + "ProjectGroupId": "ProjectGroups-1", + "LifecycleId": "Lifecycles-1", + "AutoCreateRelease": false, + "IsVersionControlled": false, + "PersistenceSettings": { + "Type": "Database" + }, + "ProjectConnectivityPolicy": { + "SkipMachineBehavior": "None", + "TargetRoles": [], + "AllowDeploymentsToNoTargets": false, + "ExcludeUnhealthyTargets": false + }, + "Links": { + "Self": "/api/Spaces-1/projects/Projects-1", + "Variables": "/api/Spaces-1/projects/Projects-1/variables", + "Releases": "/api/Spaces-1/projects/Projects-1/releases{/version}{?skip,take,searchByVersion}", + "Channels": "/api/Spaces-1/projects/Projects-1/channels{/id}{?skip,take,partialName}", + "Triggers": "/api/Spaces-1/projects/Projects-1/triggers{?skip,take,partialName,triggerActionType,triggerActionCategory,runbooks}", + "ScheduledTriggers": "/api/Spaces-1/projects/Projects-1/triggers/scheduled{?skip,take,partialName,ids}", + "OrderChannels": "/api/Spaces-1/projects/Projects-1/channels/order", + "Progression": "/api/Spaces-1/projects/Projects-1/progression{?releaseHistoryCount}", + "RunbookTaskRunDashboardItemsTemplate": "/api/Spaces-1/progression/runbooks/taskRuns{?skip,take,ids,projectIds,runbookIds,environmentIds,tenantIds,taskIds}", + "DeploymentProcess": "/api/Spaces-1/projects/Projects-1/deploymentprocesses", + "DeploymentSettings": "/api/Spaces-1/projects/Projects-1/deploymentsettings", + "Web": "/app#/Spaces-1/projects/Projects-1", + "Logo": "/api/Spaces-1/projects/Projects-1/logo?cb=2024.2.9409", + "Metadata": "/api/Spaces-1/projects/Projects-1/metadata", + "Runbooks": "/api/Spaces-1/projects/Projects-1/runbooks{?skip,take,partialName}", + "RunbookSnapshots": "/api/Spaces-1/projects/Projects-1/runbookSnapshots{/name}{?skip,take,searchByName}", + "Summary": "/api/Spaces-1/projects/Projects-1/summary", + "GitConnectionTest": "/api/Spaces-1/projects/Projects-1/git/connectivity-test", + "InsightsMetrics": "/api/Spaces-1/projects/Projects-1/insights/metrics{?channelId,environmentId,tenantId,tenantFilter,timeRange,granularity,timeZone}", + "GitCompatibilityReport": "/api/Spaces-1/projects/Projects-1/git/compatibility-report", + "ConvertToGit": "/api/Spaces-1/projects/Projects-1/git/convert", + "ConvertToVcs": "/api/Spaces-1/projects/Projects-1/git/convert" + } + } + ], + "Links": { + "Self": "/api/Spaces-1/projectgroups/ProjectGroups-1/projects?skip=0&take=30", + "Template": "/api/Spaces-1/projectgroups/ProjectGroups-1/projects{?skip,take}", + "Page.All": "/api/Spaces-1/projectgroups/ProjectGroups-1/projects?skip=0&take=2147483647", + "Page.Current": "/api/Spaces-1/projectgroups/ProjectGroups-1/projects?skip=0&take=30", + "Page.Last": "/api/Spaces-1/projectgroups/ProjectGroups-1/projects?skip=0&take=30" + } + } \ No newline at end of file diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/projectgroups/ProjectGroups-1/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/projectgroups/ProjectGroups-1/response.json new file mode 100644 index 0000000000000..ec7a38c958cf9 --- /dev/null +++ b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/projectgroups/ProjectGroups-1/response.json @@ -0,0 +1,12 @@ +{ + "Id": "ProjectGroups-1", + "Name": "Default Project Group", + "Description": "", + "EnvironmentIds": [], + "RetentionPolicyId": null, + "SpaceId": "Spaces-1", + "Links": { + "Self": "/api/Spaces-1/projectgroups/ProjectGroups-1", + "Projects": "/api/Spaces-1/projectgroups/ProjectGroups-1/projects" + } +} \ No newline at end of file diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/projectgroups/ProjectGroups-2/projects/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/projectgroups/ProjectGroups-2/projects/response.json new file mode 100644 index 0000000000000..9a931a124f728 --- /dev/null +++ b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/projectgroups/ProjectGroups-2/projects/response.json @@ -0,0 +1,82 @@ +{ + "ItemType": "Project", + "TotalResults": 1, + "ItemsPerPage": 30, + "NumberOfPages": 1, + "LastPageNumber": 0, + "Items": [ + { + "Id": "Projects-4", + "SpaceId": "Spaces-1", + "VariableSetId": "variableset-Projects-4", + "DeploymentProcessId": "deploymentprocess-Projects-4", + "ClonedFromProjectId": null, + "DiscreteChannelRelease": false, + "IncludedLibraryVariableSetIds": [], + "DefaultToSkipIfAlreadyInstalled": false, + "TenantedDeploymentMode": "Untenanted", + "DefaultGuidedFailureMode": "EnvironmentDefault", + "VersioningStrategy": null, + "ReleaseCreationStrategy": { + "ChannelId": null, + "ReleaseCreationPackage": null, + "ReleaseCreationPackageStepId": null + }, + "Templates": [], + "AutoDeployReleaseOverrides": [], + "ReleaseNotesTemplate": null, + "DeploymentChangesTemplate": null, + "ForcePackageDownload": false, + "Icon": null, + "ExtensionSettings": [], + "Name": "hi", + "Slug": "hi", + "Description": "", + "IsDisabled": false, + "ProjectGroupId": "ProjectGroups-2", + "LifecycleId": "Lifecycles-1", + "AutoCreateRelease": false, + "IsVersionControlled": false, + "PersistenceSettings": { + "Type": "Database" + }, + "ProjectConnectivityPolicy": { + "SkipMachineBehavior": "None", + "TargetRoles": [], + "AllowDeploymentsToNoTargets": false, + "ExcludeUnhealthyTargets": false + }, + "Links": { + "Self": "/api/Spaces-1/projects/Projects-4", + "Variables": "/api/Spaces-1/projects/Projects-4/variables", + "Releases": "/api/Spaces-1/projects/Projects-4/releases{/version}{?skip,take,searchByVersion}", + "Channels": "/api/Spaces-1/projects/Projects-4/channels{/id}{?skip,take,partialName}", + "Triggers": "/api/Spaces-1/projects/Projects-4/triggers{?skip,take,partialName,triggerActionType,triggerActionCategory,runbooks}", + "ScheduledTriggers": "/api/Spaces-1/projects/Projects-4/triggers/scheduled{?skip,take,partialName,ids}", + "OrderChannels": "/api/Spaces-1/projects/Projects-4/channels/order", + "Progression": "/api/Spaces-1/projects/Projects-4/progression{?releaseHistoryCount}", + "RunbookTaskRunDashboardItemsTemplate": "/api/Spaces-1/progression/runbooks/taskRuns{?skip,take,ids,projectIds,runbookIds,environmentIds,tenantIds,taskIds}", + "DeploymentProcess": "/api/Spaces-1/projects/Projects-4/deploymentprocesses", + "DeploymentSettings": "/api/Spaces-1/projects/Projects-4/deploymentsettings", + "Web": "/app#/Spaces-1/projects/Projects-4", + "Logo": "/api/Spaces-1/projects/Projects-4/logo?cb=2024.2.9409", + "Metadata": "/api/Spaces-1/projects/Projects-4/metadata", + "Runbooks": "/api/Spaces-1/projects/Projects-4/runbooks{?skip,take,partialName}", + "RunbookSnapshots": "/api/Spaces-1/projects/Projects-4/runbookSnapshots{/name}{?skip,take,searchByName}", + "Summary": "/api/Spaces-1/projects/Projects-4/summary", + "GitConnectionTest": "/api/Spaces-1/projects/Projects-4/git/connectivity-test", + "InsightsMetrics": "/api/Spaces-1/projects/Projects-4/insights/metrics{?channelId,environmentId,tenantId,tenantFilter,timeRange,granularity,timeZone}", + "GitCompatibilityReport": "/api/Spaces-1/projects/Projects-4/git/compatibility-report", + "ConvertToGit": "/api/Spaces-1/projects/Projects-4/git/convert", + "ConvertToVcs": "/api/Spaces-1/projects/Projects-4/git/convert" + } + } + ], + "Links": { + "Self": "/api/Spaces-1/projectgroups/ProjectGroups-2/projects?skip=0&take=30", + "Template": "/api/Spaces-1/projectgroups/ProjectGroups-2/projects{?skip,take}", + "Page.All": "/api/Spaces-1/projectgroups/ProjectGroups-2/projects?skip=0&take=2147483647", + "Page.Current": "/api/Spaces-1/projectgroups/ProjectGroups-2/projects?skip=0&take=30", + "Page.Last": "/api/Spaces-1/projectgroups/ProjectGroups-2/projects?skip=0&take=30" + } + } \ No newline at end of file diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/projectgroups/ProjectGroups-2/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/projectgroups/ProjectGroups-2/response.json new file mode 100644 index 0000000000000..a3c8e8fd15078 --- /dev/null +++ b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/projectgroups/ProjectGroups-2/response.json @@ -0,0 +1,12 @@ +{ + "Id": "ProjectGroups-2", + "Name": "test-group", + "Description": null, + "EnvironmentIds": [], + "RetentionPolicyId": null, + "SpaceId": "Spaces-1", + "Links": { + "Self": "/api/Spaces-1/projectgroups/ProjectGroups-2", + "Projects": "/api/Spaces-1/projectgroups/ProjectGroups-2/projects" + } +} \ No newline at end of file diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/projectgroups/ProjectGroups-3/projects/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/projectgroups/ProjectGroups-3/projects/response.json new file mode 100644 index 0000000000000..2365c6da7fed4 --- /dev/null +++ b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/projectgroups/ProjectGroups-3/projects/response.json @@ -0,0 +1,15 @@ +{ + "ItemType": "Project", + "TotalResults": 0, + "ItemsPerPage": 30, + "NumberOfPages": 1, + "LastPageNumber": 0, + "Items": [], + "Links": { + "Self": "/api/Spaces-1/projectgroups/ProjectGroups-3/projects?skip=0&take=30", + "Template": "/api/Spaces-1/projectgroups/ProjectGroups-3/projects{?skip,take}", + "Page.All": "/api/Spaces-1/projectgroups/ProjectGroups-3/projects?skip=0&take=2147483647", + "Page.Current": "/api/Spaces-1/projectgroups/ProjectGroups-3/projects?skip=0&take=30", + "Page.Last": "/api/Spaces-1/projectgroups/ProjectGroups-3/projects?skip=0&take=30" + } + } \ No newline at end of file diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/projectgroups/ProjectGroups-3/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/projectgroups/ProjectGroups-3/response.json new file mode 100644 index 0000000000000..9c133bfb27163 --- /dev/null +++ b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/projectgroups/ProjectGroups-3/response.json @@ -0,0 +1,12 @@ +{ + "Id": "ProjectGroups-3", + "Name": "hello", + "Description": null, + "EnvironmentIds": [], + "RetentionPolicyId": null, + "SpaceId": "Spaces-1", + "Links": { + "Self": "/api/Spaces-1/projectgroups/ProjectGroups-3", + "Projects": "/api/Spaces-1/projectgroups/ProjectGroups-3/projects" + } + } \ No newline at end of file diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/projectgroups/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/projectgroups/response.json new file mode 100644 index 0000000000000..450cd44196348 --- /dev/null +++ b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/projectgroups/response.json @@ -0,0 +1,52 @@ +{ + "ItemType": "ProjectGroup", + "TotalResults": 3, + "ItemsPerPage": 30, + "NumberOfPages": 1, + "LastPageNumber": 0, + "Items": [ + { + "Id": "ProjectGroups-1", + "Name": "Default Project Group", + "Description": "", + "EnvironmentIds": [], + "RetentionPolicyId": null, + "SpaceId": "Spaces-1", + "Links": { + "Self": "/api/Spaces-1/projectgroups/ProjectGroups-1", + "Projects": "/api/Spaces-1/projectgroups/ProjectGroups-1/projects" + } + }, + { + "Id": "ProjectGroups-3", + "Name": "hello", + "Description": null, + "EnvironmentIds": [], + "RetentionPolicyId": null, + "SpaceId": "Spaces-1", + "Links": { + "Self": "/api/Spaces-1/projectgroups/ProjectGroups-3", + "Projects": "/api/Spaces-1/projectgroups/ProjectGroups-3/projects" + } + }, + { + "Id": "ProjectGroups-2", + "Name": "test-group", + "Description": null, + "EnvironmentIds": [], + "RetentionPolicyId": null, + "SpaceId": "Spaces-1", + "Links": { + "Self": "/api/Spaces-1/projectgroups/ProjectGroups-2", + "Projects": "/api/Spaces-1/projectgroups/ProjectGroups-2/projects" + } + } + ], + "Links": { + "Self": "/api/Spaces-1/projectgroups?skip=0&take=30", + "Template": "/api/Spaces-1/projectgroups{?skip,take,ids,partialName}", + "Page.All": "/api/Spaces-1/projectgroups?skip=0&take=2147483647", + "Page.Current": "/api/Spaces-1/projectgroups?skip=0&take=30", + "Page.Last": "/api/Spaces-1/projectgroups?skip=0&take=30" + } + } \ No newline at end of file diff --git a/octopus_deploy/tests/fixtures/GET/api/spaces/response.json b/octopus_deploy/tests/fixtures/GET/api/spaces/response.json new file mode 100644 index 0000000000000..0ce44f236779a --- /dev/null +++ b/octopus_deploy/tests/fixtures/GET/api/spaces/response.json @@ -0,0 +1,41 @@ +{ + "ItemType": "Space", + "TotalResults": 1, + "ItemsPerPage": 30, + "NumberOfPages": 1, + "LastPageNumber": 0, + "Items": [ + { + "Id": "Spaces-1", + "Name": "Default", + "Slug": "default", + "Description": null, + "IsDefault": true, + "IsPrivate": false, + "TaskQueueStopped": false, + "SpaceManagersTeams": [ + "teams-administrators", + "teams-managers", + "teams-spacemanagers-Spaces-1" + ], + "SpaceManagersTeamMembers": [], + "Icon": null, + "ExtensionSettings": [], + "LastModifiedOn": "0001-01-01T00:00:00.000+00:00", + "Links": { + "Self": "/api/spaces/Spaces-1", + "SpaceHome": "/api/Spaces-1", + "Web": "/app#/spaces/Spaces-1", + "Logo": "/api/spaces/Spaces-1/logo?cb=2024.2.9409", + "Search": "/api/spaces/Spaces-1/search" + } + } + ], + "Links": { + "Self": "/api/spaces?skip=0&take=30", + "Template": "/api/spaces{?skip,take,ids,partialName}", + "Page.All": "/api/spaces?skip=0&take=2147483647", + "Page.Current": "/api/spaces?skip=0&take=30", + "Page.Last": "/api/spaces?skip=0&take=30" + } +} \ No newline at end of file From c802b28ed4be6211f9f825c7d182ae5e48e7a7b7 Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Tue, 3 Sep 2024 16:00:48 -0400 Subject: [PATCH 10/44] add base code and test --- octopus_deploy/assets/configuration/spec.yaml | 7 +++++++ .../datadog_checks/octopus_deploy/check.py | 17 +++++++++++++++-- .../octopus_deploy/config_models/instance.py | 1 + .../octopus_deploy/data/conf.yaml.example | 6 +++++- octopus_deploy/metadata.csv | 1 + octopus_deploy/tests/conftest.py | 2 +- octopus_deploy/tests/test_unit.py | 12 +++++++++--- 7 files changed, 39 insertions(+), 7 deletions(-) diff --git a/octopus_deploy/assets/configuration/spec.yaml b/octopus_deploy/assets/configuration/spec.yaml index 9efdf98a42a2a..6de6a3fe87b1c 100644 --- a/octopus_deploy/assets/configuration/spec.yaml +++ b/octopus_deploy/assets/configuration/spec.yaml @@ -7,4 +7,11 @@ files: - template: init_config/default - template: instances options: + - name: octopus_endpoint + description: | + Octopus API endpoint. + value: + example: http://localhost:80/api + type: string + required: true - template: instances/default diff --git a/octopus_deploy/datadog_checks/octopus_deploy/check.py b/octopus_deploy/datadog_checks/octopus_deploy/check.py index 30a129375998e..6a8504c3f597b 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/check.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/check.py @@ -1,10 +1,13 @@ # (C) Datadog, Inc. 2024-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) +from requests.exceptions import ConnectionError, HTTPError, InvalidURL, Timeout + from datadog_checks.base import AgentCheck +from datadog_checks.octopus_deploy.config_models import ConfigMixin -class OctopusDeployCheck(AgentCheck): +class OctopusDeployCheck(AgentCheck, ConfigMixin): __NAMESPACE__ = 'octopus_deploy' @@ -12,4 +15,14 @@ def __init__(self, name, init_config, instances): super(OctopusDeployCheck, self).__init__(name, init_config, instances) def check(self, _): - self.service_check("can_connect", AgentCheck.CRITICAL) + try: + response = self.http.get(self.config.octopus_endpoint) + response.raise_for_status() + except (Timeout, HTTPError, InvalidURL, ConnectionError) as e: + self.gauge("api.can_connect", 0, tags=self.config.tags) + self.log.warning( + "Failed to connect to Octopus Deploy endpoint %s: %s", self.config.octopus_endpoint, str(e) + ) + raise + + self.gauge("api.can_connect", 1, tags=self.config.tags) diff --git a/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py b/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py index 56acae21ba432..f3f28b127c613 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py @@ -38,6 +38,7 @@ class InstanceConfig(BaseModel): empty_default_hostname: Optional[bool] = None metric_patterns: Optional[MetricPatterns] = None min_collection_interval: Optional[float] = None + octopus_endpoint: str service: Optional[str] = None tags: Optional[tuple[str, ...]] = None diff --git a/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example b/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example index 57b46cc14ac44..456ae9bb1b357 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example +++ b/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example @@ -13,7 +13,11 @@ init_config: # instances: - - + ## @param octopus_endpoint - string - required + ## Octopus API endpoint. + # + - octopus_endpoint: localhost:80/api + ## @param tags - list of strings - optional ## A list of tags to attach to every metric and service check emitted by this instance. ## diff --git a/octopus_deploy/metadata.csv b/octopus_deploy/metadata.csv index 02cde5e98381e..ff748a56df064 100644 --- a/octopus_deploy/metadata.csv +++ b/octopus_deploy/metadata.csv @@ -1 +1,2 @@ metric_name,metric_type,interval,unit_name,per_unit_name,description,orientation,integration,short_name,curated_metric,sample_tags +octopus_deploy.api.can_connect,gauge,,,,Whether or not the check can connect to the Octopus Deploy API.,-1,octopus_deploy,octopus_deploy api,, diff --git a/octopus_deploy/tests/conftest.py b/octopus_deploy/tests/conftest.py index c8ac597a9862c..42e3bac6593aa 100644 --- a/octopus_deploy/tests/conftest.py +++ b/octopus_deploy/tests/conftest.py @@ -11,4 +11,4 @@ def dd_environment(): @pytest.fixture def instance(): - return {} + return {'octopus_endpoint': 'http://localhost:80/api'} diff --git a/octopus_deploy/tests/test_unit.py b/octopus_deploy/tests/test_unit.py index aaeaef5a7af82..0fc155f967388 100644 --- a/octopus_deploy/tests/test_unit.py +++ b/octopus_deploy/tests/test_unit.py @@ -4,6 +4,8 @@ from typing import Any, Callable, Dict # noqa: F401 +import pytest + from datadog_checks.base import AgentCheck # noqa: F401 from datadog_checks.base.stubs.aggregator import AggregatorStub # noqa: F401 from datadog_checks.dev.utils import get_metadata_metrics @@ -13,8 +15,10 @@ def test_check(dd_run_check, aggregator, instance): # type: (Callable[[AgentCheck, bool], None], AggregatorStub, Dict[str, Any]) -> None check = OctopusDeployCheck('octopus_deploy', {}, [instance]) - dd_run_check(check) + with pytest.raises(Exception, match=r'Max retries exceeded with url: /api'): + dd_run_check(check) + aggregator.assert_metric('octopus_deploy.api.can_connect', 0) aggregator.assert_all_metrics_covered() aggregator.assert_metrics_using_metadata(get_metadata_metrics()) @@ -22,5 +26,7 @@ def test_check(dd_run_check, aggregator, instance): def test_emits_critical_service_check_when_service_is_down(dd_run_check, aggregator, instance): # type: (Callable[[AgentCheck, bool], None], AggregatorStub, Dict[str, Any]) -> None check = OctopusDeployCheck('octopus_deploy', {}, [instance]) - dd_run_check(check) - aggregator.assert_service_check('octopus_deploy.can_connect', OctopusDeployCheck.CRITICAL) + with pytest.raises(Exception, match=r'Max retries exceeded with url: /api'): + dd_run_check(check) + + aggregator.assert_metric('octopus_deploy.api.can_connect', 0) From 29c0191105b5e78f6ad867bbdaa709c5e047ac73 Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Tue, 3 Sep 2024 16:03:24 -0400 Subject: [PATCH 11/44] fix changelog --- octopus_deploy/changelog.d/{1.added => 18488.added} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename octopus_deploy/changelog.d/{1.added => 18488.added} (100%) diff --git a/octopus_deploy/changelog.d/1.added b/octopus_deploy/changelog.d/18488.added similarity index 100% rename from octopus_deploy/changelog.d/1.added rename to octopus_deploy/changelog.d/18488.added From a77d7289f95680d1dcc334893e77ac4d87d9cc9f Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Tue, 3 Sep 2024 16:09:31 -0400 Subject: [PATCH 12/44] update and validate config --- octopus_deploy/assets/configuration/spec.yaml | 1 + .../datadog_checks/octopus_deploy/data/conf.yaml.example | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/octopus_deploy/assets/configuration/spec.yaml b/octopus_deploy/assets/configuration/spec.yaml index 6de6a3fe87b1c..ba068efa5a36a 100644 --- a/octopus_deploy/assets/configuration/spec.yaml +++ b/octopus_deploy/assets/configuration/spec.yaml @@ -10,6 +10,7 @@ files: - name: octopus_endpoint description: | Octopus API endpoint. + See https://octopus.com/docs/octopus-rest-api/getting-started#authentication for more details. value: example: http://localhost:80/api type: string diff --git a/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example b/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example index 456ae9bb1b357..add371ae84672 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example +++ b/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example @@ -15,8 +15,9 @@ instances: ## @param octopus_endpoint - string - required ## Octopus API endpoint. + ## See https://octopus.com/docs/octopus-rest-api/getting-started#authentication for more details. # - - octopus_endpoint: localhost:80/api + - octopus_endpoint: http://localhost:80/api ## @param tags - list of strings - optional ## A list of tags to attach to every metric and service check emitted by this instance. From bbb52af37565a202a773e1f2347b7dcf8845ab66 Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Tue, 3 Sep 2024 16:16:22 -0400 Subject: [PATCH 13/44] add metric to check --- octopus_deploy/manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/octopus_deploy/manifest.json b/octopus_deploy/manifest.json index 1241134d791e0..8085072eea20c 100644 --- a/octopus_deploy/manifest.json +++ b/octopus_deploy/manifest.json @@ -35,7 +35,7 @@ }, "metrics": { "prefix": "octopus_deploy.", - "check": "", + "check": "octopus_deploy.api.can_connect", "metadata_path": "metadata.csv" }, "service_checks": { From c180beffec0402a0c7dde112e7fdbb6879c3f5ae Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Tue, 3 Sep 2024 16:38:28 -0400 Subject: [PATCH 14/44] Add mocks and make true unit tests --- octopus_deploy/tests/conftest.py | 98 ++++++++++ .../tests/fixtures/GET/api/response.json | 169 ++++++++++++++++++ octopus_deploy/tests/test_unit.py | 34 ++-- 3 files changed, 290 insertions(+), 11 deletions(-) create mode 100644 octopus_deploy/tests/fixtures/GET/api/response.json diff --git a/octopus_deploy/tests/conftest.py b/octopus_deploy/tests/conftest.py index 42e3bac6593aa..f4d1e8561f063 100644 --- a/octopus_deploy/tests/conftest.py +++ b/octopus_deploy/tests/conftest.py @@ -1,7 +1,16 @@ # (C) Datadog, Inc. 2024-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) +import json +import os +from pathlib import Path +from urllib.parse import urlparse + +import mock import pytest +import requests + +from datadog_checks.dev.fs import get_here @pytest.fixture(scope='session') @@ -12,3 +21,92 @@ def dd_environment(): @pytest.fixture def instance(): return {'octopus_endpoint': 'http://localhost:80/api'} + + +def get_json_value_from_file(file_path): + with open(file_path, 'r') as file: + return json.load(file) + + +def get_url_path(url): + parsed_url = urlparse(url) + return parsed_url.path + "?" + parsed_url.query if parsed_url.query else parsed_url.path + + +@pytest.fixture +def mock_responses(): + responses_map = {} + + def process_files(dir, response_parent): + for file in dir.rglob('*'): + if file.is_file() and file.stem != ".slash": + relative_dir_path = ( + "/" + + (str(file.parent.relative_to(dir)) if str(file.parent.relative_to(dir)) != "." else "") + + ("/" if (file.parent / ".slash").is_file() else "") + ) + if relative_dir_path not in response_parent: + response_parent[relative_dir_path] = {} + json_data = get_json_value_from_file(file) + response_parent[relative_dir_path][file.stem] = json_data + + def process_dir(dir, response_parent): + response_parent[dir.name] = {} + process_files(dir, response_parent[dir.name]) + + def create_responses_tree(): + root_dir_path = os.path.join(get_here(), 'fixtures') + method_subdirs = [d for d in Path(root_dir_path).iterdir() if d.is_dir() and d.name == 'GET'] + for method_subdir in method_subdirs: + process_dir(method_subdir, responses_map) + + def method(method, url, file='response', headers=None, params=None): + filename = file + request_path = url + request_path = request_path.replace('?', '/') + if params: + param_string = '/'.join(f'{key}={str(val)}' for key, val in params.items()) + request_path = f'{url}/{param_string}' + + response = responses_map.get(method, {}).get(request_path, {}).get(filename) + return response + + create_responses_tree() + yield method + + +@pytest.fixture +def mock_http_call(mock_responses): + def call(method, url, file='response', headers=None, params=None): + + response = mock_responses(method, url, file=file, headers=headers, params=params) + if response is not None: + return response + http_response = requests.models.Response() + http_response.status_code = 404 + http_response.reason = "Not Found" + http_response.url = url + raise requests.exceptions.HTTPError(response=http_response) + + yield call + + +@pytest.fixture +def mock_http_get(request, monkeypatch, mock_http_call): + param = request.param if hasattr(request, 'param') and request.param is not None else {} + http_error = param.pop('http_error', {}) + + def get(url, *args, **kwargs): + method = 'GET' + url = get_url_path(url) + if http_error and url in http_error: + return http_error[url] + mock_status_code = mock.MagicMock(return_value=200) + headers = kwargs.get('headers') + params = kwargs.get('params') + mock_json = mock.MagicMock(return_value=mock_http_call(method, url, headers=headers, params=params)) + return mock.MagicMock(json=mock_json, status_code=mock_status_code) + + mock_get = mock.MagicMock(side_effect=get) + monkeypatch.setattr('requests.get', mock_get) + return mock_get diff --git a/octopus_deploy/tests/fixtures/GET/api/response.json b/octopus_deploy/tests/fixtures/GET/api/response.json new file mode 100644 index 0000000000000..bb58fc18d94d3 --- /dev/null +++ b/octopus_deploy/tests/fixtures/GET/api/response.json @@ -0,0 +1,169 @@ +{ + "Application": "Octopus Deploy", + "Version": "2024.2.9409", + "ApiVersion": "3.0.0", + "InstallationId": "e4499012-07a6-4298-0035-038191047a34", + "HasLongTermSupport": true, + "Links": { + "Self": "/api", + "AccessToken": "/api/users/access-token", + "Accounts": "/api/Spaces-1/accounts{/id}{?skip,take,ids,partialName,accountType}", + "ActionTemplateLogo": "/api/Spaces-1/actiontemplates/{typeOrId}/logo{?cb}", + "ActionTemplates": "/api/Spaces-1/actiontemplates{/id}{?skip,take,ids,partialName}", + "ActionTemplatesCategories": "/api/Spaces-1/actiontemplates/categories", + "ActionTemplatesSearch": "/api/Spaces-1/actiontemplates/search{?type}", + "ActionTemplateVersionedLogo": "/api/Spaces-1/actiontemplates/{typeOrId}/versions/{version}/logo{?cb}", + "ArchivedEventFiles": "/api/events/archives{?skip,take}", + "Artifacts": "/api/Spaces-1/artifacts{/id}{?skip,take,regarding,ids,partialName,order}", + "AuditStreamConfiguration": "/api/audit-stream", + "Authentication": "/api/authentication", + "AzureDevOpsConnectivityCheck": "/api/azuredevopsissuetracker/connectivitycheck", + "AzureEnvironments": "/api/accounts/azureenvironments", + "BuildInformation": "/api/Spaces-1/build-information{/id}{?packageId,filter,latest,skip,take,overwriteMode}", + "BuildInformationBulk": "/api/Spaces-1/build-information/bulk{?ids}", + "BuiltInFeedStats": "/api/feeds/stats", + "CertificateConfiguration": "/api/configuration/certificates{/id}{?skip,take}", + "Certificates": "/api/Spaces-1/certificates{/id}{?skip,take,search,archived,tenant,firstResult,orderBy,ids,partialName}", + "Channels": "/api/Spaces-1/channels{/id}{?skip,take,ids,partialName}", + "CloudTemplate": "/api/cloudtemplate/{id}/metadata{?packageId,feedId}", + "CommunityActionTemplates": "/api/communityactiontemplates{/id}{?skip,take,ids}", + "Configuration": "/api/configuration{/id}", + "CurrentLicense": "/api/licenses/licenses-current", + "CurrentLicenseStatus": "/api/licenses/licenses-current-status", + "CurrentLicenseUsage": "/api/licenses/licenses-current-usage", + "CurrentUser": "/api/users/me", + "Dashboard": "/api/Spaces-1/dashboard{?projectId,releaseId,selectedTenants,selectedTags,showAll,highestLatestVersionPerProjectAndEnvironment}", + "DashboardConfiguration": "/api/Spaces-1/dashboardconfiguration", + "DashboardDynamic": "/api/Spaces-1/dashboard/dynamic{?projects,environments,includePrevious}", + "DeploymentProcesses": "/api/Spaces-1/deploymentprocesses{/id}{?skip,take,ids}", + "Deployments": "/api/Spaces-1/deployments{/id}{?skip,take,ids,projects,environments,tenants,channels,taskState,partialName}", + "DeploymentTargetTags": "/api/Spaces-1/deploymenttargettags{/id}", + "DiscoverMachine": "/api/Spaces-1/machines/discover{?host,port,type,proxyId}", + "DiscoverWorker": "/api/Spaces-1/workers/discover{?host,port,type,proxyId}", + "DynamicExtensionsFeaturesMetadata": "/api/dynamic-extensions/features/metadata", + "DynamicExtensionsFeaturesValues": "/api/dynamic-extensions/features/values", + "DynamicExtensionsScripts": "/api/dynamic-extensions/scripts", + "EnabledFeatureToggles": "/api/configuration/enabled-feature-toggles", + "Environments": "/api/Spaces-1/environments{/id}{?name,skip,ids,take,partialName}", + "EnvironmentSortOrder": "/api/Spaces-1/environments/sortorder", + "EnvironmentsSummary": "/api/Spaces-1/environments/summary{?ids,partialName,machinePartialName,roles,isDisabled,healthStatuses,commStyles,tenantIds,tenantTags,hideEmptyEnvironments,shellNames,deploymentTargetTypes}", + "EventAgents": "/api/events/agents", + "EventCategories": "/api/events/categories{?appliesTo}", + "EventDocumentTypes": "/api/events/documenttypes", + "EventGroups": "/api/events/groups{?appliesTo}", + "Events": "/api/events{/id}{?skip,regarding,regardingAny,user,users,projects,projectGroups,environments,eventGroups,eventCategories,eventAgents,tags,tenants,from,to,internal,fromAutoId,toAutoId,documentTypes,asCsv,take,ids,spaces,includeSystem,excludeDifference}", + "ExportProjects": "/api/Spaces-1/projects/import-export/export", + "ExternalSecurityGroupProviders": "/api/externalsecuritygroupproviders", + "ExternalUserSearch": "/api/users/external-search{?partialName}", + "FeaturesConfiguration": "/api/featuresconfiguration", + "Feeds": "/api/feeds{/id}{?skip,take,ids,partialName,feedType,name}", + "GitCredentials": "/api/Spaces-1/git-credentials{/id}{?skip,take,name}", + "GitHubConnectivityCheck": "/api/githubissuetracker/connectivitycheck", + "ImportProjects": "/api/Spaces-1/projects/import-export/import", + "InsightsReports": "/api/Spaces-1/insights/reports{/id}{?skip,take}", + "Interruptions": "/api/Spaces-1/interruptions{/id}{?skip,take,regarding,pendingOnly,ids}", + "Invitations": "/api/users/invitations", + "IssueTrackers": "/api/issuetrackers{?skip,take,ids,partialName}", + "JiraConnectAppCredentialsTest": "/api/jiraintegration/connectivitycheck/connectapp", + "JiraCredentialsTest": "/api/jiraintegration/connectivitycheck/jira", + "LibraryVariables": "/api/Spaces-1/libraryvariablesets{/id}{?skip,contentType,take,ids,partialName}", + "LifecyclePreviews": "/api/Spaces-1/lifecycles/previews{?ids}", + "Lifecycles": "/api/Spaces-1/lifecycles{/id}{?skip,take,ids,partialName}", + "LoginInitiated": "/api/authentication/checklogininitiated", + "LogoIconCategories": "/api/icons/categories?cb=2024.2.9409", + "LogoIcons": "/api/icons/all?cb=2024.2.9409", + "MachineOperatingSystems": "/api/Spaces-1/machines/operatingsystem/names/all", + "MachinePolicies": "/api/Spaces-1/machinepolicies{/id}{?skip,take,ids,partialName}", + "MachinePolicyTemplate": "/api/Spaces-1/machinepolicies/template", + "MachineRoles": "/api/Spaces-1/machineroles/all", + "Machines": "/api/Spaces-1/machines{/id}{?skip,take,name,ids,partialName,roles,isDisabled,healthStatuses,commStyles,tenantIds,tenantTags,environmentIds,thumbprint,deploymentId,shellNames,deploymentTargetTypes}", + "MachineShells": "/api/Spaces-1/machines/operatingsystem/shells/all", + "MaintenanceConfiguration": "/api/maintenanceconfiguration", + "MigrationsImport": "/api/migrations/import", + "MigrationsPartialExport": "/api/migrations/partialexport", + "OctopusServerClusterSummary": "/api/octopusservernodes/summary", + "OctopusServerNodes": "/api/octopusservernodes{/id}{?skip,take,ids,partialName}", + "PackageDeltaSignature": "/api/Spaces-1/packages/{packageId}/{version}/delta-signature", + "PackageDeltaUpload": "/api/Spaces-1/packages/{packageId}/{baseVersion}/delta{?replace,overwriteMode}", + "PackageNotesList": "/api/Spaces-1/packages/notes{?packageIds}", + "Packages": "/api/Spaces-1/packages{/id}{?nuGetPackageId,filter,latest,skip,take,includeNotes}", + "PackagesBulk": "/api/Spaces-1/packages/bulk{?ids}", + "PackageUpload": "/api/Spaces-1/packages/raw{?replace,overwriteMode}", + "PerformanceConfiguration": "/api/performanceconfiguration", + "PermissionDescriptions": "/api/permissions/all", + "ProjectGroups": "/api/Spaces-1/projectgroups{/id}{?skip,take,ids,partialName}", + "ProjectImportFiles": "/api/Spaces-1/projects/import-export/import-files", + "ProjectImportPreview": "/api/Spaces-1/projects/import-export/import/preview", + "ProjectPulse": "/api/Spaces-1/projects/pulse{?projectIds}", + "Projects": "/api/Spaces-1/projects{/id}{?name,skip,ids,clone,take,partialName,clonedFromProjectId}", + "ProjectsExperimentalSummaries": "/api/Spaces-1/projects/experimental/summaries{?ids,isVersionControlled}", + "ProjectTriggers": "/api/Spaces-1/projecttriggers{/id}{?skip,take,ids,runbooks}", + "Proxies": "/api/Spaces-1/proxies{/id}{?skip,take,ids,partialName}", + "Register": "/api/users/register", + "Releases": "/api/Spaces-1/releases{/id}{?skip,ignoreChannelRules,take,ids}", + "Reporting/DeploymentsCountedByWeek": "/api/Spaces-1/reporting/deployments-counted-by-week{?projectIds}", + "RetentionDefaultConfiguration": "/api/configuration/retention-default", + "RevokeUserSessions": "/api/users/{id}/revoke-sessions", + "RunbookProcesses": "/api/Spaces-1/runbookProcesses{/id}{?skip,take,ids}", + "RunbookRuns": "/api/Spaces-1/runbookRuns{/id}{?skip,take,ids,projects,environments,tenants,runbooks,taskState,partialName}", + "Runbooks": "/api/Spaces-1/runbooks{/id}{?skip,take,ids,partialName,clone,projectIds}", + "RunbookSnapshots": "/api/Spaces-1/runbookSnapshots{/id}{?skip,take,ids,publish}", + "Scheduler": "/api/scheduler/{name}/logs{?verbose,tail}", + "ScopedUserRoles": "/api/scopeduserroles{/id}{?skip,take,ids,partialName,spaces,includeSystem}", + "ServerConfiguration": "/api/serverconfiguration", + "ServerConfigurationSettings": "/api/serverconfiguration/settings", + "ServerHealthStatus": "/api/serverstatus/health", + "ServerStatus": "/api/serverstatus", + "SignIn": "/api/users/login{?returnUrl}", + "SigningKeyConfiguration": "/api/signingkeyconfiguration", + "SigningKeyRevoke": "/api/signingkeys/{id}/revoke/v1", + "SigningKeyRotate": "/api/signingkeys/rotate/v1", + "SigningKeys": "/api/signingkeys/v1", + "SignOut": "/api/users/logout", + "SmtpConfiguration": "/api/smtpconfiguration", + "SmtpIsConfigured": "/api/smtpconfiguration/isconfigured", + "SpaceHome": "/api/{spaceId}", + "Spaces": "/api/spaces{/id}{?skip,ids,take,partialName}", + "SpaceSearch": "/api/spaces/{id}/search{?keyword}", + "StepPackageDeploymentTargetTypes": "/api/steps/deploymenttargets", + "Subscriptions": "/api/Spaces-1/subscriptions{/id}{?skip,take,ids,partialName,spaces}", + "TagSets": "/api/Spaces-1/tagsets{/id}{?skip,take,ids,partialName}", + "TagSetSortOrder": "/api/Spaces-1/tagsets/sortorder", + "Tasks": "/api/tasks{/id}{?skip,active,environment,tenant,runbook,project,name,node,running,states,hasPendingInterruptions,hasWarningsOrErrors,take,ids,partialName,spaces,includeSystem,description,fromCompletedDate,toCompletedDate,fromQueueDate,toQueueDate,fromStartDate,toStartDate}", + "TaskTypes": "/api/tasks/tasktypes", + "TeamMembership": "/api/teammembership{?userId,spaces,includeSystem}", + "TeamMembershipPreviewTeam": "/api/teammembership/previewteam", + "Teams": "/api/teams{/id}{?skip,take,ids,partialName,spaces,includeSystem}", + "TelemetryConfiguration": "/api/telemetryconfiguration", + "TelemetryDownload": "/api/telemetry/download", + "TelemetryLastTask": "/api/telemetry/lastTask", + "TelemetrySend": "/api/telemetry/send", + "Tenants": "/api/Spaces-1/tenants{/id}{?skip,projectId,name,tags,take,ids,clone,partialName,clonedFromTenantId}", + "TenantsMissingVariables": "/api/Spaces-1/tenants/variables-missing{?tenantId,projectId,environmentId,includeDetails}", + "TenantsStatus": "/api/Spaces-1/tenants/status", + "TenantTagTest": "/api/Spaces-1/tenants/tag-test{?tenantIds,tags}", + "TenantVariables": "/api/Spaces-1/tenantvariables/all{?projectId}", + "Timezones": "/api/serverstatus/timezones", + "UpgradeConfiguration": "/api/upgradeconfiguration", + "UserAuthentication": "/api/users/authentication{/userId}", + "UserIdentityMetadata": "/api/users/identity-metadata", + "UserOnboarding": "/api/Spaces-1/useronboarding", + "UserRoles": "/api/userroles{/id}{?skip,take,ids,partialName}", + "Users": "/api/users{/id}{?skip,take,ids,filter}", + "VariableNames": "/api/Spaces-1/variables/names{?project,runbook,projectEnvironmentsFilter,gitRef}", + "VariablePreview": "/api/Spaces-1/variables/preview{?project,runbook,environment,channel,tenant,action,machine,role,gitRef}", + "Variables": "/api/Spaces-1/variables{/id}{?ids}", + "VersionControlClearCache": "/api/configuration/versioncontrol/clear-cache", + "VersionRuleTest": "/api/Spaces-1/channels/rule-test{?version,versionRange,preReleaseTag,feetType}", + "Web": "/app", + "WorkerOperatingSystems": "/api/Spaces-1/workers/operatingsystem/names/all", + "WorkerPools": "/api/Spaces-1/workerpools{/id}{?skip,ids,take,partialName}", + "WorkerPoolsDynamicWorkerTypes": "/api/Spaces-1/workerpools/dynamicworkertypes", + "WorkerPoolsSortOrder": "/api/Spaces-1/workerpools/sortorder", + "WorkerPoolsSummary": "/api/Spaces-1/workerpools/summary{?ids,partialName,machinePartialName,isDisabled,healthStatuses,commStyles,hideEmptyWorkerPools,shellNames}", + "WorkerPoolsSupportedTypes": "/api/Spaces-1/workerpools/supportedtypes", + "Workers": "/api/Spaces-1/workers{/id}{?skip,take,name,ids,partialName,isDisabled,healthStatuses,commStyles,workerPoolIds,thumbprint,shellNames}", + "WorkerShells": "/api/Spaces-1/workers/operatingsystem/shells/all", + "WorkerToolsLatestImages": "/api/workertoolslatestimages" + } +} \ No newline at end of file diff --git a/octopus_deploy/tests/test_unit.py b/octopus_deploy/tests/test_unit.py index 0fc155f967388..f5dfdb32d33d2 100644 --- a/octopus_deploy/tests/test_unit.py +++ b/octopus_deploy/tests/test_unit.py @@ -2,31 +2,43 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) -from typing import Any, Callable, Dict # noqa: F401 - import pytest -from datadog_checks.base import AgentCheck # noqa: F401 -from datadog_checks.base.stubs.aggregator import AggregatorStub # noqa: F401 +from datadog_checks.dev.http import MockResponse from datadog_checks.dev.utils import get_metadata_metrics from datadog_checks.octopus_deploy import OctopusDeployCheck +@pytest.mark.usefixtures('mock_http_get') def test_check(dd_run_check, aggregator, instance): - # type: (Callable[[AgentCheck, bool], None], AggregatorStub, Dict[str, Any]) -> None check = OctopusDeployCheck('octopus_deploy', {}, [instance]) - with pytest.raises(Exception, match=r'Max retries exceeded with url: /api'): - dd_run_check(check) + dd_run_check(check) - aggregator.assert_metric('octopus_deploy.api.can_connect', 0) + aggregator.assert_metric('octopus_deploy.api.can_connect', 1) aggregator.assert_all_metrics_covered() aggregator.assert_metrics_using_metadata(get_metadata_metrics()) -def test_emits_critical_service_check_when_service_is_down(dd_run_check, aggregator, instance): - # type: (Callable[[AgentCheck, bool], None], AggregatorStub, Dict[str, Any]) -> None +@pytest.mark.parametrize( + ('mock_http_get, message'), + [ + pytest.param( + {'http_error': {'/api': MockResponse(status_code=500)}}, + 'HTTPError: 500 Server Error: None for url: None', + id='500', + ), + pytest.param( + {'http_error': {'/api': MockResponse(status_code=404)}}, + 'HTTPError: 404 Client Error: None for url: None', + id='404', + ), + ], + indirect=['mock_http_get'], +) +@pytest.mark.usefixtures('mock_http_get') +def test_emits_critical_service_check_when_service_is_down(dd_run_check, aggregator, instance, message): check = OctopusDeployCheck('octopus_deploy', {}, [instance]) - with pytest.raises(Exception, match=r'Max retries exceeded with url: /api'): + with pytest.raises(Exception, match=message): dd_run_check(check) aggregator.assert_metric('octopus_deploy.api.can_connect', 0) From 3c2c9fadabbc63d0313ab07760fe63a84c172b3f Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Wed, 4 Sep 2024 13:04:55 -0400 Subject: [PATCH 15/44] Add support for first level space autodiscovery --- octopus_deploy/assets/configuration/spec.yaml | 23 ++++++ .../datadog_checks/octopus_deploy/check.py | 74 ++++++++++++++++++- .../octopus_deploy/config_models/instance.py | 17 ++++- .../octopus_deploy/constants.py | 6 ++ .../octopus_deploy/data/conf.yaml.example | 5 ++ .../datadog_checks/octopus_deploy/spaces.py | 11 +++ octopus_deploy/metadata.csv | 1 + octopus_deploy/tests/constants.py | 5 ++ octopus_deploy/tests/test_unit.py | 29 ++++++++ 9 files changed, 167 insertions(+), 4 deletions(-) create mode 100644 octopus_deploy/datadog_checks/octopus_deploy/constants.py create mode 100644 octopus_deploy/datadog_checks/octopus_deploy/spaces.py create mode 100644 octopus_deploy/tests/constants.py diff --git a/octopus_deploy/assets/configuration/spec.yaml b/octopus_deploy/assets/configuration/spec.yaml index ba068efa5a36a..91c392ec90711 100644 --- a/octopus_deploy/assets/configuration/spec.yaml +++ b/octopus_deploy/assets/configuration/spec.yaml @@ -15,4 +15,27 @@ files: example: http://localhost:80/api type: string required: true + - name: spaces + description: | + filter your integration by spaces, project groups, and projects. + value: + type: object + properties: + - name: limit + description: | + Maximum number of spaces to be processed. + type: integer + - name: include + type: array + items: + anyOf: + - type: string + - type: object + - name: exclude + type: array + items: + type: string + - name: interval + type: integer + example: {} - template: instances/default diff --git a/octopus_deploy/datadog_checks/octopus_deploy/check.py b/octopus_deploy/datadog_checks/octopus_deploy/check.py index 6a8504c3f597b..cf529b88c8b8d 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/check.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/check.py @@ -4,8 +4,13 @@ from requests.exceptions import ConnectionError, HTTPError, InvalidURL, Timeout from datadog_checks.base import AgentCheck +from datadog_checks.base.utils.discovery import Discovery +from datadog_checks.base.utils.models.types import copy_raw from datadog_checks.octopus_deploy.config_models import ConfigMixin +from .constants import API_UP_METRIC, SPACE_COUNT_METRIC +from .spaces import Space + class OctopusDeployCheck(AgentCheck, ConfigMixin): @@ -14,15 +19,80 @@ class OctopusDeployCheck(AgentCheck, ConfigMixin): def __init__(self, name, init_config, instances): super(OctopusDeployCheck, self).__init__(name, init_config, instances) + def _initialize_caches(self): + self.spaces_discovery = None + if self.config.spaces: + normalized_spaces = normalize_discover_config_include(self.config.spaces, ["name"]) + self.log.info("Spaces discovery: %s", normalized_spaces) + if normalized_spaces: + self.spaces_discovery = Discovery( + lambda: self._get_new_spaces(), + limit=self.config.spaces.limit, + include=normalized_spaces, + exclude=self.config.spaces.exclude, + interval=self.config.spaces.interval, + key=lambda space: space.name, + ) + + def _get_new_spaces(self): + spaces_endpoint = f"{self.config.octopus_endpoint}/spaces" + response = self.http.get(spaces_endpoint) + response.raise_for_status() + spaces_json = response.json().get('Items', []) + spaces = [] + for space in spaces_json: + new_space = Space(space) + spaces.append(new_space) + return spaces + + def spaces(self): + if self.spaces_discovery: + spaces = [space_discovery[2] for space_discovery in self.spaces_discovery.get_items()] + else: + spaces = self._get_new_spaces() + + for space in spaces: + tags = [f"space_id:{space.id}", f"space_name:{space.name}", f"space_slug:{space.slug}"] + self.gauge(SPACE_COUNT_METRIC, 1, tags=tags) + + all_space_names = [space.name for space in spaces] + self.log.info("Collecting data from spaces: %s", ",".join(all_space_names)) + return spaces + def check(self, _): try: response = self.http.get(self.config.octopus_endpoint) response.raise_for_status() except (Timeout, HTTPError, InvalidURL, ConnectionError) as e: - self.gauge("api.can_connect", 0, tags=self.config.tags) + self.gauge(API_UP_METRIC, 0, tags=self.config.tags) self.log.warning( "Failed to connect to Octopus Deploy endpoint %s: %s", self.config.octopus_endpoint, str(e) ) raise - self.gauge("api.can_connect", 1, tags=self.config.tags) + self.gauge(API_UP_METRIC, 1, tags=self.config.tags) + self._initialize_caches() + self.spaces() + + +# Discovery class requires 'include' to be a dict, so this function is needed to normalize the config +def normalize_discover_config_include(config, item_keys): + normalized_config = {} + include_list = config.get('include') if isinstance(config, dict) else copy_raw(config.include) if config else [] + if include_list: + if not isinstance(include_list, list): + raise TypeError('Setting `include` must be an array') + for entry in include_list: + if isinstance(entry, str): + normalized_config[entry] = None + elif isinstance(entry, dict): + dict_key = None + for key in item_keys: + if key in entry.keys(): + normalized_config[entry[key]] = entry + break + if dict_key: + normalized_config[dict_key] = entry + else: + raise TypeError('`include` entries must be a map or a string') + return normalized_config diff --git a/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py b/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py index f3f28b127c613..3db476ae94629 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py @@ -9,9 +9,10 @@ from __future__ import annotations -from typing import Optional +from types import MappingProxyType +from typing import Any, Optional, Union -from pydantic import BaseModel, ConfigDict, field_validator, model_validator +from pydantic import BaseModel, ConfigDict, Field, field_validator, model_validator from datadog_checks.base.utils.functions import identity from datadog_checks.base.utils.models import validation @@ -28,6 +29,17 @@ class MetricPatterns(BaseModel): include: Optional[tuple[str, ...]] = None +class Spaces(BaseModel): + model_config = ConfigDict( + arbitrary_types_allowed=True, + frozen=True, + ) + exclude: Optional[tuple[str, ...]] = None + include: Optional[tuple[Union[str, MappingProxyType[str, Any]], ...]] = None + interval: Optional[int] = None + limit: Optional[int] = Field(None, description='Maximum number of spaces to be processed.\n') + + class InstanceConfig(BaseModel): model_config = ConfigDict( validate_default=True, @@ -40,6 +52,7 @@ class InstanceConfig(BaseModel): min_collection_interval: Optional[float] = None octopus_endpoint: str service: Optional[str] = None + spaces: Optional[Spaces] = None tags: Optional[tuple[str, ...]] = None @model_validator(mode='before') diff --git a/octopus_deploy/datadog_checks/octopus_deploy/constants.py b/octopus_deploy/datadog_checks/octopus_deploy/constants.py new file mode 100644 index 0000000000000..27e726b17e445 --- /dev/null +++ b/octopus_deploy/datadog_checks/octopus_deploy/constants.py @@ -0,0 +1,6 @@ +# (C) Datadog, Inc. 2024-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) + +API_UP_METRIC = "api.can_connect" +SPACE_COUNT_METRIC = "space.count" diff --git a/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example b/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example index add371ae84672..eef1c8f827953 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example +++ b/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example @@ -19,6 +19,11 @@ instances: # - octopus_endpoint: http://localhost:80/api + ## @param spaces - mapping - optional + ## filter your integration by spaces, project groups, and projects. + # + # spaces: {} + ## @param tags - list of strings - optional ## A list of tags to attach to every metric and service check emitted by this instance. ## diff --git a/octopus_deploy/datadog_checks/octopus_deploy/spaces.py b/octopus_deploy/datadog_checks/octopus_deploy/spaces.py new file mode 100644 index 0000000000000..bc7f7550c097e --- /dev/null +++ b/octopus_deploy/datadog_checks/octopus_deploy/spaces.py @@ -0,0 +1,11 @@ +# (C) Datadog, Inc. 2024-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) + + +class Space: + def __init__(self, space_json): + self.id = space_json.get("Id") + self.name = space_json.get("Name") + self.slug = space_json.get("Slug") + self.projects = None diff --git a/octopus_deploy/metadata.csv b/octopus_deploy/metadata.csv index ff748a56df064..72da2783a8bd5 100644 --- a/octopus_deploy/metadata.csv +++ b/octopus_deploy/metadata.csv @@ -1,2 +1,3 @@ metric_name,metric_type,interval,unit_name,per_unit_name,description,orientation,integration,short_name,curated_metric,sample_tags octopus_deploy.api.can_connect,gauge,,,,Whether or not the check can connect to the Octopus Deploy API.,-1,octopus_deploy,octopus_deploy api,, +octopus_deploy.space.count,gauge,,,,Number of spaces discovered.,-1,octopus_deploy,octopus_deploy space count,, diff --git a/octopus_deploy/tests/constants.py b/octopus_deploy/tests/constants.py new file mode 100644 index 0000000000000..6c1c82ee5ec46 --- /dev/null +++ b/octopus_deploy/tests/constants.py @@ -0,0 +1,5 @@ +# (C) Datadog, Inc. 2024-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) + +ALL_METRICS = ["octopus_deploy.space.count"] diff --git a/octopus_deploy/tests/test_unit.py b/octopus_deploy/tests/test_unit.py index f5dfdb32d33d2..25a056f19f837 100644 --- a/octopus_deploy/tests/test_unit.py +++ b/octopus_deploy/tests/test_unit.py @@ -2,12 +2,17 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) +import copy +import logging + import pytest from datadog_checks.dev.http import MockResponse from datadog_checks.dev.utils import get_metadata_metrics from datadog_checks.octopus_deploy import OctopusDeployCheck +from .constants import ALL_METRICS + @pytest.mark.usefixtures('mock_http_get') def test_check(dd_run_check, aggregator, instance): @@ -15,6 +20,8 @@ def test_check(dd_run_check, aggregator, instance): dd_run_check(check) aggregator.assert_metric('octopus_deploy.api.can_connect', 1) + for metric in ALL_METRICS: + aggregator.assert_metric(metric) aggregator.assert_all_metrics_covered() aggregator.assert_metrics_using_metadata(get_metadata_metrics()) @@ -42,3 +49,25 @@ def test_emits_critical_service_check_when_service_is_down(dd_run_check, aggrega dd_run_check(check) aggregator.assert_metric('octopus_deploy.api.can_connect', 0) + + +@pytest.mark.parametrize( + 'spaces_config, metric_count', + [ + pytest.param(None, 1, id="default"), + pytest.param({'include': ['Default']}, 1, id="include"), + pytest.param({'include': ['Default'], 'limit': 1}, 1, id="within limit"), + pytest.param({'include': ['Default'], 'limit': 0}, 0, id="limit hit"), + pytest.param({'include': ['Default'], 'exclude': ['Default']}, 0, id="excluded"), + pytest.param({'include': ['Default'], 'exclude': ['Default']}, 0, id="excluded"), + ], +) +@pytest.mark.usefixtures('mock_http_get') +def test_spaces_discovery(dd_run_check, aggregator, instance, spaces_config, metric_count, caplog): + caplog.set_level(logging.DEBUG) + instance = copy.deepcopy(instance) + instance['spaces'] = spaces_config + check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + dd_run_check(check) + tags = ["space_name:Default", "space_id:Spaces-1", "space_slug:default"] + aggregator.assert_metric("octopus_deploy.space.count", count=metric_count, tags=tags) From a7bf9b964234ebc68a290777ddb9e23a12814c7e Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Mon, 9 Sep 2024 11:11:24 -0400 Subject: [PATCH 16/44] Add support fot project groups discovery --- .../datadog_checks/octopus_deploy/check.py | 95 ++++++++++++++----- .../octopus_deploy/constants.py | 1 + .../datadog_checks/octopus_deploy/spaces.py | 7 ++ octopus_deploy/metadata.csv | 1 + octopus_deploy/tests/constants.py | 73 +++++++++++++- octopus_deploy/tests/test_unit.py | 60 +++++++++++- 6 files changed, 205 insertions(+), 32 deletions(-) diff --git a/octopus_deploy/datadog_checks/octopus_deploy/check.py b/octopus_deploy/datadog_checks/octopus_deploy/check.py index cf529b88c8b8d..f269fbe3e5d5b 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/check.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/check.py @@ -8,8 +8,8 @@ from datadog_checks.base.utils.models.types import copy_raw from datadog_checks.octopus_deploy.config_models import ConfigMixin -from .constants import API_UP_METRIC, SPACE_COUNT_METRIC -from .spaces import Space +from .constants import API_UP_METRIC, PROJECT_GROUP_COUNT_METRIC, SPACE_COUNT_METRIC +from .spaces import ProjectGroup, Space class OctopusDeployCheck(AgentCheck, ConfigMixin): @@ -18,12 +18,53 @@ class OctopusDeployCheck(AgentCheck, ConfigMixin): def __init__(self, name, init_config, instances): super(OctopusDeployCheck, self).__init__(name, init_config, instances) + self._project_groups_discovery = {} def _initialize_caches(self): + self._initialize_spaces() + for _, space_name, space, space_config in self.spaces(): + self._initialize_project_groups(space_name, space.id, space_config) + + def _initialize_project_groups(self, space_name, space_id, space_config): + if space_name not in self._project_groups_discovery: + normalized_project_groups = normalize_discover_config_include( + self.log, space_config.get("project_groups") if space_config else None + ) + self.log.debug("Project groups discovery: %s", normalized_project_groups) + if normalized_project_groups: + self._project_groups_discovery[space_name] = Discovery( + lambda: self._get_new_project_groups(space_id), + limit=space_config.get('project_groups').get('limit') if space_config else None, + include=normalized_project_groups, + exclude=space_config.get('project_groups').get('exclude') if space_config else None, + interval=space_config.get('project_groups').get('interval') if space_config else None, + key=lambda project_group: project_group.name, + ) + else: + self._project_groups_discovery[space_name] = None + if self._project_groups_discovery[space_name]: + discovered_project_groups = list(self._project_groups_discovery[space_name].get_items()) + else: + discovered_project_groups = [ + (None, project_group.name, project_group, None) + for project_group in self._get_new_project_groups(space_id) + ] + + for _, project_group_name, project_group, _ in discovered_project_groups: + tags = [ + f"project_group_id:{project_group.id}", + f"project_group_name:{project_group_name}", + f"space_name:{space_name}", + ] + self.gauge(PROJECT_GROUP_COUNT_METRIC, 1, tags=tags) + + self.log.debug("Discovered project groups: %s", discovered_project_groups) + + def _initialize_spaces(self): self.spaces_discovery = None if self.config.spaces: - normalized_spaces = normalize_discover_config_include(self.config.spaces, ["name"]) - self.log.info("Spaces discovery: %s", normalized_spaces) + normalized_spaces = normalize_discover_config_include(self.log, self.config.spaces) + self.log.info("Spaces discovery: %s", self.config.spaces) if normalized_spaces: self.spaces_discovery = Discovery( lambda: self._get_new_spaces(), @@ -34,6 +75,17 @@ def _initialize_caches(self): key=lambda space: space.name, ) + def _get_new_project_groups(self, space_id): + project_groups_endpoint = f"{self.config.octopus_endpoint}/{space_id}/projectgroups" + response = self.http.get(project_groups_endpoint) + response.raise_for_status() + project_groups_json = response.json().get('Items', []) + project_groups = [] + for project_group in project_groups_json: + new_project_group = ProjectGroup(project_group) + project_groups.append(new_project_group) + return project_groups + def _get_new_spaces(self): spaces_endpoint = f"{self.config.octopus_endpoint}/spaces" response = self.http.get(spaces_endpoint) @@ -47,15 +99,15 @@ def _get_new_spaces(self): def spaces(self): if self.spaces_discovery: - spaces = [space_discovery[2] for space_discovery in self.spaces_discovery.get_items()] + spaces = list(self.spaces_discovery.get_items()) else: - spaces = self._get_new_spaces() + spaces = [(None, space.name, space, None) for space in self._get_new_spaces()] - for space in spaces: + for _, _, space, _ in spaces: tags = [f"space_id:{space.id}", f"space_name:{space.name}", f"space_slug:{space.slug}"] self.gauge(SPACE_COUNT_METRIC, 1, tags=tags) - all_space_names = [space.name for space in spaces] + all_space_names = [space.name for _, _, space, _ in spaces] self.log.info("Collecting data from spaces: %s", ",".join(all_space_names)) return spaces @@ -72,27 +124,18 @@ def check(self, _): self.gauge(API_UP_METRIC, 1, tags=self.config.tags) self._initialize_caches() - self.spaces() # Discovery class requires 'include' to be a dict, so this function is needed to normalize the config -def normalize_discover_config_include(config, item_keys): +def normalize_discover_config_include(log, config): normalized_config = {} + log.debug("normalize_discover_config_include config: %s", config) include_list = config.get('include') if isinstance(config, dict) else copy_raw(config.include) if config else [] - if include_list: - if not isinstance(include_list, list): - raise TypeError('Setting `include` must be an array') - for entry in include_list: - if isinstance(entry, str): - normalized_config[entry] = None - elif isinstance(entry, dict): - dict_key = None - for key in item_keys: - if key in entry.keys(): - normalized_config[entry[key]] = entry - break - if dict_key: - normalized_config[dict_key] = entry - else: - raise TypeError('`include` entries must be a map or a string') + log.debug("normalize_discover_config_include include_list: %s", include_list) + for entry in include_list: + if isinstance(entry, str): + normalized_config[entry] = None + elif isinstance(entry, dict): + for key, value in entry.items(): + normalized_config[key] = value.copy() return normalized_config diff --git a/octopus_deploy/datadog_checks/octopus_deploy/constants.py b/octopus_deploy/datadog_checks/octopus_deploy/constants.py index 27e726b17e445..d2b073c8ca243 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/constants.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/constants.py @@ -4,3 +4,4 @@ API_UP_METRIC = "api.can_connect" SPACE_COUNT_METRIC = "space.count" +PROJECT_GROUP_COUNT_METRIC = "project_group.count" diff --git a/octopus_deploy/datadog_checks/octopus_deploy/spaces.py b/octopus_deploy/datadog_checks/octopus_deploy/spaces.py index bc7f7550c097e..2fd06d87562fb 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/spaces.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/spaces.py @@ -8,4 +8,11 @@ def __init__(self, space_json): self.id = space_json.get("Id") self.name = space_json.get("Name") self.slug = space_json.get("Slug") + self.project_groups = None + + +class ProjectGroup: + def __init__(self, project_group_json): + self.id = project_group_json.get("Id") + self.name = project_group_json.get("Name") self.projects = None diff --git a/octopus_deploy/metadata.csv b/octopus_deploy/metadata.csv index 72da2783a8bd5..80dd14c0cfa7b 100644 --- a/octopus_deploy/metadata.csv +++ b/octopus_deploy/metadata.csv @@ -1,3 +1,4 @@ metric_name,metric_type,interval,unit_name,per_unit_name,description,orientation,integration,short_name,curated_metric,sample_tags octopus_deploy.api.can_connect,gauge,,,,Whether or not the check can connect to the Octopus Deploy API.,-1,octopus_deploy,octopus_deploy api,, +octopus_deploy.project_group.count,gauge,,,,Number of project groups discovered.,-1,octopus_deploy,octopus_deploy project group count,, octopus_deploy.space.count,gauge,,,,Number of spaces discovered.,-1,octopus_deploy,octopus_deploy space count,, diff --git a/octopus_deploy/tests/constants.py b/octopus_deploy/tests/constants.py index 6c1c82ee5ec46..5615c68e972d8 100644 --- a/octopus_deploy/tests/constants.py +++ b/octopus_deploy/tests/constants.py @@ -2,4 +2,75 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) -ALL_METRICS = ["octopus_deploy.space.count"] +ALL_METRICS = ["octopus_deploy.project_group.count", "octopus_deploy.space.count"] + +PROJECT_GROUP_ALL_METRICS = [ + { + 'name': 'octopus_deploy.project_group.count', + 'tags': ["project_group_name:Default Project Group", "project_group_id:ProjectGroups-1", "space_name:Default"], + 'count': 1, + }, + { + 'name': 'octopus_deploy.project_group.count', + 'tags': ["project_group_name:hello", "project_group_id:ProjectGroups-3", "space_name:Default"], + 'count': 1, + }, + { + 'name': 'octopus_deploy.project_group.count', + 'tags': ["project_group_name:test-group", "project_group_id:ProjectGroups-2", "space_name:Default"], + 'count': 1, + }, +] + +PROJECT_GROUP_ONLY_TEST_GROUP_METRICS = [ + { + 'name': 'octopus_deploy.project_group.count', + 'tags': ["project_group_name:Default Project Group", "project_group_id:ProjectGroups-1", "space_name:Default"], + 'count': 0, + }, + { + 'name': 'octopus_deploy.project_group.count', + 'tags': ["project_group_name:hello", "project_group_id:ProjectGroups-3", "space_name:Default"], + 'count': 0, + }, + { + 'name': 'octopus_deploy.project_group.count', + 'tags': ["project_group_name:test-group", "project_group_id:ProjectGroups-2", "space_name:Default"], + 'count': 1, + }, +] + +PROJECT_GROUP_NO_METRICS = [ + { + 'name': 'octopus_deploy.project_group.count', + 'tags': ["project_group_name:Default Project Group", "project_group_id:ProjectGroups-1", "space_name:Default"], + 'count': 0, + }, + { + 'name': 'octopus_deploy.project_group.count', + 'tags': ["project_group_name:hello", "project_group_id:ProjectGroups-3", "space_name:Default"], + 'count': 0, + }, + { + 'name': 'octopus_deploy.project_group.count', + 'tags': ["project_group_name:test-group", "project_group_id:ProjectGroups-2", "space_name:Default"], + 'count': 0, + }, +] +PROJECT_GROUP_NO_TEST_GROUP_METRICS = [ + { + 'name': 'octopus_deploy.project_group.count', + 'tags': ["project_group_name:Default Project Group", "project_group_id:ProjectGroups-1", "space_name:Default"], + 'count': 1, + }, + { + 'name': 'octopus_deploy.project_group.count', + 'tags': ["project_group_name:hello", "project_group_id:ProjectGroups-3", "space_name:Default"], + 'count': 1, + }, + { + 'name': 'octopus_deploy.project_group.count', + 'tags': ["project_group_name:test-group", "project_group_id:ProjectGroups-2", "space_name:Default"], + 'count': 0, + }, +] diff --git a/octopus_deploy/tests/test_unit.py b/octopus_deploy/tests/test_unit.py index 25a056f19f837..81a6071f9f470 100644 --- a/octopus_deploy/tests/test_unit.py +++ b/octopus_deploy/tests/test_unit.py @@ -3,7 +3,6 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import copy -import logging import pytest @@ -11,7 +10,13 @@ from datadog_checks.dev.utils import get_metadata_metrics from datadog_checks.octopus_deploy import OctopusDeployCheck -from .constants import ALL_METRICS +from .constants import ( + ALL_METRICS, + PROJECT_GROUP_ALL_METRICS, + PROJECT_GROUP_NO_METRICS, + PROJECT_GROUP_NO_TEST_GROUP_METRICS, + PROJECT_GROUP_ONLY_TEST_GROUP_METRICS, +) @pytest.mark.usefixtures('mock_http_get') @@ -59,15 +64,60 @@ def test_emits_critical_service_check_when_service_is_down(dd_run_check, aggrega pytest.param({'include': ['Default'], 'limit': 1}, 1, id="within limit"), pytest.param({'include': ['Default'], 'limit': 0}, 0, id="limit hit"), pytest.param({'include': ['Default'], 'exclude': ['Default']}, 0, id="excluded"), - pytest.param({'include': ['Default'], 'exclude': ['Default']}, 0, id="excluded"), + pytest.param({'include': ['Default'], 'exclude': ['test']}, 1, id="excluded invalid"), ], ) @pytest.mark.usefixtures('mock_http_get') -def test_spaces_discovery(dd_run_check, aggregator, instance, spaces_config, metric_count, caplog): - caplog.set_level(logging.DEBUG) +def test_spaces_discovery(dd_run_check, aggregator, instance, spaces_config, metric_count): instance = copy.deepcopy(instance) instance['spaces'] = spaces_config check = OctopusDeployCheck('octopus_deploy', {}, [instance]) dd_run_check(check) tags = ["space_name:Default", "space_id:Spaces-1", "space_slug:default"] aggregator.assert_metric("octopus_deploy.space.count", count=metric_count, tags=tags) + + +@pytest.mark.parametrize( + 'spaces_config, expected_metrics', + [ + pytest.param(None, PROJECT_GROUP_ALL_METRICS, id="default"), + pytest.param( + {'include': [{'Default': {'project_groups': {'include': ['test-group']}}}]}, + PROJECT_GROUP_ONLY_TEST_GROUP_METRICS, + id="include", + ), + pytest.param( + {'include': [{'Default': {'project_groups': {'include': ['test-group'], 'limit': 1}}}]}, + PROJECT_GROUP_ONLY_TEST_GROUP_METRICS, + id="within limit", + ), + pytest.param( + {'include': [{'Default': {'project_groups': {'include': ['test-group'], 'limit': 0}}}]}, + PROJECT_GROUP_NO_METRICS, + id="limit hit", + ), + pytest.param( + {'include': [{'Default': {'project_groups': {'include': ['test-group'], 'exclude': ['test-group']}}}]}, + PROJECT_GROUP_NO_METRICS, + id="excluded", + ), + pytest.param( + {'include': [{'Default': {'project_groups': {'include': ['.*'], 'exclude': ['test-group']}}}]}, + PROJECT_GROUP_NO_TEST_GROUP_METRICS, + id="one excluded", + ), + pytest.param( + {'include': [{'Default': {'include': {'project_groups': ['test-group'], 'exclude': ['testing']}}}]}, + PROJECT_GROUP_ALL_METRICS, + id="excluded invalud", + ), + ], +) +@pytest.mark.usefixtures('mock_http_get') +def test_project_groups_discovery(dd_run_check, aggregator, instance, spaces_config, expected_metrics): + instance = copy.deepcopy(instance) + instance['spaces'] = spaces_config + check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + dd_run_check(check) + for metric in expected_metrics: + aggregator.assert_metric(metric["name"], count=metric["count"], tags=metric["tags"]) From 1ba66325d3ed900dd366a88ea9f36790fc16b857 Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Fri, 13 Sep 2024 13:09:09 -0400 Subject: [PATCH 17/44] Add support for projects --- .../datadog_checks/octopus_deploy/check.py | 126 +++++- .../octopus_deploy/constants.py | 1 + .../datadog_checks/octopus_deploy/spaces.py | 8 + octopus_deploy/metadata.csv | 1 + octopus_deploy/tests/constants.py | 378 +++++++++++++++++- octopus_deploy/tests/test_unit.py | 129 +++++- 6 files changed, 616 insertions(+), 27 deletions(-) diff --git a/octopus_deploy/datadog_checks/octopus_deploy/check.py b/octopus_deploy/datadog_checks/octopus_deploy/check.py index f269fbe3e5d5b..11b17a3d384fa 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/check.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/check.py @@ -8,8 +8,8 @@ from datadog_checks.base.utils.models.types import copy_raw from datadog_checks.octopus_deploy.config_models import ConfigMixin -from .constants import API_UP_METRIC, PROJECT_GROUP_COUNT_METRIC, SPACE_COUNT_METRIC -from .spaces import ProjectGroup, Space +from .constants import API_UP_METRIC, PROJECT_COUNT_METRIC, PROJECT_GROUP_COUNT_METRIC, SPACE_COUNT_METRIC +from .spaces import Project, ProjectGroup, Space class OctopusDeployCheck(AgentCheck, ConfigMixin): @@ -19,11 +19,47 @@ class OctopusDeployCheck(AgentCheck, ConfigMixin): def __init__(self, name, init_config, instances): super(OctopusDeployCheck, self).__init__(name, init_config, instances) self._project_groups_discovery = {} + self._projects_discovery = {} def _initialize_caches(self): self._initialize_spaces() for _, space_name, space, space_config in self.spaces(): self._initialize_project_groups(space_name, space.id, space_config) + for _, project_group_name, project_group, project_group_config in self.project_groups(space.id, space_name): + self._initialize_projects( + space_name, space.id, project_group_name, project_group.id, project_group_config + ) + self.projects(space_name, space.id, project_group.id, project_group_name) + + def _initialize_projects(self, space_name, space_id, project_group_name, project_group_id, project_group_config): + if not self._projects_discovery.get(space_name, {}).get(project_group_name): + normalized_projects = normalize_discover_config_include( + self.log, project_group_config.get("projects") if project_group_config else None + ) + self.log.debug( + "Projects discovery for space %s project_group %s: %s", + space_name, + project_group_name, + normalized_projects, + ) + if normalized_projects: + if not self._projects_discovery.get(space_name): + self._projects_discovery[space_name] = {} + self._projects_discovery[space_name][project_group_name] = Discovery( + lambda: self._get_new_projects(space_id, project_group_id), + limit=project_group_config.get('projects').get('limit') if project_group_config else None, + include=normalized_projects, + exclude=project_group_config.get('projects').get('exclude') if project_group_config else None, + interval=(project_group_config.get('projects').get('interval') if project_group_config else None), + key=lambda project: project.name, + ) + else: + if not self._projects_discovery.get(space_name): + self._projects_discovery[space_name] = {} + + self._projects_discovery[space_name][project_group_name] = None + + self.log.debug("Discovered projects: %s", self._projects_discovery) def _initialize_project_groups(self, space_name, space_id, space_config): if space_name not in self._project_groups_discovery: @@ -42,23 +78,8 @@ def _initialize_project_groups(self, space_name, space_id, space_config): ) else: self._project_groups_discovery[space_name] = None - if self._project_groups_discovery[space_name]: - discovered_project_groups = list(self._project_groups_discovery[space_name].get_items()) - else: - discovered_project_groups = [ - (None, project_group.name, project_group, None) - for project_group in self._get_new_project_groups(space_id) - ] - for _, project_group_name, project_group, _ in discovered_project_groups: - tags = [ - f"project_group_id:{project_group.id}", - f"project_group_name:{project_group_name}", - f"space_name:{space_name}", - ] - self.gauge(PROJECT_GROUP_COUNT_METRIC, 1, tags=tags) - - self.log.debug("Discovered project groups: %s", discovered_project_groups) + self.log.debug("Discovered project groups: %s", self._project_groups_discovery) def _initialize_spaces(self): self.spaces_discovery = None @@ -75,6 +96,50 @@ def _initialize_spaces(self): key=lambda space: space.name, ) + def projects(self, space_name, space_id, project_group_id, project_group_name): + if self._projects_discovery.get(space_name, {}).get(project_group_name): + projects = list(self._projects_discovery[space_name][project_group_name].get_items()) + else: + projects = [ + (None, project.name, project, None) for project in self._get_new_projects(space_id, project_group_id) + ] + + for _, _, project, _ in projects: + tags = [ + f"project_id:{project.id}", + f"project_name:{project.name}", + f"project_group_id:{project_group_id}", + f"project_group_name:{project_group_name}", + f"space_name:{space_name}", + ] + self.gauge(PROJECT_COUNT_METRIC, 1, tags=tags) + + all_project_names = [project.name for _, _, project, _ in projects] + self.log.info("Collecting data from projects: %s", ",".join(all_project_names)) + return projects + + def report_project_metrics(self, project_list, project_group_id, project_group_name, space_name): + for _, _, project, _ in project_list: + tags = [ + f"project_id:{project.id}", + f"project_name:{project.name}", + f"project_group_id:{project_group_id}", + f"project_group_name:{project_group_name}", + f"space_name:{space_name}", + ] + self.gauge(PROJECT_COUNT_METRIC, 1, tags=tags) + + def _get_new_projects(self, space_id, project_group_id): + projects_endpoint = f"{self.config.octopus_endpoint}/{space_id}/projectgroups/{project_group_id}/projects" + response = self.http.get(projects_endpoint) + response.raise_for_status() + projects_json = response.json().get('Items', []) + projects = [] + for project in projects_json: + new_project = Project(project) + projects.append(new_project) + return projects + def _get_new_project_groups(self, space_id): project_groups_endpoint = f"{self.config.octopus_endpoint}/{space_id}/projectgroups" response = self.http.get(project_groups_endpoint) @@ -111,6 +176,27 @@ def spaces(self): self.log.info("Collecting data from spaces: %s", ",".join(all_space_names)) return spaces + def project_groups(self, space_id, space_name): + if self._project_groups_discovery.get(space_name): + project_groups = list(self._project_groups_discovery[space_name].get_items()) + else: + project_groups = [ + (None, project_groups.name, project_groups, None) + for project_groups in self._get_new_project_groups(space_id) + ] + + for _, project_group_name, project_group, _ in project_groups: + tags = [ + f"project_group_id:{project_group.id}", + f"project_group_name:{project_group_name}", + f"space_name:{space_name}", + ] + self.gauge(PROJECT_GROUP_COUNT_METRIC, 1, tags=tags) + + all_project_group_names = [space.name for _, _, space, _ in project_groups] + self.log.info("Collecting data from project_groups: %s", ",".join(all_project_group_names)) + return project_groups + def check(self, _): try: response = self.http.get(self.config.octopus_endpoint) @@ -132,6 +218,10 @@ def normalize_discover_config_include(log, config): log.debug("normalize_discover_config_include config: %s", config) include_list = config.get('include') if isinstance(config, dict) else copy_raw(config.include) if config else [] log.debug("normalize_discover_config_include include_list: %s", include_list) + if not isinstance(include_list, list): + raise TypeError('Setting `include` must be an array') + if len(include_list) == 0: + return {} for entry in include_list: if isinstance(entry, str): normalized_config[entry] = None diff --git a/octopus_deploy/datadog_checks/octopus_deploy/constants.py b/octopus_deploy/datadog_checks/octopus_deploy/constants.py index d2b073c8ca243..faeb25ef189a6 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/constants.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/constants.py @@ -5,3 +5,4 @@ API_UP_METRIC = "api.can_connect" SPACE_COUNT_METRIC = "space.count" PROJECT_GROUP_COUNT_METRIC = "project_group.count" +PROJECT_COUNT_METRIC = "project.count" diff --git a/octopus_deploy/datadog_checks/octopus_deploy/spaces.py b/octopus_deploy/datadog_checks/octopus_deploy/spaces.py index 2fd06d87562fb..f309b91f904bc 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/spaces.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/spaces.py @@ -16,3 +16,11 @@ def __init__(self, project_group_json): self.id = project_group_json.get("Id") self.name = project_group_json.get("Name") self.projects = None + + +class Project: + def __init__(self, project_json): + self.id = project_json.get("Id") + self.name = project_json.get("Name") + self.last_task_id = None + self.last_task_time = None diff --git a/octopus_deploy/metadata.csv b/octopus_deploy/metadata.csv index 80dd14c0cfa7b..e69feb7afdd90 100644 --- a/octopus_deploy/metadata.csv +++ b/octopus_deploy/metadata.csv @@ -1,4 +1,5 @@ metric_name,metric_type,interval,unit_name,per_unit_name,description,orientation,integration,short_name,curated_metric,sample_tags octopus_deploy.api.can_connect,gauge,,,,Whether or not the check can connect to the Octopus Deploy API.,-1,octopus_deploy,octopus_deploy api,, +octopus_deploy.project.count,gauge,,,,Number of projects discovered.,-1,octopus_deploy,octopus_deploy projects count,, octopus_deploy.project_group.count,gauge,,,,Number of project groups discovered.,-1,octopus_deploy,octopus_deploy project group count,, octopus_deploy.space.count,gauge,,,,Number of spaces discovered.,-1,octopus_deploy,octopus_deploy space count,, diff --git a/octopus_deploy/tests/constants.py b/octopus_deploy/tests/constants.py index 5615c68e972d8..75f2f6ea7fc6b 100644 --- a/octopus_deploy/tests/constants.py +++ b/octopus_deploy/tests/constants.py @@ -2,7 +2,7 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) -ALL_METRICS = ["octopus_deploy.project_group.count", "octopus_deploy.space.count"] +ALL_METRICS = ["octopus_deploy.project_group.count", "octopus_deploy.project.count", "octopus_deploy.space.count"] PROJECT_GROUP_ALL_METRICS = [ { @@ -74,3 +74,379 @@ 'count': 0, }, ] + +PROJECT_ALL_METRICS = [ + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:test-group", + "project_group_id:ProjectGroups-2", + "space_name:Default", + "project_name:hi", + "project_id:Projects-4", + ], + 'count': 1, + }, + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:my-project", + "project_id:Projects-2", + ], + 'count': 1, + }, + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + ], + 'count': 1, + }, + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test-api", + "project_id:Projects-1", + ], + 'count': 1, + }, +] + +PROJECT_ONLY_TEST_GROUP_METRICS = [ + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:test-group", + "project_group_id:ProjectGroups-2", + "space_name:Default", + "project_name:hi", + "project_id:Projects-4", + ], + 'count': 1, + }, + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:my-project", + "project_id:Projects-2", + ], + 'count': 0, + }, + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + ], + 'count': 0, + }, + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test-api", + "project_id:Projects-1", + ], + 'count': 0, + }, +] + +PROJECT_ONLY_DEFAULT_GROUP_METRICS = [ + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:test-group", + "project_group_id:ProjectGroups-2", + "space_name:Default", + "project_name:hi", + "project_id:Projects-4", + ], + 'count': 0, + }, + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:my-project", + "project_id:Projects-2", + ], + 'count': 1, + }, + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + ], + 'count': 1, + }, + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test-api", + "project_id:Projects-1", + ], + 'count': 1, + }, +] + +PROJECT_ONLY_TEST_METRICS = [ + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:test-group", + "project_group_id:ProjectGroups-2", + "space_name:Default", + "project_name:hi", + "project_id:Projects-4", + ], + 'count': 1, + }, + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:my-project", + "project_id:Projects-2", + ], + 'count': 0, + }, + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + ], + 'count': 1, + }, + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test-api", + "project_id:Projects-1", + ], + 'count': 1, + }, +] + +PROJECT_ONLY_HI_METRICS = [ + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:test-group", + "project_group_id:ProjectGroups-2", + "space_name:Default", + "project_name:hi", + "project_id:Projects-4", + ], + 'count': 1, + }, + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:my-project", + "project_id:Projects-2", + ], + 'count': 0, + }, + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + ], + 'count': 0, + }, + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test-api", + "project_id:Projects-1", + ], + 'count': 0, + }, +] + +PROJECT_ONLY_HI_MY_PROJECT_METRICS = [ + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:test-group", + "project_group_id:ProjectGroups-2", + "space_name:Default", + "project_name:hi", + "project_id:Projects-4", + ], + 'count': 1, + }, + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:my-project", + "project_id:Projects-2", + ], + 'count': 1, + }, + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + ], + 'count': 0, + }, + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test-api", + "project_id:Projects-1", + ], + 'count': 0, + }, +] + +PROJECT_EXCLUDE_TEST_API_METRICS = [ + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:test-group", + "project_group_id:ProjectGroups-2", + "space_name:Default", + "project_name:hi", + "project_id:Projects-4", + ], + 'count': 1, + }, + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:my-project", + "project_id:Projects-2", + ], + 'count': 1, + }, + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + ], + 'count': 1, + }, + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test-api", + "project_id:Projects-1", + ], + 'count': 0, + }, +] + +PROJECT_NO_METRICS = [ + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:test-group", + "project_group_id:ProjectGroups-2", + "space_name:Default", + "project_name:hi", + "project_id:Projects-4", + ], + 'count': 0, + }, + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:my-project", + "project_id:Projects-2", + ], + 'count': 0, + }, + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + ], + 'count': 0, + }, + { + 'name': 'octopus_deploy.project.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test-api", + "project_id:Projects-1", + ], + 'count': 0, + }, +] diff --git a/octopus_deploy/tests/test_unit.py b/octopus_deploy/tests/test_unit.py index 81a6071f9f470..12cb51de66162 100644 --- a/octopus_deploy/tests/test_unit.py +++ b/octopus_deploy/tests/test_unit.py @@ -3,6 +3,7 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import copy +import logging import pytest @@ -12,10 +13,14 @@ from .constants import ( ALL_METRICS, + PROJECT_ALL_METRICS, PROJECT_GROUP_ALL_METRICS, PROJECT_GROUP_NO_METRICS, PROJECT_GROUP_NO_TEST_GROUP_METRICS, PROJECT_GROUP_ONLY_TEST_GROUP_METRICS, + PROJECT_NO_METRICS, + PROJECT_ONLY_HI_METRICS, + PROJECT_ONLY_HI_MY_PROJECT_METRICS, ) @@ -54,27 +59,35 @@ def test_emits_critical_service_check_when_service_is_down(dd_run_check, aggrega dd_run_check(check) aggregator.assert_metric('octopus_deploy.api.can_connect', 0) + aggregator.assert_all_metrics_covered() @pytest.mark.parametrize( - 'spaces_config, metric_count', + 'spaces_config, metric_count, project_group_metrics', [ - pytest.param(None, 1, id="default"), - pytest.param({'include': ['Default']}, 1, id="include"), - pytest.param({'include': ['Default'], 'limit': 1}, 1, id="within limit"), - pytest.param({'include': ['Default'], 'limit': 0}, 0, id="limit hit"), - pytest.param({'include': ['Default'], 'exclude': ['Default']}, 0, id="excluded"), - pytest.param({'include': ['Default'], 'exclude': ['test']}, 1, id="excluded invalid"), + pytest.param(None, 1, PROJECT_GROUP_ALL_METRICS, id="default"), + pytest.param({'include': ['Default']}, 1, PROJECT_GROUP_ALL_METRICS, id="include"), + pytest.param({'include': ['Default'], 'limit': 1}, 1, PROJECT_GROUP_ALL_METRICS, id="within limit"), + pytest.param({'include': ['Default'], 'limit': 0}, 0, PROJECT_GROUP_NO_METRICS, id="limit hit"), + pytest.param({'include': ['Default'], 'exclude': ['Default']}, 0, PROJECT_GROUP_NO_METRICS, id="excluded"), + pytest.param( + {'include': ['Default'], 'exclude': ['test']}, 1, PROJECT_GROUP_ALL_METRICS, id="excluded invalid" + ), ], ) @pytest.mark.usefixtures('mock_http_get') -def test_spaces_discovery(dd_run_check, aggregator, instance, spaces_config, metric_count): +def test_spaces_discovery(dd_run_check, aggregator, instance, spaces_config, metric_count, project_group_metrics): instance = copy.deepcopy(instance) instance['spaces'] = spaces_config check = OctopusDeployCheck('octopus_deploy', {}, [instance]) dd_run_check(check) tags = ["space_name:Default", "space_id:Spaces-1", "space_slug:default"] aggregator.assert_metric("octopus_deploy.space.count", count=metric_count, tags=tags) + for metric in project_group_metrics: + aggregator.assert_metric(metric["name"], count=metric["count"], tags=metric["tags"]) + aggregator.assert_metric("octopus_deploy.api.can_connect") + aggregator.assert_metric("octopus_deploy.project.count", at_least=0) # TODO: assert specific + aggregator.assert_all_metrics_covered() @pytest.mark.parametrize( @@ -121,3 +134,103 @@ def test_project_groups_discovery(dd_run_check, aggregator, instance, spaces_con dd_run_check(check) for metric in expected_metrics: aggregator.assert_metric(metric["name"], count=metric["count"], tags=metric["tags"]) + + +@pytest.mark.parametrize( + 'spaces_config, expected_metrics', + [ + pytest.param(None, PROJECT_ALL_METRICS, id="default"), + pytest.param( + { + 'include': [ + {'Default': {'project_groups': {'include': [{'test-group': {'projects': {'include': ['hi']}}}]}}} + ] + }, + PROJECT_ONLY_HI_METRICS, + id="include", + ), + pytest.param( + { + 'include': [ + { + 'Default': { + 'project_groups': {'include': [{'.*': {'projects': {'include': ['.*'], 'limit': 1}}}]} + } + } + ] + }, + PROJECT_ONLY_HI_MY_PROJECT_METRICS, + id="1 limit", + ), + pytest.param( + { + 'include': [ + { + 'Default': { + 'project_groups': {'include': [{'.*': {'projects': {'include': ['.*'], 'limit': 0}}}]} + } + } + ] + }, + PROJECT_NO_METRICS, + id="limit hit", + ), + pytest.param( + { + 'include': [ + { + 'Default': { + 'project_groups': { + 'exclude': ['Default.*'], + 'include': [{'test-group': {'projects': {'include': ['.*']}}}], + } + } + } + ] + }, + PROJECT_ONLY_HI_METRICS, + id="excluded default", + ), + pytest.param( + { + 'include': [ + { + 'Default': { + 'project_groups': { + 'include': [{'.*': {'projects': {'include': ['.*'], 'exclude': ['.*']}}}] + } + } + } + ] + }, + PROJECT_NO_METRICS, + id="all excluded", + ), + pytest.param( + { + 'include': [ + { + 'Default': { + 'include': { + 'project_groups': { + 'include': [{'.*': {'projects': {'include': ['.*'], 'exclude': ['heyhey']}}}] + } + } + } + } + ] + }, + PROJECT_ALL_METRICS, + id="excluded invalud", + ), + ], +) +@pytest.mark.usefixtures('mock_http_get') +def test_projects_discovery(dd_run_check, aggregator, instance, spaces_config, expected_metrics, caplog): + caplog.set_level(logging.DEBUG) + instance = copy.deepcopy(instance) + instance['spaces'] = spaces_config + check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + dd_run_check(check) + for metric in expected_metrics: + aggregator.assert_metric(metric["name"], count=metric["count"], tags=metric["tags"]) From bfc6d01709e431579a9a4cc3352df6c036b0b120 Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Fri, 13 Sep 2024 15:06:33 -0400 Subject: [PATCH 18/44] remove spaces discovery and add it as configuration option --- octopus_deploy/assets/configuration/spec.yaml | 14 +- .../datadog_checks/octopus_deploy/check.py | 149 +++++++----------- .../octopus_deploy/config_models/instance.py | 7 +- .../octopus_deploy/data/conf.yaml.example | 11 +- .../{spaces.py => project_groups.py} | 8 - octopus_deploy/tests/conftest.py | 2 +- octopus_deploy/tests/constants.py | 2 +- octopus_deploy/tests/test_unit.py | 92 +++-------- 8 files changed, 101 insertions(+), 184 deletions(-) rename octopus_deploy/datadog_checks/octopus_deploy/{spaces.py => project_groups.py} (71%) diff --git a/octopus_deploy/assets/configuration/spec.yaml b/octopus_deploy/assets/configuration/spec.yaml index 91c392ec90711..388ad65b26bef 100644 --- a/octopus_deploy/assets/configuration/spec.yaml +++ b/octopus_deploy/assets/configuration/spec.yaml @@ -15,15 +15,23 @@ files: example: http://localhost:80/api type: string required: true - - name: spaces + - name: space description: | - filter your integration by spaces, project groups, and projects. + Space to monitor + value: + example: Default + type: string + enabled: true + required: true + - name: project_groups + description: | + filter your integration by project groups and projects. value: type: object properties: - name: limit description: | - Maximum number of spaces to be processed. + Maximum number of project groups to be processed. type: integer - name: include type: array diff --git a/octopus_deploy/datadog_checks/octopus_deploy/check.py b/octopus_deploy/datadog_checks/octopus_deploy/check.py index 11b17a3d384fa..8f7c3624ae40e 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/check.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/check.py @@ -8,8 +8,8 @@ from datadog_checks.base.utils.models.types import copy_raw from datadog_checks.octopus_deploy.config_models import ConfigMixin -from .constants import API_UP_METRIC, PROJECT_COUNT_METRIC, PROJECT_GROUP_COUNT_METRIC, SPACE_COUNT_METRIC -from .spaces import Project, ProjectGroup, Space +from .constants import API_UP_METRIC, PROJECT_COUNT_METRIC, PROJECT_GROUP_COUNT_METRIC +from .project_groups import Project, ProjectGroup class OctopusDeployCheck(AgentCheck, ConfigMixin): @@ -20,33 +20,29 @@ def __init__(self, name, init_config, instances): super(OctopusDeployCheck, self).__init__(name, init_config, instances) self._project_groups_discovery = {} self._projects_discovery = {} + self.space_id = None + space_name = self.instance.get("space") + self.base_tags = self.instance.get("tags", []) + [f"space_name:{space_name}"] def _initialize_caches(self): - self._initialize_spaces() - for _, space_name, space, space_config in self.spaces(): - self._initialize_project_groups(space_name, space.id, space_config) - for _, project_group_name, project_group, project_group_config in self.project_groups(space.id, space_name): - self._initialize_projects( - space_name, space.id, project_group_name, project_group.id, project_group_config - ) - self.projects(space_name, space.id, project_group.id, project_group_name) + self._initialize_project_groups() + for _, project_group_name, project_group, project_group_config in self.project_groups(): + self._initialize_projects(project_group.id, project_group_name, project_group_config) + self.projects(project_group.id, project_group_name) - def _initialize_projects(self, space_name, space_id, project_group_name, project_group_id, project_group_config): - if not self._projects_discovery.get(space_name, {}).get(project_group_name): + def _initialize_projects(self, project_group_id, project_group_name, project_group_config): + if not self._projects_discovery.get(project_group_name): normalized_projects = normalize_discover_config_include( self.log, project_group_config.get("projects") if project_group_config else None ) self.log.debug( - "Projects discovery for space %s project_group %s: %s", - space_name, + "Projects discovery for project_group %s: %s", project_group_name, normalized_projects, ) if normalized_projects: - if not self._projects_discovery.get(space_name): - self._projects_discovery[space_name] = {} - self._projects_discovery[space_name][project_group_name] = Discovery( - lambda: self._get_new_projects(space_id, project_group_id), + self._projects_discovery[project_group_name] = Discovery( + lambda: self._get_new_projects(project_group_id), limit=project_group_config.get('projects').get('limit') if project_group_config else None, include=normalized_projects, exclude=project_group_config.get('projects').get('exclude') if project_group_config else None, @@ -54,55 +50,30 @@ def _initialize_projects(self, space_name, space_id, project_group_name, project key=lambda project: project.name, ) else: - if not self._projects_discovery.get(space_name): - self._projects_discovery[space_name] = {} - - self._projects_discovery[space_name][project_group_name] = None + self._projects_discovery[project_group_name] = None self.log.debug("Discovered projects: %s", self._projects_discovery) - def _initialize_project_groups(self, space_name, space_id, space_config): - if space_name not in self._project_groups_discovery: - normalized_project_groups = normalize_discover_config_include( - self.log, space_config.get("project_groups") if space_config else None - ) - self.log.debug("Project groups discovery: %s", normalized_project_groups) + def _initialize_project_groups(self): + self._project_groups_discovery = None + if self.config.project_groups: + normalized_project_groups = normalize_discover_config_include(self.log, self.config.project_groups) + self.log.info("Project groups discovery: %s", self.config.project_groups) if normalized_project_groups: - self._project_groups_discovery[space_name] = Discovery( - lambda: self._get_new_project_groups(space_id), - limit=space_config.get('project_groups').get('limit') if space_config else None, + self._project_groups_discovery = Discovery( + lambda: self._get_new_project_groups(), + limit=self.config.project_groups.limit, include=normalized_project_groups, - exclude=space_config.get('project_groups').get('exclude') if space_config else None, - interval=space_config.get('project_groups').get('interval') if space_config else None, + exclude=self.config.project_groups.exclude, + interval=self.config.project_groups.interval, key=lambda project_group: project_group.name, ) - else: - self._project_groups_discovery[space_name] = None - - self.log.debug("Discovered project groups: %s", self._project_groups_discovery) - - def _initialize_spaces(self): - self.spaces_discovery = None - if self.config.spaces: - normalized_spaces = normalize_discover_config_include(self.log, self.config.spaces) - self.log.info("Spaces discovery: %s", self.config.spaces) - if normalized_spaces: - self.spaces_discovery = Discovery( - lambda: self._get_new_spaces(), - limit=self.config.spaces.limit, - include=normalized_spaces, - exclude=self.config.spaces.exclude, - interval=self.config.spaces.interval, - key=lambda space: space.name, - ) - def projects(self, space_name, space_id, project_group_id, project_group_name): - if self._projects_discovery.get(space_name, {}).get(project_group_name): - projects = list(self._projects_discovery[space_name][project_group_name].get_items()) + def projects(self, project_group_id, project_group_name): + if self._projects_discovery.get(project_group_name): + projects = list(self._projects_discovery[project_group_name].get_items()) else: - projects = [ - (None, project.name, project, None) for project in self._get_new_projects(space_id, project_group_id) - ] + projects = [(None, project.name, project, None) for project in self._get_new_projects(project_group_id)] for _, _, project, _ in projects: tags = [ @@ -110,27 +81,25 @@ def projects(self, space_name, space_id, project_group_id, project_group_name): f"project_name:{project.name}", f"project_group_id:{project_group_id}", f"project_group_name:{project_group_name}", - f"space_name:{space_name}", ] - self.gauge(PROJECT_COUNT_METRIC, 1, tags=tags) + self.gauge(PROJECT_COUNT_METRIC, 1, tags=self.base_tags + tags) all_project_names = [project.name for _, _, project, _ in projects] self.log.info("Collecting data from projects: %s", ",".join(all_project_names)) return projects - def report_project_metrics(self, project_list, project_group_id, project_group_name, space_name): + def report_project_metrics(self, project_list, project_group_id, project_group_name): for _, _, project, _ in project_list: tags = [ f"project_id:{project.id}", f"project_name:{project.name}", f"project_group_id:{project_group_id}", f"project_group_name:{project_group_name}", - f"space_name:{space_name}", ] - self.gauge(PROJECT_COUNT_METRIC, 1, tags=tags) + self.gauge(PROJECT_COUNT_METRIC, 1, tags=self.base_tags + tags) - def _get_new_projects(self, space_id, project_group_id): - projects_endpoint = f"{self.config.octopus_endpoint}/{space_id}/projectgroups/{project_group_id}/projects" + def _get_new_projects(self, project_group_id): + projects_endpoint = f"{self.config.octopus_endpoint}/{self.space_id}/projectgroups/{project_group_id}/projects" response = self.http.get(projects_endpoint) response.raise_for_status() projects_json = response.json().get('Items', []) @@ -140,8 +109,8 @@ def _get_new_projects(self, space_id, project_group_id): projects.append(new_project) return projects - def _get_new_project_groups(self, space_id): - project_groups_endpoint = f"{self.config.octopus_endpoint}/{space_id}/projectgroups" + def _get_new_project_groups(self): + project_groups_endpoint = f"{self.config.octopus_endpoint}/{self.space_id}/projectgroups" response = self.http.get(project_groups_endpoint) response.raise_for_status() project_groups_json = response.json().get('Items', []) @@ -151,49 +120,37 @@ def _get_new_project_groups(self, space_id): project_groups.append(new_project_group) return project_groups - def _get_new_spaces(self): + def _get_space_id(self): spaces_endpoint = f"{self.config.octopus_endpoint}/spaces" response = self.http.get(spaces_endpoint) response.raise_for_status() spaces_json = response.json().get('Items', []) - spaces = [] for space in spaces_json: - new_space = Space(space) - spaces.append(new_space) - return spaces - - def spaces(self): - if self.spaces_discovery: - spaces = list(self.spaces_discovery.get_items()) - else: - spaces = [(None, space.name, space, None) for space in self._get_new_spaces()] + space_name = space.get("Name") + if space_name == self.config.space: + self.space_id = space.get("Id") + self.log.debug("Space id for %s found: %s ", self.config.space, self.space_id) - for _, _, space, _ in spaces: - tags = [f"space_id:{space.id}", f"space_name:{space.name}", f"space_slug:{space.slug}"] - self.gauge(SPACE_COUNT_METRIC, 1, tags=tags) - - all_space_names = [space.name for _, _, space, _ in spaces] - self.log.info("Collecting data from spaces: %s", ",".join(all_space_names)) - return spaces + if self.space_id is None: + self.error("Space ID not found for provided space name %s, does it exist?", self.config.space) + raise - def project_groups(self, space_id, space_name): - if self._project_groups_discovery.get(space_name): - project_groups = list(self._project_groups_discovery[space_name].get_items()) + def project_groups(self): + if self._project_groups_discovery: + project_groups = list(self._project_groups_discovery.get_items()) else: project_groups = [ - (None, project_groups.name, project_groups, None) - for project_groups in self._get_new_project_groups(space_id) + (None, project_groups.name, project_groups, None) for project_groups in self._get_new_project_groups() ] for _, project_group_name, project_group, _ in project_groups: tags = [ f"project_group_id:{project_group.id}", f"project_group_name:{project_group_name}", - f"space_name:{space_name}", ] - self.gauge(PROJECT_GROUP_COUNT_METRIC, 1, tags=tags) + self.gauge(PROJECT_GROUP_COUNT_METRIC, 1, tags=self.base_tags + tags) - all_project_group_names = [space.name for _, _, space, _ in project_groups] + all_project_group_names = [project_group.name for _, _, project_group, _ in project_groups] self.log.info("Collecting data from project_groups: %s", ",".join(all_project_group_names)) return project_groups @@ -202,13 +159,15 @@ def check(self, _): response = self.http.get(self.config.octopus_endpoint) response.raise_for_status() except (Timeout, HTTPError, InvalidURL, ConnectionError) as e: - self.gauge(API_UP_METRIC, 0, tags=self.config.tags) + self.gauge(API_UP_METRIC, 0, tags=self.base_tags) self.log.warning( "Failed to connect to Octopus Deploy endpoint %s: %s", self.config.octopus_endpoint, str(e) ) raise - self.gauge(API_UP_METRIC, 1, tags=self.config.tags) + self.gauge(API_UP_METRIC, 1, tags=self.base_tags) + if not self.space_id: + self._get_space_id() self._initialize_caches() diff --git a/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py b/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py index 3db476ae94629..9394d0f897617 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py @@ -29,7 +29,7 @@ class MetricPatterns(BaseModel): include: Optional[tuple[str, ...]] = None -class Spaces(BaseModel): +class ProjectGroups(BaseModel): model_config = ConfigDict( arbitrary_types_allowed=True, frozen=True, @@ -37,7 +37,7 @@ class Spaces(BaseModel): exclude: Optional[tuple[str, ...]] = None include: Optional[tuple[Union[str, MappingProxyType[str, Any]], ...]] = None interval: Optional[int] = None - limit: Optional[int] = Field(None, description='Maximum number of spaces to be processed.\n') + limit: Optional[int] = Field(None, description='Maximum number of project groups to be processed.\n') class InstanceConfig(BaseModel): @@ -51,8 +51,9 @@ class InstanceConfig(BaseModel): metric_patterns: Optional[MetricPatterns] = None min_collection_interval: Optional[float] = None octopus_endpoint: str + project_groups: Optional[ProjectGroups] = None service: Optional[str] = None - spaces: Optional[Spaces] = None + space: str tags: Optional[tuple[str, ...]] = None @model_validator(mode='before') diff --git a/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example b/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example index eef1c8f827953..9b6fbf0d6b93a 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example +++ b/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example @@ -19,10 +19,15 @@ instances: # - octopus_endpoint: http://localhost:80/api - ## @param spaces - mapping - optional - ## filter your integration by spaces, project groups, and projects. + ## @param space - string - required + ## Space to monitor # - # spaces: {} + space: Default + + ## @param project_groups - mapping - optional + ## filter your integration by project groups and projects. + # + # project_groups: {} ## @param tags - list of strings - optional ## A list of tags to attach to every metric and service check emitted by this instance. diff --git a/octopus_deploy/datadog_checks/octopus_deploy/spaces.py b/octopus_deploy/datadog_checks/octopus_deploy/project_groups.py similarity index 71% rename from octopus_deploy/datadog_checks/octopus_deploy/spaces.py rename to octopus_deploy/datadog_checks/octopus_deploy/project_groups.py index f309b91f904bc..8880f54a04b56 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/spaces.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/project_groups.py @@ -3,14 +3,6 @@ # Licensed under a 3-clause BSD style license (see LICENSE) -class Space: - def __init__(self, space_json): - self.id = space_json.get("Id") - self.name = space_json.get("Name") - self.slug = space_json.get("Slug") - self.project_groups = None - - class ProjectGroup: def __init__(self, project_group_json): self.id = project_group_json.get("Id") diff --git a/octopus_deploy/tests/conftest.py b/octopus_deploy/tests/conftest.py index f4d1e8561f063..455be7f3ea29e 100644 --- a/octopus_deploy/tests/conftest.py +++ b/octopus_deploy/tests/conftest.py @@ -20,7 +20,7 @@ def dd_environment(): @pytest.fixture def instance(): - return {'octopus_endpoint': 'http://localhost:80/api'} + return {'octopus_endpoint': 'http://localhost:80/api', 'space': 'Default'} def get_json_value_from_file(file_path): diff --git a/octopus_deploy/tests/constants.py b/octopus_deploy/tests/constants.py index 75f2f6ea7fc6b..a5c2f85d5ef74 100644 --- a/octopus_deploy/tests/constants.py +++ b/octopus_deploy/tests/constants.py @@ -2,7 +2,7 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) -ALL_METRICS = ["octopus_deploy.project_group.count", "octopus_deploy.project.count", "octopus_deploy.space.count"] +ALL_METRICS = ["octopus_deploy.project_group.count", "octopus_deploy.project.count"] PROJECT_GROUP_ALL_METRICS = [ { diff --git a/octopus_deploy/tests/test_unit.py b/octopus_deploy/tests/test_unit.py index 12cb51de66162..cf081e779154f 100644 --- a/octopus_deploy/tests/test_unit.py +++ b/octopus_deploy/tests/test_unit.py @@ -62,6 +62,7 @@ def test_emits_critical_service_check_when_service_is_down(dd_run_check, aggrega aggregator.assert_all_metrics_covered() +""" @pytest.mark.parametrize( 'spaces_config, metric_count, project_group_metrics', [ @@ -88,48 +89,49 @@ def test_spaces_discovery(dd_run_check, aggregator, instance, spaces_config, met aggregator.assert_metric("octopus_deploy.api.can_connect") aggregator.assert_metric("octopus_deploy.project.count", at_least=0) # TODO: assert specific aggregator.assert_all_metrics_covered() +""" @pytest.mark.parametrize( - 'spaces_config, expected_metrics', + 'project_groups_config, expected_metrics', [ pytest.param(None, PROJECT_GROUP_ALL_METRICS, id="default"), pytest.param( - {'include': [{'Default': {'project_groups': {'include': ['test-group']}}}]}, + {'include': ['test-group']}, PROJECT_GROUP_ONLY_TEST_GROUP_METRICS, id="include", ), pytest.param( - {'include': [{'Default': {'project_groups': {'include': ['test-group'], 'limit': 1}}}]}, + {'include': ['test-group'], 'limit': 1}, PROJECT_GROUP_ONLY_TEST_GROUP_METRICS, id="within limit", ), pytest.param( - {'include': [{'Default': {'project_groups': {'include': ['test-group'], 'limit': 0}}}]}, + {'include': ['test-group'], 'limit': 0}, PROJECT_GROUP_NO_METRICS, id="limit hit", ), pytest.param( - {'include': [{'Default': {'project_groups': {'include': ['test-group'], 'exclude': ['test-group']}}}]}, + {'include': ['test-group'], 'exclude': ['test-group']}, PROJECT_GROUP_NO_METRICS, id="excluded", ), pytest.param( - {'include': [{'Default': {'project_groups': {'include': ['.*'], 'exclude': ['test-group']}}}]}, + {'include': ['.*'], 'exclude': ['test-group']}, PROJECT_GROUP_NO_TEST_GROUP_METRICS, id="one excluded", ), pytest.param( - {'include': [{'Default': {'include': {'project_groups': ['test-group'], 'exclude': ['testing']}}}]}, + {'include': ['.*'], 'exclude': ['testing']}, PROJECT_GROUP_ALL_METRICS, - id="excluded invalud", + id="excluded invalid", ), ], ) @pytest.mark.usefixtures('mock_http_get') -def test_project_groups_discovery(dd_run_check, aggregator, instance, spaces_config, expected_metrics): +def test_project_groups_discovery(dd_run_check, aggregator, instance, project_groups_config, expected_metrics): instance = copy.deepcopy(instance) - instance['spaces'] = spaces_config + instance['project_groups'] = project_groups_config check = OctopusDeployCheck('octopus_deploy', {}, [instance]) dd_run_check(check) for metric in expected_metrics: @@ -137,99 +139,49 @@ def test_project_groups_discovery(dd_run_check, aggregator, instance, spaces_con @pytest.mark.parametrize( - 'spaces_config, expected_metrics', + 'project_groups_config, expected_metrics', [ pytest.param(None, PROJECT_ALL_METRICS, id="default"), pytest.param( - { - 'include': [ - {'Default': {'project_groups': {'include': [{'test-group': {'projects': {'include': ['hi']}}}]}}} - ] - }, + {'include': [{'test-group': {'projects': {'include': ['hi']}}}]}, PROJECT_ONLY_HI_METRICS, id="include", ), pytest.param( - { - 'include': [ - { - 'Default': { - 'project_groups': {'include': [{'.*': {'projects': {'include': ['.*'], 'limit': 1}}}]} - } - } - ] - }, + {'include': [{'.*': {'projects': {'include': ['.*'], 'limit': 1}}}]}, PROJECT_ONLY_HI_MY_PROJECT_METRICS, id="1 limit", ), pytest.param( - { - 'include': [ - { - 'Default': { - 'project_groups': {'include': [{'.*': {'projects': {'include': ['.*'], 'limit': 0}}}]} - } - } - ] - }, + {'include': [{'.*': {'projects': {'include': ['.*'], 'limit': 0}}}]}, PROJECT_NO_METRICS, id="limit hit", ), pytest.param( { - 'include': [ - { - 'Default': { - 'project_groups': { - 'exclude': ['Default.*'], - 'include': [{'test-group': {'projects': {'include': ['.*']}}}], - } - } - } - ] + 'exclude': ['Default.*'], + 'include': [{'test-group': {'projects': {'include': ['.*']}}}], }, PROJECT_ONLY_HI_METRICS, id="excluded default", ), pytest.param( - { - 'include': [ - { - 'Default': { - 'project_groups': { - 'include': [{'.*': {'projects': {'include': ['.*'], 'exclude': ['.*']}}}] - } - } - } - ] - }, + {'include': [{'.*': {'projects': {'include': ['.*'], 'exclude': ['.*']}}}]}, PROJECT_NO_METRICS, id="all excluded", ), pytest.param( - { - 'include': [ - { - 'Default': { - 'include': { - 'project_groups': { - 'include': [{'.*': {'projects': {'include': ['.*'], 'exclude': ['heyhey']}}}] - } - } - } - } - ] - }, + {'include': [{'.*': {'projects': {'include': ['.*'], 'exclude': ['heyhey']}}}]}, PROJECT_ALL_METRICS, id="excluded invalud", ), ], ) @pytest.mark.usefixtures('mock_http_get') -def test_projects_discovery(dd_run_check, aggregator, instance, spaces_config, expected_metrics, caplog): +def test_projects_discovery(dd_run_check, aggregator, instance, project_groups_config, expected_metrics, caplog): caplog.set_level(logging.DEBUG) instance = copy.deepcopy(instance) - instance['spaces'] = spaces_config + instance['project_groups'] = project_groups_config check = OctopusDeployCheck('octopus_deploy', {}, [instance]) dd_run_check(check) for metric in expected_metrics: From 7cd3ef36aaa3a83af1e5c1f2f159dba91bac793c Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Fri, 13 Sep 2024 16:30:28 -0400 Subject: [PATCH 19/44] clean up code and tests --- .../datadog_checks/octopus_deploy/check.py | 14 +---- octopus_deploy/tests/test_unit.py | 58 +++++++++++-------- 2 files changed, 35 insertions(+), 37 deletions(-) diff --git a/octopus_deploy/datadog_checks/octopus_deploy/check.py b/octopus_deploy/datadog_checks/octopus_deploy/check.py index 8f7c3624ae40e..52ea1e411d995 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/check.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/check.py @@ -4,6 +4,7 @@ from requests.exceptions import ConnectionError, HTTPError, InvalidURL, Timeout from datadog_checks.base import AgentCheck +from datadog_checks.base.errors import CheckException from datadog_checks.base.utils.discovery import Discovery from datadog_checks.base.utils.models.types import copy_raw from datadog_checks.octopus_deploy.config_models import ConfigMixin @@ -88,16 +89,6 @@ def projects(self, project_group_id, project_group_name): self.log.info("Collecting data from projects: %s", ",".join(all_project_names)) return projects - def report_project_metrics(self, project_list, project_group_id, project_group_name): - for _, _, project, _ in project_list: - tags = [ - f"project_id:{project.id}", - f"project_name:{project.name}", - f"project_group_id:{project_group_id}", - f"project_group_name:{project_group_name}", - ] - self.gauge(PROJECT_COUNT_METRIC, 1, tags=self.base_tags + tags) - def _get_new_projects(self, project_group_id): projects_endpoint = f"{self.config.octopus_endpoint}/{self.space_id}/projectgroups/{project_group_id}/projects" response = self.http.get(projects_endpoint) @@ -132,8 +123,7 @@ def _get_space_id(self): self.log.debug("Space id for %s found: %s ", self.config.space, self.space_id) if self.space_id is None: - self.error("Space ID not found for provided space name %s, does it exist?", self.config.space) - raise + raise CheckException(f"Space ID not found for provided space name {self.config.space}, does it exist?") def project_groups(self): if self._project_groups_discovery: diff --git a/octopus_deploy/tests/test_unit.py b/octopus_deploy/tests/test_unit.py index cf081e779154f..06c6536e7c87e 100644 --- a/octopus_deploy/tests/test_unit.py +++ b/octopus_deploy/tests/test_unit.py @@ -5,6 +5,7 @@ import copy import logging +import mock import pytest from datadog_checks.dev.http import MockResponse @@ -62,40 +63,38 @@ def test_emits_critical_service_check_when_service_is_down(dd_run_check, aggrega aggregator.assert_all_metrics_covered() -""" -@pytest.mark.parametrize( - 'spaces_config, metric_count, project_group_metrics', - [ - pytest.param(None, 1, PROJECT_GROUP_ALL_METRICS, id="default"), - pytest.param({'include': ['Default']}, 1, PROJECT_GROUP_ALL_METRICS, id="include"), - pytest.param({'include': ['Default'], 'limit': 1}, 1, PROJECT_GROUP_ALL_METRICS, id="within limit"), - pytest.param({'include': ['Default'], 'limit': 0}, 0, PROJECT_GROUP_NO_METRICS, id="limit hit"), - pytest.param({'include': ['Default'], 'exclude': ['Default']}, 0, PROJECT_GROUP_NO_METRICS, id="excluded"), - pytest.param( - {'include': ['Default'], 'exclude': ['test']}, 1, PROJECT_GROUP_ALL_METRICS, id="excluded invalid" - ), - ], -) @pytest.mark.usefixtures('mock_http_get') -def test_spaces_discovery(dd_run_check, aggregator, instance, spaces_config, metric_count, project_group_metrics): - instance = copy.deepcopy(instance) - instance['spaces'] = spaces_config +def test_space_invalid(dd_run_check, aggregator, instance): + invalid_space_instance = copy.deepcopy(instance) + invalid_space_instance['space'] = 'test' + check = OctopusDeployCheck('octopus_deploy', {}, [invalid_space_instance]) + with pytest.raises(Exception, match=r'Space ID not found for provided space name test, does it exist'): + dd_run_check(check) + + aggregator.assert_metric('octopus_deploy.api.can_connect', 1) + aggregator.assert_all_metrics_covered() + + +@pytest.mark.usefixtures('mock_http_get') +def test_space_cached(dd_run_check, aggregator, instance): check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + check._get_space_id = mock.MagicMock() + check.space_id = "Spaces-1" dd_run_check(check) - tags = ["space_name:Default", "space_id:Spaces-1", "space_slug:default"] - aggregator.assert_metric("octopus_deploy.space.count", count=metric_count, tags=tags) - for metric in project_group_metrics: - aggregator.assert_metric(metric["name"], count=metric["count"], tags=metric["tags"]) - aggregator.assert_metric("octopus_deploy.api.can_connect") - aggregator.assert_metric("octopus_deploy.project.count", at_least=0) # TODO: assert specific - aggregator.assert_all_metrics_covered() -""" + + assert check._get_space_id.call_count == 0 + aggregator.assert_metric('octopus_deploy.api.can_connect', 1) @pytest.mark.parametrize( 'project_groups_config, expected_metrics', [ pytest.param(None, PROJECT_GROUP_ALL_METRICS, id="default"), + pytest.param( + {'include': []}, + PROJECT_GROUP_ALL_METRICS, + id="empty include", + ), pytest.param( {'include': ['test-group']}, PROJECT_GROUP_ONLY_TEST_GROUP_METRICS, @@ -138,6 +137,15 @@ def test_project_groups_discovery(dd_run_check, aggregator, instance, project_gr aggregator.assert_metric(metric["name"], count=metric["count"], tags=metric["tags"]) +@pytest.mark.usefixtures('mock_http_get') +def test_project_groups_discovery_error(dd_run_check, instance): + instance = copy.deepcopy(instance) + instance['project_groups'] = {'include': None} + check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + with pytest.raises(Exception, match=r'Setting `include` must be an array'): + dd_run_check(check) + + @pytest.mark.parametrize( 'project_groups_config, expected_metrics', [ From f716ef6a02fdfb136ab59794fbee7d00d14925f8 Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Tue, 17 Sep 2024 15:22:21 -0400 Subject: [PATCH 20/44] move initializations to check init --- .../datadog_checks/octopus_deploy/check.py | 42 +++++++++---------- octopus_deploy/tests/test_unit.py | 4 +- 2 files changed, 21 insertions(+), 25 deletions(-) diff --git a/octopus_deploy/datadog_checks/octopus_deploy/check.py b/octopus_deploy/datadog_checks/octopus_deploy/check.py index 52ea1e411d995..401dd403c706d 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/check.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/check.py @@ -7,8 +7,8 @@ from datadog_checks.base.errors import CheckException from datadog_checks.base.utils.discovery import Discovery from datadog_checks.base.utils.models.types import copy_raw -from datadog_checks.octopus_deploy.config_models import ConfigMixin +from .config_models import ConfigMixin from .constants import API_UP_METRIC, PROJECT_COUNT_METRIC, PROJECT_GROUP_COUNT_METRIC from .project_groups import Project, ProjectGroup @@ -24,6 +24,8 @@ def __init__(self, name, init_config, instances): self.space_id = None space_name = self.instance.get("space") self.base_tags = self.instance.get("tags", []) + [f"space_name:{space_name}"] + self.check_initializations.append(self._get_space_id) + self.check_initializations.append(self._initialize_caches) def _initialize_caches(self): self._initialize_project_groups() @@ -113,14 +115,21 @@ def _get_new_project_groups(self): def _get_space_id(self): spaces_endpoint = f"{self.config.octopus_endpoint}/spaces" - response = self.http.get(spaces_endpoint) - response.raise_for_status() - spaces_json = response.json().get('Items', []) - for space in spaces_json: - space_name = space.get("Name") - if space_name == self.config.space: - self.space_id = space.get("Id") - self.log.debug("Space id for %s found: %s ", self.config.space, self.space_id) + try: + response = self.http.get(spaces_endpoint) + response.raise_for_status() + spaces_json = response.json().get('Items', []) + for space in spaces_json: + space_name = space.get("Name") + if space_name == self.config.space: + self.space_id = space.get("Id") + self.log.debug("Space id for %s found: %s ", self.config.space, self.space_id) + except (Timeout, HTTPError, InvalidURL, ConnectionError): + self.gauge(API_UP_METRIC, 0, tags=self.base_tags) + + raise CheckException(f"Could not connect to octopus API {self.config.octopus_endpoint}octopus_endpoint") + + self.gauge(API_UP_METRIC, 1, tags=self.base_tags) if self.space_id is None: raise CheckException(f"Space ID not found for provided space name {self.config.space}, does it exist?") @@ -145,20 +154,7 @@ def project_groups(self): return project_groups def check(self, _): - try: - response = self.http.get(self.config.octopus_endpoint) - response.raise_for_status() - except (Timeout, HTTPError, InvalidURL, ConnectionError) as e: - self.gauge(API_UP_METRIC, 0, tags=self.base_tags) - self.log.warning( - "Failed to connect to Octopus Deploy endpoint %s: %s", self.config.octopus_endpoint, str(e) - ) - raise - - self.gauge(API_UP_METRIC, 1, tags=self.base_tags) - if not self.space_id: - self._get_space_id() - self._initialize_caches() + pass # Discovery class requires 'include' to be a dict, so this function is needed to normalize the config diff --git a/octopus_deploy/tests/test_unit.py b/octopus_deploy/tests/test_unit.py index 06c6536e7c87e..2855f20fa9d0a 100644 --- a/octopus_deploy/tests/test_unit.py +++ b/octopus_deploy/tests/test_unit.py @@ -41,12 +41,12 @@ def test_check(dd_run_check, aggregator, instance): ('mock_http_get, message'), [ pytest.param( - {'http_error': {'/api': MockResponse(status_code=500)}}, + {'http_error': {'/api/spaces': MockResponse(status_code=500)}}, 'HTTPError: 500 Server Error: None for url: None', id='500', ), pytest.param( - {'http_error': {'/api': MockResponse(status_code=404)}}, + {'http_error': {'/api/spaces': MockResponse(status_code=404)}}, 'HTTPError: 404 Client Error: None for url: None', id='404', ), From 4f007c385e1d25e39deaaa9d5a0ae6cc0f9fa706 Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Fri, 27 Sep 2024 16:28:00 -0400 Subject: [PATCH 21/44] add support for task count metrics --- .../datadog_checks/octopus_deploy/check.py | 39 ++++- .../octopus_deploy/project_groups.py | 3 +- octopus_deploy/metadata.csv | 1 + octopus_deploy/tests/conftest.py | 5 + octopus_deploy/tests/constants.py | 62 ++++++- .../response.json | 78 +++++++++ .../response.json | 35 ++++ .../response.json | 162 ++++++++++++++++++ .../response.json | 35 ++++ octopus_deploy/tests/test_unit.py | 33 +++- 10 files changed, 443 insertions(+), 10 deletions(-) create mode 100644 octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-23 14:45:58.888492+00:00/response.json create mode 100644 octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/fromCompletedDate=2024-09-23 14:45:58.888492+00:00/response.json create mode 100644 octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-23 14:45:58.888492+00:00/response.json create mode 100644 octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/fromCompletedDate=2024-09-23 14:45:58.888492+00:00/response.json diff --git a/octopus_deploy/datadog_checks/octopus_deploy/check.py b/octopus_deploy/datadog_checks/octopus_deploy/check.py index 401dd403c706d..06e789cbf79ff 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/check.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/check.py @@ -1,6 +1,8 @@ # (C) Datadog, Inc. 2024-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) +from datetime import datetime, timedelta + from requests.exceptions import ConnectionError, HTTPError, InvalidURL, Timeout from datadog_checks.base import AgentCheck @@ -31,7 +33,42 @@ def _initialize_caches(self): self._initialize_project_groups() for _, project_group_name, project_group, project_group_config in self.project_groups(): self._initialize_projects(project_group.id, project_group_name, project_group_config) - self.projects(project_group.id, project_group_name) + for _, _, project, _ in self.projects(project_group.id, project_group_name): + self._get_new_tasks_for_project(project, project_group) + + def _get_new_tasks_for_project(self, project, project_group): + self.log.debug("Getting new tasks for project %s", project.name) + params = {'project': project.id, 'fromCompletedDate': project.last_task_time} + url = f"{self.config.octopus_endpoint}/{self.space_id}/tasks" + response = self.http.get(url, params=params) + response.raise_for_status() + tasks_json = response.json().get('Items', []) + new_completed_time = project.last_task_time + self.log.debug("Found %s new tasks for project %s", len(tasks_json), project.name) + + for task in tasks_json: + task_id = task.get("Id") + task_name = task.get("Name") + state = task.get("State") + completed_time = task.get("CompletedTime") + + completed_time_converted = datetime.fromisoformat(completed_time) + if completed_time_converted > new_completed_time: + new_completed_time = completed_time_converted + + project_tags = [ + f"project_id:{project.id}", + f"project_name:{project.name}", + f"project_group_id:{project_group.id}", + f"project_group_name:{project_group.name}", + ] + + tags = [f'task_name:{task_name}', f'task_id:{task_id}', f'task_state:{state}'] + + self.gauge("task.count", 1, tags=self.base_tags + project_tags + tags) + + new_completed_time = new_completed_time + timedelta(milliseconds=1) + project.last_completed_time = new_completed_time def _initialize_projects(self, project_group_id, project_group_name, project_group_config): if not self._projects_discovery.get(project_group_name): diff --git a/octopus_deploy/datadog_checks/octopus_deploy/project_groups.py b/octopus_deploy/datadog_checks/octopus_deploy/project_groups.py index 8880f54a04b56..4c1dc3ff7be38 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/project_groups.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/project_groups.py @@ -1,6 +1,7 @@ # (C) Datadog, Inc. 2024-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) +from datadog_checks.base.utils.time import get_current_datetime class ProjectGroup: @@ -15,4 +16,4 @@ def __init__(self, project_json): self.id = project_json.get("Id") self.name = project_json.get("Name") self.last_task_id = None - self.last_task_time = None + self.last_task_time = get_current_datetime() diff --git a/octopus_deploy/metadata.csv b/octopus_deploy/metadata.csv index e69feb7afdd90..52dd522a7d78c 100644 --- a/octopus_deploy/metadata.csv +++ b/octopus_deploy/metadata.csv @@ -3,3 +3,4 @@ octopus_deploy.api.can_connect,gauge,,,,Whether or not the check can connect to octopus_deploy.project.count,gauge,,,,Number of projects discovered.,-1,octopus_deploy,octopus_deploy projects count,, octopus_deploy.project_group.count,gauge,,,,Number of project groups discovered.,-1,octopus_deploy,octopus_deploy project group count,, octopus_deploy.space.count,gauge,,,,Number of spaces discovered.,-1,octopus_deploy,octopus_deploy space count,, +octopus_deploy.task.count,gauge,,,,Number of tasks monitored.,-1,octopus_deploy,octopus_deploy space count,, diff --git a/octopus_deploy/tests/conftest.py b/octopus_deploy/tests/conftest.py index 455be7f3ea29e..882631a0397e2 100644 --- a/octopus_deploy/tests/conftest.py +++ b/octopus_deploy/tests/conftest.py @@ -1,6 +1,7 @@ # (C) Datadog, Inc. 2024-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) +import datetime import json import os from pathlib import Path @@ -10,8 +11,12 @@ import pytest import requests +from datadog_checks.base.utils.time import ensure_aware_datetime from datadog_checks.dev.fs import get_here +BASE_TIME = ensure_aware_datetime(datetime.datetime.strptime("2024-09-23 14:45:58.888492", '%Y-%m-%d %H:%M:%S.%f')) +MOCKED_TIMESTAMPS = [BASE_TIME] * 20 + @pytest.fixture(scope='session') def dd_environment(): diff --git a/octopus_deploy/tests/constants.py b/octopus_deploy/tests/constants.py index a5c2f85d5ef74..1c45803605e7a 100644 --- a/octopus_deploy/tests/constants.py +++ b/octopus_deploy/tests/constants.py @@ -2,7 +2,7 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) -ALL_METRICS = ["octopus_deploy.project_group.count", "octopus_deploy.project.count"] +ALL_METRICS = ["octopus_deploy.project_group.count", "octopus_deploy.project.count", "octopus_deploy.task.count"] PROJECT_GROUP_ALL_METRICS = [ { @@ -450,3 +450,63 @@ 'count': 0, }, ] + + +TASK_COUNT_METRICS = [ + { + 'name': 'octopus_deploy.task.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + "task_id:ServerTasks-1845", + "task_name:Deploy", + "task_state:Success", + ], + 'count': 1, + }, + { + 'name': 'octopus_deploy.task.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + "task_id:ServerTasks-1846", + "task_name:Deploy", + "task_state:Success", + ], + 'count': 1, + }, + { + 'name': 'octopus_deploy.task.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + "task_id:ServerTasks-1847", + "task_name:Deploy", + "task_state:Failed", + ], + 'count': 1, + }, + { + 'name': 'octopus_deploy.task.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test-api", + "project_id:Projects-1", + "task_id:ServerTasks-1844", + "task_name:Deploy", + "task_state:Success", + ], + 'count': 1, + }, +] diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-23 14:45:58.888492+00:00/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-23 14:45:58.888492+00:00/response.json new file mode 100644 index 0000000000000..73ad397dbd175 --- /dev/null +++ b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-23 14:45:58.888492+00:00/response.json @@ -0,0 +1,78 @@ +{ + "ItemType": "Task", + "TotalResults": 1, + "ItemsPerPage": 30, + "NumberOfPages": 1, + "LastPageNumber": 0, + "TotalCounts": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 12, + "Queued": 0, + "Success": 1779, + "TimedOut": 0, + "Interrupted": 0 + }, + "TotalCountsInOtherSpaces": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 0, + "Queued": 0, + "Success": 0, + "TimedOut": 0, + "Interrupted": 0 + }, + "Items": [ + { + "Id": "ServerTasks-1844", + "SpaceId": "Spaces-1", + "Name": "Deploy", + "Description": "Deploy test-api release 0.1.5 to Development", + "Arguments": { + "DeploymentId": "Deployments-16" + }, + "State": "Success", + "Completed": "Monday, 23 September 2024 2:57:22 PM +00:00", + "QueueTime": "2024-09-23T14:57:18.798+00:00", + "QueueTimeExpiry": null, + "StartTime": "2024-09-23T14:57:19.411+00:00", + "LastUpdatedTime": "2024-09-23T14:57:22.603+00:00", + "CompletedTime": "2024-09-23T14:57:22.603+00:00", + "ServerNode": "OctopusServerNodes-50c3dfbarc82", + "Duration": "3 seconds", + "ErrorMessage": "", + "HasBeenPickedUpByProcessor": true, + "IsCompleted": true, + "FinishedSuccessfully": true, + "HasPendingInterruptions": false, + "CanRerun": false, + "HasWarningsOrErrors": false, + "UnmetPreconditions": null, + "ProjectId": "Projects-1", + "Links": { + "Self": "/api/tasks/ServerTasks-1844", + "Web": "/app#/Spaces-1/tasks/ServerTasks-1844", + "Raw": "/api/tasks/ServerTasks-1844/raw", + "Rerun": "/api/tasks/rerun/ServerTasks-1844", + "Cancel": "/api/tasks/ServerTasks-1844/cancel", + "State": "/api/tasks/ServerTasks-1844/state", + "BlockedBy": "/api/tasks/ServerTasks-1844/blockedby", + "QueuedBehind": "/api/tasks/ServerTasks-1844/queued-behind{?skip,take}", + "Details": "/api/tasks/ServerTasks-1844/details{?verbose,tail,ranges}", + "StatusMessages": "/api/tasks/ServerTasks-1844/status/messages", + "Prioritize": "/api/tasks/ServerTasks-1844/prioritize", + "Artifacts": "/api/Spaces-1/artifacts?regarding=ServerTasks-1844", + "Interruptions": "/api/Spaces-1/interruptions?regarding=ServerTasks-1844" + } + } + ], + "Links": { + "Self": "/api/tasks?skip=0&take=30", + "Template": "/api/tasks{?skip,active,environment,tenant,runbook,project,name,node,running,states,hasPendingInterruptions,hasWarningsOrErrors,take,ids,partialName,spaces,includeSystem,description,fromCompletedDate,toCompletedDate,fromQueueDate,toQueueDate,fromStartDate,toStartDate}", + "Page.All": "/api/tasks?skip=0&take=2147483647", + "Page.Current": "/api/tasks?skip=0&take=30", + "Page.Last": "/api/tasks?skip=0&take=30" + } +} \ No newline at end of file diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/fromCompletedDate=2024-09-23 14:45:58.888492+00:00/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/fromCompletedDate=2024-09-23 14:45:58.888492+00:00/response.json new file mode 100644 index 0000000000000..79bccd61a6f32 --- /dev/null +++ b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/fromCompletedDate=2024-09-23 14:45:58.888492+00:00/response.json @@ -0,0 +1,35 @@ +{ + "ItemType": "Task", + "TotalResults": 0, + "ItemsPerPage": 30, + "NumberOfPages": 1, + "LastPageNumber": 0, + "TotalCounts": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 12, + "Queued": 0, + "Success": 1783, + "TimedOut": 0, + "Interrupted": 0 + }, + "TotalCountsInOtherSpaces": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 0, + "Queued": 0, + "Success": 0, + "TimedOut": 0, + "Interrupted": 0 + }, + "Items": [], + "Links": { + "Self": "/api/tasks?skip=0&take=30", + "Template": "/api/tasks{?skip,active,environment,tenant,runbook,project,name,node,running,states,hasPendingInterruptions,hasWarningsOrErrors,take,ids,partialName,spaces,includeSystem,description,fromCompletedDate,toCompletedDate,fromQueueDate,toQueueDate,fromStartDate,toStartDate}", + "Page.All": "/api/tasks?skip=0&take=2147483647", + "Page.Current": "/api/tasks?skip=0&take=30", + "Page.Last": "/api/tasks?skip=0&take=30" + } +} \ No newline at end of file diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-23 14:45:58.888492+00:00/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-23 14:45:58.888492+00:00/response.json new file mode 100644 index 0000000000000..fcefac4dceb19 --- /dev/null +++ b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-23 14:45:58.888492+00:00/response.json @@ -0,0 +1,162 @@ +{ + "ItemType": "Task", + "TotalResults": 3, + "ItemsPerPage": 30, + "NumberOfPages": 1, + "LastPageNumber": 0, + "TotalCounts": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 13, + "Queued": 0, + "Success": 1783, + "TimedOut": 0, + "Interrupted": 0 + }, + "TotalCountsInOtherSpaces": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 0, + "Queued": 0, + "Success": 0, + "TimedOut": 0, + "Interrupted": 0 + }, + "Items": [ + { + "Id": "ServerTasks-1847", + "SpaceId": "Spaces-1", + "Name": "Deploy", + "Description": "Deploy test release 0.0.2 to Development", + "Arguments": { + "DeploymentId": "Deployments-19" + }, + "State": "Failed", + "Completed": "Monday, 23 September 2024 3:10:03 PM +00:00", + "QueueTime": "2024-09-23T15:09:56.364+00:00", + "QueueTimeExpiry": null, + "StartTime": "2024-09-23T15:09:56.995+00:00", + "LastUpdatedTime": "2024-09-23T15:10:03.262+00:00", + "CompletedTime": "2024-09-23T15:10:03.262+00:00", + "ServerNode": "OctopusServerNodes-50c3dfbarc82", + "Duration": "6 seconds", + "ErrorMessage": "The deployment failed because one or more steps failed. Please see the deployment log for details.", + "HasBeenPickedUpByProcessor": true, + "IsCompleted": true, + "FinishedSuccessfully": false, + "HasPendingInterruptions": false, + "CanRerun": false, + "HasWarningsOrErrors": true, + "UnmetPreconditions": null, + "ProjectId": "Projects-3", + "Links": { + "Self": "/api/tasks/ServerTasks-1847", + "Web": "/app#/Spaces-1/tasks/ServerTasks-1847", + "Raw": "/api/tasks/ServerTasks-1847/raw", + "Rerun": "/api/tasks/rerun/ServerTasks-1847", + "Cancel": "/api/tasks/ServerTasks-1847/cancel", + "State": "/api/tasks/ServerTasks-1847/state", + "BlockedBy": "/api/tasks/ServerTasks-1847/blockedby", + "QueuedBehind": "/api/tasks/ServerTasks-1847/queued-behind{?skip,take}", + "Details": "/api/tasks/ServerTasks-1847/details{?verbose,tail,ranges}", + "StatusMessages": "/api/tasks/ServerTasks-1847/status/messages", + "Prioritize": "/api/tasks/ServerTasks-1847/prioritize", + "Artifacts": "/api/Spaces-1/artifacts?regarding=ServerTasks-1847", + "Interruptions": "/api/Spaces-1/interruptions?regarding=ServerTasks-1847" + } + }, + { + "Id": "ServerTasks-1846", + "SpaceId": "Spaces-1", + "Name": "Deploy", + "Description": "Deploy test release 0.0.1 to Staging", + "Arguments": { + "DeploymentId": "Deployments-18" + }, + "State": "Success", + "Completed": "Monday, 23 September 2024 3:00:28 PM +00:00", + "QueueTime": "2024-09-23T15:00:25.468+00:00", + "QueueTimeExpiry": null, + "StartTime": "2024-09-23T15:00:26.138+00:00", + "LastUpdatedTime": "2024-09-23T15:00:28.272+00:00", + "CompletedTime": "2024-09-23T15:00:28.272+00:00", + "ServerNode": "OctopusServerNodes-50c3dfbarc82", + "Duration": "2 seconds", + "ErrorMessage": "", + "HasBeenPickedUpByProcessor": true, + "IsCompleted": true, + "FinishedSuccessfully": true, + "HasPendingInterruptions": false, + "CanRerun": false, + "HasWarningsOrErrors": false, + "UnmetPreconditions": null, + "ProjectId": "Projects-3", + "Links": { + "Self": "/api/tasks/ServerTasks-1846", + "Web": "/app#/Spaces-1/tasks/ServerTasks-1846", + "Raw": "/api/tasks/ServerTasks-1846/raw", + "Rerun": "/api/tasks/rerun/ServerTasks-1846", + "Cancel": "/api/tasks/ServerTasks-1846/cancel", + "State": "/api/tasks/ServerTasks-1846/state", + "BlockedBy": "/api/tasks/ServerTasks-1846/blockedby", + "QueuedBehind": "/api/tasks/ServerTasks-1846/queued-behind{?skip,take}", + "Details": "/api/tasks/ServerTasks-1846/details{?verbose,tail,ranges}", + "StatusMessages": "/api/tasks/ServerTasks-1846/status/messages", + "Prioritize": "/api/tasks/ServerTasks-1846/prioritize", + "Artifacts": "/api/Spaces-1/artifacts?regarding=ServerTasks-1846", + "Interruptions": "/api/Spaces-1/interruptions?regarding=ServerTasks-1846" + } + }, + { + "Id": "ServerTasks-1845", + "SpaceId": "Spaces-1", + "Name": "Deploy", + "Description": "Deploy test release 0.0.1 to Development", + "Arguments": { + "DeploymentId": "Deployments-17" + }, + "State": "Success", + "Completed": "Monday, 23 September 2024 3:00:21 PM +00:00", + "QueueTime": "2024-09-23T15:00:19.040+00:00", + "QueueTimeExpiry": null, + "StartTime": "2024-09-23T15:00:19.679+00:00", + "LastUpdatedTime": "2024-09-23T15:00:21.752+00:00", + "CompletedTime": "2024-09-23T15:00:21.752+00:00", + "ServerNode": "OctopusServerNodes-50c3dfbarc82", + "Duration": "2 seconds", + "ErrorMessage": "", + "HasBeenPickedUpByProcessor": true, + "IsCompleted": true, + "FinishedSuccessfully": true, + "HasPendingInterruptions": false, + "CanRerun": false, + "HasWarningsOrErrors": false, + "UnmetPreconditions": null, + "ProjectId": "Projects-3", + "Links": { + "Self": "/api/tasks/ServerTasks-1845", + "Web": "/app#/Spaces-1/tasks/ServerTasks-1845", + "Raw": "/api/tasks/ServerTasks-1845/raw", + "Rerun": "/api/tasks/rerun/ServerTasks-1845", + "Cancel": "/api/tasks/ServerTasks-1845/cancel", + "State": "/api/tasks/ServerTasks-1845/state", + "BlockedBy": "/api/tasks/ServerTasks-1845/blockedby", + "QueuedBehind": "/api/tasks/ServerTasks-1845/queued-behind{?skip,take}", + "Details": "/api/tasks/ServerTasks-1845/details{?verbose,tail,ranges}", + "StatusMessages": "/api/tasks/ServerTasks-1845/status/messages", + "Prioritize": "/api/tasks/ServerTasks-1845/prioritize", + "Artifacts": "/api/Spaces-1/artifacts?regarding=ServerTasks-1845", + "Interruptions": "/api/Spaces-1/interruptions?regarding=ServerTasks-1845" + } + } + ], + "Links": { + "Self": "/api/tasks?skip=0&take=30", + "Template": "/api/tasks{?skip,active,environment,tenant,runbook,project,name,node,running,states,hasPendingInterruptions,hasWarningsOrErrors,take,ids,partialName,spaces,includeSystem,description,fromCompletedDate,toCompletedDate,fromQueueDate,toQueueDate,fromStartDate,toStartDate}", + "Page.All": "/api/tasks?skip=0&take=2147483647", + "Page.Current": "/api/tasks?skip=0&take=30", + "Page.Last": "/api/tasks?skip=0&take=30" + } +} \ No newline at end of file diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/fromCompletedDate=2024-09-23 14:45:58.888492+00:00/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/fromCompletedDate=2024-09-23 14:45:58.888492+00:00/response.json new file mode 100644 index 0000000000000..aa05b4987d9be --- /dev/null +++ b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/fromCompletedDate=2024-09-23 14:45:58.888492+00:00/response.json @@ -0,0 +1,35 @@ +{ + "ItemType": "Task", + "TotalResults": 0, + "ItemsPerPage": 30, + "NumberOfPages": 1, + "LastPageNumber": 0, + "TotalCounts": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 3, + "Queued": 0, + "Success": 1763, + "TimedOut": 0, + "Interrupted": 0 + }, + "TotalCountsInOtherSpaces": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 0, + "Queued": 0, + "Success": 0, + "TimedOut": 0, + "Interrupted": 0 + }, + "Items": [], + "Links": { + "Self": "/api/tasks?skip=0&take=30", + "Template": "/api/tasks{?skip,active,environment,tenant,runbook,project,name,node,running,states,hasPendingInterruptions,hasWarningsOrErrors,take,ids,partialName,spaces,includeSystem,description,fromCompletedDate,toCompletedDate,fromQueueDate,toQueueDate,fromStartDate,toStartDate}", + "Page.All": "/api/tasks?skip=0&take=2147483647", + "Page.Current": "/api/tasks?skip=0&take=30", + "Page.Last": "/api/tasks?skip=0&take=30" + } +} \ No newline at end of file diff --git a/octopus_deploy/tests/test_unit.py b/octopus_deploy/tests/test_unit.py index 2855f20fa9d0a..c54cf3fd4e828 100644 --- a/octopus_deploy/tests/test_unit.py +++ b/octopus_deploy/tests/test_unit.py @@ -3,7 +3,6 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import copy -import logging import mock import pytest @@ -12,6 +11,7 @@ from datadog_checks.dev.utils import get_metadata_metrics from datadog_checks.octopus_deploy import OctopusDeployCheck +from .conftest import MOCKED_TIMESTAMPS from .constants import ( ALL_METRICS, PROJECT_ALL_METRICS, @@ -22,11 +22,13 @@ PROJECT_NO_METRICS, PROJECT_ONLY_HI_METRICS, PROJECT_ONLY_HI_MY_PROJECT_METRICS, + TASK_COUNT_METRICS, ) @pytest.mark.usefixtures('mock_http_get') -def test_check(dd_run_check, aggregator, instance): +@mock.patch("datadog_checks.octopus_deploy.project_groups.get_current_datetime", side_effect=MOCKED_TIMESTAMPS) +def test_check(get_current_datetime, dd_run_check, aggregator, instance): check = OctopusDeployCheck('octopus_deploy', {}, [instance]) dd_run_check(check) @@ -76,7 +78,8 @@ def test_space_invalid(dd_run_check, aggregator, instance): @pytest.mark.usefixtures('mock_http_get') -def test_space_cached(dd_run_check, aggregator, instance): +@mock.patch("datadog_checks.octopus_deploy.project_groups.get_current_datetime", side_effect=MOCKED_TIMESTAMPS) +def test_space_cached(get_current_datetime, dd_run_check, aggregator, instance): check = OctopusDeployCheck('octopus_deploy', {}, [instance]) check._get_space_id = mock.MagicMock() check.space_id = "Spaces-1" @@ -128,7 +131,10 @@ def test_space_cached(dd_run_check, aggregator, instance): ], ) @pytest.mark.usefixtures('mock_http_get') -def test_project_groups_discovery(dd_run_check, aggregator, instance, project_groups_config, expected_metrics): +@mock.patch("datadog_checks.octopus_deploy.project_groups.get_current_datetime", side_effect=MOCKED_TIMESTAMPS) +def test_project_groups_discovery( + get_current_datetime, dd_run_check, aggregator, instance, project_groups_config, expected_metrics +): instance = copy.deepcopy(instance) instance['project_groups'] = project_groups_config check = OctopusDeployCheck('octopus_deploy', {}, [instance]) @@ -138,7 +144,8 @@ def test_project_groups_discovery(dd_run_check, aggregator, instance, project_gr @pytest.mark.usefixtures('mock_http_get') -def test_project_groups_discovery_error(dd_run_check, instance): +@mock.patch("datadog_checks.octopus_deploy.project_groups.get_current_datetime", side_effect=MOCKED_TIMESTAMPS) +def test_project_groups_discovery_error(get_current_datetime, dd_run_check, instance): instance = copy.deepcopy(instance) instance['project_groups'] = {'include': None} check = OctopusDeployCheck('octopus_deploy', {}, [instance]) @@ -186,11 +193,23 @@ def test_project_groups_discovery_error(dd_run_check, instance): ], ) @pytest.mark.usefixtures('mock_http_get') -def test_projects_discovery(dd_run_check, aggregator, instance, project_groups_config, expected_metrics, caplog): - caplog.set_level(logging.DEBUG) +@mock.patch("datadog_checks.octopus_deploy.project_groups.get_current_datetime", side_effect=MOCKED_TIMESTAMPS) +def test_projects_discovery( + get_current_datetime, dd_run_check, aggregator, instance, project_groups_config, expected_metrics +): instance = copy.deepcopy(instance) instance['project_groups'] = project_groups_config check = OctopusDeployCheck('octopus_deploy', {}, [instance]) dd_run_check(check) for metric in expected_metrics: aggregator.assert_metric(metric["name"], count=metric["count"], tags=metric["tags"]) + + +@pytest.mark.usefixtures('mock_http_get') +@mock.patch("datadog_checks.octopus_deploy.project_groups.get_current_datetime", side_effect=MOCKED_TIMESTAMPS) +def test_task_metrics(get_current_datetime, dd_run_check, aggregator, instance): + check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + dd_run_check(check) + + for metric in TASK_COUNT_METRICS: + aggregator.assert_metric(metric["name"], count=metric["count"], tags=metric["tags"]) From 03393b3617bc0436928d18737fe4f688f02658d6 Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Mon, 30 Sep 2024 10:31:31 -0400 Subject: [PATCH 22/44] refactor to reference project_group --- .../datadog_checks/octopus_deploy/check.py | 44 +++++++++---------- .../octopus_deploy/project_groups.py | 3 +- 2 files changed, 24 insertions(+), 23 deletions(-) diff --git a/octopus_deploy/datadog_checks/octopus_deploy/check.py b/octopus_deploy/datadog_checks/octopus_deploy/check.py index 06e789cbf79ff..6702dfcc265ec 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/check.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/check.py @@ -31,12 +31,12 @@ def __init__(self, name, init_config, instances): def _initialize_caches(self): self._initialize_project_groups() - for _, project_group_name, project_group, project_group_config in self.project_groups(): - self._initialize_projects(project_group.id, project_group_name, project_group_config) - for _, _, project, _ in self.projects(project_group.id, project_group_name): - self._get_new_tasks_for_project(project, project_group) + for _, _, project_group, project_group_config in self.project_groups(): + self._initialize_projects(project_group, project_group_config) + for _, _, project, _ in self.projects(project_group): + self._get_new_tasks_for_project(project) - def _get_new_tasks_for_project(self, project, project_group): + def _get_new_tasks_for_project(self, project): self.log.debug("Getting new tasks for project %s", project.name) params = {'project': project.id, 'fromCompletedDate': project.last_task_time} url = f"{self.config.octopus_endpoint}/{self.space_id}/tasks" @@ -59,8 +59,8 @@ def _get_new_tasks_for_project(self, project, project_group): project_tags = [ f"project_id:{project.id}", f"project_name:{project.name}", - f"project_group_id:{project_group.id}", - f"project_group_name:{project_group.name}", + f"project_group_id:{project.project_group.id}", + f"project_group_name:{project.project_group.name}", ] tags = [f'task_name:{task_name}', f'task_id:{task_id}', f'task_state:{state}'] @@ -70,19 +70,19 @@ def _get_new_tasks_for_project(self, project, project_group): new_completed_time = new_completed_time + timedelta(milliseconds=1) project.last_completed_time = new_completed_time - def _initialize_projects(self, project_group_id, project_group_name, project_group_config): - if not self._projects_discovery.get(project_group_name): + def _initialize_projects(self, project_group, project_group_config): + if not self._projects_discovery.get(project_group.name): normalized_projects = normalize_discover_config_include( self.log, project_group_config.get("projects") if project_group_config else None ) self.log.debug( "Projects discovery for project_group %s: %s", - project_group_name, + project_group.name, normalized_projects, ) if normalized_projects: - self._projects_discovery[project_group_name] = Discovery( - lambda: self._get_new_projects(project_group_id), + self._projects_discovery[project_group.name] = Discovery( + lambda: self._get_new_projects(project_group), limit=project_group_config.get('projects').get('limit') if project_group_config else None, include=normalized_projects, exclude=project_group_config.get('projects').get('exclude') if project_group_config else None, @@ -90,7 +90,7 @@ def _initialize_projects(self, project_group_id, project_group_name, project_gro key=lambda project: project.name, ) else: - self._projects_discovery[project_group_name] = None + self._projects_discovery[project_group.name] = None self.log.debug("Discovered projects: %s", self._projects_discovery) @@ -109,18 +109,18 @@ def _initialize_project_groups(self): key=lambda project_group: project_group.name, ) - def projects(self, project_group_id, project_group_name): - if self._projects_discovery.get(project_group_name): - projects = list(self._projects_discovery[project_group_name].get_items()) + def projects(self, project_group): + if self._projects_discovery.get(project_group.name): + projects = list(self._projects_discovery[project_group.name].get_items()) else: - projects = [(None, project.name, project, None) for project in self._get_new_projects(project_group_id)] + projects = [(None, project.name, project, None) for project in self._get_new_projects(project_group)] for _, _, project, _ in projects: tags = [ f"project_id:{project.id}", f"project_name:{project.name}", - f"project_group_id:{project_group_id}", - f"project_group_name:{project_group_name}", + f"project_group_id:{project.project_group.id}", + f"project_group_name:{project.project_group.name}", ] self.gauge(PROJECT_COUNT_METRIC, 1, tags=self.base_tags + tags) @@ -128,14 +128,14 @@ def projects(self, project_group_id, project_group_name): self.log.info("Collecting data from projects: %s", ",".join(all_project_names)) return projects - def _get_new_projects(self, project_group_id): - projects_endpoint = f"{self.config.octopus_endpoint}/{self.space_id}/projectgroups/{project_group_id}/projects" + def _get_new_projects(self, project_group): + projects_endpoint = f"{self.config.octopus_endpoint}/{self.space_id}/projectgroups/{project_group.id}/projects" response = self.http.get(projects_endpoint) response.raise_for_status() projects_json = response.json().get('Items', []) projects = [] for project in projects_json: - new_project = Project(project) + new_project = Project(project, project_group) projects.append(new_project) return projects diff --git a/octopus_deploy/datadog_checks/octopus_deploy/project_groups.py b/octopus_deploy/datadog_checks/octopus_deploy/project_groups.py index 4c1dc3ff7be38..de6c31a17b9db 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/project_groups.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/project_groups.py @@ -12,8 +12,9 @@ def __init__(self, project_group_json): class Project: - def __init__(self, project_json): + def __init__(self, project_json, project_group): self.id = project_json.get("Id") self.name = project_json.get("Name") + self.project_group = project_group self.last_task_id = None self.last_task_time = get_current_datetime() From d22f5fa21709ae21bc842f475881ca6650a87678 Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Mon, 30 Sep 2024 10:58:10 -0400 Subject: [PATCH 23/44] submit project metrics in check method --- .../datadog_checks/octopus_deploy/check.py | 36 +++++++++---------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/octopus_deploy/datadog_checks/octopus_deploy/check.py b/octopus_deploy/datadog_checks/octopus_deploy/check.py index 6702dfcc265ec..52adc03564365 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/check.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/check.py @@ -33,8 +33,6 @@ def _initialize_caches(self): self._initialize_project_groups() for _, _, project_group, project_group_config in self.project_groups(): self._initialize_projects(project_group, project_group_config) - for _, _, project, _ in self.projects(project_group): - self._get_new_tasks_for_project(project) def _get_new_tasks_for_project(self, project): self.log.debug("Getting new tasks for project %s", project.name) @@ -115,18 +113,27 @@ def projects(self, project_group): else: projects = [(None, project.name, project, None) for project in self._get_new_projects(project_group)] + return projects + + def collect_project_metrics(self, project_group): + + project_group_tags = [ + f"project_group_id:{project_group.id}", + f"project_group_name:{project_group.name}", + ] + self.gauge(PROJECT_GROUP_COUNT_METRIC, 1, tags=self.base_tags + project_group_tags) + + projects = self.projects(project_group) + for _, _, project, _ in projects: - tags = [ + project_tags = [ f"project_id:{project.id}", f"project_name:{project.name}", - f"project_group_id:{project.project_group.id}", - f"project_group_name:{project.project_group.name}", ] - self.gauge(PROJECT_COUNT_METRIC, 1, tags=self.base_tags + tags) + self.gauge(PROJECT_COUNT_METRIC, 1, tags=self.base_tags + project_group_tags + project_tags) all_project_names = [project.name for _, _, project, _ in projects] self.log.info("Collecting data from projects: %s", ",".join(all_project_names)) - return projects def _get_new_projects(self, project_group): projects_endpoint = f"{self.config.octopus_endpoint}/{self.space_id}/projectgroups/{project_group.id}/projects" @@ -178,20 +185,13 @@ def project_groups(self): project_groups = [ (None, project_groups.name, project_groups, None) for project_groups in self._get_new_project_groups() ] - - for _, project_group_name, project_group, _ in project_groups: - tags = [ - f"project_group_id:{project_group.id}", - f"project_group_name:{project_group_name}", - ] - self.gauge(PROJECT_GROUP_COUNT_METRIC, 1, tags=self.base_tags + tags) - - all_project_group_names = [project_group.name for _, _, project_group, _ in project_groups] - self.log.info("Collecting data from project_groups: %s", ",".join(all_project_group_names)) return project_groups def check(self, _): - pass + for _, _, project_group, _ in self.project_groups(): + self.collect_project_metrics(project_group) + for _, _, project, _ in self.projects(project_group): + self._get_new_tasks_for_project(project) # Discovery class requires 'include' to be a dict, so this function is needed to normalize the config From 37e67528da905bb0ff7ca6682c5d241b2f2060ac Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Mon, 30 Sep 2024 15:03:19 -0400 Subject: [PATCH 24/44] Add handle_error wrapper and add more logging --- .../datadog_checks/octopus_deploy/check.py | 16 +++++-- .../datadog_checks/octopus_deploy/error.py | 23 ++++++++++ octopus_deploy/tests/conftest.py | 20 ++++---- octopus_deploy/tests/constants.py | 46 +++++++++++++++++++ octopus_deploy/tests/test_unit.py | 30 ++++++++++++ 5 files changed, 122 insertions(+), 13 deletions(-) create mode 100644 octopus_deploy/datadog_checks/octopus_deploy/error.py diff --git a/octopus_deploy/datadog_checks/octopus_deploy/check.py b/octopus_deploy/datadog_checks/octopus_deploy/check.py index 52adc03564365..f1c6255dbf8ab 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/check.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/check.py @@ -12,6 +12,7 @@ from .config_models import ConfigMixin from .constants import API_UP_METRIC, PROJECT_COUNT_METRIC, PROJECT_GROUP_COUNT_METRIC +from .error import handle_error from .project_groups import Project, ProjectGroup @@ -34,6 +35,7 @@ def _initialize_caches(self): for _, _, project_group, project_group_config in self.project_groups(): self._initialize_projects(project_group, project_group_config) + @handle_error def _get_new_tasks_for_project(self, project): self.log.debug("Getting new tasks for project %s", project.name) params = {'project': project.id, 'fromCompletedDate': project.last_task_time} @@ -116,7 +118,6 @@ def projects(self, project_group): return projects def collect_project_metrics(self, project_group): - project_group_tags = [ f"project_group_id:{project_group.id}", f"project_group_name:{project_group.name}", @@ -124,6 +125,10 @@ def collect_project_metrics(self, project_group): self.gauge(PROJECT_GROUP_COUNT_METRIC, 1, tags=self.base_tags + project_group_tags) projects = self.projects(project_group) + all_project_names = [project.name for _, _, project, _ in projects] + self.log.info( + "Collecting data from project group: %s, for projects: %s", project_group.name, ",".join(all_project_names) + ) for _, _, project, _ in projects: project_tags = [ @@ -132,9 +137,6 @@ def collect_project_metrics(self, project_group): ] self.gauge(PROJECT_COUNT_METRIC, 1, tags=self.base_tags + project_group_tags + project_tags) - all_project_names = [project.name for _, _, project, _ in projects] - self.log.info("Collecting data from projects: %s", ",".join(all_project_names)) - def _get_new_projects(self, project_group): projects_endpoint = f"{self.config.octopus_endpoint}/{self.space_id}/projectgroups/{project_group.id}/projects" response = self.http.get(projects_endpoint) @@ -155,6 +157,9 @@ def _get_new_project_groups(self): for project_group in project_groups_json: new_project_group = ProjectGroup(project_group) project_groups.append(new_project_group) + + all_project_group_names = [project_group.name for project_group in project_groups] + self.log.debug("Found new project groups: %s", all_project_group_names) return project_groups def _get_space_id(self): @@ -207,7 +212,8 @@ def normalize_discover_config_include(log, config): for entry in include_list: if isinstance(entry, str): normalized_config[entry] = None - elif isinstance(entry, dict): + # entry is dict + else: for key, value in entry.items(): normalized_config[key] = value.copy() return normalized_config diff --git a/octopus_deploy/datadog_checks/octopus_deploy/error.py b/octopus_deploy/datadog_checks/octopus_deploy/error.py new file mode 100644 index 0000000000000..0fc6840589693 --- /dev/null +++ b/octopus_deploy/datadog_checks/octopus_deploy/error.py @@ -0,0 +1,23 @@ +# (C) Datadog, Inc. 2024-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +from functools import wraps + +import requests + + +def handle_error(f): + @wraps(f) + def wrapper(check, *args, **kwargs): + try: + result = f(check, *args, **kwargs) + return result + except requests.exceptions.RequestException as e: + check.log.info( + "Encountered a RequestException in '%s' [%s]: %s", + f.__name__, + type(e), + e, + ) + + return wrapper diff --git a/octopus_deploy/tests/conftest.py b/octopus_deploy/tests/conftest.py index 882631a0397e2..183a685720c2c 100644 --- a/octopus_deploy/tests/conftest.py +++ b/octopus_deploy/tests/conftest.py @@ -68,10 +68,6 @@ def create_responses_tree(): def method(method, url, file='response', headers=None, params=None): filename = file request_path = url - request_path = request_path.replace('?', '/') - if params: - param_string = '/'.join(f'{key}={str(val)}' for key, val in params.items()) - request_path = f'{url}/{param_string}' response = responses_map.get(method, {}).get(request_path, {}).get(filename) return response @@ -104,12 +100,20 @@ def mock_http_get(request, monkeypatch, mock_http_call): def get(url, *args, **kwargs): method = 'GET' url = get_url_path(url) - if http_error and url in http_error: - return http_error[url] + request_path = url.replace('?', '/') + params = kwargs.get('params') + if params: + param_string = '/'.join(f'{key}={str(val)}' for key, val in params.items()) + request_path = f'{url}/{param_string}' + + print(request_path) + if http_error and request_path in http_error: + return http_error[request_path] + mock_status_code = mock.MagicMock(return_value=200) headers = kwargs.get('headers') - params = kwargs.get('params') - mock_json = mock.MagicMock(return_value=mock_http_call(method, url, headers=headers, params=params)) + + mock_json = mock.MagicMock(return_value=mock_http_call(method, request_path, headers=headers)) return mock.MagicMock(json=mock_json, status_code=mock_status_code) mock_get = mock.MagicMock(side_effect=get) diff --git a/octopus_deploy/tests/constants.py b/octopus_deploy/tests/constants.py index 1c45803605e7a..f70c4f3b2caf4 100644 --- a/octopus_deploy/tests/constants.py +++ b/octopus_deploy/tests/constants.py @@ -510,3 +510,49 @@ 'count': 1, }, ] + + +TASK_COUNT_METRICS_NO_PROJECT_1 = [ + { + 'name': 'octopus_deploy.task.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + "task_id:ServerTasks-1845", + "task_name:Deploy", + "task_state:Success", + ], + 'count': 1, + }, + { + 'name': 'octopus_deploy.task.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + "task_id:ServerTasks-1846", + "task_name:Deploy", + "task_state:Success", + ], + 'count': 1, + }, + { + 'name': 'octopus_deploy.task.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + "task_id:ServerTasks-1847", + "task_name:Deploy", + "task_state:Failed", + ], + 'count': 1, + }, +] diff --git a/octopus_deploy/tests/test_unit.py b/octopus_deploy/tests/test_unit.py index c54cf3fd4e828..04d9cfa12d814 100644 --- a/octopus_deploy/tests/test_unit.py +++ b/octopus_deploy/tests/test_unit.py @@ -3,6 +3,7 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import copy +import logging import mock import pytest @@ -23,6 +24,7 @@ PROJECT_ONLY_HI_METRICS, PROJECT_ONLY_HI_MY_PROJECT_METRICS, TASK_COUNT_METRICS, + TASK_COUNT_METRICS_NO_PROJECT_1, ) @@ -213,3 +215,31 @@ def test_task_metrics(get_current_datetime, dd_run_check, aggregator, instance): for metric in TASK_COUNT_METRICS: aggregator.assert_metric(metric["name"], count=metric["count"], tags=metric["tags"]) + + +@pytest.mark.parametrize( + ('mock_http_get, message'), + [ + pytest.param( + { + 'http_error': { + '/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-23 ' + '14:45:58.888492+00:00': MockResponse(status_code=404) + } + }, + 'Encountered a RequestException in \'_get_new_tasks_for_project\'', + id='404', + ), + ], + indirect=['mock_http_get'], +) +@pytest.mark.usefixtures('mock_http_get') +@mock.patch("datadog_checks.octopus_deploy.project_groups.get_current_datetime", side_effect=MOCKED_TIMESTAMPS) +def test_exception_when_getting_tasks(get_current_datetime, dd_run_check, aggregator, instance, message, caplog): + check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + caplog.set_level(logging.INFO) + dd_run_check(check) + assert message in caplog.text + + for metric in PROJECT_GROUP_ALL_METRICS + PROJECT_ALL_METRICS + TASK_COUNT_METRICS_NO_PROJECT_1: + aggregator.assert_metric(metric["name"], count=metric["count"], tags=metric["tags"]) From 90990f94cf63cfbd47e9d5027ece96a9ec29d6a3 Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Mon, 30 Sep 2024 15:21:48 -0400 Subject: [PATCH 25/44] bump python version --- octopus_deploy/hatch.toml | 2 +- octopus_deploy/pyproject.toml | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/octopus_deploy/hatch.toml b/octopus_deploy/hatch.toml index 001e43ce25414..c85c5f07a7df2 100644 --- a/octopus_deploy/hatch.toml +++ b/octopus_deploy/hatch.toml @@ -1,4 +1,4 @@ [env.collectors.datadog-checks] [[envs.default.matrix]] -python = ["3.11"] +python = ["3.12"] diff --git a/octopus_deploy/pyproject.toml b/octopus_deploy/pyproject.toml index 5a56841f9adfb..d0844486315b1 100644 --- a/octopus_deploy/pyproject.toml +++ b/octopus_deploy/pyproject.toml @@ -9,7 +9,7 @@ name = "datadog-octopus-deploy" description = "The Octopus Deploy check" readme = "README.md" license = "BSD-3-Clause" -requires-python = ">=3.11" +requires-python = ">=3.12" keywords = [ "datadog", "datadog agent", @@ -25,11 +25,11 @@ classifiers = [ "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Private :: Do Not Upload", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Monitoring", ] dependencies = [ - "datadog-checks-base>=32.6.0", + "datadog-checks-base>=37.0.0", ] dynamic = [ "version", From c6590184d20c55aaf8cd4ed4eb082a5659c34d93 Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Mon, 30 Sep 2024 15:29:17 -0400 Subject: [PATCH 26/44] Add e2e test --- octopus_deploy/tests/test_e2e.py | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 octopus_deploy/tests/test_e2e.py diff --git a/octopus_deploy/tests/test_e2e.py b/octopus_deploy/tests/test_e2e.py new file mode 100644 index 0000000000000..64e77855cef82 --- /dev/null +++ b/octopus_deploy/tests/test_e2e.py @@ -0,0 +1,10 @@ +# (C) Datadog, Inc. 2024-present +# All rights reserved +# Licensed under a 3-clause BSD style license (see LICENSE) +import pytest + +@pytest.mark.e2e +def test_e2e(dd_agent_check, instance): + aggregator = dd_agent_check(instance, rate=True) + + aggregator.assert_metric('octopus_deploy.api.can_connect', 0) \ No newline at end of file From 21e8ff80f5f187ef88c4ea9607e217cfcf52b5ad Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Mon, 30 Sep 2024 15:30:54 -0400 Subject: [PATCH 27/44] lint --- octopus_deploy/tests/test_e2e.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/octopus_deploy/tests/test_e2e.py b/octopus_deploy/tests/test_e2e.py index 64e77855cef82..bb7c8a57a8940 100644 --- a/octopus_deploy/tests/test_e2e.py +++ b/octopus_deploy/tests/test_e2e.py @@ -3,8 +3,9 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import pytest + @pytest.mark.e2e def test_e2e(dd_agent_check, instance): aggregator = dd_agent_check(instance, rate=True) - aggregator.assert_metric('octopus_deploy.api.can_connect', 0) \ No newline at end of file + aggregator.assert_metric('octopus_deploy.api.can_connect', 0) From 83ab4ae6e3ad313192004195218456b1924191fd Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Tue, 1 Oct 2024 11:41:19 -0400 Subject: [PATCH 28/44] add http template to config spec --- octopus_deploy/assets/configuration/spec.yaml | 3 + .../octopus_deploy/config_models/defaults.py | 64 ++++ .../octopus_deploy/config_models/instance.py | 53 +++ .../octopus_deploy/config_models/shared.py | 17 +- .../octopus_deploy/data/conf.yaml.example | 343 ++++++++++++++++++ 5 files changed, 479 insertions(+), 1 deletion(-) diff --git a/octopus_deploy/assets/configuration/spec.yaml b/octopus_deploy/assets/configuration/spec.yaml index 388ad65b26bef..74213acbbee0f 100644 --- a/octopus_deploy/assets/configuration/spec.yaml +++ b/octopus_deploy/assets/configuration/spec.yaml @@ -5,6 +5,7 @@ files: - template: init_config options: - template: init_config/default + - template: init_config/http - template: instances options: - name: octopus_endpoint @@ -47,3 +48,5 @@ files: type: integer example: {} - template: instances/default + - template: instances/http + diff --git a/octopus_deploy/datadog_checks/octopus_deploy/config_models/defaults.py b/octopus_deploy/datadog_checks/octopus_deploy/config_models/defaults.py index 4d46152df5d40..c09eec5573003 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/config_models/defaults.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/config_models/defaults.py @@ -8,6 +8,22 @@ # ddev -x validate models -s +def shared_skip_proxy(): + return False + + +def shared_timeout(): + return 10 + + +def instance_allow_redirects(): + return True + + +def instance_auth_type(): + return 'basic' + + def instance_disable_generic_tags(): return False @@ -16,5 +32,53 @@ def instance_empty_default_hostname(): return False +def instance_kerberos_auth(): + return 'disabled' + + +def instance_kerberos_delegate(): + return False + + +def instance_kerberos_force_initiate(): + return False + + +def instance_log_requests(): + return False + + def instance_min_collection_interval(): return 15 + + +def instance_persist_connections(): + return False + + +def instance_request_size(): + return 16 + + +def instance_skip_proxy(): + return False + + +def instance_timeout(): + return 10 + + +def instance_tls_ignore_warning(): + return False + + +def instance_tls_use_host_header(): + return False + + +def instance_tls_verify(): + return True + + +def instance_use_legacy_auth_encoding(): + return True diff --git a/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py b/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py index 9394d0f897617..daf4d9c57ee21 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py @@ -20,6 +20,15 @@ from . import defaults, validators +class AuthToken(BaseModel): + model_config = ConfigDict( + arbitrary_types_allowed=True, + frozen=True, + ) + reader: Optional[MappingProxyType[str, Any]] = None + writer: Optional[MappingProxyType[str, Any]] = None + + class MetricPatterns(BaseModel): model_config = ConfigDict( arbitrary_types_allowed=True, @@ -40,21 +49,65 @@ class ProjectGroups(BaseModel): limit: Optional[int] = Field(None, description='Maximum number of project groups to be processed.\n') +class Proxy(BaseModel): + model_config = ConfigDict( + arbitrary_types_allowed=True, + frozen=True, + ) + http: Optional[str] = None + https: Optional[str] = None + no_proxy: Optional[tuple[str, ...]] = None + + class InstanceConfig(BaseModel): model_config = ConfigDict( validate_default=True, arbitrary_types_allowed=True, frozen=True, ) + allow_redirects: Optional[bool] = None + auth_token: Optional[AuthToken] = None + auth_type: Optional[str] = None + aws_host: Optional[str] = None + aws_region: Optional[str] = None + aws_service: Optional[str] = None + connect_timeout: Optional[float] = None disable_generic_tags: Optional[bool] = None empty_default_hostname: Optional[bool] = None + extra_headers: Optional[MappingProxyType[str, Any]] = None + headers: Optional[MappingProxyType[str, Any]] = None + kerberos_auth: Optional[str] = None + kerberos_cache: Optional[str] = None + kerberos_delegate: Optional[bool] = None + kerberos_force_initiate: Optional[bool] = None + kerberos_hostname: Optional[str] = None + kerberos_keytab: Optional[str] = None + kerberos_principal: Optional[str] = None + log_requests: Optional[bool] = None metric_patterns: Optional[MetricPatterns] = None min_collection_interval: Optional[float] = None + ntlm_domain: Optional[str] = None octopus_endpoint: str + password: Optional[str] = None + persist_connections: Optional[bool] = None project_groups: Optional[ProjectGroups] = None + proxy: Optional[Proxy] = None + read_timeout: Optional[float] = None + request_size: Optional[float] = None service: Optional[str] = None + skip_proxy: Optional[bool] = None space: str tags: Optional[tuple[str, ...]] = None + timeout: Optional[float] = None + tls_ca_cert: Optional[str] = None + tls_cert: Optional[str] = None + tls_ignore_warning: Optional[bool] = None + tls_private_key: Optional[str] = None + tls_protocols_allowed: Optional[tuple[str, ...]] = None + tls_use_host_header: Optional[bool] = None + tls_verify: Optional[bool] = None + use_legacy_auth_encoding: Optional[bool] = None + username: Optional[str] = None @model_validator(mode='before') def _initial_validation(cls, values): diff --git a/octopus_deploy/datadog_checks/octopus_deploy/config_models/shared.py b/octopus_deploy/datadog_checks/octopus_deploy/config_models/shared.py index e39d447dfc4b9..0e8a9ecab10a2 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/config_models/shared.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/config_models/shared.py @@ -16,7 +16,17 @@ from datadog_checks.base.utils.functions import identity from datadog_checks.base.utils.models import validation -from . import validators +from . import defaults, validators + + +class Proxy(BaseModel): + model_config = ConfigDict( + arbitrary_types_allowed=True, + frozen=True, + ) + http: Optional[str] = None + https: Optional[str] = None + no_proxy: Optional[tuple[str, ...]] = None class SharedConfig(BaseModel): @@ -25,7 +35,10 @@ class SharedConfig(BaseModel): arbitrary_types_allowed=True, frozen=True, ) + proxy: Optional[Proxy] = None service: Optional[str] = None + skip_proxy: Optional[bool] = None + timeout: Optional[float] = None @model_validator(mode='before') def _initial_validation(cls, values): @@ -37,6 +50,8 @@ def _validate(cls, value, info): field_name = field.alias or info.field_name if field_name in info.context['configured_fields']: value = getattr(validators, f'shared_{info.field_name}', identity)(value, field=field) + else: + value = getattr(defaults, f'shared_{info.field_name}', lambda: value)() return validation.utils.make_immutable(value) diff --git a/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example b/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example index 9b6fbf0d6b93a..d4249d9651a03 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example +++ b/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example @@ -9,6 +9,38 @@ init_config: # # service: + ## @param proxy - mapping - optional + ## Set HTTP or HTTPS proxies for all instances. Use the `no_proxy` list + ## to specify hosts that must bypass proxies. + ## + ## The SOCKS protocol is also supported like so: + ## + ## socks5://user:pass@host:port + ## + ## Using the scheme `socks5` causes the DNS resolution to happen on the + ## client, rather than on the proxy server. This is in line with `curl`, + ## which uses the scheme to decide whether to do the DNS resolution on + ## the client or proxy. If you want to resolve the domains on the proxy + ## server, use `socks5h` as the scheme. + # + # proxy: + # http: http://: + # https: https://: + # no_proxy: + # - + # - + + ## @param skip_proxy - boolean - optional - default: false + ## If set to `true`, this makes the check bypass any proxy + ## settings enabled and attempt to reach services directly. + # + # skip_proxy: false + + ## @param timeout - number - optional - default: 10 + ## The timeout for connecting to services. + # + # timeout: 10 + ## Every instance is scheduled independently of the others. # instances: @@ -68,3 +100,314 @@ instances: # - # exclude: # - + + ## @param proxy - mapping - optional + ## This overrides the `proxy` setting in `init_config`. + ## + ## Set HTTP or HTTPS proxies for this instance. Use the `no_proxy` list + ## to specify hosts that must bypass proxies. + ## + ## The SOCKS protocol is also supported, for example: + ## + ## socks5://user:pass@host:port + ## + ## Using the scheme `socks5` causes the DNS resolution to happen on the + ## client, rather than on the proxy server. This is in line with `curl`, + ## which uses the scheme to decide whether to do the DNS resolution on + ## the client or proxy. If you want to resolve the domains on the proxy + ## server, use `socks5h` as the scheme. + # + # proxy: + # http: http://: + # https: https://: + # no_proxy: + # - + # - + + ## @param skip_proxy - boolean - optional - default: false + ## This overrides the `skip_proxy` setting in `init_config`. + ## + ## If set to `true`, this makes the check bypass any proxy + ## settings enabled and attempt to reach services directly. + # + # skip_proxy: false + + ## @param auth_type - string - optional - default: basic + ## The type of authentication to use. The available types (and related options) are: + ## + ## - basic + ## |__ username + ## |__ password + ## |__ use_legacy_auth_encoding + ## - digest + ## |__ username + ## |__ password + ## - ntlm + ## |__ ntlm_domain + ## |__ password + ## - kerberos + ## |__ kerberos_auth + ## |__ kerberos_cache + ## |__ kerberos_delegate + ## |__ kerberos_force_initiate + ## |__ kerberos_hostname + ## |__ kerberos_keytab + ## |__ kerberos_principal + ## - aws + ## |__ aws_region + ## |__ aws_host + ## |__ aws_service + ## + ## The `aws` auth type relies on boto3 to automatically gather AWS credentials, for example: from `.aws/credentials`. + ## Details: https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html#configuring-credentials + # + # auth_type: basic + + ## @param use_legacy_auth_encoding - boolean - optional - default: true + ## When `auth_type` is set to `basic`, this determines whether to encode as `latin1` rather than `utf-8`. + # + # use_legacy_auth_encoding: true + + ## @param username - string - optional + ## The username to use if services are behind basic or digest auth. + # + # username: + + ## @param password - string - optional + ## The password to use if services are behind basic or NTLM auth. + # + # password: + + ## @param ntlm_domain - string - optional + ## If your services use NTLM authentication, specify + ## the domain used in the check. For NTLM Auth, append + ## the username to domain, not as the `username` parameter. + # + # ntlm_domain: \ + + ## @param kerberos_auth - string - optional - default: disabled + ## If your services use Kerberos authentication, you can specify the Kerberos + ## strategy to use between: + ## + ## - required + ## - optional + ## - disabled + ## + ## See https://github.com/requests/requests-kerberos#mutual-authentication + # + # kerberos_auth: disabled + + ## @param kerberos_cache - string - optional + ## Sets the KRB5CCNAME environment variable. + ## It should point to a credential cache with a valid TGT. + # + # kerberos_cache: + + ## @param kerberos_delegate - boolean - optional - default: false + ## Set to `true` to enable Kerberos delegation of credentials to a server that requests delegation. + ## + ## See https://github.com/requests/requests-kerberos#delegation + # + # kerberos_delegate: false + + ## @param kerberos_force_initiate - boolean - optional - default: false + ## Set to `true` to preemptively initiate the Kerberos GSS exchange and + ## present a Kerberos ticket on the initial request (and all subsequent). + ## + ## See https://github.com/requests/requests-kerberos#preemptive-authentication + # + # kerberos_force_initiate: false + + ## @param kerberos_hostname - string - optional + ## Override the hostname used for the Kerberos GSS exchange if its DNS name doesn't + ## match its Kerberos hostname, for example: behind a content switch or load balancer. + ## + ## See https://github.com/requests/requests-kerberos#hostname-override + # + # kerberos_hostname: + + ## @param kerberos_principal - string - optional + ## Set an explicit principal, to force Kerberos to look for a + ## matching credential cache for the named user. + ## + ## See https://github.com/requests/requests-kerberos#explicit-principal + # + # kerberos_principal: + + ## @param kerberos_keytab - string - optional + ## Set the path to your Kerberos key tab file. + # + # kerberos_keytab: + + ## @param auth_token - mapping - optional + ## This allows for the use of authentication information from dynamic sources. + ## Both a reader and writer must be configured. + ## + ## The available readers are: + ## + ## - type: file + ## path (required): The absolute path for the file to read from. + ## pattern: A regular expression pattern with a single capture group used to find the + ## token rather than using the entire file, for example: Your secret is (.+) + ## - type: oauth + ## url (required): The token endpoint. + ## client_id (required): The client identifier. + ## client_secret (required): The client secret. + ## basic_auth: Whether the provider expects credentials to be transmitted in + ## an HTTP Basic Auth header. The default is: false + ## options: Mapping of additional options to pass to the provider, such as the audience + ## or the scope. For example: + ## options: + ## audience: https://example.com + ## scope: read:example + ## + ## The available writers are: + ## + ## - type: header + ## name (required): The name of the field, for example: Authorization + ## value: The template value, for example `Bearer `. The default is: + ## placeholder: The substring in `value` to replace with the token, defaults to: + # + # auth_token: + # reader: + # type: + # : + # : + # writer: + # type: + # : + # : + + ## @param aws_region - string - optional + ## If your services require AWS Signature Version 4 signing, set the region. + ## + ## See https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html + # + # aws_region: + + ## @param aws_host - string - optional + ## If your services require AWS Signature Version 4 signing, set the host. + ## This only needs the hostname and does not require the protocol (HTTP, HTTPS, and more). + ## For example, if connecting to https://us-east-1.amazonaws.com/, set `aws_host` to `us-east-1.amazonaws.com`. + ## + ## Note: This setting is not necessary for official integrations. + ## + ## See https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html + # + # aws_host: + + ## @param aws_service - string - optional + ## If your services require AWS Signature Version 4 signing, set the service code. For a list + ## of available service codes, see https://docs.aws.amazon.com/general/latest/gr/rande.html + ## + ## Note: This setting is not necessary for official integrations. + ## + ## See https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html + # + # aws_service: + + ## @param tls_verify - boolean - optional - default: true + ## Instructs the check to validate the TLS certificate of services. + # + # tls_verify: true + + ## @param tls_use_host_header - boolean - optional - default: false + ## If a `Host` header is set, this enables its use for SNI (matching against the TLS certificate CN or SAN). + # + # tls_use_host_header: false + + ## @param tls_ignore_warning - boolean - optional - default: false + ## If `tls_verify` is disabled, security warnings are logged by the check. + ## Disable those by setting `tls_ignore_warning` to true. + # + # tls_ignore_warning: false + + ## @param tls_cert - string - optional + ## The path to a single file in PEM format containing a certificate as well as any + ## number of CA certificates needed to establish the certificate's authenticity for + ## use when connecting to services. It may also contain an unencrypted private key to use. + # + # tls_cert: + + ## @param tls_private_key - string - optional + ## The unencrypted private key to use for `tls_cert` when connecting to services. This is + ## required if `tls_cert` is set and it does not already contain a private key. + # + # tls_private_key: + + ## @param tls_ca_cert - string - optional + ## The path to a file of concatenated CA certificates in PEM format or a directory + ## containing several CA certificates in PEM format. If a directory, the directory + ## must have been processed using the `openssl rehash` command. See: + ## https://www.openssl.org/docs/man3.2/man1/c_rehash.html + # + # tls_ca_cert: + + ## @param tls_protocols_allowed - list of strings - optional + ## The expected versions of TLS/SSL when fetching intermediate certificates. + ## Only `SSLv3`, `TLSv1.2`, `TLSv1.3` are allowed by default. The possible values are: + ## SSLv3 + ## TLSv1 + ## TLSv1.1 + ## TLSv1.2 + ## TLSv1.3 + # + # tls_protocols_allowed: + # - SSLv3 + # - TLSv1.2 + # - TLSv1.3 + + ## @param headers - mapping - optional + ## The headers parameter allows you to send specific headers with every request. + ## You can use it for explicitly specifying the host header or adding headers for + ## authorization purposes. + ## + ## This overrides any default headers. + # + # headers: + # Host: + # X-Auth-Token: + + ## @param extra_headers - mapping - optional + ## Additional headers to send with every request. + # + # extra_headers: + # Host: + # X-Auth-Token: + + ## @param timeout - number - optional - default: 10 + ## The timeout for accessing services. + ## + ## This overrides the `timeout` setting in `init_config`. + # + # timeout: 10 + + ## @param connect_timeout - number - optional + ## The connect timeout for accessing services. Defaults to `timeout`. + # + # connect_timeout: + + ## @param read_timeout - number - optional + ## The read timeout for accessing services. Defaults to `timeout`. + # + # read_timeout: + + ## @param request_size - number - optional - default: 16 + ## The number of kibibytes (KiB) to read from streaming HTTP responses at a time. + # + # request_size: 16 + + ## @param log_requests - boolean - optional - default: false + ## Whether or not to debug log the HTTP(S) requests made, including the method and URL. + # + # log_requests: false + + ## @param persist_connections - boolean - optional - default: false + ## Whether or not to persist cookies and use connection pooling for improved performance. + # + # persist_connections: false + + ## @param allow_redirects - boolean - optional - default: true + ## Whether or not to allow URL redirection. + # + # allow_redirects: true From 416fb69c48f83025ee7ac4d5dd562246a2b700c2 Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Tue, 1 Oct 2024 14:49:28 -0400 Subject: [PATCH 29/44] refactor test files and add caddy e2e --- octopus_deploy/tests/conftest.py | 18 ++- octopus_deploy/tests/constants.py | 13 ++ octopus_deploy/tests/docker/Caddyfile | 122 ++++++++++++++++++ .../tests/docker/docker-compose.yaml | 10 ++ .../response.json | 0 .../response.json | 0 .../response.json | 0 .../response.json | 0 octopus_deploy/tests/test_e2e.py | 9 +- octopus_deploy/tests/test_unit.py | 2 +- 10 files changed, 164 insertions(+), 10 deletions(-) create mode 100644 octopus_deploy/tests/docker/Caddyfile create mode 100644 octopus_deploy/tests/docker/docker-compose.yaml rename octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/{fromCompletedDate=2024-09-23 14:45:58.888492+00:00 => fromCompletedDate=2024-09-2314:45:58.888492+00:00}/response.json (100%) rename octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/{fromCompletedDate=2024-09-23 14:45:58.888492+00:00 => fromCompletedDate=2024-09-2314:45:58.888492+00:00}/response.json (100%) rename octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/{fromCompletedDate=2024-09-23 14:45:58.888492+00:00 => fromCompletedDate=2024-09-2314:45:58.888492+00:00}/response.json (100%) rename octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/{fromCompletedDate=2024-09-23 14:45:58.888492+00:00 => fromCompletedDate=2024-09-2314:45:58.888492+00:00}/response.json (100%) diff --git a/octopus_deploy/tests/conftest.py b/octopus_deploy/tests/conftest.py index 183a685720c2c..a0db85f095866 100644 --- a/octopus_deploy/tests/conftest.py +++ b/octopus_deploy/tests/conftest.py @@ -1,7 +1,6 @@ # (C) Datadog, Inc. 2024-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) -import datetime import json import os from pathlib import Path @@ -11,16 +10,23 @@ import pytest import requests -from datadog_checks.base.utils.time import ensure_aware_datetime +from datadog_checks.dev import docker_run +from datadog_checks.dev.conditions import CheckDockerLogs, CheckEndpoints from datadog_checks.dev.fs import get_here -BASE_TIME = ensure_aware_datetime(datetime.datetime.strptime("2024-09-23 14:45:58.888492", '%Y-%m-%d %H:%M:%S.%f')) -MOCKED_TIMESTAMPS = [BASE_TIME] * 20 +from .constants import COMPOSE_FILE, INSTANCE @pytest.fixture(scope='session') def dd_environment(): - yield + compose_file = COMPOSE_FILE + endpoint = INSTANCE["octopus_endpoint"] + conditions = [ + CheckDockerLogs(identifier='octopus-api', patterns=['server running']), + CheckEndpoints(f'{endpoint}/spaces'), + ] + with docker_run(compose_file, conditions=conditions): + yield INSTANCE @pytest.fixture @@ -106,7 +112,7 @@ def get(url, *args, **kwargs): param_string = '/'.join(f'{key}={str(val)}' for key, val in params.items()) request_path = f'{url}/{param_string}' - print(request_path) + request_path = request_path.replace(" ") if http_error and request_path in http_error: return http_error[request_path] diff --git a/octopus_deploy/tests/constants.py b/octopus_deploy/tests/constants.py index f70c4f3b2caf4..361a39d524c60 100644 --- a/octopus_deploy/tests/constants.py +++ b/octopus_deploy/tests/constants.py @@ -1,6 +1,19 @@ # (C) Datadog, Inc. 2024-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) +import datetime +import os + +from datadog_checks.base.utils.time import ensure_aware_datetime +from datadog_checks.dev.fs import get_here + +COMPOSE_FILE = os.path.join(get_here(), 'docker', 'docker-compose.yaml') +INSTANCE = {'octopus_endpoint': 'http://localhost:80/api', 'space': 'Default'} + + +BASE_TIME = ensure_aware_datetime(datetime.datetime.strptime("2024-09-23 14:45:58.888492", '%Y-%m-%d %H:%M:%S.%f')) +MOCKED_TIMESTAMPS = [BASE_TIME] * 20 + ALL_METRICS = ["octopus_deploy.project_group.count", "octopus_deploy.project.count", "octopus_deploy.task.count"] diff --git a/octopus_deploy/tests/docker/Caddyfile b/octopus_deploy/tests/docker/Caddyfile new file mode 100644 index 0000000000000..eca9a9706cf02 --- /dev/null +++ b/octopus_deploy/tests/docker/Caddyfile @@ -0,0 +1,122 @@ +{ + debug + admin :2019 +} +:8080 { + root * /usr/share/caddy/ + @get_tasks_project_1 { + method GET + path /api/Spaces-1/tasks* + expression {uri}.contains('?') + expression {uri}.contains('Projects-1') + } + route @get_tasks_project_1 { + rewrite * /GET/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json + file_server + } + @get_tasks_project_2 { + method GET + path /api/Spaces-1/tasks* + expression {uri}.contains('?') + expression {uri}.contains('Projects-2') + } + route @get_tasks_project_2 { + rewrite * /GET/api/Spaces-1/tasks/project=Projects-2/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json + file_server + } + @get_tasks_project_3 { + method GET + path /api/Spaces-1/tasks* + expression {uri}.contains('?') + expression {uri}.contains('Projects-3') + } + route @get_tasks_project_3 { + rewrite * /GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json + file_server + } + @get_tasks_project_4 { + method GET + path /api/Spaces-1/tasks* + expression {uri}.contains('?') + expression {uri}.contains('Projects-4') + } + route @get_tasks_project_4 { + rewrite * /GET/api/Spaces-1/tasks/project=Projects-4/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json + file_server + } + @get_spaces { + method GET + path /api/spaces + } + route @get_spaces { + rewrite * /GET/api/spaces/response.json + file_server + } + @get_spaces_1 { + method GET + path /api/Spaces-1 + } + route @get_spaces_1 { + rewrite * /GET/api/Spaces-1/response.json + file_server + } + @get_projectgroups { + method GET + path /api/Spaces-1/projectgroups + } + route @get_projectgroups { + rewrite * /GET/api/Spaces-1/projectgroups/response.json + file_server + } + + @get_projectgroups_1 { + method GET + path /api/Spaces-1/projectgroups/ProjectGroups-1 + } + route @get_projectgroups_1 { + rewrite * /GET/api/Spaces-1/projectgroups/ProjectGroups-1/response.json + file_server + } + @get_projectgroups_2 { + method GET + path /api/Spaces-1/projectgroups/ProjectGroups-2 + } + route @get_projectgroups_2 { + rewrite * /GET/api/Spaces-1/projectgroups/ProjectGroups-2/response.json + file_server + } + @get_projectgroups_3 { + method GET + path /api/Spaces-1/projectgroups/ProjectGroups-3 + } + route @get_projectgroups_3 { + rewrite * /GET/api/Spaces-1/projectgroups/ProjectGroups-3/response.json + file_server + } + @get_projectgroups_1_projects { + method GET + path /api/Spaces-1/projectgroups/ProjectGroups-1/projects + } + route @get_projectgroups_1_projects { + rewrite * /GET/api/Spaces-1/projectgroups/ProjectGroups-1/projects/response.json + file_server + } + @get_projectgroups_2_projects { + method GET + path /api/Spaces-1/projectgroups/ProjectGroups-2/projects + } + route @get_projectgroups_2_projects { + rewrite * /GET/api/Spaces-1/projectgroups/ProjectGroups-2/projects/response.json + file_server + } + @get_projectgroups_3_projects { + method GET + path /api/Spaces-1/projectgroups/ProjectGroups-3/projects + } + route @get_projectgroups_3_projects { + rewrite * /GET/api/Spaces-1/projectgroups/ProjectGroups-3/projects/response.json + file_server + } + + file_server browse +} diff --git a/octopus_deploy/tests/docker/docker-compose.yaml b/octopus_deploy/tests/docker/docker-compose.yaml new file mode 100644 index 0000000000000..b4ccb78364e14 --- /dev/null +++ b/octopus_deploy/tests/docker/docker-compose.yaml @@ -0,0 +1,10 @@ +services: + + octopus-api: + image: caddy:2.7 + container_name: octopus-api + ports: + - "80:8080" + volumes: + - ./Caddyfile:/etc/caddy/Caddyfile + - ../fixtures/:/usr/share/caddy \ No newline at end of file diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-23 14:45:58.888492+00:00/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json similarity index 100% rename from octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-23 14:45:58.888492+00:00/response.json rename to octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/fromCompletedDate=2024-09-23 14:45:58.888492+00:00/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json similarity index 100% rename from octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/fromCompletedDate=2024-09-23 14:45:58.888492+00:00/response.json rename to octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-23 14:45:58.888492+00:00/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json similarity index 100% rename from octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-23 14:45:58.888492+00:00/response.json rename to octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/fromCompletedDate=2024-09-23 14:45:58.888492+00:00/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json similarity index 100% rename from octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/fromCompletedDate=2024-09-23 14:45:58.888492+00:00/response.json rename to octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json diff --git a/octopus_deploy/tests/test_e2e.py b/octopus_deploy/tests/test_e2e.py index bb7c8a57a8940..06aa09b082576 100644 --- a/octopus_deploy/tests/test_e2e.py +++ b/octopus_deploy/tests/test_e2e.py @@ -2,10 +2,13 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) import pytest - +from .constants import ALL_METRICS @pytest.mark.e2e def test_e2e(dd_agent_check, instance): - aggregator = dd_agent_check(instance, rate=True) + aggregator = dd_agent_check(instance) + + aggregator.assert_metric('octopus_deploy.api.can_connect', 1, tags=['space_name:Default']) + for metric in ALL_METRICS: + aggregator.assert_metric(metric) - aggregator.assert_metric('octopus_deploy.api.can_connect', 0) diff --git a/octopus_deploy/tests/test_unit.py b/octopus_deploy/tests/test_unit.py index 04d9cfa12d814..9de00bbaf4128 100644 --- a/octopus_deploy/tests/test_unit.py +++ b/octopus_deploy/tests/test_unit.py @@ -12,9 +12,9 @@ from datadog_checks.dev.utils import get_metadata_metrics from datadog_checks.octopus_deploy import OctopusDeployCheck -from .conftest import MOCKED_TIMESTAMPS from .constants import ( ALL_METRICS, + MOCKED_TIMESTAMPS, PROJECT_ALL_METRICS, PROJECT_GROUP_ALL_METRICS, PROJECT_GROUP_NO_METRICS, From 14e851add573394068589156db409a64f4b655ad Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Tue, 1 Oct 2024 14:53:42 -0400 Subject: [PATCH 30/44] lint --- octopus_deploy/tests/docker/docker-compose.yaml | 2 +- octopus_deploy/tests/test_e2e.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/octopus_deploy/tests/docker/docker-compose.yaml b/octopus_deploy/tests/docker/docker-compose.yaml index b4ccb78364e14..6d244b521e5e7 100644 --- a/octopus_deploy/tests/docker/docker-compose.yaml +++ b/octopus_deploy/tests/docker/docker-compose.yaml @@ -1,4 +1,4 @@ -services: +services: octopus-api: image: caddy:2.7 diff --git a/octopus_deploy/tests/test_e2e.py b/octopus_deploy/tests/test_e2e.py index 06aa09b082576..54d144a6c40a1 100644 --- a/octopus_deploy/tests/test_e2e.py +++ b/octopus_deploy/tests/test_e2e.py @@ -2,8 +2,10 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) import pytest + from .constants import ALL_METRICS + @pytest.mark.e2e def test_e2e(dd_agent_check, instance): aggregator = dd_agent_check(instance) @@ -11,4 +13,3 @@ def test_e2e(dd_agent_check, instance): aggregator.assert_metric('octopus_deploy.api.can_connect', 1, tags=['space_name:Default']) for metric in ALL_METRICS: aggregator.assert_metric(metric) - From 67c6303fb00a48d5f1997c047fbec7653c63f714 Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Tue, 1 Oct 2024 14:57:47 -0400 Subject: [PATCH 31/44] fix tests --- octopus_deploy/tests/conftest.py | 2 +- octopus_deploy/tests/test_unit.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/octopus_deploy/tests/conftest.py b/octopus_deploy/tests/conftest.py index a0db85f095866..feba4c97a26e8 100644 --- a/octopus_deploy/tests/conftest.py +++ b/octopus_deploy/tests/conftest.py @@ -112,7 +112,7 @@ def get(url, *args, **kwargs): param_string = '/'.join(f'{key}={str(val)}' for key, val in params.items()) request_path = f'{url}/{param_string}' - request_path = request_path.replace(" ") + request_path = request_path.replace(" ", "") if http_error and request_path in http_error: return http_error[request_path] diff --git a/octopus_deploy/tests/test_unit.py b/octopus_deploy/tests/test_unit.py index 9de00bbaf4128..300c627c96cd4 100644 --- a/octopus_deploy/tests/test_unit.py +++ b/octopus_deploy/tests/test_unit.py @@ -223,7 +223,7 @@ def test_task_metrics(get_current_datetime, dd_run_check, aggregator, instance): pytest.param( { 'http_error': { - '/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-23 ' + '/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-23' '14:45:58.888492+00:00': MockResponse(status_code=404) } }, From 1e58c47274a04a9f82acaac08964cc5324d378c4 Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Wed, 2 Oct 2024 15:32:07 -0400 Subject: [PATCH 32/44] Add deployment duration metric --- .../datadog_checks/octopus_deploy/check.py | 16 ++- .../octopus_deploy/constants.py | 3 + .../octopus_deploy/project_groups.py | 1 - octopus_deploy/metadata.csv | 3 +- octopus_deploy/tests/constants.py | 122 ++++++++++++++++-- octopus_deploy/tests/test_unit.py | 13 +- 6 files changed, 138 insertions(+), 20 deletions(-) diff --git a/octopus_deploy/datadog_checks/octopus_deploy/check.py b/octopus_deploy/datadog_checks/octopus_deploy/check.py index f1c6255dbf8ab..53306ae6716bf 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/check.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/check.py @@ -11,7 +11,13 @@ from datadog_checks.base.utils.models.types import copy_raw from .config_models import ConfigMixin -from .constants import API_UP_METRIC, PROJECT_COUNT_METRIC, PROJECT_GROUP_COUNT_METRIC +from .constants import ( + API_UP_METRIC, + DEPLOY_COUNT_METRIC, + DEPLOY_DURATION_METRIC, + PROJECT_COUNT_METRIC, + PROJECT_GROUP_COUNT_METRIC, +) from .error import handle_error from .project_groups import Project, ProjectGroup @@ -51,8 +57,13 @@ def _get_new_tasks_for_project(self, project): task_name = task.get("Name") state = task.get("State") completed_time = task.get("CompletedTime") + start_time = task.get("StartTime") completed_time_converted = datetime.fromisoformat(completed_time) + start_time_converted = datetime.fromisoformat(start_time) + duration = completed_time_converted - start_time_converted + duration_microseconds = duration.total_seconds() + if completed_time_converted > new_completed_time: new_completed_time = completed_time_converted @@ -65,7 +76,8 @@ def _get_new_tasks_for_project(self, project): tags = [f'task_name:{task_name}', f'task_id:{task_id}', f'task_state:{state}'] - self.gauge("task.count", 1, tags=self.base_tags + project_tags + tags) + self.gauge(DEPLOY_COUNT_METRIC, 1, tags=self.base_tags + project_tags + tags) + self.gauge(DEPLOY_DURATION_METRIC, duration_microseconds, tags=self.base_tags + project_tags + tags) new_completed_time = new_completed_time + timedelta(milliseconds=1) project.last_completed_time = new_completed_time diff --git a/octopus_deploy/datadog_checks/octopus_deploy/constants.py b/octopus_deploy/datadog_checks/octopus_deploy/constants.py index faeb25ef189a6..e6a55a229d168 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/constants.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/constants.py @@ -6,3 +6,6 @@ SPACE_COUNT_METRIC = "space.count" PROJECT_GROUP_COUNT_METRIC = "project_group.count" PROJECT_COUNT_METRIC = "project.count" +DEPLOY_PREFIX = "deployment" +DEPLOY_COUNT_METRIC = f"{DEPLOY_PREFIX}.count" +DEPLOY_DURATION_METRIC = f"{DEPLOY_PREFIX}.duration" diff --git a/octopus_deploy/datadog_checks/octopus_deploy/project_groups.py b/octopus_deploy/datadog_checks/octopus_deploy/project_groups.py index de6c31a17b9db..1c1a561a0df1e 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/project_groups.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/project_groups.py @@ -16,5 +16,4 @@ def __init__(self, project_json, project_group): self.id = project_json.get("Id") self.name = project_json.get("Name") self.project_group = project_group - self.last_task_id = None self.last_task_time = get_current_datetime() diff --git a/octopus_deploy/metadata.csv b/octopus_deploy/metadata.csv index 52dd522a7d78c..51b6802401c82 100644 --- a/octopus_deploy/metadata.csv +++ b/octopus_deploy/metadata.csv @@ -3,4 +3,5 @@ octopus_deploy.api.can_connect,gauge,,,,Whether or not the check can connect to octopus_deploy.project.count,gauge,,,,Number of projects discovered.,-1,octopus_deploy,octopus_deploy projects count,, octopus_deploy.project_group.count,gauge,,,,Number of project groups discovered.,-1,octopus_deploy,octopus_deploy project group count,, octopus_deploy.space.count,gauge,,,,Number of spaces discovered.,-1,octopus_deploy,octopus_deploy space count,, -octopus_deploy.task.count,gauge,,,,Number of tasks monitored.,-1,octopus_deploy,octopus_deploy space count,, +octopus_deploy.deployment.count,gauge,,,,Number of tasks monitored.,-1,octopus_deploy,octopus_deploy space count,, +octopus_deploy.deployment.duration,gauge,,second,,Duration of task.,-1,octopus_deploy,octopus_deploy space count,, diff --git a/octopus_deploy/tests/constants.py b/octopus_deploy/tests/constants.py index 361a39d524c60..2004b8ab394e7 100644 --- a/octopus_deploy/tests/constants.py +++ b/octopus_deploy/tests/constants.py @@ -15,7 +15,12 @@ MOCKED_TIMESTAMPS = [BASE_TIME] * 20 -ALL_METRICS = ["octopus_deploy.project_group.count", "octopus_deploy.project.count", "octopus_deploy.task.count"] +ALL_METRICS = [ + "octopus_deploy.project_group.count", + "octopus_deploy.project.count", + "octopus_deploy.deployment.count", + "octopus_deploy.deployment.duration", +] PROJECT_GROUP_ALL_METRICS = [ { @@ -465,9 +470,9 @@ ] -TASK_COUNT_METRICS = [ +DEPLOYMENT_METRICS = [ { - 'name': 'octopus_deploy.task.count', + 'name': 'octopus_deploy.deployment.duration', 'tags': [ "project_group_name:Default Project Group", "project_group_id:ProjectGroups-1", @@ -479,9 +484,25 @@ "task_state:Success", ], 'count': 1, + 'value': 2.073, + }, + { + 'name': 'octopus_deploy.deployment.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + "task_id:ServerTasks-1846", + "task_name:Deploy", + "task_state:Success", + ], + 'count': 1, + 'value': 1, }, { - 'name': 'octopus_deploy.task.count', + 'name': 'octopus_deploy.deployment.duration', 'tags': [ "project_group_name:Default Project Group", "project_group_id:ProjectGroups-1", @@ -493,9 +514,25 @@ "task_state:Success", ], 'count': 1, + 'value': 2.134, + }, + { + 'name': 'octopus_deploy.deployment.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + "task_id:ServerTasks-1847", + "task_name:Deploy", + "task_state:Failed", + ], + 'count': 1, + 'value': 1, }, { - 'name': 'octopus_deploy.task.count', + 'name': 'octopus_deploy.deployment.duration', 'tags': [ "project_group_name:Default Project Group", "project_group_id:ProjectGroups-1", @@ -507,9 +544,25 @@ "task_state:Failed", ], 'count': 1, + 'value': 6.267, + }, + { + 'name': 'octopus_deploy.deployment.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test-api", + "project_id:Projects-1", + "task_id:ServerTasks-1844", + "task_name:Deploy", + "task_state:Success", + ], + 'count': 1, + 'value': 1, }, { - 'name': 'octopus_deploy.task.count', + 'name': 'octopus_deploy.deployment.duration', 'tags': [ "project_group_name:Default Project Group", "project_group_id:ProjectGroups-1", @@ -521,13 +574,14 @@ "task_state:Success", ], 'count': 1, + 'value': 3.192, }, ] -TASK_COUNT_METRICS_NO_PROJECT_1 = [ +DEPLOYMENT_METRICS_NO_PROJECT_1 = [ { - 'name': 'octopus_deploy.task.count', + 'name': 'octopus_deploy.deployment.count', 'tags': [ "project_group_name:Default Project Group", "project_group_id:ProjectGroups-1", @@ -539,9 +593,25 @@ "task_state:Success", ], 'count': 1, + 'value': 1, }, { - 'name': 'octopus_deploy.task.count', + 'name': 'octopus_deploy.deployment.duration', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + "task_id:ServerTasks-1845", + "task_name:Deploy", + "task_state:Success", + ], + 'count': 1, + 'value': 2.073, + }, + { + 'name': 'octopus_deploy.deployment.count', 'tags': [ "project_group_name:Default Project Group", "project_group_id:ProjectGroups-1", @@ -553,9 +623,40 @@ "task_state:Success", ], 'count': 1, + 'value': 1, + }, + { + 'name': 'octopus_deploy.deployment.duration', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + "task_id:ServerTasks-1846", + "task_name:Deploy", + "task_state:Success", + ], + 'count': 1, + 'value': 2.134, + }, + { + 'name': 'octopus_deploy.deployment.count', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + "task_id:ServerTasks-1847", + "task_name:Deploy", + "task_state:Failed", + ], + 'count': 1, + 'value': 1, }, { - 'name': 'octopus_deploy.task.count', + 'name': 'octopus_deploy.deployment.duration', 'tags': [ "project_group_name:Default Project Group", "project_group_id:ProjectGroups-1", @@ -567,5 +668,6 @@ "task_state:Failed", ], 'count': 1, + 'value': 6.267, }, ] diff --git a/octopus_deploy/tests/test_unit.py b/octopus_deploy/tests/test_unit.py index 300c627c96cd4..5eb025eb61fd7 100644 --- a/octopus_deploy/tests/test_unit.py +++ b/octopus_deploy/tests/test_unit.py @@ -14,6 +14,8 @@ from .constants import ( ALL_METRICS, + DEPLOYMENT_METRICS, + DEPLOYMENT_METRICS_NO_PROJECT_1, MOCKED_TIMESTAMPS, PROJECT_ALL_METRICS, PROJECT_GROUP_ALL_METRICS, @@ -23,8 +25,6 @@ PROJECT_NO_METRICS, PROJECT_ONLY_HI_METRICS, PROJECT_ONLY_HI_MY_PROJECT_METRICS, - TASK_COUNT_METRICS, - TASK_COUNT_METRICS_NO_PROJECT_1, ) @@ -209,12 +209,13 @@ def test_projects_discovery( @pytest.mark.usefixtures('mock_http_get') @mock.patch("datadog_checks.octopus_deploy.project_groups.get_current_datetime", side_effect=MOCKED_TIMESTAMPS) -def test_task_metrics(get_current_datetime, dd_run_check, aggregator, instance): +def test_deployment_metrics(get_current_datetime, dd_run_check, aggregator, instance, caplog): + caplog.set_level(logging.DEBUG) check = OctopusDeployCheck('octopus_deploy', {}, [instance]) dd_run_check(check) - for metric in TASK_COUNT_METRICS: - aggregator.assert_metric(metric["name"], count=metric["count"], tags=metric["tags"]) + for metric in DEPLOYMENT_METRICS: + aggregator.assert_metric(metric["name"], count=metric["count"], value=metric["value"], tags=metric["tags"]) @pytest.mark.parametrize( @@ -241,5 +242,5 @@ def test_exception_when_getting_tasks(get_current_datetime, dd_run_check, aggreg dd_run_check(check) assert message in caplog.text - for metric in PROJECT_GROUP_ALL_METRICS + PROJECT_ALL_METRICS + TASK_COUNT_METRICS_NO_PROJECT_1: + for metric in PROJECT_GROUP_ALL_METRICS + PROJECT_ALL_METRICS + DEPLOYMENT_METRICS_NO_PROJECT_1: aggregator.assert_metric(metric["name"], count=metric["count"], tags=metric["tags"]) From 3b74fddedc920386e6f484bea130355db3e106b9 Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Wed, 2 Oct 2024 15:33:04 -0400 Subject: [PATCH 33/44] rename to seconds --- octopus_deploy/datadog_checks/octopus_deploy/check.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/octopus_deploy/datadog_checks/octopus_deploy/check.py b/octopus_deploy/datadog_checks/octopus_deploy/check.py index 53306ae6716bf..c09b9dcf96922 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/check.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/check.py @@ -62,7 +62,7 @@ def _get_new_tasks_for_project(self, project): completed_time_converted = datetime.fromisoformat(completed_time) start_time_converted = datetime.fromisoformat(start_time) duration = completed_time_converted - start_time_converted - duration_microseconds = duration.total_seconds() + duration_seconds = duration.total_seconds() if completed_time_converted > new_completed_time: new_completed_time = completed_time_converted @@ -77,7 +77,7 @@ def _get_new_tasks_for_project(self, project): tags = [f'task_name:{task_name}', f'task_id:{task_id}', f'task_state:{state}'] self.gauge(DEPLOY_COUNT_METRIC, 1, tags=self.base_tags + project_tags + tags) - self.gauge(DEPLOY_DURATION_METRIC, duration_microseconds, tags=self.base_tags + project_tags + tags) + self.gauge(DEPLOY_DURATION_METRIC, duration_seconds, tags=self.base_tags + project_tags + tags) new_completed_time = new_completed_time + timedelta(milliseconds=1) project.last_completed_time = new_completed_time From 27b2c5e9b13f862cf91d12c6824588f5b6cc923b Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Wed, 2 Oct 2024 16:39:16 -0400 Subject: [PATCH 34/44] Add queue time metric --- .../datadog_checks/octopus_deploy/check.py | 8 ++ .../octopus_deploy/constants.py | 1 + octopus_deploy/metadata.csv | 5 +- octopus_deploy/tests/constants.py | 99 +++++++++++++++++-- 4 files changed, 103 insertions(+), 10 deletions(-) diff --git a/octopus_deploy/datadog_checks/octopus_deploy/check.py b/octopus_deploy/datadog_checks/octopus_deploy/check.py index c09b9dcf96922..3e4188f9792f7 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/check.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/check.py @@ -15,6 +15,7 @@ API_UP_METRIC, DEPLOY_COUNT_METRIC, DEPLOY_DURATION_METRIC, + DEPLOY_QUEUE_TIME_METRIC, PROJECT_COUNT_METRIC, PROJECT_GROUP_COUNT_METRIC, ) @@ -58,12 +59,18 @@ def _get_new_tasks_for_project(self, project): state = task.get("State") completed_time = task.get("CompletedTime") start_time = task.get("StartTime") + queue_time = task.get("QueueTime") completed_time_converted = datetime.fromisoformat(completed_time) start_time_converted = datetime.fromisoformat(start_time) + queue_time_converted = datetime.fromisoformat(queue_time) + duration = completed_time_converted - start_time_converted duration_seconds = duration.total_seconds() + queue_time = start_time_converted - queue_time_converted + queue_time_seconds = queue_time.total_seconds() + if completed_time_converted > new_completed_time: new_completed_time = completed_time_converted @@ -78,6 +85,7 @@ def _get_new_tasks_for_project(self, project): self.gauge(DEPLOY_COUNT_METRIC, 1, tags=self.base_tags + project_tags + tags) self.gauge(DEPLOY_DURATION_METRIC, duration_seconds, tags=self.base_tags + project_tags + tags) + self.gauge(DEPLOY_QUEUE_TIME_METRIC, queue_time_seconds, tags=self.base_tags + project_tags + tags) new_completed_time = new_completed_time + timedelta(milliseconds=1) project.last_completed_time = new_completed_time diff --git a/octopus_deploy/datadog_checks/octopus_deploy/constants.py b/octopus_deploy/datadog_checks/octopus_deploy/constants.py index e6a55a229d168..55862d8217ec2 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/constants.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/constants.py @@ -9,3 +9,4 @@ DEPLOY_PREFIX = "deployment" DEPLOY_COUNT_METRIC = f"{DEPLOY_PREFIX}.count" DEPLOY_DURATION_METRIC = f"{DEPLOY_PREFIX}.duration" +DEPLOY_QUEUE_TIME_METRIC = f"{DEPLOY_PREFIX}.queue_time" diff --git a/octopus_deploy/metadata.csv b/octopus_deploy/metadata.csv index 51b6802401c82..015486d0af29f 100644 --- a/octopus_deploy/metadata.csv +++ b/octopus_deploy/metadata.csv @@ -1,7 +1,8 @@ metric_name,metric_type,interval,unit_name,per_unit_name,description,orientation,integration,short_name,curated_metric,sample_tags octopus_deploy.api.can_connect,gauge,,,,Whether or not the check can connect to the Octopus Deploy API.,-1,octopus_deploy,octopus_deploy api,, +octopus_deploy.deployment.count,gauge,,,,Number of deployments monitored.,-1,octopus_deploy,octopus_deploy space count,, +octopus_deploy.deployment.duration,gauge,,second,,Duration of deployment.,-1,octopus_deploy,octopus_deploy space count,, +octopus_deploy.deployment.queue_time,gauge,,second,,Time deployment was in queue.,-1,octopus_deploy,octopus_deploy space count,, octopus_deploy.project.count,gauge,,,,Number of projects discovered.,-1,octopus_deploy,octopus_deploy projects count,, octopus_deploy.project_group.count,gauge,,,,Number of project groups discovered.,-1,octopus_deploy,octopus_deploy project group count,, octopus_deploy.space.count,gauge,,,,Number of spaces discovered.,-1,octopus_deploy,octopus_deploy space count,, -octopus_deploy.deployment.count,gauge,,,,Number of tasks monitored.,-1,octopus_deploy,octopus_deploy space count,, -octopus_deploy.deployment.duration,gauge,,second,,Duration of task.,-1,octopus_deploy,octopus_deploy space count,, diff --git a/octopus_deploy/tests/constants.py b/octopus_deploy/tests/constants.py index 2004b8ab394e7..c9c07f3039eb4 100644 --- a/octopus_deploy/tests/constants.py +++ b/octopus_deploy/tests/constants.py @@ -20,6 +20,7 @@ "octopus_deploy.project.count", "octopus_deploy.deployment.count", "octopus_deploy.deployment.duration", + "octopus_deploy.deployment.queue_time", ] PROJECT_GROUP_ALL_METRICS = [ @@ -486,6 +487,21 @@ 'count': 1, 'value': 2.073, }, + { + 'name': 'octopus_deploy.deployment.queue_time', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + "task_id:ServerTasks-1845", + "task_name:Deploy", + "task_state:Success", + ], + 'count': 1, + 'value': 0.639, + }, { 'name': 'octopus_deploy.deployment.count', 'tags': [ @@ -516,6 +532,21 @@ 'count': 1, 'value': 2.134, }, + { + 'name': 'octopus_deploy.deployment.queue_time', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + "task_id:ServerTasks-1846", + "task_name:Deploy", + "task_state:Success", + ], + 'count': 1, + 'value': 0.67, + }, { 'name': 'octopus_deploy.deployment.count', 'tags': [ @@ -546,6 +577,21 @@ 'count': 1, 'value': 6.267, }, + { + 'name': 'octopus_deploy.deployment.queue_time', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + "task_id:ServerTasks-1847", + "task_name:Deploy", + "task_state:Failed", + ], + 'count': 1, + 'value': 0.631, + }, { 'name': 'octopus_deploy.deployment.count', 'tags': [ @@ -576,12 +622,27 @@ 'count': 1, 'value': 3.192, }, + { + 'name': 'octopus_deploy.deployment.queue_time', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test-api", + "project_id:Projects-1", + "task_id:ServerTasks-1844", + "task_name:Deploy", + "task_state:Success", + ], + 'count': 1, + 'value': 0.613, + }, ] DEPLOYMENT_METRICS_NO_PROJECT_1 = [ { - 'name': 'octopus_deploy.deployment.count', + 'name': 'octopus_deploy.deployment.duration', 'tags': [ "project_group_name:Default Project Group", "project_group_id:ProjectGroups-1", @@ -593,10 +654,9 @@ "task_state:Success", ], 'count': 1, - 'value': 1, }, { - 'name': 'octopus_deploy.deployment.duration', + 'name': 'octopus_deploy.deployment.queue_time', 'tags': [ "project_group_name:Default Project Group", "project_group_id:ProjectGroups-1", @@ -608,7 +668,6 @@ "task_state:Success", ], 'count': 1, - 'value': 2.073, }, { 'name': 'octopus_deploy.deployment.count', @@ -623,7 +682,6 @@ "task_state:Success", ], 'count': 1, - 'value': 1, }, { 'name': 'octopus_deploy.deployment.duration', @@ -638,7 +696,20 @@ "task_state:Success", ], 'count': 1, - 'value': 2.134, + }, + { + 'name': 'octopus_deploy.deployment.queue_time', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + "task_id:ServerTasks-1846", + "task_name:Deploy", + "task_state:Success", + ], + 'count': 1, }, { 'name': 'octopus_deploy.deployment.count', @@ -653,7 +724,6 @@ "task_state:Failed", ], 'count': 1, - 'value': 1, }, { 'name': 'octopus_deploy.deployment.duration', @@ -668,6 +738,19 @@ "task_state:Failed", ], 'count': 1, - 'value': 6.267, + }, + { + 'name': 'octopus_deploy.deployment.queue_time', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + "task_id:ServerTasks-1847", + "task_name:Deploy", + "task_state:Failed", + ], + 'count': 1, }, ] From 020982480f450507ba916ff64884e3cb0adfbfb7 Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Mon, 7 Oct 2024 12:42:47 -0400 Subject: [PATCH 35/44] Add deploy success metric --- .../datadog_checks/octopus_deploy/check.py | 5 ++ .../octopus_deploy/constants.py | 3 + octopus_deploy/metadata.csv | 7 +- octopus_deploy/tests/constants.py | 84 +++++++++++++++++++ 4 files changed, 96 insertions(+), 3 deletions(-) diff --git a/octopus_deploy/datadog_checks/octopus_deploy/check.py b/octopus_deploy/datadog_checks/octopus_deploy/check.py index 3e4188f9792f7..d813cf77d8a36 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/check.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/check.py @@ -16,6 +16,8 @@ DEPLOY_COUNT_METRIC, DEPLOY_DURATION_METRIC, DEPLOY_QUEUE_TIME_METRIC, + DEPLOY_SUCCESS_METRIC, + DEPLOY_SUCCESS_STATE, PROJECT_COUNT_METRIC, PROJECT_GROUP_COUNT_METRIC, ) @@ -74,6 +76,8 @@ def _get_new_tasks_for_project(self, project): if completed_time_converted > new_completed_time: new_completed_time = completed_time_converted + succeeded = state == DEPLOY_SUCCESS_STATE + project_tags = [ f"project_id:{project.id}", f"project_name:{project.name}", @@ -86,6 +90,7 @@ def _get_new_tasks_for_project(self, project): self.gauge(DEPLOY_COUNT_METRIC, 1, tags=self.base_tags + project_tags + tags) self.gauge(DEPLOY_DURATION_METRIC, duration_seconds, tags=self.base_tags + project_tags + tags) self.gauge(DEPLOY_QUEUE_TIME_METRIC, queue_time_seconds, tags=self.base_tags + project_tags + tags) + self.gauge(DEPLOY_SUCCESS_METRIC, succeeded, tags=self.base_tags + project_tags + tags) new_completed_time = new_completed_time + timedelta(milliseconds=1) project.last_completed_time = new_completed_time diff --git a/octopus_deploy/datadog_checks/octopus_deploy/constants.py b/octopus_deploy/datadog_checks/octopus_deploy/constants.py index 55862d8217ec2..462614afc2917 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/constants.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/constants.py @@ -10,3 +10,6 @@ DEPLOY_COUNT_METRIC = f"{DEPLOY_PREFIX}.count" DEPLOY_DURATION_METRIC = f"{DEPLOY_PREFIX}.duration" DEPLOY_QUEUE_TIME_METRIC = f"{DEPLOY_PREFIX}.queue_time" +DEPLOY_SUCCESS_METRIC = f"{DEPLOY_PREFIX}.succeeded" + +DEPLOY_SUCCESS_STATE = "Success" diff --git a/octopus_deploy/metadata.csv b/octopus_deploy/metadata.csv index 015486d0af29f..ad6e96d9c4d84 100644 --- a/octopus_deploy/metadata.csv +++ b/octopus_deploy/metadata.csv @@ -1,8 +1,9 @@ metric_name,metric_type,interval,unit_name,per_unit_name,description,orientation,integration,short_name,curated_metric,sample_tags octopus_deploy.api.can_connect,gauge,,,,Whether or not the check can connect to the Octopus Deploy API.,-1,octopus_deploy,octopus_deploy api,, -octopus_deploy.deployment.count,gauge,,,,Number of deployments monitored.,-1,octopus_deploy,octopus_deploy space count,, -octopus_deploy.deployment.duration,gauge,,second,,Duration of deployment.,-1,octopus_deploy,octopus_deploy space count,, -octopus_deploy.deployment.queue_time,gauge,,second,,Time deployment was in queue.,-1,octopus_deploy,octopus_deploy space count,, +octopus_deploy.deployment.count,gauge,,,,Number of deployments monitored.,-1,octopus_deploy,octopus_deploy deploy count,, +octopus_deploy.deployment.duration,gauge,,second,,Duration of deployment.,-1,octopus_deploy,octopus_deploy deploy dur,, +octopus_deploy.deployment.queue_time,gauge,,second,,Time deployment was in queue.,-1,octopus_deploy,octopus_deploy deploy queue,, +octopus_deploy.deployment.succeeded,gauge,,second,,Whether or not the deployment succeeded.,-1,octopus_deploy,octopus_deploy deploy success,, octopus_deploy.project.count,gauge,,,,Number of projects discovered.,-1,octopus_deploy,octopus_deploy projects count,, octopus_deploy.project_group.count,gauge,,,,Number of project groups discovered.,-1,octopus_deploy,octopus_deploy project group count,, octopus_deploy.space.count,gauge,,,,Number of spaces discovered.,-1,octopus_deploy,octopus_deploy space count,, diff --git a/octopus_deploy/tests/constants.py b/octopus_deploy/tests/constants.py index c9c07f3039eb4..2f03903caf823 100644 --- a/octopus_deploy/tests/constants.py +++ b/octopus_deploy/tests/constants.py @@ -21,6 +21,7 @@ "octopus_deploy.deployment.count", "octopus_deploy.deployment.duration", "octopus_deploy.deployment.queue_time", + "octopus_deploy.deployment.succeeded", ] PROJECT_GROUP_ALL_METRICS = [ @@ -517,6 +518,21 @@ 'count': 1, 'value': 1, }, + { + 'name': 'octopus_deploy.deployment.succeeded', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + "task_id:ServerTasks-1846", + "task_name:Deploy", + "task_state:Success", + ], + 'count': 1, + 'value': 1, + }, { 'name': 'octopus_deploy.deployment.duration', 'tags': [ @@ -562,6 +578,21 @@ 'count': 1, 'value': 1, }, + { + 'name': 'octopus_deploy.deployment.succeeded', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + "task_id:ServerTasks-1847", + "task_name:Deploy", + "task_state:Failed", + ], + 'count': 1, + 'value': 0, + }, { 'name': 'octopus_deploy.deployment.duration', 'tags': [ @@ -607,6 +638,21 @@ 'count': 1, 'value': 1, }, + { + 'name': 'octopus_deploy.deployment.succeeded', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test-api", + "project_id:Projects-1", + "task_id:ServerTasks-1844", + "task_name:Deploy", + "task_state:Success", + ], + 'count': 1, + 'value': 1, + }, { 'name': 'octopus_deploy.deployment.duration', 'tags': [ @@ -654,6 +700,7 @@ "task_state:Success", ], 'count': 1, + 'value': 2.073, }, { 'name': 'octopus_deploy.deployment.queue_time', @@ -668,6 +715,7 @@ "task_state:Success", ], 'count': 1, + 'value': 0.639, }, { 'name': 'octopus_deploy.deployment.count', @@ -682,6 +730,22 @@ "task_state:Success", ], 'count': 1, + 'value': 1, + }, + { + 'name': 'octopus_deploy.deployment.succeeded', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + "task_id:ServerTasks-1846", + "task_name:Deploy", + "task_state:Success", + ], + 'count': 1, + 'value': 1, }, { 'name': 'octopus_deploy.deployment.duration', @@ -696,6 +760,7 @@ "task_state:Success", ], 'count': 1, + 'value': 2.134, }, { 'name': 'octopus_deploy.deployment.queue_time', @@ -710,6 +775,7 @@ "task_state:Success", ], 'count': 1, + 'value': 0.67, }, { 'name': 'octopus_deploy.deployment.count', @@ -724,6 +790,22 @@ "task_state:Failed", ], 'count': 1, + 'value': 1, + }, + { + 'name': 'octopus_deploy.deployment.succeeded', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + "task_id:ServerTasks-1847", + "task_name:Deploy", + "task_state:Failed", + ], + 'count': 1, + 'value': 0, }, { 'name': 'octopus_deploy.deployment.duration', @@ -738,6 +820,7 @@ "task_state:Failed", ], 'count': 1, + 'value': 6.267, }, { 'name': 'octopus_deploy.deployment.queue_time', @@ -752,5 +835,6 @@ "task_state:Failed", ], 'count': 1, + 'value': 0.631, }, ] From e1abac8234afb25c864151998f3d3ef837c55741 Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Mon, 7 Oct 2024 13:17:14 -0400 Subject: [PATCH 36/44] Add can rerun metric --- .../datadog_checks/octopus_deploy/check.py | 5 +- .../octopus_deploy/constants.py | 1 + octopus_deploy/metadata.csv | 1 + octopus_deploy/tests/constants.py | 84 ++++++++++++++++--- 4 files changed, 80 insertions(+), 11 deletions(-) diff --git a/octopus_deploy/datadog_checks/octopus_deploy/check.py b/octopus_deploy/datadog_checks/octopus_deploy/check.py index d813cf77d8a36..68c67e7384a40 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/check.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/check.py @@ -16,6 +16,7 @@ DEPLOY_COUNT_METRIC, DEPLOY_DURATION_METRIC, DEPLOY_QUEUE_TIME_METRIC, + DEPLOY_RERUN_METRIC, DEPLOY_SUCCESS_METRIC, DEPLOY_SUCCESS_STATE, PROJECT_COUNT_METRIC, @@ -62,6 +63,7 @@ def _get_new_tasks_for_project(self, project): completed_time = task.get("CompletedTime") start_time = task.get("StartTime") queue_time = task.get("QueueTime") + can_rerun = int(task.get("CanRerun", False)) completed_time_converted = datetime.fromisoformat(completed_time) start_time_converted = datetime.fromisoformat(start_time) @@ -76,7 +78,7 @@ def _get_new_tasks_for_project(self, project): if completed_time_converted > new_completed_time: new_completed_time = completed_time_converted - succeeded = state == DEPLOY_SUCCESS_STATE + succeeded = int(state == DEPLOY_SUCCESS_STATE) project_tags = [ f"project_id:{project.id}", @@ -91,6 +93,7 @@ def _get_new_tasks_for_project(self, project): self.gauge(DEPLOY_DURATION_METRIC, duration_seconds, tags=self.base_tags + project_tags + tags) self.gauge(DEPLOY_QUEUE_TIME_METRIC, queue_time_seconds, tags=self.base_tags + project_tags + tags) self.gauge(DEPLOY_SUCCESS_METRIC, succeeded, tags=self.base_tags + project_tags + tags) + self.gauge(DEPLOY_RERUN_METRIC, can_rerun, tags=self.base_tags + project_tags + tags) new_completed_time = new_completed_time + timedelta(milliseconds=1) project.last_completed_time = new_completed_time diff --git a/octopus_deploy/datadog_checks/octopus_deploy/constants.py b/octopus_deploy/datadog_checks/octopus_deploy/constants.py index 462614afc2917..2597910d4d926 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/constants.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/constants.py @@ -11,5 +11,6 @@ DEPLOY_DURATION_METRIC = f"{DEPLOY_PREFIX}.duration" DEPLOY_QUEUE_TIME_METRIC = f"{DEPLOY_PREFIX}.queue_time" DEPLOY_SUCCESS_METRIC = f"{DEPLOY_PREFIX}.succeeded" +DEPLOY_RERUN_METRIC = f"{DEPLOY_PREFIX}.can_rerun" DEPLOY_SUCCESS_STATE = "Success" diff --git a/octopus_deploy/metadata.csv b/octopus_deploy/metadata.csv index ad6e96d9c4d84..bb58f0cddf1f5 100644 --- a/octopus_deploy/metadata.csv +++ b/octopus_deploy/metadata.csv @@ -4,6 +4,7 @@ octopus_deploy.deployment.count,gauge,,,,Number of deployments monitored.,-1,oct octopus_deploy.deployment.duration,gauge,,second,,Duration of deployment.,-1,octopus_deploy,octopus_deploy deploy dur,, octopus_deploy.deployment.queue_time,gauge,,second,,Time deployment was in queue.,-1,octopus_deploy,octopus_deploy deploy queue,, octopus_deploy.deployment.succeeded,gauge,,second,,Whether or not the deployment succeeded.,-1,octopus_deploy,octopus_deploy deploy success,, +octopus_deploy.deployment.can_rerun,gauge,,second,,Whether or not the deployment can be rerun.,-1,octopus_deploy,octopus_deploy deploy rerun,, octopus_deploy.project.count,gauge,,,,Number of projects discovered.,-1,octopus_deploy,octopus_deploy projects count,, octopus_deploy.project_group.count,gauge,,,,Number of project groups discovered.,-1,octopus_deploy,octopus_deploy project group count,, octopus_deploy.space.count,gauge,,,,Number of spaces discovered.,-1,octopus_deploy,octopus_deploy space count,, diff --git a/octopus_deploy/tests/constants.py b/octopus_deploy/tests/constants.py index 2f03903caf823..7ac4bbd7260fe 100644 --- a/octopus_deploy/tests/constants.py +++ b/octopus_deploy/tests/constants.py @@ -22,6 +22,7 @@ "octopus_deploy.deployment.duration", "octopus_deploy.deployment.queue_time", "octopus_deploy.deployment.succeeded", + "octopus_deploy.deployment.can_rerun", ] PROJECT_GROUP_ALL_METRICS = [ @@ -533,6 +534,21 @@ 'count': 1, 'value': 1, }, + { + 'name': 'octopus_deploy.deployment.can_rerun', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + "task_id:ServerTasks-1846", + "task_name:Deploy", + "task_state:Success", + ], + 'count': 1, + 'value': 0, + }, { 'name': 'octopus_deploy.deployment.duration', 'tags': [ @@ -593,6 +609,21 @@ 'count': 1, 'value': 0, }, + { + 'name': 'octopus_deploy.deployment.can_rerun', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + "task_id:ServerTasks-1847", + "task_name:Deploy", + "task_state:Failed", + ], + 'count': 1, + 'value': 0, + }, { 'name': 'octopus_deploy.deployment.duration', 'tags': [ @@ -653,6 +684,21 @@ 'count': 1, 'value': 1, }, + { + 'name': 'octopus_deploy.deployment.can_rerun', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test-api", + "project_id:Projects-1", + "task_id:ServerTasks-1844", + "task_name:Deploy", + "task_state:Success", + ], + 'count': 1, + 'value': 0, + }, { 'name': 'octopus_deploy.deployment.duration', 'tags': [ @@ -700,7 +746,6 @@ "task_state:Success", ], 'count': 1, - 'value': 2.073, }, { 'name': 'octopus_deploy.deployment.queue_time', @@ -715,7 +760,6 @@ "task_state:Success", ], 'count': 1, - 'value': 0.639, }, { 'name': 'octopus_deploy.deployment.count', @@ -730,7 +774,6 @@ "task_state:Success", ], 'count': 1, - 'value': 1, }, { 'name': 'octopus_deploy.deployment.succeeded', @@ -745,7 +788,20 @@ "task_state:Success", ], 'count': 1, - 'value': 1, + }, + { + 'name': 'octopus_deploy.deployment.can_rerun', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + "task_id:ServerTasks-1846", + "task_name:Deploy", + "task_state:Success", + ], + 'count': 1, }, { 'name': 'octopus_deploy.deployment.duration', @@ -760,7 +816,6 @@ "task_state:Success", ], 'count': 1, - 'value': 2.134, }, { 'name': 'octopus_deploy.deployment.queue_time', @@ -775,7 +830,6 @@ "task_state:Success", ], 'count': 1, - 'value': 0.67, }, { 'name': 'octopus_deploy.deployment.count', @@ -790,7 +844,6 @@ "task_state:Failed", ], 'count': 1, - 'value': 1, }, { 'name': 'octopus_deploy.deployment.succeeded', @@ -805,7 +858,20 @@ "task_state:Failed", ], 'count': 1, - 'value': 0, + }, + { + 'name': 'octopus_deploy.deployment.can_rerun', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + "task_id:ServerTasks-1847", + "task_name:Deploy", + "task_state:Failed", + ], + 'count': 1, }, { 'name': 'octopus_deploy.deployment.duration', @@ -820,7 +886,6 @@ "task_state:Failed", ], 'count': 1, - 'value': 6.267, }, { 'name': 'octopus_deploy.deployment.queue_time', @@ -835,6 +900,5 @@ "task_state:Failed", ], 'count': 1, - 'value': 0.631, }, ] From 49e61350ecd60d5caaf0dc8a6a5a3c3cbbd377bd Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Mon, 7 Oct 2024 13:38:11 -0400 Subject: [PATCH 37/44] add metric for has warnings or errors --- .../datadog_checks/octopus_deploy/check.py | 3 + .../octopus_deploy/constants.py | 1 + octopus_deploy/metadata.csv | 5 +- octopus_deploy/tests/constants.py | 74 +++++++++++++++++++ 4 files changed, 81 insertions(+), 2 deletions(-) diff --git a/octopus_deploy/datadog_checks/octopus_deploy/check.py b/octopus_deploy/datadog_checks/octopus_deploy/check.py index 68c67e7384a40..a310ace2897b6 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/check.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/check.py @@ -19,6 +19,7 @@ DEPLOY_RERUN_METRIC, DEPLOY_SUCCESS_METRIC, DEPLOY_SUCCESS_STATE, + DEPLOY_WARNINGS_METRIC, PROJECT_COUNT_METRIC, PROJECT_GROUP_COUNT_METRIC, ) @@ -64,6 +65,7 @@ def _get_new_tasks_for_project(self, project): start_time = task.get("StartTime") queue_time = task.get("QueueTime") can_rerun = int(task.get("CanRerun", False)) + has_warnings = int(task.get("HasWarningsOrErrors", False)) completed_time_converted = datetime.fromisoformat(completed_time) start_time_converted = datetime.fromisoformat(start_time) @@ -94,6 +96,7 @@ def _get_new_tasks_for_project(self, project): self.gauge(DEPLOY_QUEUE_TIME_METRIC, queue_time_seconds, tags=self.base_tags + project_tags + tags) self.gauge(DEPLOY_SUCCESS_METRIC, succeeded, tags=self.base_tags + project_tags + tags) self.gauge(DEPLOY_RERUN_METRIC, can_rerun, tags=self.base_tags + project_tags + tags) + self.gauge(DEPLOY_WARNINGS_METRIC, has_warnings, tags=self.base_tags + project_tags + tags) new_completed_time = new_completed_time + timedelta(milliseconds=1) project.last_completed_time = new_completed_time diff --git a/octopus_deploy/datadog_checks/octopus_deploy/constants.py b/octopus_deploy/datadog_checks/octopus_deploy/constants.py index 2597910d4d926..96c6513df4bc9 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/constants.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/constants.py @@ -12,5 +12,6 @@ DEPLOY_QUEUE_TIME_METRIC = f"{DEPLOY_PREFIX}.queue_time" DEPLOY_SUCCESS_METRIC = f"{DEPLOY_PREFIX}.succeeded" DEPLOY_RERUN_METRIC = f"{DEPLOY_PREFIX}.can_rerun" +DEPLOY_WARNINGS_METRIC = f"{DEPLOY_PREFIX}.has_warnings_or_errors" DEPLOY_SUCCESS_STATE = "Success" diff --git a/octopus_deploy/metadata.csv b/octopus_deploy/metadata.csv index bb58f0cddf1f5..7564d6bf1145f 100644 --- a/octopus_deploy/metadata.csv +++ b/octopus_deploy/metadata.csv @@ -2,9 +2,10 @@ metric_name,metric_type,interval,unit_name,per_unit_name,description,orientation octopus_deploy.api.can_connect,gauge,,,,Whether or not the check can connect to the Octopus Deploy API.,-1,octopus_deploy,octopus_deploy api,, octopus_deploy.deployment.count,gauge,,,,Number of deployments monitored.,-1,octopus_deploy,octopus_deploy deploy count,, octopus_deploy.deployment.duration,gauge,,second,,Duration of deployment.,-1,octopus_deploy,octopus_deploy deploy dur,, +octopus_deploy.deployment.has_warnings_or_errors,gauge,,,,Whether or not the deployment can be rerun.,-1,octopus_deploy,octopus_deploy warnings,, octopus_deploy.deployment.queue_time,gauge,,second,,Time deployment was in queue.,-1,octopus_deploy,octopus_deploy deploy queue,, -octopus_deploy.deployment.succeeded,gauge,,second,,Whether or not the deployment succeeded.,-1,octopus_deploy,octopus_deploy deploy success,, -octopus_deploy.deployment.can_rerun,gauge,,second,,Whether or not the deployment can be rerun.,-1,octopus_deploy,octopus_deploy deploy rerun,, +octopus_deploy.deployment.succeeded,gauge,,,,Whether or not the deployment succeeded.,-1,octopus_deploy,octopus_deploy deploy success,, +octopus_deploy.deployment.can_rerun,gauge,,,,Whether or not the deployment can be rerun.,-1,octopus_deploy,octopus_deploy deploy rerun,, octopus_deploy.project.count,gauge,,,,Number of projects discovered.,-1,octopus_deploy,octopus_deploy projects count,, octopus_deploy.project_group.count,gauge,,,,Number of project groups discovered.,-1,octopus_deploy,octopus_deploy project group count,, octopus_deploy.space.count,gauge,,,,Number of spaces discovered.,-1,octopus_deploy,octopus_deploy space count,, diff --git a/octopus_deploy/tests/constants.py b/octopus_deploy/tests/constants.py index 7ac4bbd7260fe..3b918be35c393 100644 --- a/octopus_deploy/tests/constants.py +++ b/octopus_deploy/tests/constants.py @@ -20,6 +20,7 @@ "octopus_deploy.project.count", "octopus_deploy.deployment.count", "octopus_deploy.deployment.duration", + "octopus_deploy.deployment.has_warnings_or_errors", "octopus_deploy.deployment.queue_time", "octopus_deploy.deployment.succeeded", "octopus_deploy.deployment.can_rerun", @@ -549,6 +550,21 @@ 'count': 1, 'value': 0, }, + { + 'name': 'octopus_deploy.deployment.has_warnings_or_errors', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + "task_id:ServerTasks-1846", + "task_name:Deploy", + "task_state:Success", + ], + 'count': 1, + 'value': 0, + }, { 'name': 'octopus_deploy.deployment.duration', 'tags': [ @@ -624,6 +640,21 @@ 'count': 1, 'value': 0, }, + { + 'name': 'octopus_deploy.deployment.has_warnings_or_errors', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + "task_id:ServerTasks-1847", + "task_name:Deploy", + "task_state:Failed", + ], + 'count': 1, + 'value': 1, + }, { 'name': 'octopus_deploy.deployment.duration', 'tags': [ @@ -699,6 +730,21 @@ 'count': 1, 'value': 0, }, + { + 'name': 'octopus_deploy.deployment.has_warnings_or_errors', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test-api", + "project_id:Projects-1", + "task_id:ServerTasks-1844", + "task_name:Deploy", + "task_state:Success", + ], + 'count': 1, + 'value': 0, + }, { 'name': 'octopus_deploy.deployment.duration', 'tags': [ @@ -803,6 +849,20 @@ ], 'count': 1, }, + { + 'name': 'octopus_deploy.deployment.has_warnings_or_errors', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + "task_id:ServerTasks-1846", + "task_name:Deploy", + "task_state:Success", + ], + 'count': 1, + }, { 'name': 'octopus_deploy.deployment.duration', 'tags': [ @@ -873,6 +933,20 @@ ], 'count': 1, }, + { + 'name': 'octopus_deploy.deployment.has_warnings_or_errors', + 'tags': [ + "project_group_name:Default Project Group", + "project_group_id:ProjectGroups-1", + "space_name:Default", + "project_name:test", + "project_id:Projects-3", + "task_id:ServerTasks-1847", + "task_name:Deploy", + "task_state:Failed", + ], + 'count': 1, + }, { 'name': 'octopus_deploy.deployment.duration', 'tags': [ From d84cad9492e64e18befcc11818d747009913de96 Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Mon, 7 Oct 2024 13:41:15 -0400 Subject: [PATCH 38/44] sort readme --- octopus_deploy/metadata.csv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/octopus_deploy/metadata.csv b/octopus_deploy/metadata.csv index 7564d6bf1145f..1402e6cf31ce0 100644 --- a/octopus_deploy/metadata.csv +++ b/octopus_deploy/metadata.csv @@ -1,11 +1,11 @@ metric_name,metric_type,interval,unit_name,per_unit_name,description,orientation,integration,short_name,curated_metric,sample_tags octopus_deploy.api.can_connect,gauge,,,,Whether or not the check can connect to the Octopus Deploy API.,-1,octopus_deploy,octopus_deploy api,, +octopus_deploy.deployment.can_rerun,gauge,,,,Whether or not the deployment can be rerun.,-1,octopus_deploy,octopus_deploy deploy rerun,, octopus_deploy.deployment.count,gauge,,,,Number of deployments monitored.,-1,octopus_deploy,octopus_deploy deploy count,, octopus_deploy.deployment.duration,gauge,,second,,Duration of deployment.,-1,octopus_deploy,octopus_deploy deploy dur,, octopus_deploy.deployment.has_warnings_or_errors,gauge,,,,Whether or not the deployment can be rerun.,-1,octopus_deploy,octopus_deploy warnings,, octopus_deploy.deployment.queue_time,gauge,,second,,Time deployment was in queue.,-1,octopus_deploy,octopus_deploy deploy queue,, octopus_deploy.deployment.succeeded,gauge,,,,Whether or not the deployment succeeded.,-1,octopus_deploy,octopus_deploy deploy success,, -octopus_deploy.deployment.can_rerun,gauge,,,,Whether or not the deployment can be rerun.,-1,octopus_deploy,octopus_deploy deploy rerun,, octopus_deploy.project.count,gauge,,,,Number of projects discovered.,-1,octopus_deploy,octopus_deploy projects count,, octopus_deploy.project_group.count,gauge,,,,Number of project groups discovered.,-1,octopus_deploy,octopus_deploy project group count,, octopus_deploy.space.count,gauge,,,,Number of spaces discovered.,-1,octopus_deploy,octopus_deploy space count,, From a88f90aef8c2d177e654d6421229602c8e3e0653 Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Thu, 17 Oct 2024 14:20:34 -0400 Subject: [PATCH 39/44] Sarah/add octopus server metrics (#18868) * Add support for server node metrics * add server node mocks * remove task id tag * update caddyfile --- .../datadog_checks/octopus_deploy/check.py | 28 +++- .../octopus_deploy/constants.py | 6 + octopus_deploy/metadata.csv | 3 + octopus_deploy/tests/constants.py | 120 +++++------------- octopus_deploy/tests/docker/Caddyfile | 8 ++ .../GET/api/octopusservernodes/response.json | 26 ++++ octopus_deploy/tests/test_unit.py | 12 ++ 7 files changed, 117 insertions(+), 86 deletions(-) create mode 100644 octopus_deploy/tests/fixtures/GET/api/octopusservernodes/response.json diff --git a/octopus_deploy/datadog_checks/octopus_deploy/check.py b/octopus_deploy/datadog_checks/octopus_deploy/check.py index a310ace2897b6..af22be6436759 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/check.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/check.py @@ -22,6 +22,9 @@ DEPLOY_WARNINGS_METRIC, PROJECT_COUNT_METRIC, PROJECT_GROUP_COUNT_METRIC, + SERVER_COUNT_METRIC, + SERVER_MAINTENANCE_MODE_METRIC, + SERVER_MAX_TASKS_METRIC, ) from .error import handle_error from .project_groups import Project, ProjectGroup @@ -67,6 +70,8 @@ def _get_new_tasks_for_project(self, project): can_rerun = int(task.get("CanRerun", False)) has_warnings = int(task.get("HasWarningsOrErrors", False)) + self.log.debug("Found task id=%s, name=%s", task_id, task_name) + completed_time_converted = datetime.fromisoformat(completed_time) start_time_converted = datetime.fromisoformat(start_time) queue_time_converted = datetime.fromisoformat(queue_time) @@ -89,7 +94,7 @@ def _get_new_tasks_for_project(self, project): f"project_group_name:{project.project_group.name}", ] - tags = [f'task_name:{task_name}', f'task_id:{task_id}', f'task_state:{state}'] + tags = [f'task_name:{task_name}', f'task_state:{state}'] self.gauge(DEPLOY_COUNT_METRIC, 1, tags=self.base_tags + project_tags + tags) self.gauge(DEPLOY_DURATION_METRIC, duration_seconds, tags=self.base_tags + project_tags + tags) @@ -223,12 +228,33 @@ def project_groups(self): ] return project_groups + @handle_error + def collect_server_nodes_metrics(self): + self.log.debug("Collecting server node metrics.") + url = f"{self.config.octopus_endpoint}/octopusservernodes" + response = self.http.get(url) + response.raise_for_status() + server_nodes = response.json().get('Items', []) + + for server_node in server_nodes: + node_id = server_node.get("Id") + node_name = server_node.get("Name") + maintenance_mode = int(server_node.get("IsInMaintenanceMode", False)) + max_tasks = int(server_node.get("MaxConcurrentTasks", 0)) + server_tags = [f"server_node_id:{node_id}", f"server_node_name:{node_name}"] + + self.gauge(SERVER_COUNT_METRIC, 1, tags=self.base_tags + server_tags) + self.gauge(SERVER_MAINTENANCE_MODE_METRIC, maintenance_mode, tags=self.base_tags + server_tags) + self.gauge(SERVER_MAX_TASKS_METRIC, max_tasks, tags=self.base_tags + server_tags) + def check(self, _): for _, _, project_group, _ in self.project_groups(): self.collect_project_metrics(project_group) for _, _, project, _ in self.projects(project_group): self._get_new_tasks_for_project(project) + self.collect_server_nodes_metrics() + # Discovery class requires 'include' to be a dict, so this function is needed to normalize the config def normalize_discover_config_include(log, config): diff --git a/octopus_deploy/datadog_checks/octopus_deploy/constants.py b/octopus_deploy/datadog_checks/octopus_deploy/constants.py index 96c6513df4bc9..6bc726d3be640 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/constants.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/constants.py @@ -14,4 +14,10 @@ DEPLOY_RERUN_METRIC = f"{DEPLOY_PREFIX}.can_rerun" DEPLOY_WARNINGS_METRIC = f"{DEPLOY_PREFIX}.has_warnings_or_errors" +SERVER_PREFIX = "server_node" +SERVER_COUNT_METRIC = f"{SERVER_PREFIX}.count" +SERVER_MAINTENANCE_MODE_METRIC = f"{SERVER_PREFIX}.in_maintenance_mode" +SERVER_MAX_TASKS_METRIC = f"{SERVER_PREFIX}.max_concurrent_tasks" + + DEPLOY_SUCCESS_STATE = "Success" diff --git a/octopus_deploy/metadata.csv b/octopus_deploy/metadata.csv index 1402e6cf31ce0..7d38572c8996d 100644 --- a/octopus_deploy/metadata.csv +++ b/octopus_deploy/metadata.csv @@ -8,4 +8,7 @@ octopus_deploy.deployment.queue_time,gauge,,second,,Time deployment was in queue octopus_deploy.deployment.succeeded,gauge,,,,Whether or not the deployment succeeded.,-1,octopus_deploy,octopus_deploy deploy success,, octopus_deploy.project.count,gauge,,,,Number of projects discovered.,-1,octopus_deploy,octopus_deploy projects count,, octopus_deploy.project_group.count,gauge,,,,Number of project groups discovered.,-1,octopus_deploy,octopus_deploy project group count,, +octopus_deploy.server_node.count,gauge,,,,Number of Octopus server nodes discovered.,-1,octopus_deploy,octopus_deploy server count,, +octopus_deploy.server_node.in_maintenance_mode,gauge,,,,Whether the Octopus server node is in maintenance mode or not.,-1,octopus_deploy,octopus_deploy server maintenance,, +octopus_deploy.server_node.max_concurrent_tasks,gauge,,,,The max concurrent tasks for the given Octopus server node.,-1,octopus_deploy,octopus_deploy server max tasks,, octopus_deploy.space.count,gauge,,,,Number of spaces discovered.,-1,octopus_deploy,octopus_deploy space count,, diff --git a/octopus_deploy/tests/constants.py b/octopus_deploy/tests/constants.py index 3b918be35c393..2218863229c72 100644 --- a/octopus_deploy/tests/constants.py +++ b/octopus_deploy/tests/constants.py @@ -24,6 +24,9 @@ "octopus_deploy.deployment.queue_time", "octopus_deploy.deployment.succeeded", "octopus_deploy.deployment.can_rerun", + "octopus_deploy.server_node.count", + "octopus_deploy.server_node.in_maintenance_mode", + "octopus_deploy.server_node.max_concurrent_tasks", ] PROJECT_GROUP_ALL_METRICS = [ @@ -483,7 +486,6 @@ "space_name:Default", "project_name:test", "project_id:Projects-3", - "task_id:ServerTasks-1845", "task_name:Deploy", "task_state:Success", ], @@ -498,7 +500,6 @@ "space_name:Default", "project_name:test", "project_id:Projects-3", - "task_id:ServerTasks-1845", "task_name:Deploy", "task_state:Success", ], @@ -513,11 +514,10 @@ "space_name:Default", "project_name:test", "project_id:Projects-3", - "task_id:ServerTasks-1846", "task_name:Deploy", "task_state:Success", ], - 'count': 1, + 'count': 2, 'value': 1, }, { @@ -528,11 +528,10 @@ "space_name:Default", "project_name:test", "project_id:Projects-3", - "task_id:ServerTasks-1846", "task_name:Deploy", "task_state:Success", ], - 'count': 1, + 'count': 2, 'value': 1, }, { @@ -543,11 +542,10 @@ "space_name:Default", "project_name:test", "project_id:Projects-3", - "task_id:ServerTasks-1846", "task_name:Deploy", "task_state:Success", ], - 'count': 1, + 'count': 2, 'value': 0, }, { @@ -558,11 +556,10 @@ "space_name:Default", "project_name:test", "project_id:Projects-3", - "task_id:ServerTasks-1846", "task_name:Deploy", "task_state:Success", ], - 'count': 1, + 'count': 2, 'value': 0, }, { @@ -573,7 +570,6 @@ "space_name:Default", "project_name:test", "project_id:Projects-3", - "task_id:ServerTasks-1846", "task_name:Deploy", "task_state:Success", ], @@ -588,7 +584,6 @@ "space_name:Default", "project_name:test", "project_id:Projects-3", - "task_id:ServerTasks-1846", "task_name:Deploy", "task_state:Success", ], @@ -603,7 +598,6 @@ "space_name:Default", "project_name:test", "project_id:Projects-3", - "task_id:ServerTasks-1847", "task_name:Deploy", "task_state:Failed", ], @@ -618,7 +612,6 @@ "space_name:Default", "project_name:test", "project_id:Projects-3", - "task_id:ServerTasks-1847", "task_name:Deploy", "task_state:Failed", ], @@ -633,7 +626,6 @@ "space_name:Default", "project_name:test", "project_id:Projects-3", - "task_id:ServerTasks-1847", "task_name:Deploy", "task_state:Failed", ], @@ -648,7 +640,6 @@ "space_name:Default", "project_name:test", "project_id:Projects-3", - "task_id:ServerTasks-1847", "task_name:Deploy", "task_state:Failed", ], @@ -663,7 +654,6 @@ "space_name:Default", "project_name:test", "project_id:Projects-3", - "task_id:ServerTasks-1847", "task_name:Deploy", "task_state:Failed", ], @@ -678,7 +668,6 @@ "space_name:Default", "project_name:test", "project_id:Projects-3", - "task_id:ServerTasks-1847", "task_name:Deploy", "task_state:Failed", ], @@ -693,7 +682,6 @@ "space_name:Default", "project_name:test-api", "project_id:Projects-1", - "task_id:ServerTasks-1844", "task_name:Deploy", "task_state:Success", ], @@ -708,7 +696,6 @@ "space_name:Default", "project_name:test-api", "project_id:Projects-1", - "task_id:ServerTasks-1844", "task_name:Deploy", "task_state:Success", ], @@ -723,7 +710,6 @@ "space_name:Default", "project_name:test-api", "project_id:Projects-1", - "task_id:ServerTasks-1844", "task_name:Deploy", "task_state:Success", ], @@ -738,7 +724,6 @@ "space_name:Default", "project_name:test-api", "project_id:Projects-1", - "task_id:ServerTasks-1844", "task_name:Deploy", "task_state:Success", ], @@ -753,7 +738,6 @@ "space_name:Default", "project_name:test-api", "project_id:Projects-1", - "task_id:ServerTasks-1844", "task_name:Deploy", "task_state:Success", ], @@ -768,7 +752,6 @@ "space_name:Default", "project_name:test-api", "project_id:Projects-1", - "task_id:ServerTasks-1844", "task_name:Deploy", "task_state:Success", ], @@ -787,11 +770,10 @@ "space_name:Default", "project_name:test", "project_id:Projects-3", - "task_id:ServerTasks-1845", "task_name:Deploy", "task_state:Success", ], - 'count': 1, + 'count': 2, }, { 'name': 'octopus_deploy.deployment.queue_time', @@ -801,11 +783,10 @@ "space_name:Default", "project_name:test", "project_id:Projects-3", - "task_id:ServerTasks-1845", "task_name:Deploy", "task_state:Success", ], - 'count': 1, + 'count': 2, }, { 'name': 'octopus_deploy.deployment.count', @@ -815,11 +796,10 @@ "space_name:Default", "project_name:test", "project_id:Projects-3", - "task_id:ServerTasks-1846", "task_name:Deploy", "task_state:Success", ], - 'count': 1, + 'count': 2, }, { 'name': 'octopus_deploy.deployment.succeeded', @@ -829,150 +809,120 @@ "space_name:Default", "project_name:test", "project_id:Projects-3", - "task_id:ServerTasks-1846", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.deployment.can_rerun', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_id:ServerTasks-1846", "task_name:Deploy", "task_state:Success", ], - 'count': 1, + 'count': 2, }, { - 'name': 'octopus_deploy.deployment.has_warnings_or_errors', + 'name': 'octopus_deploy.deployment.count', 'tags': [ "project_group_name:Default Project Group", "project_group_id:ProjectGroups-1", "space_name:Default", "project_name:test", "project_id:Projects-3", - "task_id:ServerTasks-1846", "task_name:Deploy", - "task_state:Success", + "task_state:Failed", ], 'count': 1, }, { - 'name': 'octopus_deploy.deployment.duration', + 'name': 'octopus_deploy.deployment.succeeded', 'tags': [ "project_group_name:Default Project Group", "project_group_id:ProjectGroups-1", "space_name:Default", "project_name:test", "project_id:Projects-3", - "task_id:ServerTasks-1846", "task_name:Deploy", - "task_state:Success", + "task_state:Failed", ], 'count': 1, }, { - 'name': 'octopus_deploy.deployment.queue_time', + 'name': 'octopus_deploy.deployment.can_rerun', 'tags': [ "project_group_name:Default Project Group", "project_group_id:ProjectGroups-1", "space_name:Default", "project_name:test", "project_id:Projects-3", - "task_id:ServerTasks-1846", "task_name:Deploy", - "task_state:Success", + "task_state:Failed", ], 'count': 1, }, { - 'name': 'octopus_deploy.deployment.count', + 'name': 'octopus_deploy.deployment.has_warnings_or_errors', 'tags': [ "project_group_name:Default Project Group", "project_group_id:ProjectGroups-1", "space_name:Default", "project_name:test", "project_id:Projects-3", - "task_id:ServerTasks-1847", "task_name:Deploy", "task_state:Failed", ], 'count': 1, }, { - 'name': 'octopus_deploy.deployment.succeeded', + 'name': 'octopus_deploy.deployment.duration', 'tags': [ "project_group_name:Default Project Group", "project_group_id:ProjectGroups-1", "space_name:Default", "project_name:test", "project_id:Projects-3", - "task_id:ServerTasks-1847", "task_name:Deploy", "task_state:Failed", ], 'count': 1, }, { - 'name': 'octopus_deploy.deployment.can_rerun', + 'name': 'octopus_deploy.deployment.queue_time', 'tags': [ "project_group_name:Default Project Group", "project_group_id:ProjectGroups-1", "space_name:Default", "project_name:test", "project_id:Projects-3", - "task_id:ServerTasks-1847", "task_name:Deploy", "task_state:Failed", ], 'count': 1, }, +] + +SERVER_NODES_METRICS = [ { - 'name': 'octopus_deploy.deployment.has_warnings_or_errors', + 'name': 'octopus_deploy.server_node.count', 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_id:ServerTasks-1847", - "task_name:Deploy", - "task_state:Failed", + "server_node_name:octopus-i8932-79236734bc234-09h234n", + "server_node_id:OctopusServerNodes-octopus-i8932-79236734bc234-09h234n", ], 'count': 1, + 'value': 1, }, { - 'name': 'octopus_deploy.deployment.duration', + 'name': 'octopus_deploy.server_node.in_maintenance_mode', 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_id:ServerTasks-1847", - "task_name:Deploy", - "task_state:Failed", + "server_node_name:octopus-i8932-79236734bc234-09h234n", + "server_node_id:OctopusServerNodes-octopus-i8932-79236734bc234-09h234n", ], 'count': 1, + 'value': 0, }, { - 'name': 'octopus_deploy.deployment.queue_time', + 'name': 'octopus_deploy.server_node.max_concurrent_tasks', 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_id:ServerTasks-1847", - "task_name:Deploy", - "task_state:Failed", + "server_node_name:octopus-i8932-79236734bc234-09h234n", + "server_node_id:OctopusServerNodes-octopus-i8932-79236734bc234-09h234n", ], 'count': 1, + 'value': 5, }, ] diff --git a/octopus_deploy/tests/docker/Caddyfile b/octopus_deploy/tests/docker/Caddyfile index eca9a9706cf02..2316a2a225c0c 100644 --- a/octopus_deploy/tests/docker/Caddyfile +++ b/octopus_deploy/tests/docker/Caddyfile @@ -117,6 +117,14 @@ rewrite * /GET/api/Spaces-1/projectgroups/ProjectGroups-3/projects/response.json file_server } + @get_octopusservernodes { + method GET + path /api/octopusservernodes + } + route @get_octopusservernodes { + rewrite * /GET/api/octopusservernodes/response.json + file_server + } file_server browse } diff --git a/octopus_deploy/tests/fixtures/GET/api/octopusservernodes/response.json b/octopus_deploy/tests/fixtures/GET/api/octopusservernodes/response.json new file mode 100644 index 0000000000000..29a728dd7c377 --- /dev/null +++ b/octopus_deploy/tests/fixtures/GET/api/octopusservernodes/response.json @@ -0,0 +1,26 @@ +{ + "ItemType": "OctopusServerNode", + "TotalResults": 1, + "ItemsPerPage": 30, + "NumberOfPages": 1, + "LastPageNumber": 0, + "Items": [ + { + "Id": "OctopusServerNodes-octopus-i8932-79236734bc234-09h234n", + "Name": "octopus-i8932-79236734bc234-09h234n", + "MaxConcurrentTasks": 5, + "IsInMaintenanceMode": false, + "Links": { + "Self": "/api/octopusservernodes/OctopusServerNodes-octopus-i8932-79236734bc234-09h234n", + "Details": "/api/octopusservernodes/OctopusServerNodes-octopus-i8932-79236734bc234-09h234n/details" + } + } + ], + "Links": { + "Self": "/api/octopusservernodes?skip=0&take=30", + "Template": "/api/octopusservernodes{?skip,take,ids,partialName}", + "Page.All": "/api/octopusservernodes?skip=0&take=2147483647", + "Page.Current": "/api/octopusservernodes?skip=0&take=30", + "Page.Last": "/api/octopusservernodes?skip=0&take=30" + } +} \ No newline at end of file diff --git a/octopus_deploy/tests/test_unit.py b/octopus_deploy/tests/test_unit.py index 5eb025eb61fd7..c73fea621b392 100644 --- a/octopus_deploy/tests/test_unit.py +++ b/octopus_deploy/tests/test_unit.py @@ -25,6 +25,7 @@ PROJECT_NO_METRICS, PROJECT_ONLY_HI_METRICS, PROJECT_ONLY_HI_MY_PROJECT_METRICS, + SERVER_NODES_METRICS, ) @@ -244,3 +245,14 @@ def test_exception_when_getting_tasks(get_current_datetime, dd_run_check, aggreg for metric in PROJECT_GROUP_ALL_METRICS + PROJECT_ALL_METRICS + DEPLOYMENT_METRICS_NO_PROJECT_1: aggregator.assert_metric(metric["name"], count=metric["count"], tags=metric["tags"]) + + +@pytest.mark.usefixtures('mock_http_get') +@mock.patch("datadog_checks.octopus_deploy.project_groups.get_current_datetime", side_effect=MOCKED_TIMESTAMPS) +def test_octopus_server_node_metrics(get_current_datetime, dd_run_check, aggregator, instance, caplog): + caplog.set_level(logging.DEBUG) + check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + dd_run_check(check) + + for metric in SERVER_NODES_METRICS: + aggregator.assert_metric(metric["name"], count=metric["count"], value=metric["value"], tags=metric["tags"]) From 17cd086960e66befb6d095900eb27d35cc9f475d Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Thu, 17 Oct 2024 15:00:55 -0400 Subject: [PATCH 40/44] remove dead code --- .../datadog_checks/octopus_deploy/check.py | 37 +++++++++---------- octopus_deploy/tests/test_unit.py | 3 +- 2 files changed, 19 insertions(+), 21 deletions(-) diff --git a/octopus_deploy/datadog_checks/octopus_deploy/check.py b/octopus_deploy/datadog_checks/octopus_deploy/check.py index af22be6436759..eec3b057579b5 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/check.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/check.py @@ -107,26 +107,25 @@ def _get_new_tasks_for_project(self, project): project.last_completed_time = new_completed_time def _initialize_projects(self, project_group, project_group_config): - if not self._projects_discovery.get(project_group.name): - normalized_projects = normalize_discover_config_include( - self.log, project_group_config.get("projects") if project_group_config else None - ) - self.log.debug( - "Projects discovery for project_group %s: %s", - project_group.name, - normalized_projects, + normalized_projects = normalize_discover_config_include( + self.log, project_group_config.get("projects") if project_group_config else None + ) + self.log.debug( + "Projects discovery for project_group %s: %s", + project_group.name, + normalized_projects, + ) + if normalized_projects: + self._projects_discovery[project_group.name] = Discovery( + lambda: self._get_new_projects(project_group), + limit=project_group_config.get('projects').get('limit') if project_group_config else None, + include=normalized_projects, + exclude=project_group_config.get('projects').get('exclude') if project_group_config else None, + interval=(project_group_config.get('projects').get('interval') if project_group_config else None), + key=lambda project: project.name, ) - if normalized_projects: - self._projects_discovery[project_group.name] = Discovery( - lambda: self._get_new_projects(project_group), - limit=project_group_config.get('projects').get('limit') if project_group_config else None, - include=normalized_projects, - exclude=project_group_config.get('projects').get('exclude') if project_group_config else None, - interval=(project_group_config.get('projects').get('interval') if project_group_config else None), - key=lambda project: project.name, - ) - else: - self._projects_discovery[project_group.name] = None + else: + self._projects_discovery[project_group.name] = None self.log.debug("Discovered projects: %s", self._projects_discovery) diff --git a/octopus_deploy/tests/test_unit.py b/octopus_deploy/tests/test_unit.py index c73fea621b392..a6c49864d6d79 100644 --- a/octopus_deploy/tests/test_unit.py +++ b/octopus_deploy/tests/test_unit.py @@ -249,8 +249,7 @@ def test_exception_when_getting_tasks(get_current_datetime, dd_run_check, aggreg @pytest.mark.usefixtures('mock_http_get') @mock.patch("datadog_checks.octopus_deploy.project_groups.get_current_datetime", side_effect=MOCKED_TIMESTAMPS) -def test_octopus_server_node_metrics(get_current_datetime, dd_run_check, aggregator, instance, caplog): - caplog.set_level(logging.DEBUG) +def test_octopus_server_node_metrics(get_current_datetime, dd_run_check, aggregator, instance): check = OctopusDeployCheck('octopus_deploy', {}, [instance]) dd_run_check(check) From 6a885018c172f06632837ae07bd294a24ee58f58 Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Thu, 17 Oct 2024 15:15:25 -0400 Subject: [PATCH 41/44] update codeowners --- .github/CODEOWNERS | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 45f7a00c1f303..6f28b51c60de5 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -144,6 +144,9 @@ manifest.json @DataDog/documentation @DataDog/agent-integrations /openstack_controller/ @DataDog/platform-integrations @DataDog/agent-integrations /openstack_controller/manifest.json @DataDog/platform-integrations @DataDog/agent-integrations @DataDog/documentation /openstack_controller/*.md @DataDog/platform-integrations @DataDog/agent-integrations @DataDog/documentation +/octopus_deploy/ @DataDog/platform-integrations @DataDog/agent-integrations +/octopus_deploy/manifest.json @DataDog/platform-integrations @DataDog/agent-integrations @DataDog/documentation +/octopus_deploy/*.md @DataDog/platform-integrations @DataDog/agent-integrations @DataDog/documentation /teleport/ @DataDog/platform-integrations @DataDog/agent-integrations /teleport/manifest.json @DataDog/platform-integrations @DataDog/agent-integrations @DataDog/documentation /teleport/*.md @DataDog/platform-integrations @DataDog/agent-integrations @DataDog/documentation From 75396e8f24c28c0f26fea57131730cdc45c5fe69 Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Mon, 21 Oct 2024 10:54:54 -0400 Subject: [PATCH 42/44] add e2e lab and update config --- octopus_deploy/assets/configuration/spec.yaml | 14 +++ .../octopus_deploy/data/conf.yaml.example | 97 +++++++++---------- octopus_deploy/hatch.toml | 15 +++ octopus_deploy/tests/conftest.py | 21 ++-- octopus_deploy/tests/constants.py | 11 +++ 5 files changed, 99 insertions(+), 59 deletions(-) diff --git a/octopus_deploy/assets/configuration/spec.yaml b/octopus_deploy/assets/configuration/spec.yaml index 74213acbbee0f..27c819dbd228d 100644 --- a/octopus_deploy/assets/configuration/spec.yaml +++ b/octopus_deploy/assets/configuration/spec.yaml @@ -9,6 +9,7 @@ files: - template: instances options: - name: octopus_endpoint + display_priority: 8 description: | Octopus API endpoint. See https://octopus.com/docs/octopus-rest-api/getting-started#authentication for more details. @@ -17,6 +18,7 @@ files: type: string required: true - name: space + display_priority: 7 description: | Space to monitor value: @@ -25,6 +27,7 @@ files: enabled: true required: true - name: project_groups + display_priority: 5 description: | filter your integration by project groups and projects. value: @@ -49,4 +52,15 @@ files: example: {} - template: instances/default - template: instances/http + overrides: + headers.display_priority: 6 + headers.enabled: true + headers.description: | + Headers to use for every request. An Authorization header including the Octopus Deploy API key token is required + for authentication for the REST API. + You can alternatively use the `auth_token` option. + + headers.value.example: + X-Octopus-ApiKey: "" + auth_token.display_priority: 4 diff --git a/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example b/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example index d4249d9651a03..b75d832e4078e 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example +++ b/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example @@ -56,11 +56,58 @@ instances: # space: Default + ## @param headers - mapping - optional + ## Headers to use for every request. An Authorization header including the Octopus Deploy API key token is required + ## for authentication for the REST API. + ## You can alternatively use the `auth_token` option. + # + headers: + X-Octopus-ApiKey: + ## @param project_groups - mapping - optional ## filter your integration by project groups and projects. # # project_groups: {} + ## @param auth_token - mapping - optional + ## This allows for the use of authentication information from dynamic sources. + ## Both a reader and writer must be configured. + ## + ## The available readers are: + ## + ## - type: file + ## path (required): The absolute path for the file to read from. + ## pattern: A regular expression pattern with a single capture group used to find the + ## token rather than using the entire file, for example: Your secret is (.+) + ## - type: oauth + ## url (required): The token endpoint. + ## client_id (required): The client identifier. + ## client_secret (required): The client secret. + ## basic_auth: Whether the provider expects credentials to be transmitted in + ## an HTTP Basic Auth header. The default is: false + ## options: Mapping of additional options to pass to the provider, such as the audience + ## or the scope. For example: + ## options: + ## audience: https://example.com + ## scope: read:example + ## + ## The available writers are: + ## + ## - type: header + ## name (required): The name of the field, for example: Authorization + ## value: The template value, for example `Bearer `. The default is: + ## placeholder: The substring in `value` to replace with the token, defaults to: + # + # auth_token: + # reader: + # type: + # : + # : + # writer: + # type: + # : + # : + ## @param tags - list of strings - optional ## A list of tags to attach to every metric and service check emitted by this instance. ## @@ -239,45 +286,6 @@ instances: # # kerberos_keytab: - ## @param auth_token - mapping - optional - ## This allows for the use of authentication information from dynamic sources. - ## Both a reader and writer must be configured. - ## - ## The available readers are: - ## - ## - type: file - ## path (required): The absolute path for the file to read from. - ## pattern: A regular expression pattern with a single capture group used to find the - ## token rather than using the entire file, for example: Your secret is (.+) - ## - type: oauth - ## url (required): The token endpoint. - ## client_id (required): The client identifier. - ## client_secret (required): The client secret. - ## basic_auth: Whether the provider expects credentials to be transmitted in - ## an HTTP Basic Auth header. The default is: false - ## options: Mapping of additional options to pass to the provider, such as the audience - ## or the scope. For example: - ## options: - ## audience: https://example.com - ## scope: read:example - ## - ## The available writers are: - ## - ## - type: header - ## name (required): The name of the field, for example: Authorization - ## value: The template value, for example `Bearer `. The default is: - ## placeholder: The substring in `value` to replace with the token, defaults to: - # - # auth_token: - # reader: - # type: - # : - # : - # writer: - # type: - # : - # : - ## @param aws_region - string - optional ## If your services require AWS Signature Version 4 signing, set the region. ## @@ -357,17 +365,6 @@ instances: # - TLSv1.2 # - TLSv1.3 - ## @param headers - mapping - optional - ## The headers parameter allows you to send specific headers with every request. - ## You can use it for explicitly specifying the host header or adding headers for - ## authorization purposes. - ## - ## This overrides any default headers. - # - # headers: - # Host: - # X-Auth-Token: - ## @param extra_headers - mapping - optional ## Additional headers to send with every request. # diff --git a/octopus_deploy/hatch.toml b/octopus_deploy/hatch.toml index c85c5f07a7df2..f2e48c56d3349 100644 --- a/octopus_deploy/hatch.toml +++ b/octopus_deploy/hatch.toml @@ -2,3 +2,18 @@ [[envs.default.matrix]] python = ["3.12"] +setup = ["caddy"] + +[[envs.default.matrix]] +python = ["3.12"] +setup = ["lab"] + +[envs.default.overrides] +name."caddy".e2e-env = { value = true } +matrix.setup.e2e-env = { value = true, if = ["lab"], env = ["OCTOPUS_LAB_ENDPOINT", "OCTOPUS_API_KEY"] } +matrix.setup.env-vars = [ + { key = "USE_OCTOPUS_LAB", value = "True", if = ["lab"] }, +] + +[envs.default] +e2e-env = false \ No newline at end of file diff --git a/octopus_deploy/tests/conftest.py b/octopus_deploy/tests/conftest.py index feba4c97a26e8..3aaf2a4d80725 100644 --- a/octopus_deploy/tests/conftest.py +++ b/octopus_deploy/tests/conftest.py @@ -14,19 +14,22 @@ from datadog_checks.dev.conditions import CheckDockerLogs, CheckEndpoints from datadog_checks.dev.fs import get_here -from .constants import COMPOSE_FILE, INSTANCE +from .constants import COMPOSE_FILE, INSTANCE, LAB_INSTANCE, USE_OCTOPUS_LAB @pytest.fixture(scope='session') def dd_environment(): - compose_file = COMPOSE_FILE - endpoint = INSTANCE["octopus_endpoint"] - conditions = [ - CheckDockerLogs(identifier='octopus-api', patterns=['server running']), - CheckEndpoints(f'{endpoint}/spaces'), - ] - with docker_run(compose_file, conditions=conditions): - yield INSTANCE + if USE_OCTOPUS_LAB: + yield LAB_INSTANCE + else: + compose_file = COMPOSE_FILE + endpoint = INSTANCE["octopus_endpoint"] + conditions = [ + CheckDockerLogs(identifier='octopus-api', patterns=['server running']), + CheckEndpoints(f'{endpoint}/spaces'), + ] + with docker_run(compose_file, conditions=conditions): + yield INSTANCE @pytest.fixture diff --git a/octopus_deploy/tests/constants.py b/octopus_deploy/tests/constants.py index 2218863229c72..10bc8288b0523 100644 --- a/octopus_deploy/tests/constants.py +++ b/octopus_deploy/tests/constants.py @@ -7,9 +7,20 @@ from datadog_checks.base.utils.time import ensure_aware_datetime from datadog_checks.dev.fs import get_here +USE_OCTOPUS_LAB = os.environ.get("USE_OCTOPUS_LAB") +OCTOPUS_LAB_ENDPOINT = os.environ.get('OCTOPUS_LAB_ENDPOINT') +OCTOPUS_API_KEY = os.environ.get('OCTOPUS_API_KEY') +OCTOPUS_SPACE = os.environ.get('OCTOPUS_SPACE', 'Default') + COMPOSE_FILE = os.path.join(get_here(), 'docker', 'docker-compose.yaml') INSTANCE = {'octopus_endpoint': 'http://localhost:80/api', 'space': 'Default'} +LAB_INSTANCE = { + 'octopus_endpoint': OCTOPUS_LAB_ENDPOINT, + 'space': OCTOPUS_SPACE, + 'headers': {'X-Octopus-ApiKey': OCTOPUS_API_KEY}, +} + BASE_TIME = ensure_aware_datetime(datetime.datetime.strptime("2024-09-23 14:45:58.888492", '%Y-%m-%d %H:%M:%S.%f')) MOCKED_TIMESTAMPS = [BASE_TIME] * 20 From 7a3d2bd85b9258ca1a59848dbbf5fbff92b0caf0 Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Mon, 21 Oct 2024 11:12:01 -0400 Subject: [PATCH 43/44] Address comments --- octopus_deploy/README.md | 11 ++--------- octopus_deploy/assets/configuration/spec.yaml | 2 +- .../octopus_deploy/data/conf.yaml.example | 2 +- 3 files changed, 4 insertions(+), 11 deletions(-) diff --git a/octopus_deploy/README.md b/octopus_deploy/README.md index 36c68fb632b90..bc417a843fe94 100644 --- a/octopus_deploy/README.md +++ b/octopus_deploy/README.md @@ -2,12 +2,7 @@ ## Overview -This check monitors [Octopus Deploy][1] through the Datadog Agent. - -Include a high level overview of what this integration does: -- What does your product do (in 1-2 sentences)? -- What value will customers get from this integration, and why is it valuable to them? -- What specific data will your integration monitor, and what's the value of that data? +This check monitors your [Octopus Deploy][1] deployments through the Datadog Agent. Track information such as average deployment time per Environment, and deployment failure rate for a Project. ## Setup @@ -20,7 +15,7 @@ No additional installation is needed on your server. ### Configuration -1. Edit the `octopus_deploy.d/conf.yaml` file, in the `conf.d/` folder at the root of your Agent's configuration directory to start collecting your octopus_deploy performance data. See the [sample octopus_deploy.d/conf.yaml][4] for all available configuration options. +1. Edit the `octopus_deploy.d/conf.yaml` file in the `conf.d/` folder at the root of your Agent's configuration directory to start collecting your `octopus_deploy` performance data. See the [sample `octopus_deploy.d/conf.yaml`][4] for all available configuration options. 2. [Restart the Agent][5]. @@ -42,8 +37,6 @@ The Octopus Deploy integration does not include any events. The Octopus Deploy integration does not include any service checks. -See [service_checks.json][8] for a list of service checks provided by this integration. - ## Troubleshooting Need help? Contact [Datadog support][9]. diff --git a/octopus_deploy/assets/configuration/spec.yaml b/octopus_deploy/assets/configuration/spec.yaml index 27c819dbd228d..51fff1090d038 100644 --- a/octopus_deploy/assets/configuration/spec.yaml +++ b/octopus_deploy/assets/configuration/spec.yaml @@ -29,7 +29,7 @@ files: - name: project_groups display_priority: 5 description: | - filter your integration by project groups and projects. + Filter your integration by project groups and projects. value: type: object properties: diff --git a/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example b/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example index b75d832e4078e..25d32dd938c3c 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example +++ b/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example @@ -65,7 +65,7 @@ instances: X-Octopus-ApiKey: ## @param project_groups - mapping - optional - ## filter your integration by project groups and projects. + ## Filter your integration by project groups and projects. # # project_groups: {} From 179597c2462e18ca57e07af2779a2927cf0497e0 Mon Sep 17 00:00:00 2001 From: Sarah Witt Date: Fri, 22 Nov 2024 09:54:03 -0500 Subject: [PATCH 44/44] Update projects logic and refactor (#19087) * Fix new deployment logic and support in progress and queued deployments * sync metadata * update caddyfile * fix * add to completed date * Refactor * fixed e2e tests * fixed instance fixture * fixed e2e test * added some e2e asserts * fixed lint error * all metrics asserted * Add test asserting metadata, add test and handle failure of tasks * store base tags * Don't report completed and executing metrics if no data * Add support for octopus server node metrics * remove unused constants * validate * lint * update e2e * update metadata * Use project name and project group name tags * Use space name instead of space id * Update log lines * update test instance * Update metric description --------- Co-authored-by: Jose Manuel Almaza --- octopus_deploy/assets/configuration/spec.yaml | 53 +- .../datadog_checks/octopus_deploy/check.py | 474 ++++----- .../octopus_deploy/config_models/instance.py | 25 +- .../octopus_deploy/constants.py | 23 - .../octopus_deploy/data/conf.yaml.example | 15 +- .../datadog_checks/octopus_deploy/error.py | 23 - .../octopus_deploy/project_groups.py | 19 - octopus_deploy/metadata.csv | 8 +- octopus_deploy/tests/conftest.py | 47 +- octopus_deploy/tests/constants.py | 910 +---------------- octopus_deploy/tests/docker/Caddyfile | 123 ++- .../response.json | 35 + .../response.json | 0 .../states=Queued,Executing/response.json | 35 + .../response.json | 0 .../response.json | 35 + .../states=Queued,Executing/response.json | 79 ++ .../response.json | 35 + .../response.json | 18 +- .../states=Queued,Executing/response.json | 79 ++ .../response.json | 0 .../response.json | 35 + .../states=Queued,Executing/response.json | 35 + octopus_deploy/tests/test_e2e.py | 4 +- octopus_deploy/tests/test_unit.py | 914 +++++++++++++++--- 25 files changed, 1573 insertions(+), 1451 deletions(-) delete mode 100644 octopus_deploy/datadog_checks/octopus_deploy/constants.py delete mode 100644 octopus_deploy/datadog_checks/octopus_deploy/error.py delete mode 100644 octopus_deploy/datadog_checks/octopus_deploy/project_groups.py create mode 100644 octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00/response.json rename octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/{fromCompletedDate=2024-09-2314:45:58.888492+00:00 => fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:15.123000+00:00}/response.json (100%) create mode 100644 octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/states=Queued,Executing/response.json rename octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/{fromCompletedDate=2024-09-2314:45:58.888492+00:00 => fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00}/response.json (100%) create mode 100644 octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:15.123000+00:00/response.json create mode 100644 octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/states=Queued,Executing/response.json create mode 100644 octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00/response.json rename octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/{fromCompletedDate=2024-09-2314:45:58.888492+00:00 => fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:15.123000+00:00}/response.json (92%) create mode 100644 octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/states=Queued,Executing/response.json rename octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/{fromCompletedDate=2024-09-2314:45:58.888492+00:00 => fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00}/response.json (100%) create mode 100644 octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:15.123000+00:00/response.json create mode 100644 octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/states=Queued,Executing/response.json diff --git a/octopus_deploy/assets/configuration/spec.yaml b/octopus_deploy/assets/configuration/spec.yaml index 51fff1090d038..ddc9211d6ba79 100644 --- a/octopus_deploy/assets/configuration/spec.yaml +++ b/octopus_deploy/assets/configuration/spec.yaml @@ -17,15 +17,30 @@ files: example: http://localhost:80/api type: string required: true - - name: space - display_priority: 7 + - name: spaces + display_priority: 5 description: | - Space to monitor + Filter your integration by spaces. value: - example: Default - type: string - enabled: true - required: true + type: object + properties: + - name: limit + description: | + Maximum number of spaces to be processed. + type: integer + - name: include + type: array + items: + anyOf: + - type: string + - type: object + - name: exclude + type: array + items: + type: string + - name: interval + type: integer + example: {} - name: project_groups display_priority: 5 description: | @@ -50,6 +65,30 @@ files: - name: interval type: integer example: {} + - name: projects + display_priority: 5 + description: | + Filter your integration by projects. + value: + type: object + properties: + - name: limit + description: | + Maximum number of projects to be processed. + type: integer + - name: include + type: array + items: + anyOf: + - type: string + - type: object + - name: exclude + type: array + items: + type: string + - name: interval + type: integer + example: {} - template: instances/default - template: instances/http overrides: diff --git a/octopus_deploy/datadog_checks/octopus_deploy/check.py b/octopus_deploy/datadog_checks/octopus_deploy/check.py index eec3b057579b5..1c141df6da16d 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/check.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/check.py @@ -1,239 +1,285 @@ # (C) Datadog, Inc. 2024-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) -from datetime import datetime, timedelta + +import datetime +from collections.abc import Iterable from requests.exceptions import ConnectionError, HTTPError, InvalidURL, Timeout from datadog_checks.base import AgentCheck from datadog_checks.base.errors import CheckException -from datadog_checks.base.utils.discovery import Discovery -from datadog_checks.base.utils.models.types import copy_raw +from datadog_checks.base.utils.discovery.discovery import Discovery +from datadog_checks.base.utils.time import get_current_datetime +from datadog_checks.octopus_deploy.config_models.instance import ProjectGroups, Projects from .config_models import ConfigMixin -from .constants import ( - API_UP_METRIC, - DEPLOY_COUNT_METRIC, - DEPLOY_DURATION_METRIC, - DEPLOY_QUEUE_TIME_METRIC, - DEPLOY_RERUN_METRIC, - DEPLOY_SUCCESS_METRIC, - DEPLOY_SUCCESS_STATE, - DEPLOY_WARNINGS_METRIC, - PROJECT_COUNT_METRIC, - PROJECT_GROUP_COUNT_METRIC, - SERVER_COUNT_METRIC, - SERVER_MAINTENANCE_MODE_METRIC, - SERVER_MAX_TASKS_METRIC, -) -from .error import handle_error -from .project_groups import Project, ProjectGroup class OctopusDeployCheck(AgentCheck, ConfigMixin): - __NAMESPACE__ = 'octopus_deploy' def __init__(self, name, init_config, instances): super(OctopusDeployCheck, self).__init__(name, init_config, instances) + self._from_completed_time = None + self._to_completed_time = None + self.current_datetime = None + self._spaces_discovery = None + self._default_project_groups_discovery = {} self._project_groups_discovery = {} + self._default_projects_discovery = {} self._projects_discovery = {} - self.space_id = None - space_name = self.instance.get("space") - self.base_tags = self.instance.get("tags", []) + [f"space_name:{space_name}"] - self.check_initializations.append(self._get_space_id) - self.check_initializations.append(self._initialize_caches) - - def _initialize_caches(self): - self._initialize_project_groups() - for _, _, project_group, project_group_config in self.project_groups(): - self._initialize_projects(project_group, project_group_config) - - @handle_error - def _get_new_tasks_for_project(self, project): - self.log.debug("Getting new tasks for project %s", project.name) - params = {'project': project.id, 'fromCompletedDate': project.last_task_time} - url = f"{self.config.octopus_endpoint}/{self.space_id}/tasks" - response = self.http.get(url, params=params) - response.raise_for_status() - tasks_json = response.json().get('Items', []) - new_completed_time = project.last_task_time - self.log.debug("Found %s new tasks for project %s", len(tasks_json), project.name) - - for task in tasks_json: - task_id = task.get("Id") - task_name = task.get("Name") - state = task.get("State") - completed_time = task.get("CompletedTime") - start_time = task.get("StartTime") - queue_time = task.get("QueueTime") - can_rerun = int(task.get("CanRerun", False)) - has_warnings = int(task.get("HasWarningsOrErrors", False)) + self._base_tags = self.instance.get("tags", []) - self.log.debug("Found task id=%s, name=%s", task_id, task_name) + def check(self, _): + self._update_times() + self._process_spaces() + self._collect_server_nodes_metrics() + + def _update_times(self): + self.current_datetime = get_current_datetime() + self._from_completed_time = ( + self._to_completed_time if self._to_completed_time is not None else self.current_datetime + ) + self._to_completed_time = self.current_datetime - completed_time_converted = datetime.fromisoformat(completed_time) - start_time_converted = datetime.fromisoformat(start_time) - queue_time_converted = datetime.fromisoformat(queue_time) + def _process_endpoint(self, endpoint, params=None, report_service_check=False): + try: + response = self.http.get(f"{self.config.octopus_endpoint}/{endpoint}", params=params) + response.raise_for_status() + if report_service_check: + self.gauge('api.can_connect', 1, tags=self._base_tags) + return response.json() + except (Timeout, HTTPError, InvalidURL, ConnectionError) as e: + if report_service_check: + self.gauge('api.can_connect', 0, tags=self._base_tags) + raise CheckException( + f"Could not connect to octopus API {self.config.octopus_endpoint} octopus_endpoint: {e}" + ) from e + else: + self.warning("Failed to access endpoint: %s: %s", endpoint, e) + return {} + + def _init_spaces_discovery(self): + self.log.info("Spaces discovery: %s", self.config.spaces) + self._spaces_discovery = Discovery( + lambda: self._process_endpoint("api/spaces", report_service_check=True).get('Items', []), + limit=self.config.spaces.limit, + include=normalize_discover_config_include(self.config.spaces), + exclude=self.config.spaces.exclude, + interval=self.config.spaces.interval, + key=lambda space: space.get("Name"), + ) - duration = completed_time_converted - start_time_converted - duration_seconds = duration.total_seconds() + def _init_default_project_groups_discovery(self, space_id): + self.log.info("Default Project Groups discovery: %s", self.config.project_groups) + if space_id not in self._default_project_groups_discovery: + self._default_project_groups_discovery[space_id] = Discovery( + lambda: self._process_endpoint(f"api/{space_id}/projectgroups", report_service_check=True).get( + 'Items', [] + ), + limit=self.config.project_groups.limit, + include=normalize_discover_config_include(self.config.project_groups), + exclude=self.config.project_groups.exclude, + interval=self.config.project_groups.interval, + key=lambda project_group: project_group.get("Name"), + ) - queue_time = start_time_converted - queue_time_converted - queue_time_seconds = queue_time.total_seconds() + def _init_project_groups_discovery(self, space_id, project_groups_config): + self.log.info("Project Groups discovery: %s", project_groups_config) + if space_id not in self._project_groups_discovery: + self._project_groups_discovery[space_id] = Discovery( + lambda: self._process_endpoint(f"api/{space_id}/projectgroups", report_service_check=True).get( + 'Items', [] + ), + limit=project_groups_config.limit, + include=normalize_discover_config_include(project_groups_config), + exclude=project_groups_config.exclude, + interval=project_groups_config.interval, + key=lambda project_group: project_group.get("Name"), + ) - if completed_time_converted > new_completed_time: - new_completed_time = completed_time_converted + def _init_default_projects_discovery(self, space_id, project_group_id): + self.log.info("Default Projects discovery: %s", self.config.projects) + if space_id not in self._default_projects_discovery: + self._default_projects_discovery[space_id] = {} + if project_group_id not in self._default_projects_discovery[space_id]: + self._default_projects_discovery[space_id][project_group_id] = Discovery( + lambda: self._process_endpoint( + f"api/{space_id}/projectgroups/{project_group_id}/projects", report_service_check=True + ).get('Items', []), + limit=self.config.projects.limit, + include=normalize_discover_config_include(self.config.projects), + exclude=self.config.projects.exclude, + interval=self.config.projects.interval, + key=lambda project: project.get("Name"), + ) - succeeded = int(state == DEPLOY_SUCCESS_STATE) + def _init_projects_discovery(self, space_id, project_group_id, projects_config): + self.log.info("Projects discovery: %s", projects_config) + if space_id not in self._projects_discovery: + self._projects_discovery[space_id] = {} + if project_group_id not in self._projects_discovery[space_id]: + self._projects_discovery[space_id][project_group_id] = Discovery( + lambda: self._process_endpoint( + f"api/{space_id}/projectgroups/{project_group_id}/projects", report_service_check=True + ).get('Items', []), + limit=projects_config.limit, + include=normalize_discover_config_include(projects_config), + exclude=projects_config.exclude, + interval=projects_config.interval, + key=lambda project: project.get("Name"), + ) - project_tags = [ - f"project_id:{project.id}", - f"project_name:{project.name}", - f"project_group_id:{project.project_group.id}", - f"project_group_name:{project.project_group.name}", + def _process_spaces(self): + if self.config.spaces: + if self._spaces_discovery is None: + self._init_spaces_discovery() + spaces = list(self._spaces_discovery.get_items()) + else: + spaces = [ + (None, space.get("Name"), space, None) + for space in self._process_endpoint("api/spaces", report_service_check=True).get('Items', []) ] - - tags = [f'task_name:{task_name}', f'task_state:{state}'] - - self.gauge(DEPLOY_COUNT_METRIC, 1, tags=self.base_tags + project_tags + tags) - self.gauge(DEPLOY_DURATION_METRIC, duration_seconds, tags=self.base_tags + project_tags + tags) - self.gauge(DEPLOY_QUEUE_TIME_METRIC, queue_time_seconds, tags=self.base_tags + project_tags + tags) - self.gauge(DEPLOY_SUCCESS_METRIC, succeeded, tags=self.base_tags + project_tags + tags) - self.gauge(DEPLOY_RERUN_METRIC, can_rerun, tags=self.base_tags + project_tags + tags) - self.gauge(DEPLOY_WARNINGS_METRIC, has_warnings, tags=self.base_tags + project_tags + tags) - - new_completed_time = new_completed_time + timedelta(milliseconds=1) - project.last_completed_time = new_completed_time - - def _initialize_projects(self, project_group, project_group_config): - normalized_projects = normalize_discover_config_include( - self.log, project_group_config.get("projects") if project_group_config else None - ) - self.log.debug( - "Projects discovery for project_group %s: %s", - project_group.name, - normalized_projects, - ) - if normalized_projects: - self._projects_discovery[project_group.name] = Discovery( - lambda: self._get_new_projects(project_group), - limit=project_group_config.get('projects').get('limit') if project_group_config else None, - include=normalized_projects, - exclude=project_group_config.get('projects').get('exclude') if project_group_config else None, - interval=(project_group_config.get('projects').get('interval') if project_group_config else None), - key=lambda project: project.name, + self.log.debug("Monitoring %s spaces", len(spaces)) + for _, _, space, space_config in spaces: + space_id = space.get("Id") + space_name = space.get("Name") + tags = self._base_tags + [f'space_id:{space_id}', f'space_name:{space_name}'] + self.gauge("space.count", 1, tags=tags) + self.log.debug("Processing space %s", space_name) + self._process_project_groups( + space_id, space_name, space_config.get("project_groups") if space_config else None ) - else: - self._projects_discovery[project_group.name] = None - - self.log.debug("Discovered projects: %s", self._projects_discovery) - - def _initialize_project_groups(self): - self._project_groups_discovery = None - if self.config.project_groups: - normalized_project_groups = normalize_discover_config_include(self.log, self.config.project_groups) - self.log.info("Project groups discovery: %s", self.config.project_groups) - if normalized_project_groups: - self._project_groups_discovery = Discovery( - lambda: self._get_new_project_groups(), - limit=self.config.project_groups.limit, - include=normalized_project_groups, - exclude=self.config.project_groups.exclude, - interval=self.config.project_groups.interval, - key=lambda project_group: project_group.name, - ) - def projects(self, project_group): - if self._projects_discovery.get(project_group.name): - projects = list(self._projects_discovery[project_group.name].get_items()) + def _process_project_groups(self, space_id, space_name, project_groups_config): + if project_groups_config: + self._init_project_groups_discovery(space_id, ProjectGroups(**project_groups_config)) + project_groups = list(self._project_groups_discovery[space_id].get_items()) else: - projects = [(None, project.name, project, None) for project in self._get_new_projects(project_group)] - - return projects - - def collect_project_metrics(self, project_group): - project_group_tags = [ - f"project_group_id:{project_group.id}", - f"project_group_name:{project_group.name}", - ] - self.gauge(PROJECT_GROUP_COUNT_METRIC, 1, tags=self.base_tags + project_group_tags) - - projects = self.projects(project_group) - all_project_names = [project.name for _, _, project, _ in projects] - self.log.info( - "Collecting data from project group: %s, for projects: %s", project_group.name, ",".join(all_project_names) - ) - - for _, _, project, _ in projects: - project_tags = [ - f"project_id:{project.id}", - f"project_name:{project.name}", + if self.config.project_groups: + self._init_default_project_groups_discovery(space_id) + project_groups = list(self._default_project_groups_discovery[space_id].get_items()) + else: + project_groups = [ + (None, project_group.get("Name"), project_group, None) + for project_group in self._process_endpoint(f"api/{space_id}/projectgroups").get('Items', []) + ] + self.log.debug("Monitoring %s Project Groups", len(project_groups)) + for _, _, project_group, project_group_config in project_groups: + project_group_id = project_group.get("Id") + project_group_name = project_group.get("Name") + tags = self._base_tags + [ + f'space_name:{space_name}', + f'project_group_id:{project_group_id}', + f'project_group_name:{project_group_name}', ] - self.gauge(PROJECT_COUNT_METRIC, 1, tags=self.base_tags + project_group_tags + project_tags) - - def _get_new_projects(self, project_group): - projects_endpoint = f"{self.config.octopus_endpoint}/{self.space_id}/projectgroups/{project_group.id}/projects" - response = self.http.get(projects_endpoint) - response.raise_for_status() - projects_json = response.json().get('Items', []) - projects = [] - for project in projects_json: - new_project = Project(project, project_group) - projects.append(new_project) - return projects - - def _get_new_project_groups(self): - project_groups_endpoint = f"{self.config.octopus_endpoint}/{self.space_id}/projectgroups" - response = self.http.get(project_groups_endpoint) - response.raise_for_status() - project_groups_json = response.json().get('Items', []) - project_groups = [] - for project_group in project_groups_json: - new_project_group = ProjectGroup(project_group) - project_groups.append(new_project_group) - - all_project_group_names = [project_group.name for project_group in project_groups] - self.log.debug("Found new project groups: %s", all_project_group_names) - return project_groups - - def _get_space_id(self): - spaces_endpoint = f"{self.config.octopus_endpoint}/spaces" - try: - response = self.http.get(spaces_endpoint) - response.raise_for_status() - spaces_json = response.json().get('Items', []) - for space in spaces_json: - space_name = space.get("Name") - if space_name == self.config.space: - self.space_id = space.get("Id") - self.log.debug("Space id for %s found: %s ", self.config.space, self.space_id) - except (Timeout, HTTPError, InvalidURL, ConnectionError): - self.gauge(API_UP_METRIC, 0, tags=self.base_tags) - - raise CheckException(f"Could not connect to octopus API {self.config.octopus_endpoint}octopus_endpoint") - - self.gauge(API_UP_METRIC, 1, tags=self.base_tags) - - if self.space_id is None: - raise CheckException(f"Space ID not found for provided space name {self.config.space}, does it exist?") + self.gauge("project_group.count", 1, tags=tags) + self._process_projects( + space_id, + space_name, + project_group_id, + project_group_name, + project_group_config.get("projects") if project_group_config else None, + ) - def project_groups(self): - if self._project_groups_discovery: - project_groups = list(self._project_groups_discovery.get_items()) + def _process_projects(self, space_id, space_name, project_group_id, project_group_name, projects_config): + if projects_config: + self._init_projects_discovery(space_id, project_group_id, Projects(**projects_config)) + projects = list(self._projects_discovery[space_id][project_group_id].get_items()) else: - project_groups = [ - (None, project_groups.name, project_groups, None) for project_groups in self._get_new_project_groups() + if self.config.projects: + self._init_default_projects_discovery(space_id, project_group_id) + projects = list(self._default_projects_discovery[space_id][project_group_id].get_items()) + else: + projects = [ + (None, project.get("Name"), project, None) + for project in self._process_endpoint( + f"api/{space_id}/projectgroups/{project_group_id}/projects" + ).get('Items', []) + ] + self.log.debug("Monitoring %s Projects", len(projects)) + for _, _, project, _ in projects: + project_id = project.get("Id") + project_name = project.get("Name") + tags = self._base_tags + [ + f'space_name:{space_name}', + f'project_group_name:{project_group_name}', + f'project_id:{project_id}', + f'project_name:{project_name}', ] - return project_groups + self.gauge("project.count", 1, tags=tags) + self._process_queued_and_running_tasks(space_id, space_name, project_id, project_name) + self._process_completed_tasks(space_id, space_name, project_id, project_name) + + def _process_queued_and_running_tasks(self, space_id, space_name, project_id, project_name): + self.log.debug("Collecting running and queued tasks for project %s", project_name) + params = {'project': project_id, 'states': ["Queued", "Executing"]} + response_json = self._process_endpoint(f"api/{space_id}/tasks", params) + self._process_tasks(space_name, project_name, response_json.get('Items', [])) + + def _process_completed_tasks(self, space_id, space_name, project_id, project_name): + self.log.debug("Collecting completed tasks for project %s", project_name) + params = { + 'project': project_id, + 'fromCompletedDate': self._from_completed_time, + 'toCompletedDate': self._to_completed_time, + } + response_json = self._process_endpoint(f"api/{space_id}/tasks", params) + self._process_tasks(space_name, project_name, response_json.get('Items', [])) + + def _calculate_task_times(self, task): + task_queue_time = task.get("QueueTime") + task_start_time = task.get("StartTime") + task_completed_time = task.get("CompletedTime") + if task_start_time: + queued_time = ( + datetime.datetime.fromisoformat(task_start_time) - datetime.datetime.fromisoformat(task_queue_time) + ).total_seconds() + if task_completed_time: + executing_time = ( + datetime.datetime.fromisoformat(task_completed_time) + - datetime.datetime.fromisoformat(task_start_time) + ).total_seconds() + completed_time = ( + self.current_datetime - datetime.datetime.fromisoformat(task_completed_time) + ).total_seconds() + else: + executing_time = ( + self.current_datetime - datetime.datetime.fromisoformat(task_start_time) + ).total_seconds() + completed_time = -1 + else: + queued_time = (self.current_datetime - datetime.datetime.fromisoformat(task_queue_time)).total_seconds() + executing_time = -1 + completed_time = -1 + return queued_time, executing_time, completed_time - @handle_error - def collect_server_nodes_metrics(self): + def _process_tasks(self, space_name, project_name, tasks_json): + self.log.debug("Discovered %s tasks for project %s", len(tasks_json), project_name) + for task in tasks_json: + task_id = task.get("Id") + tags = self._base_tags + [ + f'space_name:{space_name}', + f'project_name:{project_name}', + f'task_id:{task_id}', + f'task_name:{task.get("Name")}', + f'task_state:{task.get("State")}', + ] + self.log.debug("Processing task id %s for project %s", task_id, project_name) + queued_time, executing_time, completed_time = self._calculate_task_times(task) + self.gauge("deployment.count", 1, tags=tags) + self.gauge("deployment.queued_time", queued_time, tags=tags) + if executing_time != -1: + self.gauge("deployment.executing_time", executing_time, tags=tags) + if executing_time != -1: + self.gauge("deployment.completed_time", completed_time, tags=tags) + + def _collect_server_nodes_metrics(self): self.log.debug("Collecting server node metrics.") - url = f"{self.config.octopus_endpoint}/octopusservernodes" - response = self.http.get(url) - response.raise_for_status() - server_nodes = response.json().get('Items', []) + url = "api/octopusservernodes" + response_json = self._process_endpoint(url) + server_nodes = response_json.get('Items', []) for server_node in server_nodes: node_id = server_node.get("Id") @@ -241,35 +287,23 @@ def collect_server_nodes_metrics(self): maintenance_mode = int(server_node.get("IsInMaintenanceMode", False)) max_tasks = int(server_node.get("MaxConcurrentTasks", 0)) server_tags = [f"server_node_id:{node_id}", f"server_node_name:{node_name}"] - - self.gauge(SERVER_COUNT_METRIC, 1, tags=self.base_tags + server_tags) - self.gauge(SERVER_MAINTENANCE_MODE_METRIC, maintenance_mode, tags=self.base_tags + server_tags) - self.gauge(SERVER_MAX_TASKS_METRIC, max_tasks, tags=self.base_tags + server_tags) - - def check(self, _): - for _, _, project_group, _ in self.project_groups(): - self.collect_project_metrics(project_group) - for _, _, project, _ in self.projects(project_group): - self._get_new_tasks_for_project(project) - - self.collect_server_nodes_metrics() + self.gauge("server_node.count", 1, tags=self._base_tags + server_tags) + self.gauge("server_node.in_maintenance_mode", maintenance_mode, tags=self._base_tags + server_tags) + self.gauge("server_node.max_concurrent_tasks", max_tasks, tags=self._base_tags + server_tags) # Discovery class requires 'include' to be a dict, so this function is needed to normalize the config -def normalize_discover_config_include(log, config): +def normalize_discover_config_include(config): normalized_config = {} - log.debug("normalize_discover_config_include config: %s", config) - include_list = config.get('include') if isinstance(config, dict) else copy_raw(config.include) if config else [] - log.debug("normalize_discover_config_include include_list: %s", include_list) - if not isinstance(include_list, list): - raise TypeError('Setting `include` must be an array') + include_list = ( + list(getattr(config, 'include', [])) if isinstance(getattr(config, 'include', None), Iterable) else [] + ) if len(include_list) == 0: return {} for entry in include_list: if isinstance(entry, str): normalized_config[entry] = None - # entry is dict - else: + elif hasattr(entry, 'items'): for key, value in entry.items(): normalized_config[key] = value.copy() return normalized_config diff --git a/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py b/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py index daf4d9c57ee21..c03756d3e67bb 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py +++ b/octopus_deploy/datadog_checks/octopus_deploy/config_models/instance.py @@ -49,6 +49,17 @@ class ProjectGroups(BaseModel): limit: Optional[int] = Field(None, description='Maximum number of project groups to be processed.\n') +class Projects(BaseModel): + model_config = ConfigDict( + arbitrary_types_allowed=True, + frozen=True, + ) + exclude: Optional[tuple[str, ...]] = None + include: Optional[tuple[Union[str, MappingProxyType[str, Any]], ...]] = None + interval: Optional[int] = None + limit: Optional[int] = Field(None, description='Maximum number of projects to be processed.\n') + + class Proxy(BaseModel): model_config = ConfigDict( arbitrary_types_allowed=True, @@ -59,6 +70,17 @@ class Proxy(BaseModel): no_proxy: Optional[tuple[str, ...]] = None +class Spaces(BaseModel): + model_config = ConfigDict( + arbitrary_types_allowed=True, + frozen=True, + ) + exclude: Optional[tuple[str, ...]] = None + include: Optional[tuple[Union[str, MappingProxyType[str, Any]], ...]] = None + interval: Optional[int] = None + limit: Optional[int] = Field(None, description='Maximum number of spaces to be processed.\n') + + class InstanceConfig(BaseModel): model_config = ConfigDict( validate_default=True, @@ -91,12 +113,13 @@ class InstanceConfig(BaseModel): password: Optional[str] = None persist_connections: Optional[bool] = None project_groups: Optional[ProjectGroups] = None + projects: Optional[Projects] = None proxy: Optional[Proxy] = None read_timeout: Optional[float] = None request_size: Optional[float] = None service: Optional[str] = None skip_proxy: Optional[bool] = None - space: str + spaces: Optional[Spaces] = None tags: Optional[tuple[str, ...]] = None timeout: Optional[float] = None tls_ca_cert: Optional[str] = None diff --git a/octopus_deploy/datadog_checks/octopus_deploy/constants.py b/octopus_deploy/datadog_checks/octopus_deploy/constants.py deleted file mode 100644 index 6bc726d3be640..0000000000000 --- a/octopus_deploy/datadog_checks/octopus_deploy/constants.py +++ /dev/null @@ -1,23 +0,0 @@ -# (C) Datadog, Inc. 2024-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) - -API_UP_METRIC = "api.can_connect" -SPACE_COUNT_METRIC = "space.count" -PROJECT_GROUP_COUNT_METRIC = "project_group.count" -PROJECT_COUNT_METRIC = "project.count" -DEPLOY_PREFIX = "deployment" -DEPLOY_COUNT_METRIC = f"{DEPLOY_PREFIX}.count" -DEPLOY_DURATION_METRIC = f"{DEPLOY_PREFIX}.duration" -DEPLOY_QUEUE_TIME_METRIC = f"{DEPLOY_PREFIX}.queue_time" -DEPLOY_SUCCESS_METRIC = f"{DEPLOY_PREFIX}.succeeded" -DEPLOY_RERUN_METRIC = f"{DEPLOY_PREFIX}.can_rerun" -DEPLOY_WARNINGS_METRIC = f"{DEPLOY_PREFIX}.has_warnings_or_errors" - -SERVER_PREFIX = "server_node" -SERVER_COUNT_METRIC = f"{SERVER_PREFIX}.count" -SERVER_MAINTENANCE_MODE_METRIC = f"{SERVER_PREFIX}.in_maintenance_mode" -SERVER_MAX_TASKS_METRIC = f"{SERVER_PREFIX}.max_concurrent_tasks" - - -DEPLOY_SUCCESS_STATE = "Success" diff --git a/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example b/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example index 25d32dd938c3c..ba891f699d5a6 100644 --- a/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example +++ b/octopus_deploy/datadog_checks/octopus_deploy/data/conf.yaml.example @@ -51,11 +51,6 @@ instances: # - octopus_endpoint: http://localhost:80/api - ## @param space - string - required - ## Space to monitor - # - space: Default - ## @param headers - mapping - optional ## Headers to use for every request. An Authorization header including the Octopus Deploy API key token is required ## for authentication for the REST API. @@ -64,11 +59,21 @@ instances: headers: X-Octopus-ApiKey: + ## @param spaces - mapping - optional + ## Filter your integration by spaces. + # + # spaces: {} + ## @param project_groups - mapping - optional ## Filter your integration by project groups and projects. # # project_groups: {} + ## @param projects - mapping - optional + ## Filter your integration by projects. + # + # projects: {} + ## @param auth_token - mapping - optional ## This allows for the use of authentication information from dynamic sources. ## Both a reader and writer must be configured. diff --git a/octopus_deploy/datadog_checks/octopus_deploy/error.py b/octopus_deploy/datadog_checks/octopus_deploy/error.py deleted file mode 100644 index 0fc6840589693..0000000000000 --- a/octopus_deploy/datadog_checks/octopus_deploy/error.py +++ /dev/null @@ -1,23 +0,0 @@ -# (C) Datadog, Inc. 2024-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from functools import wraps - -import requests - - -def handle_error(f): - @wraps(f) - def wrapper(check, *args, **kwargs): - try: - result = f(check, *args, **kwargs) - return result - except requests.exceptions.RequestException as e: - check.log.info( - "Encountered a RequestException in '%s' [%s]: %s", - f.__name__, - type(e), - e, - ) - - return wrapper diff --git a/octopus_deploy/datadog_checks/octopus_deploy/project_groups.py b/octopus_deploy/datadog_checks/octopus_deploy/project_groups.py deleted file mode 100644 index 1c1a561a0df1e..0000000000000 --- a/octopus_deploy/datadog_checks/octopus_deploy/project_groups.py +++ /dev/null @@ -1,19 +0,0 @@ -# (C) Datadog, Inc. 2024-present -# All rights reserved -# Licensed under a 3-clause BSD style license (see LICENSE) -from datadog_checks.base.utils.time import get_current_datetime - - -class ProjectGroup: - def __init__(self, project_group_json): - self.id = project_group_json.get("Id") - self.name = project_group_json.get("Name") - self.projects = None - - -class Project: - def __init__(self, project_json, project_group): - self.id = project_json.get("Id") - self.name = project_json.get("Name") - self.project_group = project_group - self.last_task_time = get_current_datetime() diff --git a/octopus_deploy/metadata.csv b/octopus_deploy/metadata.csv index 7d38572c8996d..a41002aa00758 100644 --- a/octopus_deploy/metadata.csv +++ b/octopus_deploy/metadata.csv @@ -1,11 +1,9 @@ metric_name,metric_type,interval,unit_name,per_unit_name,description,orientation,integration,short_name,curated_metric,sample_tags octopus_deploy.api.can_connect,gauge,,,,Whether or not the check can connect to the Octopus Deploy API.,-1,octopus_deploy,octopus_deploy api,, -octopus_deploy.deployment.can_rerun,gauge,,,,Whether or not the deployment can be rerun.,-1,octopus_deploy,octopus_deploy deploy rerun,, +octopus_deploy.deployment.completed_time,gauge,,second,,Duration of deployment.,-1,octopus_deploy,octopus_deploy deploy dur,, octopus_deploy.deployment.count,gauge,,,,Number of deployments monitored.,-1,octopus_deploy,octopus_deploy deploy count,, -octopus_deploy.deployment.duration,gauge,,second,,Duration of deployment.,-1,octopus_deploy,octopus_deploy deploy dur,, -octopus_deploy.deployment.has_warnings_or_errors,gauge,,,,Whether or not the deployment can be rerun.,-1,octopus_deploy,octopus_deploy warnings,, -octopus_deploy.deployment.queue_time,gauge,,second,,Time deployment was in queue.,-1,octopus_deploy,octopus_deploy deploy queue,, -octopus_deploy.deployment.succeeded,gauge,,,,Whether or not the deployment succeeded.,-1,octopus_deploy,octopus_deploy deploy success,, +octopus_deploy.deployment.executing_time,gauge,,second,,How long the deployment has been executing.,-1,octopus_deploy,octopus_deploy deploy dur,, +octopus_deploy.deployment.queued_time,gauge,,second,,Time deployment was in queue.,-1,octopus_deploy,octopus_deploy deploy queue,, octopus_deploy.project.count,gauge,,,,Number of projects discovered.,-1,octopus_deploy,octopus_deploy projects count,, octopus_deploy.project_group.count,gauge,,,,Number of project groups discovered.,-1,octopus_deploy,octopus_deploy project group count,, octopus_deploy.server_node.count,gauge,,,,Number of Octopus server nodes discovered.,-1,octopus_deploy,octopus_deploy server count,, diff --git a/octopus_deploy/tests/conftest.py b/octopus_deploy/tests/conftest.py index 3aaf2a4d80725..47ae09e002512 100644 --- a/octopus_deploy/tests/conftest.py +++ b/octopus_deploy/tests/conftest.py @@ -13,6 +13,7 @@ from datadog_checks.dev import docker_run from datadog_checks.dev.conditions import CheckDockerLogs, CheckEndpoints from datadog_checks.dev.fs import get_here +from datadog_checks.dev.http import MockResponse from .constants import COMPOSE_FILE, INSTANCE, LAB_INSTANCE, USE_OCTOPUS_LAB @@ -26,7 +27,7 @@ def dd_environment(): endpoint = INSTANCE["octopus_endpoint"] conditions = [ CheckDockerLogs(identifier='octopus-api', patterns=['server running']), - CheckEndpoints(f'{endpoint}/spaces'), + CheckEndpoints(f'{endpoint}/api/spaces'), ] with docker_run(compose_file, conditions=conditions): yield INSTANCE @@ -34,7 +35,7 @@ def dd_environment(): @pytest.fixture def instance(): - return {'octopus_endpoint': 'http://localhost:80/api', 'space': 'Default'} + return INSTANCE def get_json_value_from_file(file_path): @@ -55,9 +56,7 @@ def process_files(dir, response_parent): for file in dir.rglob('*'): if file.is_file() and file.stem != ".slash": relative_dir_path = ( - "/" - + (str(file.parent.relative_to(dir)) if str(file.parent.relative_to(dir)) != "." else "") - + ("/" if (file.parent / ".slash").is_file() else "") + "/" + str(file.parent.relative_to(dir)) + ("/" if (file.parent / ".slash").is_file() else "") ) if relative_dir_path not in response_parent: response_parent[relative_dir_path] = {} @@ -70,14 +69,24 @@ def process_dir(dir, response_parent): def create_responses_tree(): root_dir_path = os.path.join(get_here(), 'fixtures') - method_subdirs = [d for d in Path(root_dir_path).iterdir() if d.is_dir() and d.name == 'GET'] + method_subdirs = [d for d in Path(root_dir_path).iterdir() if d.is_dir() and d.name in ['GET', 'POST']] for method_subdir in method_subdirs: process_dir(method_subdir, responses_map) def method(method, url, file='response', headers=None, params=None): filename = file request_path = url - + request_path = request_path.replace('?', '/') + if params: + param_string = "" + for key, val in params.items(): + if type(val) is list: + val_string = ','.join(f'{str(val_item)}' for val_item in val) + else: + val_string = str(val) + param_string += ("/" if param_string else "") + f'{key}={val_string}' + request_path = '{}/{}'.format(url, param_string) + print(request_path) response = responses_map.get(method, {}).get(request_path, {}).get(filename) return response @@ -88,7 +97,6 @@ def method(method, url, file='response', headers=None, params=None): @pytest.fixture def mock_http_call(mock_responses): def call(method, url, file='response', headers=None, params=None): - response = mock_responses(method, url, file=file, headers=headers, params=params) if response is not None: return response @@ -105,25 +113,22 @@ def call(method, url, file='response', headers=None, params=None): def mock_http_get(request, monkeypatch, mock_http_call): param = request.param if hasattr(request, 'param') and request.param is not None else {} http_error = param.pop('http_error', {}) + data = param.pop('mock_data', {}) + elapsed_total_seconds = param.pop('elapsed_total_seconds', {}) def get(url, *args, **kwargs): method = 'GET' url = get_url_path(url) - request_path = url.replace('?', '/') + if http_error and url in http_error: + return http_error[url] + if data and url in data: + return MockResponse(json_data=data[url], status_code=200) + headers = kwargs.get('headers') params = kwargs.get('params') - if params: - param_string = '/'.join(f'{key}={str(val)}' for key, val in params.items()) - request_path = f'{url}/{param_string}' - - request_path = request_path.replace(" ", "") - if http_error and request_path in http_error: - return http_error[request_path] - + mock_elapsed = mock.MagicMock(total_seconds=mock.MagicMock(return_value=elapsed_total_seconds.get(url, 0.0))) + mock_json = mock.MagicMock(return_value=mock_http_call(method, url, headers=headers, params=params)) mock_status_code = mock.MagicMock(return_value=200) - headers = kwargs.get('headers') - - mock_json = mock.MagicMock(return_value=mock_http_call(method, request_path, headers=headers)) - return mock.MagicMock(json=mock_json, status_code=mock_status_code) + return mock.MagicMock(elapsed=mock_elapsed, json=mock_json, status_code=mock_status_code) mock_get = mock.MagicMock(side_effect=get) monkeypatch.setattr('requests.get', mock_get) diff --git a/octopus_deploy/tests/constants.py b/octopus_deploy/tests/constants.py index 10bc8288b0523..23095bff383b3 100644 --- a/octopus_deploy/tests/constants.py +++ b/octopus_deploy/tests/constants.py @@ -13,11 +13,10 @@ OCTOPUS_SPACE = os.environ.get('OCTOPUS_SPACE', 'Default') COMPOSE_FILE = os.path.join(get_here(), 'docker', 'docker-compose.yaml') -INSTANCE = {'octopus_endpoint': 'http://localhost:80/api', 'space': 'Default'} +INSTANCE = {'octopus_endpoint': 'http://localhost:80'} LAB_INSTANCE = { 'octopus_endpoint': OCTOPUS_LAB_ENDPOINT, - 'space': OCTOPUS_SPACE, 'headers': {'X-Octopus-ApiKey': OCTOPUS_API_KEY}, } @@ -27,913 +26,14 @@ ALL_METRICS = [ + "octopus_deploy.space.count", "octopus_deploy.project_group.count", "octopus_deploy.project.count", "octopus_deploy.deployment.count", - "octopus_deploy.deployment.duration", - "octopus_deploy.deployment.has_warnings_or_errors", - "octopus_deploy.deployment.queue_time", - "octopus_deploy.deployment.succeeded", - "octopus_deploy.deployment.can_rerun", + "octopus_deploy.deployment.queued_time", + "octopus_deploy.deployment.executing_time", + "octopus_deploy.deployment.completed_time", "octopus_deploy.server_node.count", "octopus_deploy.server_node.in_maintenance_mode", "octopus_deploy.server_node.max_concurrent_tasks", ] - -PROJECT_GROUP_ALL_METRICS = [ - { - 'name': 'octopus_deploy.project_group.count', - 'tags': ["project_group_name:Default Project Group", "project_group_id:ProjectGroups-1", "space_name:Default"], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project_group.count', - 'tags': ["project_group_name:hello", "project_group_id:ProjectGroups-3", "space_name:Default"], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project_group.count', - 'tags': ["project_group_name:test-group", "project_group_id:ProjectGroups-2", "space_name:Default"], - 'count': 1, - }, -] - -PROJECT_GROUP_ONLY_TEST_GROUP_METRICS = [ - { - 'name': 'octopus_deploy.project_group.count', - 'tags': ["project_group_name:Default Project Group", "project_group_id:ProjectGroups-1", "space_name:Default"], - 'count': 0, - }, - { - 'name': 'octopus_deploy.project_group.count', - 'tags': ["project_group_name:hello", "project_group_id:ProjectGroups-3", "space_name:Default"], - 'count': 0, - }, - { - 'name': 'octopus_deploy.project_group.count', - 'tags': ["project_group_name:test-group", "project_group_id:ProjectGroups-2", "space_name:Default"], - 'count': 1, - }, -] - -PROJECT_GROUP_NO_METRICS = [ - { - 'name': 'octopus_deploy.project_group.count', - 'tags': ["project_group_name:Default Project Group", "project_group_id:ProjectGroups-1", "space_name:Default"], - 'count': 0, - }, - { - 'name': 'octopus_deploy.project_group.count', - 'tags': ["project_group_name:hello", "project_group_id:ProjectGroups-3", "space_name:Default"], - 'count': 0, - }, - { - 'name': 'octopus_deploy.project_group.count', - 'tags': ["project_group_name:test-group", "project_group_id:ProjectGroups-2", "space_name:Default"], - 'count': 0, - }, -] -PROJECT_GROUP_NO_TEST_GROUP_METRICS = [ - { - 'name': 'octopus_deploy.project_group.count', - 'tags': ["project_group_name:Default Project Group", "project_group_id:ProjectGroups-1", "space_name:Default"], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project_group.count', - 'tags': ["project_group_name:hello", "project_group_id:ProjectGroups-3", "space_name:Default"], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project_group.count', - 'tags': ["project_group_name:test-group", "project_group_id:ProjectGroups-2", "space_name:Default"], - 'count': 0, - }, -] - -PROJECT_ALL_METRICS = [ - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:test-group", - "project_group_id:ProjectGroups-2", - "space_name:Default", - "project_name:hi", - "project_id:Projects-4", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:my-project", - "project_id:Projects-2", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test-api", - "project_id:Projects-1", - ], - 'count': 1, - }, -] - -PROJECT_ONLY_TEST_GROUP_METRICS = [ - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:test-group", - "project_group_id:ProjectGroups-2", - "space_name:Default", - "project_name:hi", - "project_id:Projects-4", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:my-project", - "project_id:Projects-2", - ], - 'count': 0, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - ], - 'count': 0, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test-api", - "project_id:Projects-1", - ], - 'count': 0, - }, -] - -PROJECT_ONLY_DEFAULT_GROUP_METRICS = [ - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:test-group", - "project_group_id:ProjectGroups-2", - "space_name:Default", - "project_name:hi", - "project_id:Projects-4", - ], - 'count': 0, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:my-project", - "project_id:Projects-2", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test-api", - "project_id:Projects-1", - ], - 'count': 1, - }, -] - -PROJECT_ONLY_TEST_METRICS = [ - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:test-group", - "project_group_id:ProjectGroups-2", - "space_name:Default", - "project_name:hi", - "project_id:Projects-4", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:my-project", - "project_id:Projects-2", - ], - 'count': 0, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test-api", - "project_id:Projects-1", - ], - 'count': 1, - }, -] - -PROJECT_ONLY_HI_METRICS = [ - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:test-group", - "project_group_id:ProjectGroups-2", - "space_name:Default", - "project_name:hi", - "project_id:Projects-4", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:my-project", - "project_id:Projects-2", - ], - 'count': 0, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - ], - 'count': 0, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test-api", - "project_id:Projects-1", - ], - 'count': 0, - }, -] - -PROJECT_ONLY_HI_MY_PROJECT_METRICS = [ - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:test-group", - "project_group_id:ProjectGroups-2", - "space_name:Default", - "project_name:hi", - "project_id:Projects-4", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:my-project", - "project_id:Projects-2", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - ], - 'count': 0, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test-api", - "project_id:Projects-1", - ], - 'count': 0, - }, -] - -PROJECT_EXCLUDE_TEST_API_METRICS = [ - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:test-group", - "project_group_id:ProjectGroups-2", - "space_name:Default", - "project_name:hi", - "project_id:Projects-4", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:my-project", - "project_id:Projects-2", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test-api", - "project_id:Projects-1", - ], - 'count': 0, - }, -] - -PROJECT_NO_METRICS = [ - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:test-group", - "project_group_id:ProjectGroups-2", - "space_name:Default", - "project_name:hi", - "project_id:Projects-4", - ], - 'count': 0, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:my-project", - "project_id:Projects-2", - ], - 'count': 0, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - ], - 'count': 0, - }, - { - 'name': 'octopus_deploy.project.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test-api", - "project_id:Projects-1", - ], - 'count': 0, - }, -] - - -DEPLOYMENT_METRICS = [ - { - 'name': 'octopus_deploy.deployment.duration', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 1, - 'value': 2.073, - }, - { - 'name': 'octopus_deploy.deployment.queue_time', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 1, - 'value': 0.639, - }, - { - 'name': 'octopus_deploy.deployment.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 2, - 'value': 1, - }, - { - 'name': 'octopus_deploy.deployment.succeeded', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 2, - 'value': 1, - }, - { - 'name': 'octopus_deploy.deployment.can_rerun', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 2, - 'value': 0, - }, - { - 'name': 'octopus_deploy.deployment.has_warnings_or_errors', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 2, - 'value': 0, - }, - { - 'name': 'octopus_deploy.deployment.duration', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 1, - 'value': 2.134, - }, - { - 'name': 'octopus_deploy.deployment.queue_time', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 1, - 'value': 0.67, - }, - { - 'name': 'octopus_deploy.deployment.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Failed", - ], - 'count': 1, - 'value': 1, - }, - { - 'name': 'octopus_deploy.deployment.succeeded', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Failed", - ], - 'count': 1, - 'value': 0, - }, - { - 'name': 'octopus_deploy.deployment.can_rerun', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Failed", - ], - 'count': 1, - 'value': 0, - }, - { - 'name': 'octopus_deploy.deployment.has_warnings_or_errors', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Failed", - ], - 'count': 1, - 'value': 1, - }, - { - 'name': 'octopus_deploy.deployment.duration', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Failed", - ], - 'count': 1, - 'value': 6.267, - }, - { - 'name': 'octopus_deploy.deployment.queue_time', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Failed", - ], - 'count': 1, - 'value': 0.631, - }, - { - 'name': 'octopus_deploy.deployment.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test-api", - "project_id:Projects-1", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 1, - 'value': 1, - }, - { - 'name': 'octopus_deploy.deployment.succeeded', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test-api", - "project_id:Projects-1", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 1, - 'value': 1, - }, - { - 'name': 'octopus_deploy.deployment.can_rerun', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test-api", - "project_id:Projects-1", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 1, - 'value': 0, - }, - { - 'name': 'octopus_deploy.deployment.has_warnings_or_errors', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test-api", - "project_id:Projects-1", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 1, - 'value': 0, - }, - { - 'name': 'octopus_deploy.deployment.duration', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test-api", - "project_id:Projects-1", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 1, - 'value': 3.192, - }, - { - 'name': 'octopus_deploy.deployment.queue_time', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test-api", - "project_id:Projects-1", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 1, - 'value': 0.613, - }, -] - - -DEPLOYMENT_METRICS_NO_PROJECT_1 = [ - { - 'name': 'octopus_deploy.deployment.duration', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 2, - }, - { - 'name': 'octopus_deploy.deployment.queue_time', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 2, - }, - { - 'name': 'octopus_deploy.deployment.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 2, - }, - { - 'name': 'octopus_deploy.deployment.succeeded', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Success", - ], - 'count': 2, - }, - { - 'name': 'octopus_deploy.deployment.count', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Failed", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.deployment.succeeded', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Failed", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.deployment.can_rerun', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Failed", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.deployment.has_warnings_or_errors', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Failed", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.deployment.duration', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Failed", - ], - 'count': 1, - }, - { - 'name': 'octopus_deploy.deployment.queue_time', - 'tags': [ - "project_group_name:Default Project Group", - "project_group_id:ProjectGroups-1", - "space_name:Default", - "project_name:test", - "project_id:Projects-3", - "task_name:Deploy", - "task_state:Failed", - ], - 'count': 1, - }, -] - -SERVER_NODES_METRICS = [ - { - 'name': 'octopus_deploy.server_node.count', - 'tags': [ - "space_name:Default", - "server_node_name:octopus-i8932-79236734bc234-09h234n", - "server_node_id:OctopusServerNodes-octopus-i8932-79236734bc234-09h234n", - ], - 'count': 1, - 'value': 1, - }, - { - 'name': 'octopus_deploy.server_node.in_maintenance_mode', - 'tags': [ - "space_name:Default", - "server_node_name:octopus-i8932-79236734bc234-09h234n", - "server_node_id:OctopusServerNodes-octopus-i8932-79236734bc234-09h234n", - ], - 'count': 1, - 'value': 0, - }, - { - 'name': 'octopus_deploy.server_node.max_concurrent_tasks', - 'tags': [ - "space_name:Default", - "server_node_name:octopus-i8932-79236734bc234-09h234n", - "server_node_id:OctopusServerNodes-octopus-i8932-79236734bc234-09h234n", - ], - 'count': 1, - 'value': 5, - }, -] diff --git a/octopus_deploy/tests/docker/Caddyfile b/octopus_deploy/tests/docker/Caddyfile index 2316a2a225c0c..541b9e3f4d112 100644 --- a/octopus_deploy/tests/docker/Caddyfile +++ b/octopus_deploy/tests/docker/Caddyfile @@ -4,117 +4,116 @@ } :8080 { root * /usr/share/caddy/ - @get_tasks_project_1 { + @get_spaces { method GET - path /api/Spaces-1/tasks* - expression {uri}.contains('?') - expression {uri}.contains('Projects-1') + path /api/spaces } - route @get_tasks_project_1 { - rewrite * /GET/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json + route @get_spaces { + rewrite * "/GET/api/spaces/response.json" file_server } - @get_tasks_project_2 { + @get_projectgroups { method GET - path /api/Spaces-1/tasks* - expression {uri}.contains('?') - expression {uri}.contains('Projects-2') + path /api/Spaces-1/projectgroups } - route @get_tasks_project_2 { - rewrite * /GET/api/Spaces-1/tasks/project=Projects-2/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json + route @get_projectgroups { + rewrite * /GET/api/Spaces-1/projectgroups/response.json file_server } - @get_tasks_project_3 { + @get_projectgroups_1_projects { method GET - path /api/Spaces-1/tasks* - expression {uri}.contains('?') - expression {uri}.contains('Projects-3') + path /api/Spaces-1/projectgroups/ProjectGroups-1/projects } - route @get_tasks_project_3 { - rewrite * /GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json + route @get_projectgroups_1_projects { + rewrite * /GET/api/Spaces-1/projectgroups/ProjectGroups-1/projects/response.json file_server } - @get_tasks_project_4 { + @get_projectgroups_2_projects { method GET - path /api/Spaces-1/tasks* - expression {uri}.contains('?') - expression {uri}.contains('Projects-4') + path /api/Spaces-1/projectgroups/ProjectGroups-2/projects } - route @get_tasks_project_4 { - rewrite * /GET/api/Spaces-1/tasks/project=Projects-4/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json + route @get_projectgroups_2_projects { + rewrite * /GET/api/Spaces-1/projectgroups/ProjectGroups-2/projects/response.json file_server } - @get_spaces { + @get_projectgroups_3_projects { method GET - path /api/spaces + path /api/Spaces-1/projectgroups/ProjectGroups-3/projects } - route @get_spaces { - rewrite * /GET/api/spaces/response.json + route @get_projectgroups_3_projects { + rewrite * /GET/api/Spaces-1/projectgroups/ProjectGroups-3/projects/response.json file_server } - @get_spaces_1 { + @get_running_tasks_project_1 { method GET - path /api/Spaces-1 + path /api/Spaces-1/tasks + expression {query.project}.contains("Projects-1") && {query.states}.contains("Queued") && {query.states}.contains("Executing") } - route @get_spaces_1 { - rewrite * /GET/api/Spaces-1/response.json + route @get_running_tasks_project_1 { + rewrite * /GET/api/Spaces-1/tasks/project=Projects-1/states=Queued,Executing/response.json file_server } - @get_projectgroups { + @get_running_tasks_project_2 { method GET - path /api/Spaces-1/projectgroups + path /api/Spaces-1/tasks + expression {query.project}.contains("Projects-2") && {query.states}.contains("Queued") && {query.states}.contains("Executing") } - route @get_projectgroups { - rewrite * /GET/api/Spaces-1/projectgroups/response.json + route @get_running_tasks_project_2 { + rewrite * /GET/api/Spaces-1/tasks/project=Projects-2/states=Queued,Executing/response.json file_server } - - @get_projectgroups_1 { + @get_running_tasks_project_3 { method GET - path /api/Spaces-1/projectgroups/ProjectGroups-1 + path /api/Spaces-1/tasks + expression {query.project}.contains("Projects-3") && {query.states}.contains("Queued") && {query.states}.contains("Executing") } - route @get_projectgroups_1 { - rewrite * /GET/api/Spaces-1/projectgroups/ProjectGroups-1/response.json + route @get_running_tasks_project_3 { + rewrite * /GET/api/Spaces-1/tasks/project=Projects-3/states=Queued,Executing/response.json file_server } - @get_projectgroups_2 { + @get_running_tasks_project_4 { method GET - path /api/Spaces-1/projectgroups/ProjectGroups-2 + path /api/Spaces-1/tasks + expression {query.project}.contains("Projects-4") && {query.states}.contains("Queued") && {query.states}.contains("Executing") } - route @get_projectgroups_2 { - rewrite * /GET/api/Spaces-1/projectgroups/ProjectGroups-2/response.json + route @get_running_tasks_project_4 { + rewrite * /GET/api/Spaces-1/tasks/project=Projects-4/states=Queued,Executing/response.json file_server } - @get_projectgroups_3 { + @get_completed_tasks_project_1 { method GET - path /api/Spaces-1/projectgroups/ProjectGroups-3 + path /api/Spaces-1/tasks* + expression {query.project}.contains("Projects-1") && {query}.contains("fromCompletedDate") && {query}.contains("toCompletedDate") } - route @get_projectgroups_3 { - rewrite * /GET/api/Spaces-1/projectgroups/ProjectGroups-3/response.json + route @get_completed_tasks_project_1 { + rewrite * "/GET/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00/response.json" file_server } - @get_projectgroups_1_projects { + @get_completed_tasks_project_2 { method GET - path /api/Spaces-1/projectgroups/ProjectGroups-1/projects + path /api/Spaces-1/tasks* + expression {query.project}.contains("Projects-2") && {query}.contains("fromCompletedDate") && {query}.contains("toCompletedDate") } - route @get_projectgroups_1_projects { - rewrite * /GET/api/Spaces-1/projectgroups/ProjectGroups-1/projects/response.json + route @get_completed_tasks_project_2 { + rewrite * "/GET/api/Spaces-1/tasks/project=Projects-2/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00/response.json" file_server } - @get_projectgroups_2_projects { + @get_completed_tasks_project_3 { method GET - path /api/Spaces-1/projectgroups/ProjectGroups-2/projects + path /api/Spaces-1/tasks* + expression {query.project}.contains("Projects-3") && {query}.contains("fromCompletedDate") && {query}.contains("toCompletedDate") } - route @get_projectgroups_2_projects { - rewrite * /GET/api/Spaces-1/projectgroups/ProjectGroups-2/projects/response.json + route @get_completed_tasks_project_3 { + rewrite * "/GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00/response.json" file_server } - @get_projectgroups_3_projects { + @get_completed_tasks_project_4 { method GET - path /api/Spaces-1/projectgroups/ProjectGroups-3/projects + path /api/Spaces-1/tasks* + expression {query.project}.contains("Projects-4") && {query}.contains("fromCompletedDate") && {query}.contains("toCompletedDate") } - route @get_projectgroups_3_projects { - rewrite * /GET/api/Spaces-1/projectgroups/ProjectGroups-3/projects/response.json + route @get_completed_tasks_project_4 { + rewrite * "/GET/api/Spaces-1/tasks/project=Projects-4/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00/response.json" file_server } @get_octopusservernodes { @@ -125,6 +124,6 @@ rewrite * /GET/api/octopusservernodes/response.json file_server } - + file_server browse } diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00/response.json new file mode 100644 index 0000000000000..163d82fa86e94 --- /dev/null +++ b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00/response.json @@ -0,0 +1,35 @@ +{ + "ItemType": "Task", + "TotalResults": 1, + "ItemsPerPage": 30, + "NumberOfPages": 1, + "LastPageNumber": 0, + "TotalCounts": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 12, + "Queued": 0, + "Success": 1779, + "TimedOut": 0, + "Interrupted": 0 + }, + "TotalCountsInOtherSpaces": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 0, + "Queued": 0, + "Success": 0, + "TimedOut": 0, + "Interrupted": 0 + }, + "Items": [], + "Links": { + "Self": "/api/tasks?skip=0&take=30", + "Template": "/api/tasks{?skip,active,environment,tenant,runbook,project,name,node,running,states,hasPendingInterruptions,hasWarningsOrErrors,take,ids,partialName,spaces,includeSystem,description,fromCompletedDate,toCompletedDate,fromQueueDate,toQueueDate,fromStartDate,toStartDate}", + "Page.All": "/api/tasks?skip=0&take=2147483647", + "Page.Current": "/api/tasks?skip=0&take=30", + "Page.Last": "/api/tasks?skip=0&take=30" + } +} \ No newline at end of file diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:15.123000+00:00/response.json similarity index 100% rename from octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json rename to octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:15.123000+00:00/response.json diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/states=Queued,Executing/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/states=Queued,Executing/response.json new file mode 100644 index 0000000000000..3657788aaaedc --- /dev/null +++ b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-1/states=Queued,Executing/response.json @@ -0,0 +1,35 @@ +{ + "ItemType": "Task", + "TotalResults": 0, + "ItemsPerPage": 30, + "NumberOfPages": 1, + "LastPageNumber": 0, + "TotalCounts": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 2, + "Queued": 0, + "Success": 1, + "TimedOut": 0, + "Interrupted": 0 + }, + "TotalCountsInOtherSpaces": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 0, + "Queued": 0, + "Success": 0, + "TimedOut": 0, + "Interrupted": 0 + }, + "Items": [], + "Links": { + "Self": "/api/tasks?skip=0&take=30", + "Template": "/api/tasks{?skip,active,environment,tenant,runbook,project,name,node,running,states,hasPendingInterruptions,hasWarningsOrErrors,take,ids,partialName,spaces,includeSystem,description,fromCompletedDate,toCompletedDate,fromQueueDate,toQueueDate,fromStartDate,toStartDate}", + "Page.All": "/api/tasks?skip=0&take=2147483647", + "Page.Current": "/api/tasks?skip=0&take=30", + "Page.Last": "/api/tasks?skip=0&take=30" + } +} \ No newline at end of file diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00/response.json similarity index 100% rename from octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json rename to octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00/response.json diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:15.123000+00:00/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:15.123000+00:00/response.json new file mode 100644 index 0000000000000..79bccd61a6f32 --- /dev/null +++ b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:15.123000+00:00/response.json @@ -0,0 +1,35 @@ +{ + "ItemType": "Task", + "TotalResults": 0, + "ItemsPerPage": 30, + "NumberOfPages": 1, + "LastPageNumber": 0, + "TotalCounts": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 12, + "Queued": 0, + "Success": 1783, + "TimedOut": 0, + "Interrupted": 0 + }, + "TotalCountsInOtherSpaces": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 0, + "Queued": 0, + "Success": 0, + "TimedOut": 0, + "Interrupted": 0 + }, + "Items": [], + "Links": { + "Self": "/api/tasks?skip=0&take=30", + "Template": "/api/tasks{?skip,active,environment,tenant,runbook,project,name,node,running,states,hasPendingInterruptions,hasWarningsOrErrors,take,ids,partialName,spaces,includeSystem,description,fromCompletedDate,toCompletedDate,fromQueueDate,toQueueDate,fromStartDate,toStartDate}", + "Page.All": "/api/tasks?skip=0&take=2147483647", + "Page.Current": "/api/tasks?skip=0&take=30", + "Page.Last": "/api/tasks?skip=0&take=30" + } +} \ No newline at end of file diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/states=Queued,Executing/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/states=Queued,Executing/response.json new file mode 100644 index 0000000000000..49bdeab0175bb --- /dev/null +++ b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-2/states=Queued,Executing/response.json @@ -0,0 +1,79 @@ +{ + "ItemType": "Task", + "TotalResults": 1, + "ItemsPerPage": 30, + "NumberOfPages": 1, + "LastPageNumber": 0, + "TotalCounts": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 1, + "Failed": 2, + "Queued": 0, + "Success": 1, + "TimedOut": 0, + "Interrupted": 0 + }, + "TotalCountsInOtherSpaces": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 0, + "Queued": 0, + "Success": 0, + "TimedOut": 0, + "Interrupted": 0 + }, + "Items": [ + { + "Id": "ServerTasks-118048", + "SpaceId": "Spaces-1", + "EstimatedRemainingQueueDurationSeconds": 0, + "Name": "Deploy", + "Description": "Deploy hello release 0.0.36 to staging", + "Arguments": { + "DeploymentId": "Deployments-111" + }, + "State": "Executing", + "Completed": "Executing...", + "QueueTime": "2024-09-23T14:42:00.123+00:00", + "QueueTimeExpiry": null, + "StartTime": "2024-09-23T14:42:30.123+00:00", + "LastUpdatedTime": "2024-11-05T18:59:25.395+00:00", + "CompletedTime": null, + "ServerNode": "OctopusServerNodes-50c3dfbarc82", + "Duration": "18 seconds", + "ErrorMessage": "", + "HasBeenPickedUpByProcessor": true, + "IsCompleted": false, + "FinishedSuccessfully": false, + "HasPendingInterruptions": false, + "CanRerun": false, + "HasWarningsOrErrors": false, + "UnmetPreconditions": null, + "ProjectId": "Projects-2", + "Links": { + "Self": "/api/tasks/ServerTasks-118048", + "Web": "/app#/Spaces-1/tasks/ServerTasks-118048", + "Raw": "/api/tasks/ServerTasks-118048/raw", + "Rerun": "/api/tasks/rerun/ServerTasks-118048", + "Cancel": "/api/tasks/ServerTasks-118048/cancel", + "State": "/api/tasks/ServerTasks-118048/state", + "BlockedBy": "/api/tasks/ServerTasks-118048/blockedby", + "QueuedBehind": "/api/tasks/ServerTasks-118048/queued-behind{?skip,take}", + "Details": "/api/tasks/ServerTasks-118048/details{?verbose,tail,ranges}", + "StatusMessages": "/api/tasks/ServerTasks-118048/status/messages", + "Prioritize": "/api/tasks/ServerTasks-118048/prioritize", + "Artifacts": "/api/Spaces-1/artifacts?regarding=ServerTasks-118048", + "Interruptions": "/api/Spaces-1/interruptions?regarding=ServerTasks-118048" + } + } + ], + "Links": { + "Self": "/api/tasks?skip=0&take=30", + "Template": "/api/tasks{?skip,active,environment,tenant,runbook,project,name,node,running,states,hasPendingInterruptions,hasWarningsOrErrors,take,ids,partialName,spaces,includeSystem,description,fromCompletedDate,toCompletedDate,fromQueueDate,toQueueDate,fromStartDate,toStartDate}", + "Page.All": "/api/tasks?skip=0&take=2147483647", + "Page.Current": "/api/tasks?skip=0&take=30", + "Page.Last": "/api/tasks?skip=0&take=30" + } + } \ No newline at end of file diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00/response.json new file mode 100644 index 0000000000000..9c020bbbd218d --- /dev/null +++ b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00/response.json @@ -0,0 +1,35 @@ +{ + "ItemType": "Task", + "TotalResults": 3, + "ItemsPerPage": 30, + "NumberOfPages": 1, + "LastPageNumber": 0, + "TotalCounts": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 13, + "Queued": 0, + "Success": 1783, + "TimedOut": 0, + "Interrupted": 0 + }, + "TotalCountsInOtherSpaces": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 0, + "Queued": 0, + "Success": 0, + "TimedOut": 0, + "Interrupted": 0 + }, + "Items": [], + "Links": { + "Self": "/api/tasks?skip=0&take=30", + "Template": "/api/tasks{?skip,active,environment,tenant,runbook,project,name,node,running,states,hasPendingInterruptions,hasWarningsOrErrors,take,ids,partialName,spaces,includeSystem,description,fromCompletedDate,toCompletedDate,fromQueueDate,toQueueDate,fromStartDate,toStartDate}", + "Page.All": "/api/tasks?skip=0&take=2147483647", + "Page.Current": "/api/tasks?skip=0&take=30", + "Page.Last": "/api/tasks?skip=0&take=30" + } +} \ No newline at end of file diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:15.123000+00:00/response.json similarity index 92% rename from octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json rename to octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:15.123000+00:00/response.json index fcefac4dceb19..042a162b333cc 100644 --- a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json +++ b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:15.123000+00:00/response.json @@ -35,11 +35,11 @@ }, "State": "Failed", "Completed": "Monday, 23 September 2024 3:10:03 PM +00:00", - "QueueTime": "2024-09-23T15:09:56.364+00:00", + "QueueTime": "2024-09-23T14:42:30.123+00:00", "QueueTimeExpiry": null, - "StartTime": "2024-09-23T15:09:56.995+00:00", + "StartTime": "2024-09-23T14:44:20.123+00:00", "LastUpdatedTime": "2024-09-23T15:10:03.262+00:00", - "CompletedTime": "2024-09-23T15:10:03.262+00:00", + "CompletedTime": "2024-09-23T14:45:10.123+00:00", "ServerNode": "OctopusServerNodes-50c3dfbarc82", "Duration": "6 seconds", "ErrorMessage": "The deployment failed because one or more steps failed. Please see the deployment log for details.", @@ -77,11 +77,11 @@ }, "State": "Success", "Completed": "Monday, 23 September 2024 3:00:28 PM +00:00", - "QueueTime": "2024-09-23T15:00:25.468+00:00", + "QueueTime": "2024-09-23T14:42:50.123+00:00", "QueueTimeExpiry": null, - "StartTime": "2024-09-23T15:00:26.138+00:00", + "StartTime": "2024-09-23T14:44:20.123+00:00", "LastUpdatedTime": "2024-09-23T15:00:28.272+00:00", - "CompletedTime": "2024-09-23T15:00:28.272+00:00", + "CompletedTime": "2024-09-23T14:45:14.123+00:00", "ServerNode": "OctopusServerNodes-50c3dfbarc82", "Duration": "2 seconds", "ErrorMessage": "", @@ -119,11 +119,11 @@ }, "State": "Success", "Completed": "Monday, 23 September 2024 3:00:21 PM +00:00", - "QueueTime": "2024-09-23T15:00:19.040+00:00", + "QueueTime": "2024-09-23T14:44:02.123+00:00", "QueueTimeExpiry": null, - "StartTime": "2024-09-23T15:00:19.679+00:00", + "StartTime": "2024-09-23T14:44:20.123+00:00", "LastUpdatedTime": "2024-09-23T15:00:21.752+00:00", - "CompletedTime": "2024-09-23T15:00:21.752+00:00", + "CompletedTime": "2024-09-23T14:45:01.123+00:00", "ServerNode": "OctopusServerNodes-50c3dfbarc82", "Duration": "2 seconds", "ErrorMessage": "", diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/states=Queued,Executing/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/states=Queued,Executing/response.json new file mode 100644 index 0000000000000..dca5a6d5a33a1 --- /dev/null +++ b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-3/states=Queued,Executing/response.json @@ -0,0 +1,79 @@ +{ + "ItemType": "Task", + "TotalResults": 1, + "ItemsPerPage": 30, + "NumberOfPages": 1, + "LastPageNumber": 0, + "TotalCounts": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 1, + "Failed": 2, + "Queued": 1, + "Success": 1879, + "TimedOut": 0, + "Interrupted": 0 + }, + "TotalCountsInOtherSpaces": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 0, + "Queued": 0, + "Success": 0, + "TimedOut": 0, + "Interrupted": 0 + }, + "Items": [ + { + "Id": "ServerTasks-118055", + "SpaceId": "Spaces-1", + "EstimatedRemainingQueueDurationSeconds": 0, + "Name": "Deploy", + "Description": "Deploy test release 0.0.41 to dev", + "Arguments": { + "DeploymentId": "Deployments-118" + }, + "State": "Queued", + "Completed": "Queued...", + "QueueTime": "2024-09-23T14:44:00.123+00:00", + "QueueTimeExpiry": null, + "StartTime": null, + "LastUpdatedTime": "2024-11-05T19:13:49.523+00:00", + "CompletedTime": null, + "ServerNode": null, + "Duration": "9 seconds", + "ErrorMessage": "", + "HasBeenPickedUpByProcessor": false, + "IsCompleted": false, + "FinishedSuccessfully": false, + "HasPendingInterruptions": false, + "CanRerun": false, + "HasWarningsOrErrors": false, + "UnmetPreconditions": null, + "ProjectId": "Projects-3", + "Links": { + "Self": "/api/tasks/ServerTasks-118055", + "Web": "/app#/Spaces-1/tasks/ServerTasks-118055", + "Raw": "/api/tasks/ServerTasks-118055/raw", + "Rerun": "/api/tasks/rerun/ServerTasks-118055", + "Cancel": "/api/tasks/ServerTasks-118055/cancel", + "State": "/api/tasks/ServerTasks-118055/state", + "BlockedBy": "/api/tasks/ServerTasks-118055/blockedby", + "QueuedBehind": "/api/tasks/ServerTasks-118055/queued-behind{?skip,take}", + "Details": "/api/tasks/ServerTasks-118055/details{?verbose,tail,ranges}", + "StatusMessages": "/api/tasks/ServerTasks-118055/status/messages", + "Prioritize": "/api/tasks/ServerTasks-118055/prioritize", + "Artifacts": "/api/Spaces-1/artifacts?regarding=ServerTasks-118055", + "Interruptions": "/api/Spaces-1/interruptions?regarding=ServerTasks-118055" + } + } + ], + "Links": { + "Self": "/api/tasks?skip=0&take=30", + "Template": "/api/tasks{?skip,active,environment,tenant,runbook,project,name,node,running,states,hasPendingInterruptions,hasWarningsOrErrors,take,ids,partialName,spaces,includeSystem,description,fromCompletedDate,toCompletedDate,fromQueueDate,toQueueDate,fromStartDate,toStartDate}", + "Page.All": "/api/tasks?skip=0&take=2147483647", + "Page.Current": "/api/tasks?skip=0&take=30", + "Page.Last": "/api/tasks?skip=0&take=30" + } +} \ No newline at end of file diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00/response.json similarity index 100% rename from octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/fromCompletedDate=2024-09-2314:45:58.888492+00:00/response.json rename to octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:00.123000+00:00/response.json diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:15.123000+00:00/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:15.123000+00:00/response.json new file mode 100644 index 0000000000000..aa05b4987d9be --- /dev/null +++ b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/fromCompletedDate=2024-09-23 14:45:00.123000+00:00/toCompletedDate=2024-09-23 14:45:15.123000+00:00/response.json @@ -0,0 +1,35 @@ +{ + "ItemType": "Task", + "TotalResults": 0, + "ItemsPerPage": 30, + "NumberOfPages": 1, + "LastPageNumber": 0, + "TotalCounts": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 3, + "Queued": 0, + "Success": 1763, + "TimedOut": 0, + "Interrupted": 0 + }, + "TotalCountsInOtherSpaces": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 0, + "Queued": 0, + "Success": 0, + "TimedOut": 0, + "Interrupted": 0 + }, + "Items": [], + "Links": { + "Self": "/api/tasks?skip=0&take=30", + "Template": "/api/tasks{?skip,active,environment,tenant,runbook,project,name,node,running,states,hasPendingInterruptions,hasWarningsOrErrors,take,ids,partialName,spaces,includeSystem,description,fromCompletedDate,toCompletedDate,fromQueueDate,toQueueDate,fromStartDate,toStartDate}", + "Page.All": "/api/tasks?skip=0&take=2147483647", + "Page.Current": "/api/tasks?skip=0&take=30", + "Page.Last": "/api/tasks?skip=0&take=30" + } +} \ No newline at end of file diff --git a/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/states=Queued,Executing/response.json b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/states=Queued,Executing/response.json new file mode 100644 index 0000000000000..3657788aaaedc --- /dev/null +++ b/octopus_deploy/tests/fixtures/GET/api/Spaces-1/tasks/project=Projects-4/states=Queued,Executing/response.json @@ -0,0 +1,35 @@ +{ + "ItemType": "Task", + "TotalResults": 0, + "ItemsPerPage": 30, + "NumberOfPages": 1, + "LastPageNumber": 0, + "TotalCounts": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 2, + "Queued": 0, + "Success": 1, + "TimedOut": 0, + "Interrupted": 0 + }, + "TotalCountsInOtherSpaces": { + "Canceled": 0, + "Cancelling": 0, + "Executing": 0, + "Failed": 0, + "Queued": 0, + "Success": 0, + "TimedOut": 0, + "Interrupted": 0 + }, + "Items": [], + "Links": { + "Self": "/api/tasks?skip=0&take=30", + "Template": "/api/tasks{?skip,active,environment,tenant,runbook,project,name,node,running,states,hasPendingInterruptions,hasWarningsOrErrors,take,ids,partialName,spaces,includeSystem,description,fromCompletedDate,toCompletedDate,fromQueueDate,toQueueDate,fromStartDate,toStartDate}", + "Page.All": "/api/tasks?skip=0&take=2147483647", + "Page.Current": "/api/tasks?skip=0&take=30", + "Page.Last": "/api/tasks?skip=0&take=30" + } +} \ No newline at end of file diff --git a/octopus_deploy/tests/test_e2e.py b/octopus_deploy/tests/test_e2e.py index 54d144a6c40a1..fb516fbeb9ca3 100644 --- a/octopus_deploy/tests/test_e2e.py +++ b/octopus_deploy/tests/test_e2e.py @@ -10,6 +10,8 @@ def test_e2e(dd_agent_check, instance): aggregator = dd_agent_check(instance) - aggregator.assert_metric('octopus_deploy.api.can_connect', 1, tags=['space_name:Default']) + aggregator.assert_metric('octopus_deploy.api.can_connect', 1, tags=[]) for metric in ALL_METRICS: aggregator.assert_metric(metric) + aggregator.assert_no_duplicate_all() + aggregator.assert_all_metrics_covered() diff --git a/octopus_deploy/tests/test_unit.py b/octopus_deploy/tests/test_unit.py index a6c49864d6d79..272bf0888cd64 100644 --- a/octopus_deploy/tests/test_unit.py +++ b/octopus_deploy/tests/test_unit.py @@ -2,8 +2,9 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) -import copy +import datetime import logging +from contextlib import nullcontext as does_not_raise import mock import pytest @@ -12,246 +13,859 @@ from datadog_checks.dev.utils import get_metadata_metrics from datadog_checks.octopus_deploy import OctopusDeployCheck -from .constants import ( - ALL_METRICS, - DEPLOYMENT_METRICS, - DEPLOYMENT_METRICS_NO_PROJECT_1, - MOCKED_TIMESTAMPS, - PROJECT_ALL_METRICS, - PROJECT_GROUP_ALL_METRICS, - PROJECT_GROUP_NO_METRICS, - PROJECT_GROUP_NO_TEST_GROUP_METRICS, - PROJECT_GROUP_ONLY_TEST_GROUP_METRICS, - PROJECT_NO_METRICS, - PROJECT_ONLY_HI_METRICS, - PROJECT_ONLY_HI_MY_PROJECT_METRICS, - SERVER_NODES_METRICS, +from .constants import ALL_METRICS + +MOCKED_TIME1 = datetime.datetime.fromisoformat("2024-09-23T14:45:00.123+00:00") +MOCKED_TIME2 = MOCKED_TIME1 + datetime.timedelta(seconds=15) + + +@pytest.mark.parametrize( + ('mock_http_get', 'expected_exception', 'can_connect'), + [ + pytest.param( + { + 'http_error': { + '/api/spaces': MockResponse(status_code=500), + } + }, + pytest.raises(Exception, match=r'Could not connect to octopus API.*'), + 0, + id='http error', + ), + pytest.param( + { + 'mock_data': { + '/api/spaces': {"Items": []}, + } + }, + does_not_raise(), + 1, + id='http ok', + ), + ], + indirect=['mock_http_get'], ) +@pytest.mark.usefixtures('mock_http_get') +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_can_connect(get_current_datetime, dd_run_check, aggregator, expected_exception, can_connect): + instance = {'octopus_endpoint': 'http://localhost:80'} + check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + get_current_datetime.return_value = MOCKED_TIME1 + + with expected_exception: + dd_run_check(check) + + aggregator.assert_metric('octopus_deploy.api.can_connect', can_connect) @pytest.mark.usefixtures('mock_http_get') -@mock.patch("datadog_checks.octopus_deploy.project_groups.get_current_datetime", side_effect=MOCKED_TIMESTAMPS) -def test_check(get_current_datetime, dd_run_check, aggregator, instance): +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_all_metrics_covered( + get_current_datetime, + dd_run_check, + aggregator, +): + instance = {'octopus_endpoint': 'http://localhost:80'} check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + get_current_datetime.return_value = MOCKED_TIME1 + dd_run_check(check) aggregator.assert_metric('octopus_deploy.api.can_connect', 1) + for metric in ALL_METRICS: aggregator.assert_metric(metric) + aggregator.assert_all_metrics_covered() aggregator.assert_metrics_using_metadata(get_metadata_metrics()) @pytest.mark.parametrize( - ('mock_http_get, message'), + ('mock_http_get'), [ pytest.param( - {'http_error': {'/api/spaces': MockResponse(status_code=500)}}, - 'HTTPError: 500 Server Error: None for url: None', - id='500', - ), - pytest.param( - {'http_error': {'/api/spaces': MockResponse(status_code=404)}}, - 'HTTPError: 404 Client Error: None for url: None', - id='404', + { + 'mock_data': { + '/api/spaces': {"Items": []}, + } + }, + id='empty spaces', ), ], indirect=['mock_http_get'], ) @pytest.mark.usefixtures('mock_http_get') -def test_emits_critical_service_check_when_service_is_down(dd_run_check, aggregator, instance, message): +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_empty_spaces(get_current_datetime, dd_run_check, aggregator): + instance = {'octopus_endpoint': 'http://localhost:80'} check = OctopusDeployCheck('octopus_deploy', {}, [instance]) - with pytest.raises(Exception, match=message): - dd_run_check(check) + get_current_datetime.return_value = MOCKED_TIME1 - aggregator.assert_metric('octopus_deploy.api.can_connect', 0) - aggregator.assert_all_metrics_covered() + dd_run_check(check) + + aggregator.assert_metric('octopus_deploy.space.count', count=0) @pytest.mark.usefixtures('mock_http_get') -def test_space_invalid(dd_run_check, aggregator, instance): - invalid_space_instance = copy.deepcopy(instance) - invalid_space_instance['space'] = 'test' - check = OctopusDeployCheck('octopus_deploy', {}, [invalid_space_instance]) - with pytest.raises(Exception, match=r'Space ID not found for provided space name test, does it exist'): - dd_run_check(check) +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_one_space(get_current_datetime, dd_run_check, aggregator): + instance = {'octopus_endpoint': 'http://localhost:80'} + check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + get_current_datetime.return_value = MOCKED_TIME1 - aggregator.assert_metric('octopus_deploy.api.can_connect', 1) - aggregator.assert_all_metrics_covered() + dd_run_check(check) + + aggregator.assert_metric('octopus_deploy.space.count', 1, tags=['space_id:Spaces-1', 'space_name:Default']) @pytest.mark.usefixtures('mock_http_get') -@mock.patch("datadog_checks.octopus_deploy.project_groups.get_current_datetime", side_effect=MOCKED_TIMESTAMPS) -def test_space_cached(get_current_datetime, dd_run_check, aggregator, instance): +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_project_groups(get_current_datetime, dd_run_check, aggregator): + instance = {'octopus_endpoint': 'http://localhost:80'} check = OctopusDeployCheck('octopus_deploy', {}, [instance]) - check._get_space_id = mock.MagicMock() - check.space_id = "Spaces-1" + get_current_datetime.return_value = MOCKED_TIME1 + dd_run_check(check) - assert check._get_space_id.call_count == 0 - aggregator.assert_metric('octopus_deploy.api.can_connect', 1) + aggregator.assert_metric( + 'octopus_deploy.project_group.count', + 1, + tags=['project_group_id:ProjectGroups-1', 'project_group_name:Default Project Group', 'space_name:Default'], + ) + aggregator.assert_metric( + 'octopus_deploy.project_group.count', + 1, + tags=['project_group_id:ProjectGroups-2', 'project_group_name:test-group', 'space_name:Default'], + ) + aggregator.assert_metric( + 'octopus_deploy.project_group.count', + 1, + tags=['project_group_id:ProjectGroups-3', 'project_group_name:hello', 'space_name:Default'], + ) + + +@pytest.mark.usefixtures('mock_http_get') +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_projects(get_current_datetime, dd_run_check, aggregator): + instance = {'octopus_endpoint': 'http://localhost:80'} + check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + get_current_datetime.return_value = MOCKED_TIME1 + + dd_run_check(check) + + aggregator.assert_metric( + 'octopus_deploy.project.count', + 1, + tags=[ + 'project_id:Projects-1', + 'project_name:test-api', + 'project_group_name:Default Project Group', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.project.count', + 1, + tags=[ + 'project_id:Projects-2', + 'project_name:my-project', + 'project_group_name:Default Project Group', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.project.count', + 1, + tags=[ + 'project_id:Projects-3', + 'project_name:test', + 'project_group_name:Default Project Group', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.project.count', + 1, + tags=['project_id:Projects-4', 'project_name:hi', 'project_group_name:test-group', 'space_name:Default'], + ) + + +@pytest.mark.usefixtures('mock_http_get') +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_queued_or_running_tasks(get_current_datetime, dd_run_check, aggregator): + instance = {'octopus_endpoint': 'http://localhost:80'} + check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + get_current_datetime.return_value = MOCKED_TIME1 + + dd_run_check(check) + + aggregator.assert_metric( + 'octopus_deploy.deployment.count', + 1, + tags=[ + 'task_id:ServerTasks-118048', + 'task_name:Deploy', + 'task_state:Executing', + 'project_name:my-project', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.queued_time', + 30, + tags=[ + 'task_id:ServerTasks-118048', + 'task_name:Deploy', + 'task_state:Executing', + 'project_name:my-project', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.executing_time', + 150, + tags=[ + 'task_id:ServerTasks-118048', + 'task_name:Deploy', + 'task_state:Executing', + 'project_name:my-project', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.completed_time', + 0, + count=0, + tags=[ + 'task_id:ServerTasks-118048', + 'task_name:Deploy', + 'task_state:Executing', + 'project_name:my-project', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.count', + 1, + tags=[ + 'task_id:ServerTasks-118055', + 'task_name:Deploy', + 'task_state:Queued', + 'project_name:test', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.queued_time', + 60, + tags=[ + 'task_id:ServerTasks-118055', + 'task_name:Deploy', + 'task_state:Queued', + 'project_name:test', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.executing_time', + 0, + count=0, + tags=[ + 'task_id:ServerTasks-118055', + 'task_name:Deploy', + 'task_state:Queued', + 'project_name:test', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.completed_time', + 0, + count=0, + tags=[ + 'task_id:ServerTasks-118055', + 'task_name:Deploy', + 'task_state:Queued', + 'project_name:test', + 'space_name:Default', + ], + ) + + +@pytest.mark.usefixtures('mock_http_get') +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_completed_tasks(get_current_datetime, dd_run_check, aggregator): + instance = {'octopus_endpoint': 'http://localhost:80'} + check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + + get_current_datetime.return_value = MOCKED_TIME1 + dd_run_check(check) + metrics = aggregator.metrics('octopus_deploy.deployment.count') + for metric in metrics: + assert not ('project_name:test-api' in metric.tags and 'task_state:Success' in metric.tags) + assert not ('project_name:test' in metric.tags and 'task_state:Success' in metric.tags) + + get_current_datetime.return_value = MOCKED_TIME2 + dd_run_check(check) + + aggregator.assert_metric( + 'octopus_deploy.deployment.count', + 1, + tags=[ + 'task_id:ServerTasks-1847', + 'task_name:Deploy', + 'task_state:Failed', + 'project_name:test', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.queued_time', + 110, + tags=[ + 'task_id:ServerTasks-1847', + 'task_name:Deploy', + 'task_state:Failed', + 'project_name:test', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.executing_time', + 50, + tags=[ + 'task_id:ServerTasks-1847', + 'task_name:Deploy', + 'task_state:Failed', + 'project_name:test', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.completed_time', + 5, + tags=[ + 'task_id:ServerTasks-1847', + 'task_name:Deploy', + 'task_state:Failed', + 'project_name:test', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.count', + 1, + tags=[ + 'task_id:ServerTasks-1846', + 'task_name:Deploy', + 'task_state:Success', + 'project_name:test', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.queued_time', + 90, + tags=[ + 'task_id:ServerTasks-1846', + 'task_name:Deploy', + 'task_state:Success', + 'project_name:test', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.executing_time', + 54, + tags=[ + 'task_id:ServerTasks-1846', + 'task_name:Deploy', + 'task_state:Success', + 'project_name:test', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.completed_time', + 1, + tags=[ + 'task_id:ServerTasks-1846', + 'task_name:Deploy', + 'task_state:Success', + 'project_name:test', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.count', + tags=[ + 'task_id:ServerTasks-1845', + 'task_name:Deploy', + 'task_state:Success', + 'project_name:test', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.queued_time', + 18, + tags=[ + 'task_id:ServerTasks-1845', + 'task_name:Deploy', + 'task_state:Success', + 'project_name:test', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.executing_time', + 41, + tags=[ + 'task_id:ServerTasks-1845', + 'task_name:Deploy', + 'task_state:Success', + 'project_name:test', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.deployment.completed_time', + 14, + tags=[ + 'task_id:ServerTasks-1845', + 'task_name:Deploy', + 'task_state:Success', + 'project_name:test', + 'space_name:Default', + ], + ) @pytest.mark.parametrize( - 'project_groups_config, expected_metrics', + ('mock_http_get'), [ - pytest.param(None, PROJECT_GROUP_ALL_METRICS, id="default"), - pytest.param( - {'include': []}, - PROJECT_GROUP_ALL_METRICS, - id="empty include", - ), - pytest.param( - {'include': ['test-group']}, - PROJECT_GROUP_ONLY_TEST_GROUP_METRICS, - id="include", - ), - pytest.param( - {'include': ['test-group'], 'limit': 1}, - PROJECT_GROUP_ONLY_TEST_GROUP_METRICS, - id="within limit", - ), pytest.param( - {'include': ['test-group'], 'limit': 0}, - PROJECT_GROUP_NO_METRICS, - id="limit hit", - ), - pytest.param( - {'include': ['test-group'], 'exclude': ['test-group']}, - PROJECT_GROUP_NO_METRICS, - id="excluded", - ), - pytest.param( - {'include': ['.*'], 'exclude': ['test-group']}, - PROJECT_GROUP_NO_TEST_GROUP_METRICS, - id="one excluded", - ), - pytest.param( - {'include': ['.*'], 'exclude': ['testing']}, - PROJECT_GROUP_ALL_METRICS, - id="excluded invalid", + { + 'mock_data': { + '/api/spaces': { + "Items": [ + { + "Id": "Spaces-1", + "Name": "First", + }, + { + "Id": "Spaces-2", + "Name": "Second", + }, + ] + }, + '/api/Spaces-1/projectgroups': {"Items": []}, + '/api/Spaces-2/projectgroups': {"Items": []}, + } + }, + id='empty spaces', ), ], + indirect=['mock_http_get'], ) @pytest.mark.usefixtures('mock_http_get') -@mock.patch("datadog_checks.octopus_deploy.project_groups.get_current_datetime", side_effect=MOCKED_TIMESTAMPS) -def test_project_groups_discovery( - get_current_datetime, dd_run_check, aggregator, instance, project_groups_config, expected_metrics -): - instance = copy.deepcopy(instance) - instance['project_groups'] = project_groups_config +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_discovery_spaces(get_current_datetime, dd_run_check, aggregator): + instance = { + 'octopus_endpoint': 'http://localhost:80', + 'spaces': { + 'include': ['Second'], + }, + } check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + + get_current_datetime.return_value = MOCKED_TIME1 dd_run_check(check) - for metric in expected_metrics: - aggregator.assert_metric(metric["name"], count=metric["count"], tags=metric["tags"]) + + aggregator.assert_metric('octopus_deploy.space.count', tags=['space_name:Default', 'space_name:First'], count=0) + aggregator.assert_metric('octopus_deploy.space.count', tags=['space_id:Spaces-2', 'space_name:Second']) @pytest.mark.usefixtures('mock_http_get') -@mock.patch("datadog_checks.octopus_deploy.project_groups.get_current_datetime", side_effect=MOCKED_TIMESTAMPS) -def test_project_groups_discovery_error(get_current_datetime, dd_run_check, instance): - instance = copy.deepcopy(instance) - instance['project_groups'] = {'include': None} +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_discovery_default_project_groups(get_current_datetime, dd_run_check, aggregator): + instance = { + 'octopus_endpoint': 'http://localhost:80', + 'project_groups': { + 'include': ['hello'], + }, + } check = OctopusDeployCheck('octopus_deploy', {}, [instance]) - with pytest.raises(Exception, match=r'Setting `include` must be an array'): - dd_run_check(check) + + get_current_datetime.return_value = MOCKED_TIME1 + dd_run_check(check) + + aggregator.assert_metric( + 'octopus_deploy.project_group.count', + tags=['project_group_id:ProjectGroups-1', 'project_group_name:Default Project Group', 'space_name:Default'], + count=0, + ) + aggregator.assert_metric( + 'octopus_deploy.project_group.count', + tags=['project_group_id:ProjectGroups-2', 'project_group_name:test-group', 'space_name:Default'], + count=0, + ) + aggregator.assert_metric( + 'octopus_deploy.project_group.count', + 1, + tags=['project_group_id:ProjectGroups-3', 'project_group_name:hello', 'space_name:Default'], + ) + + +@pytest.mark.usefixtures('mock_http_get') +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_discovery_space_project_groups(get_current_datetime, dd_run_check, aggregator): + instance = { + 'octopus_endpoint': 'http://localhost:80', + 'spaces': { + 'include': [ + { + 'Default': { + 'project_groups': { + 'include': ['hello'], + } + } + } + ], + }, + } + check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + + get_current_datetime.return_value = MOCKED_TIME1 + dd_run_check(check) + + aggregator.assert_metric( + 'octopus_deploy.project_group.count', + tags=['project_group_id:ProjectGroups-1', 'project_group_name:Default Project Group', 'space_name:Default'], + count=0, + ) + aggregator.assert_metric( + 'octopus_deploy.project_group.count', + tags=['project_group_id:ProjectGroups-2', 'project_group_name:test-group', 'space_name:Default'], + count=0, + ) + aggregator.assert_metric( + 'octopus_deploy.project_group.count', + 1, + tags=['project_group_id:ProjectGroups-3', 'project_group_name:hello', 'space_name:Default'], + ) + + +@pytest.mark.usefixtures('mock_http_get') +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_discovery_default_projects(get_current_datetime, dd_run_check, aggregator): + instance = { + 'octopus_endpoint': 'http://localhost:80', + 'projects': { + 'include': ['test-api'], + }, + } + check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + + get_current_datetime.return_value = MOCKED_TIME1 + dd_run_check(check) + + aggregator.assert_metric( + 'octopus_deploy.project.count', + 1, + tags=[ + 'project_id:Projects-1', + 'project_name:test-api', + 'project_group_name:Default Project Group', + 'space_name:Default', + ], + ) + aggregator.assert_metric( + 'octopus_deploy.project.count', + tags=[ + 'project_id:Projects-2', + 'project_name:my-project', + 'project_group_name:Default Project Group', + 'space_name:Default', + ], + count=0, + ) + aggregator.assert_metric( + 'octopus_deploy.project.count', + tags=[ + 'project_name:test', + 'project_name:test', + 'project_group_name:Default Project Group', + 'space_name:Default', + ], + count=0, + ) + aggregator.assert_metric( + 'octopus_deploy.project.count', + tags=['project_id:Projects-4', 'project_name:hi', 'project_group_name:test-group', 'space_name:Default'], + count=0, + ) + + +@pytest.mark.usefixtures('mock_http_get') +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_discovery_space_project_group_projects(get_current_datetime, dd_run_check, aggregator): + instance = { + 'octopus_endpoint': 'http://localhost:80', + 'spaces': { + 'include': [ + { + 'Default': { + 'project_groups': { + 'include': [ + { + 'hello': { + 'projects': { + 'include': ['.*'], + }, + } + } + ], + }, + } + } + ], + }, + } + check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + + get_current_datetime.return_value = MOCKED_TIME1 + dd_run_check(check) + + aggregator.assert_metric( + 'octopus_deploy.project_group.count', + tags=['project_group_id:ProjectGroups-1', 'project_group_name:Default Project Group', 'space_name:Default'], + count=0, + ) + aggregator.assert_metric( + 'octopus_deploy.project_group.count', + tags=['project_group_id:ProjectGroups-2', 'project_group_name:test-group', 'space_name:Default'], + count=0, + ) + aggregator.assert_metric( + 'octopus_deploy.project_group.count', + 1, + tags=['project_group_id:ProjectGroups-3', 'project_group_name:hello', 'space_name:Default'], + ) @pytest.mark.parametrize( - 'project_groups_config, expected_metrics', + ('instance'), [ - pytest.param(None, PROJECT_ALL_METRICS, id="default"), - pytest.param( - {'include': [{'test-group': {'projects': {'include': ['hi']}}}]}, - PROJECT_ONLY_HI_METRICS, - id="include", - ), - pytest.param( - {'include': [{'.*': {'projects': {'include': ['.*'], 'limit': 1}}}]}, - PROJECT_ONLY_HI_MY_PROJECT_METRICS, - id="1 limit", - ), - pytest.param( - {'include': [{'.*': {'projects': {'include': ['.*'], 'limit': 0}}}]}, - PROJECT_NO_METRICS, - id="limit hit", - ), pytest.param( { - 'exclude': ['Default.*'], - 'include': [{'test-group': {'projects': {'include': ['.*']}}}], + 'octopus_endpoint': 'http://localhost:80', + 'spaces': { + 'include': ['Default'], + }, + 'project_groups': { + 'include': ['Default Project Group'], + }, + 'projects': { + 'include': ['.*'], + }, }, - PROJECT_ONLY_HI_METRICS, - id="excluded default", + id='all default', ), pytest.param( - {'include': [{'.*': {'projects': {'include': ['.*'], 'exclude': ['.*']}}}]}, - PROJECT_NO_METRICS, - id="all excluded", + { + 'octopus_endpoint': 'http://localhost:80', + 'spaces': { + 'include': [ + { + 'Default': { + 'project_groups': { + 'include': ['Default Project Group'], + }, + } + } + ], + }, + 'projects': { + 'include': ['.*'], + }, + }, + id='with project groups', ), pytest.param( - {'include': [{'.*': {'projects': {'include': ['.*'], 'exclude': ['heyhey']}}}]}, - PROJECT_ALL_METRICS, - id="excluded invalud", + { + 'octopus_endpoint': 'http://localhost:80', + 'spaces': { + 'include': [ + { + 'Default': { + 'project_groups': { + 'include': [ + { + 'Default Project Group': { + 'projects': { + 'include': ['.*'], + }, + } + } + ], + }, + } + } + ], + }, + }, + id='with projects', ), ], ) @pytest.mark.usefixtures('mock_http_get') -@mock.patch("datadog_checks.octopus_deploy.project_groups.get_current_datetime", side_effect=MOCKED_TIMESTAMPS) -def test_projects_discovery( - get_current_datetime, dd_run_check, aggregator, instance, project_groups_config, expected_metrics -): - instance = copy.deepcopy(instance) - instance['project_groups'] = project_groups_config +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_run_twice(get_current_datetime, dd_run_check, aggregator, instance): check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + get_current_datetime.return_value = MOCKED_TIME1 + + dd_run_check(check) + + aggregator.assert_metric('octopus_deploy.space.count') + aggregator.assert_metric('octopus_deploy.project_group.count') + aggregator.assert_metric('octopus_deploy.project.count') + + get_current_datetime.return_value = MOCKED_TIME2 dd_run_check(check) - for metric in expected_metrics: - aggregator.assert_metric(metric["name"], count=metric["count"], tags=metric["tags"]) + + aggregator.assert_metric('octopus_deploy.space.count') + aggregator.assert_metric('octopus_deploy.project_group.count') + aggregator.assert_metric('octopus_deploy.project.count') @pytest.mark.usefixtures('mock_http_get') -@mock.patch("datadog_checks.octopus_deploy.project_groups.get_current_datetime", side_effect=MOCKED_TIMESTAMPS) -def test_deployment_metrics(get_current_datetime, dd_run_check, aggregator, instance, caplog): - caplog.set_level(logging.DEBUG) +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_empty_include(get_current_datetime, dd_run_check, aggregator): + instance = { + 'octopus_endpoint': 'http://localhost:80', + 'spaces': { + 'include': [], + }, + } check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + get_current_datetime.return_value = MOCKED_TIME1 + dd_run_check(check) - for metric in DEPLOYMENT_METRICS: - aggregator.assert_metric(metric["name"], count=metric["count"], value=metric["value"], tags=metric["tags"]) + aggregator.assert_metric('octopus_deploy.space.count', count=0) @pytest.mark.parametrize( - ('mock_http_get, message'), + ('mock_http_get', 'expected_log'), [ pytest.param( { 'http_error': { - '/api/Spaces-1/tasks/project=Projects-1/fromCompletedDate=2024-09-23' - '14:45:58.888492+00:00': MockResponse(status_code=404) + '/api/Spaces-1/tasks': MockResponse(status_code=500), } }, - 'Encountered a RequestException in \'_get_new_tasks_for_project\'', - id='404', + 'Failed to access endpoint: api/Spaces-1/tasks: 500 Server Error: None for url: None', + id='http error', ), ], indirect=['mock_http_get'], ) @pytest.mark.usefixtures('mock_http_get') -@mock.patch("datadog_checks.octopus_deploy.project_groups.get_current_datetime", side_effect=MOCKED_TIMESTAMPS) -def test_exception_when_getting_tasks(get_current_datetime, dd_run_check, aggregator, instance, message, caplog): +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_tasks_endpoint_unavailable(get_current_datetime, dd_run_check, expected_log, caplog): + instance = {'octopus_endpoint': 'http://localhost:80'} check = OctopusDeployCheck('octopus_deploy', {}, [instance]) - caplog.set_level(logging.INFO) + get_current_datetime.return_value = MOCKED_TIME1 + caplog.set_level(logging.WARNING) dd_run_check(check) - assert message in caplog.text - - for metric in PROJECT_GROUP_ALL_METRICS + PROJECT_ALL_METRICS + DEPLOYMENT_METRICS_NO_PROJECT_1: - aggregator.assert_metric(metric["name"], count=metric["count"], tags=metric["tags"]) + assert expected_log in caplog.text @pytest.mark.usefixtures('mock_http_get') -@mock.patch("datadog_checks.octopus_deploy.project_groups.get_current_datetime", side_effect=MOCKED_TIMESTAMPS) -def test_octopus_server_node_metrics(get_current_datetime, dd_run_check, aggregator, instance): +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_server_node_metrics(get_current_datetime, dd_run_check, aggregator): + instance = {'octopus_endpoint': 'http://localhost:80'} check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + get_current_datetime.return_value = MOCKED_TIME1 dd_run_check(check) + aggregator.assert_metric( + "octopus_deploy.server_node.count", + 1, + count=1, + tags=[ + 'server_node_id:OctopusServerNodes-octopus-i8932-79236734bc234-09h234n', + 'server_node_name:octopus-i8932-79236734bc234-09h234n', + ], + ) + aggregator.assert_metric( + "octopus_deploy.server_node.max_concurrent_tasks", + 5, + count=1, + tags=[ + 'server_node_id:OctopusServerNodes-octopus-i8932-79236734bc234-09h234n', + 'server_node_name:octopus-i8932-79236734bc234-09h234n', + ], + ) + aggregator.assert_metric( + "octopus_deploy.server_node.in_maintenance_mode", + 0, + count=1, + tags=[ + 'server_node_id:OctopusServerNodes-octopus-i8932-79236734bc234-09h234n', + 'server_node_name:octopus-i8932-79236734bc234-09h234n', + ], + ) - for metric in SERVER_NODES_METRICS: - aggregator.assert_metric(metric["name"], count=metric["count"], value=metric["value"], tags=metric["tags"]) + +@pytest.mark.parametrize( + ('mock_http_get', 'expected_log'), + [ + pytest.param( + { + 'http_error': { + '/api/octopusservernodes': MockResponse(status_code=500), + } + }, + 'Failed to access endpoint: api/octopusservernodes: 500 Server Error: None for url: None', + id='http error', + ), + ], + indirect=['mock_http_get'], +) +@pytest.mark.usefixtures('mock_http_get') +@mock.patch("datadog_checks.octopus_deploy.check.get_current_datetime") +def test_server_node_endpoint_failed(get_current_datetime, dd_run_check, aggregator, expected_log, caplog): + instance = {'octopus_endpoint': 'http://localhost:80'} + check = OctopusDeployCheck('octopus_deploy', {}, [instance]) + get_current_datetime.return_value = MOCKED_TIME1 + caplog.set_level(logging.WARNING) + dd_run_check(check) + assert expected_log in caplog.text + aggregator.assert_metric( + "octopus_deploy.server_node.count", + 1, + count=0, + tags=[ + 'server_node_id:OctopusServerNodes-octopus-i8932-79236734bc234-09h234n', + 'server_node_name:octopus-i8932-79236734bc234-09h234n', + ], + ) + aggregator.assert_metric( + "octopus_deploy.server_node.max_concurrent_tasks", + 5, + count=0, + tags=[ + 'server_node_id:OctopusServerNodes-octopus-i8932-79236734bc234-09h234n', + 'server_node_name:octopus-i8932-79236734bc234-09h234n', + ], + ) + aggregator.assert_metric( + "octopus_deploy.server_node.in_maintenance_mode", + 5, + count=0, + tags=[ + 'server_node_id:OctopusServerNodes-octopus-i8932-79236734bc234-09h234n', + 'server_node_name:octopus-i8932-79236734bc234-09h234n', + ], + )