Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for sending database_instance metadata #15524

Merged
Merged
4 changes: 4 additions & 0 deletions mysql/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,10 @@

## Unreleased

***Added***:

* Add support for sending `database_instance` metadata ([#15524](https://github.com/DataDog/integrations-core/pull/15524))

***Fixed***:

* Bump cryptography to 41.0.3 ([#15517](https://github.com/DataDog/integrations-core/pull/15517))
Expand Down
10 changes: 10 additions & 0 deletions mysql/assets/configuration/spec.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -632,6 +632,16 @@ files:
type: boolean
example: false
display_default: false
- name: database_instance_collection_interval
hidden: true
description: |
Set the database instance collection interval (in seconds). The database instance collection sends
basic information about the database instance along with a signal that it still exists.
This collection does not involve any additional queries to the database.
value:
type: number
example: 1800
display_default: false
- template: instances/default
overrides:
disable_generic_tags.hidden: false
Expand Down
1 change: 1 addition & 0 deletions mysql/datadog_checks/mysql/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@ def __init__(self, instance):
}
self.log_unobfuscated_queries = is_affirmative(instance.get('log_unobfuscated_queries', False))
self.log_unobfuscated_plans = is_affirmative(instance.get('log_unobfuscated_plans', False))
self.database_instance_collection_interval = instance.get('database_instance_collection_interval', 1800)
self.configuration_checks()

def _build_tags(self, custom_tags):
Expand Down
4 changes: 4 additions & 0 deletions mysql/datadog_checks/mysql/config_models/defaults.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,10 @@ def instance_connect_timeout():
return 10


def instance_database_instance_collection_interval():
return False


def instance_dbm():
return False

Expand Down
1 change: 1 addition & 0 deletions mysql/datadog_checks/mysql/config_models/instance.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,7 @@ class InstanceConfig(BaseModel):
collect_settings: Optional[CollectSettings] = None
connect_timeout: Optional[float] = None
custom_queries: Optional[tuple[CustomQuery, ...]] = None
database_instance_collection_interval: Optional[float] = None
dbm: Optional[bool] = None
defaults_file: Optional[str] = None
disable_generic_tags: Optional[bool] = None
Expand Down
41 changes: 40 additions & 1 deletion mysql/datadog_checks/mysql/mysql.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,18 +5,27 @@
from __future__ import division

import copy
import time
import traceback
from collections import defaultdict
from contextlib import closing, contextmanager
from typing import Any, Dict, List, Optional # noqa: F401

import pymysql
from cachetools import TTLCache
from six import PY3, iteritems, itervalues

from datadog_checks.base import AgentCheck, is_affirmative
from datadog_checks.base.utils.db import QueryExecutor, QueryManager
from datadog_checks.base.utils.db.utils import resolve_db_host as agent_host_resolver
from datadog_checks.base.utils.db.utils import (
default_json_event_encoding,
)
from datadog_checks.base.utils.db.utils import (
resolve_db_host as agent_host_resolver,
)
from datadog_checks.base.utils.serialization import json

from .__about__ import __version__
from .activity import MySQLActivity
from .collection_utils import collect_all_scalars, collect_scalar, collect_string, collect_type
from .config import MySQLConfig
Expand Down Expand Up @@ -119,8 +128,16 @@ def __init__(self, name, init_config, instances):
self._statement_samples = MySQLStatementSamples(self, self._config, self._get_connection_args())
self._mysql_metadata = MySQLMetadata(self, self._config, self._get_connection_args())
self._query_activity = MySQLActivity(self, self._config, self._get_connection_args())
# _database_instance_emitted: limit the collection and transmission of the database instance metadata
self._database_instance_emitted = TTLCache(
maxsize=1,
ttl=self._config.database_instance_collection_interval,
) # type: TTLCache

self._runtime_queries = None
# Keep a copy of the tags without the internal resource tags so they can be used for paths that don't
# go through the agent internal metrics submission processing those tags
self._non_internal_tags = copy.deepcopy(self.tags)
self.set_resource_tags()

def execute_query_raw(self, query):
Expand Down Expand Up @@ -254,6 +271,7 @@ def check(self, _):
# version collection
self.version = get_version(db)
self._send_metadata()
self._send_database_instance_metadata()

self.is_mariadb = self.version.flavor == "MariaDB"
if self._get_is_aurora(db):
Expand Down Expand Up @@ -1213,3 +1231,24 @@ def _report_warnings(self):

for warning in messages:
self.warning(warning)

def _send_database_instance_metadata(self):
if self.resolved_hostname not in self._database_instance_emitted:
event = {
"host": self.resolved_hostname,
"agent_version": datadog_agent.get_version(),
"dbms": "mysql",
"kind": "database_instance",
"collection_interval": self._config.database_instance_collection_interval,
'dbms_version': self.version.version + '+' + self.version.build,
'integration_version': __version__,
"tags": self._non_internal_tags,
"timestamp": time.time() * 1000,
"cloud_metadata": self._config.cloud_metadata,
"metadata": {
"dbm": self._config.dbm_enabled,
"connection_host": self._config.host,
},
}
self._database_instance_emitted[self.resolved_hostname] = event
self.database_monitoring_metadata(json.dumps(event, default=default_json_event_encoding))
alexandre-normand marked this conversation as resolved.
Show resolved Hide resolved
4 changes: 2 additions & 2 deletions mysql/tests/test_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@ def test_collect_mysql_settings(aggregator, dbm_instance, dd_run_check):
mysql_check = MySql(common.CHECK_NAME, {}, instances=[dbm_instance])
dd_run_check(mysql_check)
dbm_metadata = aggregator.get_event_platform_events("dbm-metadata")
event = dbm_metadata[0]
event = next((e for e in dbm_metadata if e['kind'] == 'mysql_variables'), None)
assert event is not None
assert event['host'] == "stubbed.hostname"
assert event['dbms'] == "mysql"
assert event['kind'] == "mysql_variables"
assert len(event["metadata"]) > 0
42 changes: 42 additions & 0 deletions mysql/tests/test_mysql.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from datadog_checks.base.utils.platform import Platform
from datadog_checks.dev.utils import get_metadata_metrics
from datadog_checks.mysql import MySql
from datadog_checks.mysql.__about__ import __version__
from datadog_checks.mysql.const import (
BINLOG_VARS,
GALERA_VARS,
Expand Down Expand Up @@ -640,3 +641,44 @@ def test_set_resources(aggregator, dd_run_check, instance_basic, cloud_metadata,
aggregator.assert_metric_has_tag(
"mysql.net.connections", tags.DATABASE_INSTANCE_RESOURCE_TAG.format(hostname=mysql_check.resolved_hostname)
)


@pytest.mark.parametrize(
'dbm_enabled, reported_hostname',
[
(True, None),
(False, None),
(True, 'forced_hostname'),
(True, 'forced_hostname'),
],
)
@pytest.mark.integration
@pytest.mark.usefixtures('dd_environment')
def test_database_instance_metadata(aggregator, dd_run_check, instance_complex, dbm_enabled, reported_hostname):
instance_complex['dbm'] = dbm_enabled
if reported_hostname:
instance_complex['reported_hostname'] = reported_hostname
expected_host = reported_hostname if reported_hostname else 'stubbed.hostname'
mysql_check = MySql(common.CHECK_NAME, {}, [instance_complex])
dd_run_check(mysql_check)

dbm_metadata = aggregator.get_event_platform_events("dbm-metadata")
event = next((e for e in dbm_metadata if e['kind'] == 'database_instance'), None)
assert event is not None
assert event['host'] == expected_host
assert event['dbms'] == "mysql"
assert event['tags'].sort() == tags.METRIC_TAGS.sort()
assert event['integration_version'] == __version__
assert event['collection_interval'] == 1800
assert event['metadata'] == {
'dbm': dbm_enabled,
'connection_host': instance_complex['host'],
}

# Run a second time and expect the metadata to not be emitted again because of the cache TTL
aggregator.reset()
dd_run_check(mysql_check)

dbm_metadata = aggregator.get_event_platform_events("dbm-metadata")
event = next((e for e in dbm_metadata if e['kind'] == 'database_instance'), None)
assert event is None
Loading