Skip to content

Commit

Permalink
Add support for ingesting mysql config settings (#15498)
Browse files Browse the repository at this point in the history
  • Loading branch information
jmeunier28 authored Aug 8, 2023
1 parent e408aa1 commit e600fb3
Show file tree
Hide file tree
Showing 7 changed files with 206 additions and 0 deletions.
16 changes: 16 additions & 0 deletions mysql/assets/configuration/spec.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -335,6 +335,22 @@ files:
type: boolean
example: false
display_default: false
- name: collect_settings
description: Configure collection of performance_schema.global_variables. This is an alpha feature.
options:
- name: enabled
description: |
Enable collection of performance_schema.global_variables. Requires `dbm: true`.
value:
type: boolean
example: false
- name: collection_interval
description: |
Set the database settings collection interval (in seconds). Each collection involves a single query to
`performance_schema.global_variables`.
value:
type: number
example: 600
- name: query_metrics
description: Configure collection of query metrics
options:
Expand Down
1 change: 1 addition & 0 deletions mysql/datadog_checks/mysql/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ def __init__(self, instance):
)
self.statement_samples_config = instance.get('query_samples', instance.get('statement_samples', {})) or {}
self.statement_metrics_config = instance.get('query_metrics', {}) or {}
self.settings_config = instance.get('collect_settings', {}) or {}
self.activity_config = instance.get('query_activity', {}) or {}
self.cloud_metadata = {}
aws = instance.get('aws', {})
Expand Down
10 changes: 10 additions & 0 deletions mysql/datadog_checks/mysql/config_models/instance.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,15 @@ class Azure(BaseModel):
fully_qualified_domain_name: Optional[str] = None


class CollectSettings(BaseModel):
model_config = ConfigDict(
arbitrary_types_allowed=True,
frozen=True,
)
collection_interval: Optional[float] = None
enabled: Optional[bool] = None


class CustomQuery(BaseModel):
model_config = ConfigDict(
arbitrary_types_allowed=True,
Expand Down Expand Up @@ -157,6 +166,7 @@ class InstanceConfig(BaseModel):
aws: Optional[Aws] = None
azure: Optional[Azure] = None
charset: Optional[str] = None
collect_settings: Optional[CollectSettings] = None
connect_timeout: Optional[float] = None
custom_queries: Optional[tuple[CustomQuery, ...]] = None
dbm: Optional[bool] = None
Expand Down
15 changes: 15 additions & 0 deletions mysql/datadog_checks/mysql/data/conf.yaml.example
Original file line number Diff line number Diff line change
Expand Up @@ -334,6 +334,21 @@ instances:
#
# dbm: false

## Configure collection of performance_schema.global_variables. This is an alpha feature.
#
# collect_settings:

## @param enabled - boolean - optional - default: false
## Enable collection of performance_schema.global_variables. Requires `dbm: true`.
#
# enabled: false

## @param collection_interval - number - optional - default: 600
## Set the database settings collection interval (in seconds). Each collection involves a single query to
## `performance_schema.global_variables`.
#
# collection_interval: 600

## Configure collection of query metrics
#
# query_metrics:
Expand Down
128 changes: 128 additions & 0 deletions mysql/datadog_checks/mysql/metadata.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@
# (C) Datadog, Inc. 2023-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import time
from contextlib import closing
from operator import attrgetter

import pymysql

try:
import datadog_agent
except ImportError:
from ..stubs import datadog_agent

from datadog_checks.base import is_affirmative
from datadog_checks.base.utils.db.utils import (
DBMAsyncJob,
default_json_event_encoding,
)
from datadog_checks.base.utils.serialization import json
from datadog_checks.base.utils.tracking import tracked_method

# default pg_settings collection interval in seconds
DEFAULT_SETTINGS_COLLECTION_INTERVAL = 600

MARIADB_TABLE_NAME = "information_schema.GLOBAL_VARIABLES"
MYSQL_TABLE_NAME = "performance_schema.global_variables"

SETTINGS_QUERY = """
SELECT
variable_name,
variable_value
FROM
{table_name}
"""


class MySQLMetadata(DBMAsyncJob):
"""
Collects database metadata. Supports:
1. collection of performance_schema.global_variables
"""

def __init__(self, check, config, connection_args):
self.collection_interval = float(
config.settings_config.get('collection_interval', DEFAULT_SETTINGS_COLLECTION_INTERVAL)
)
super(MySQLMetadata, self).__init__(
check,
rate_limit=1 / self.collection_interval,
run_sync=is_affirmative(config.settings_config.get('run_sync', False)),
enabled=is_affirmative(config.settings_config.get('enabled', False)),
min_collection_interval=config.min_collection_interval,
dbms="mysql",
expected_db_exceptions=(pymysql.err.DatabaseError,),
job_name="database-metadata",
shutdown_callback=self._close_db_conn,
)
self._check = check
self._config = config
self._version_processed = False
self._connection_args = connection_args
self._db = None
self._check = check

def _get_db_connection(self):
"""
lazy reconnect db
pymysql connections are not thread safe so we can't reuse the same connection from the main check
:return:
"""
if not self._db:
self._db = pymysql.connect(**self._connection_args)
return self._db

def _close_db_conn(self):
if self._db:
try:
self._db.close()
except Exception:
self._log.debug("Failed to close db connection", exc_info=1)
finally:
self._db = None

def _cursor_run(self, cursor, query, params=None):
"""
Run and log the query. If provided, obfuscated params are logged in place of the regular params.
"""
try:
self._log.debug("Running query [{}] params={}".format(query, params))
cursor.execute(query, params)
except pymysql.DatabaseError as e:
self._check.count(
"dd.mysql.db.error",
1,
tags=self._tags + ["error:{}".format(type(e))] + self._check._get_debug_tags(),
hostname=self._check.resolved_hostname,
)
raise

def run_job(self):
self.report_mysql_metadata()

@tracked_method(agent_check_getter=attrgetter('_check'))
def report_mysql_metadata(self):
settings = []
table_name = MYSQL_TABLE_NAME if not self._check.is_mariadb else MARIADB_TABLE_NAME
query = SETTINGS_QUERY.format(table_name=table_name)
with closing(self._get_db_connection().cursor(pymysql.cursors.DictCursor)) as cursor:
self._cursor_run(
cursor,
query,
)
rows = cursor.fetchall()
settings = [dict(row) for row in rows]
event = {
"host": self._check.resolved_hostname,
"agent_version": datadog_agent.get_version(),
"dbms": "mysql",
"kind": "mysql_variables",
"collection_interval": self.collection_interval,
'dbms_version': self._check.version.version + '+' + self._check.version.build,
"tags": self._tags,
"timestamp": time.time() * 1000,
"cloud_metadata": self._config.cloud_metadata,
"metadata": settings,
}
self._check.database_monitoring_metadata(json.dumps(event, default=default_json_event_encoding))
4 changes: 4 additions & 0 deletions mysql/datadog_checks/mysql/mysql.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
VARIABLES_VARS,
)
from .innodb_metrics import InnoDBMetrics
from .metadata import MySQLMetadata
from .queries import (
QUERY_USER_CONNECTIONS,
SQL_95TH_PERCENTILE,
Expand Down Expand Up @@ -116,6 +117,7 @@ def __init__(self, name, init_config, instances):
self._warnings_by_code = {}
self._statement_metrics = MySQLStatementMetrics(self, self._config, self._get_connection_args())
self._statement_samples = MySQLStatementSamples(self, self._config, self._get_connection_args())
self._mysql_metadata = MySQLMetadata(self, self._config, self._get_connection_args())
self._query_activity = MySQLActivity(self, self._config, self._get_connection_args())

self._runtime_queries = None
Expand Down Expand Up @@ -274,6 +276,7 @@ def check(self, _):
self._statement_metrics.run_job_loop(dbm_tags)
self._statement_samples.run_job_loop(dbm_tags)
self._query_activity.run_job_loop(dbm_tags)
self._mysql_metadata.run_job_loop(dbm_tags)

# keeping track of these:
self._put_qcache_stats()
Expand All @@ -292,6 +295,7 @@ def cancel(self):
self._statement_samples.cancel()
self._statement_metrics.cancel()
self._query_activity.cancel()
self._mysql_metadata.cancel()

def _new_query_executor(self, queries):
return QueryExecutor(
Expand Down
32 changes: 32 additions & 0 deletions mysql/tests/test_metadata.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
# (C) Datadog, Inc. 2023-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import pytest

from datadog_checks.mysql import MySql

from . import common


@pytest.fixture
def dbm_instance(instance_complex):
instance_complex['dbm'] = True
instance_complex['query_samples'] = {'enabled': False}
instance_complex['query_metrics'] = {'enabled': False}
instance_complex['query_activity'] = {'enabled': False}
instance_complex['collect_settings'] = {'enabled': True, 'run_sync': True, 'collection_interval': 0.1}
return instance_complex


@pytest.mark.integration
@pytest.mark.usefixtures('dd_environment')
def test_collect_mysql_settings(aggregator, dbm_instance, dd_run_check):
# test to make sure we continue to support the old key
mysql_check = MySql(common.CHECK_NAME, {}, instances=[dbm_instance])
dd_run_check(mysql_check)
dbm_metadata = aggregator.get_event_platform_events("dbm-metadata")
event = dbm_metadata[0]
assert event['host'] == "stubbed.hostname"
assert event['dbms'] == "mysql"
assert event['kind'] == "mysql_variables"
assert len(event["metadata"]) > 0

0 comments on commit e600fb3

Please sign in to comment.