diff --git a/postgres/datadog_checks/postgres/metadata.py b/postgres/datadog_checks/postgres/metadata.py index 755eb29d8ad04..44626a66cc266 100644 --- a/postgres/datadog_checks/postgres/metadata.py +++ b/postgres/datadog_checks/postgres/metadata.py @@ -319,14 +319,28 @@ def _get_table_info(self, cursor, dbname, schema_id): cursor.execute(PG_TABLES_QUERY_V10_PLUS.format(schema_oid=schema_id)) rows = cursor.fetchall() table_info = [dict(row) for row in rows] - # return self._sort_and_limit_table_info(cursor, dbname, table_info, limit) - return table_info[:limit] + table_info = self._filter_tables_with_no_relation_metrics(dbname, table_info) + return self._sort_and_limit_table_info(cursor, dbname, table_info, limit) else: # Config error should catch the case where schema collection is enabled # and relation metrics aren't, but adding a warning here just in case self._check.log.warning("Relation metrics are not configured for {dbname}, so tables cannot be collected") + def _filter_tables_with_no_relation_metrics( + self, dbname, table_info: List[Dict[str, Union[str, bool]]] + ) -> List[Dict[str, Union[str, bool]]]: + filtered_table_list = [] + cache = self._check.metrics_cache.table_activity_metrics + for table in table_info: + if table['name'] in cache[dbname].keys(): + filtered_table_list.append(table) + # partitioned tables will not have metrics recorded under the name of the partitioned table, + # so for now we always report them + elif table['has_partitions']: + filtered_table_list.append(table) + return filtered_table_list + def _sort_and_limit_table_info( self, cursor, dbname, table_info: List[Dict[str, Union[str, bool]]], limit: int ) -> List[Dict[str, Union[str, bool]]]: diff --git a/postgres/tests/test_metadata.py b/postgres/tests/test_metadata.py index a4b99d28635b9..25fbda829f197 100644 --- a/postgres/tests/test_metadata.py +++ b/postgres/tests/test_metadata.py @@ -66,7 +66,7 @@ def test_collect_schemas(integration_check, dbm_instance, aggregator): schema_public = None for schema in database_metadata[0]['schemas']: if schema['name'] == 'public': - schema_public = schema + schema_public = schema # check that all expected tables are present tables_set = {'persons', "personsdup1", "personsdup2", "pgtable", "pg_newtable", "cities"}