diff --git a/CHANGELOG.md b/CHANGELOG.md index 1084b8f41..5c131e193 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,9 @@ # 14.0.0, in progress +## Added +* The Datadog sink can now filter metric names by prefix with `datadog_metric_name_prefix_drops`. Thanks, [kaplanelad](https://github.com/kaplanelad)! +* The Datadog sink can now filter tags by metric names prefix with `datadog_exclude_tags_prefix_by_prefix_metric`. Thanks, [kaplanelad](https://github.com/kaplanelad)! + # 13.0.0, 2020-01-03 ## Added diff --git a/config.go b/config.go index 20ca57051..485bd2546 100644 --- a/config.go +++ b/config.go @@ -1,15 +1,19 @@ package veneur type Config struct { - Aggregates []string `yaml:"aggregates"` - AwsAccessKeyID string `yaml:"aws_access_key_id"` - AwsRegion string `yaml:"aws_region"` - AwsS3Bucket string `yaml:"aws_s3_bucket"` - AwsSecretAccessKey string `yaml:"aws_secret_access_key"` - BlockProfileRate int `yaml:"block_profile_rate"` - CountUniqueTimeseries bool `yaml:"count_unique_timeseries"` - DatadogAPIHostname string `yaml:"datadog_api_hostname"` - DatadogAPIKey string `yaml:"datadog_api_key"` + Aggregates []string `yaml:"aggregates"` + AwsAccessKeyID string `yaml:"aws_access_key_id"` + AwsRegion string `yaml:"aws_region"` + AwsS3Bucket string `yaml:"aws_s3_bucket"` + AwsSecretAccessKey string `yaml:"aws_secret_access_key"` + BlockProfileRate int `yaml:"block_profile_rate"` + CountUniqueTimeseries bool `yaml:"count_unique_timeseries"` + DatadogAPIHostname string `yaml:"datadog_api_hostname"` + DatadogAPIKey string `yaml:"datadog_api_key"` + DatadogExcludeTagsPrefixByPrefixMetric []struct { + MetricPrefix string `yaml:"metric_prefix"` + Tags []string `yaml:"tags"` + } `yaml:"datadog_exclude_tags_prefix_by_prefix_metric"` DatadogFlushMaxPerBody int `yaml:"datadog_flush_max_per_body"` DatadogMetricNamePrefixDrops []string `yaml:"datadog_metric_name_prefix_drops"` DatadogSpanBufferSize int `yaml:"datadog_span_buffer_size"` diff --git a/example.yaml b/example.yaml index ff5c6d56e..9fac0700e 100644 --- a/example.yaml +++ b/example.yaml @@ -285,6 +285,13 @@ datadog_trace_api_address: "" datadog_metric_name_prefix_drops: - "an_ignorable_metric." +# Excluded tags *prefixes* from specific metric *prefixes* +# Any metrics that have tags *prefixes* will be dropped before sending to Datadog. +datadog_exclude_tags_prefix_by_prefix_metric: + - metric_prefix: "metric_prefix" + tags: + - an_ignorable_tag_prefix" + # The size of the ring buffer used for retaining spans during a flush interval. datadog_span_buffer_size: 16384 diff --git a/server.go b/server.go index 577e56668..3b85db963 100644 --- a/server.go +++ b/server.go @@ -495,9 +495,16 @@ func NewFromConfig(logger *logrus.Logger, conf Config) (*Server, error) { ret.metricSinks = append(ret.metricSinks, sfxSink) } if conf.DatadogAPIKey != "" && conf.DatadogAPIHostname != "" { + + excludeTagsPrefixByPrefixMetric := map[string][]string{} + for _, m := range conf.DatadogExcludeTagsPrefixByPrefixMetric { + excludeTagsPrefixByPrefixMetric[m.MetricPrefix] = m.Tags + } + ddSink, err := datadog.NewDatadogMetricSink( ret.interval.Seconds(), conf.DatadogFlushMaxPerBody, conf.Hostname, ret.Tags, conf.DatadogAPIHostname, conf.DatadogAPIKey, ret.HTTPClient, log, conf.DatadogMetricNamePrefixDrops, + excludeTagsPrefixByPrefixMetric, ) if err != nil { return ret, err diff --git a/sinks/datadog/datadog.go b/sinks/datadog/datadog.go index 21f5b2f52..161e3bc99 100644 --- a/sinks/datadog/datadog.go +++ b/sinks/datadog/datadog.go @@ -32,17 +32,18 @@ const datadogSpanType = "web" const datadogSpanBufferSize = 1 << 14 type DatadogMetricSink struct { - HTTPClient *http.Client - APIKey string - DDHostname string - hostname string - flushMaxPerBody int - tags []string - interval float64 - traceClient *trace.Client - log *logrus.Logger - metricNamePrefixDrops []string - excludedTags []string + HTTPClient *http.Client + APIKey string + DDHostname string + hostname string + flushMaxPerBody int + tags []string + interval float64 + traceClient *trace.Client + log *logrus.Logger + metricNamePrefixDrops []string + excludedTags []string + excludeTagsPrefixByPrefixMetric map[string][]string } // DDEvent represents the structure of datadog's undocumented /intake endpoint @@ -81,17 +82,18 @@ type DDServiceCheck struct { } // NewDatadogMetricSink creates a new Datadog sink for trace spans. -func NewDatadogMetricSink(interval float64, flushMaxPerBody int, hostname string, tags []string, ddHostname string, apiKey string, httpClient *http.Client, log *logrus.Logger, metricNamePrefixDrops []string) (*DatadogMetricSink, error) { +func NewDatadogMetricSink(interval float64, flushMaxPerBody int, hostname string, tags []string, ddHostname string, apiKey string, httpClient *http.Client, log *logrus.Logger, metricNamePrefixDrops []string, excludeTagsPrefixByPrefixMetric map[string][]string) (*DatadogMetricSink, error) { return &DatadogMetricSink{ - HTTPClient: httpClient, - APIKey: apiKey, - DDHostname: ddHostname, - interval: interval, - flushMaxPerBody: flushMaxPerBody, - hostname: hostname, - tags: tags, - metricNamePrefixDrops: metricNamePrefixDrops, - log: log, + HTTPClient: httpClient, + APIKey: apiKey, + DDHostname: ddHostname, + interval: interval, + flushMaxPerBody: flushMaxPerBody, + hostname: hostname, + tags: tags, + metricNamePrefixDrops: metricNamePrefixDrops, + excludeTagsPrefixByPrefixMetric: excludeTagsPrefixByPrefixMetric, + log: log, }, nil } @@ -270,6 +272,17 @@ METRICLOOP: // Defensively copy tags since we're gonna mutate it tags := make([]string, 0, len(dd.tags)) + // Prepare exclude tags by specific prefix metric + var excludeTagsPrefixByPrefixMetric []string + if len(dd.excludeTagsPrefixByPrefixMetric) > 0 { + for prefixMetric, tags := range dd.excludeTagsPrefixByPrefixMetric { + if strings.HasPrefix(m.Name, prefixMetric) { + excludeTagsPrefixByPrefixMetric = tags + break + } + } + } + for i := range dd.tags { exclude := false for j := range dd.excludedTags { @@ -301,6 +314,14 @@ METRICLOOP: exclude = true break } + + } + + for i := range excludeTagsPrefixByPrefixMetric { + if strings.HasPrefix(tag, excludeTagsPrefixByPrefixMetric[i]) { + exclude = true + break + } } if !exclude { tags = append(tags, tag) diff --git a/sinks/datadog/datadog_test.go b/sinks/datadog/datadog_test.go index ba798468b..abee82eb4 100644 --- a/sinks/datadog/datadog_test.go +++ b/sinks/datadog/datadog_test.go @@ -294,7 +294,7 @@ func TestDatadogMetricRouting(t *testing.T) { func TestDatadogFlushEvents(t *testing.T) { transport := &DatadogRoundTripper{Endpoint: "/intake", Contains: ""} - ddSink, err := NewDatadogMetricSink(10, 2500, "example.com", []string{"gloobles:toots"}, "http://example.com", "secret", &http.Client{Transport: transport}, logrus.New(), nil) + ddSink, err := NewDatadogMetricSink(10, 2500, "example.com", []string{"gloobles:toots"}, "http://example.com", "secret", &http.Client{Transport: transport}, logrus.New(), nil, nil) assert.NoError(t, err) testEvent := ssf.SSFSample{ @@ -350,7 +350,7 @@ func TestDatadogFlushEvents(t *testing.T) { func TestDatadogFlushOtherMetricsForServiceChecks(t *testing.T) { transport := &DatadogRoundTripper{Endpoint: "/api/v1/check_run", Contains: ""} - ddSink, err := NewDatadogMetricSink(10, 2500, "example.com", []string{"gloobles:toots"}, "http://example.com", "secret", &http.Client{Transport: transport}, logrus.New(), nil) + ddSink, err := NewDatadogMetricSink(10, 2500, "example.com", []string{"gloobles:toots"}, "http://example.com", "secret", &http.Client{Transport: transport}, logrus.New(), nil, nil) assert.NoError(t, err) testCheck := ssf.SSFSample{ @@ -373,7 +373,7 @@ func TestDatadogFlushOtherMetricsForServiceChecks(t *testing.T) { func TestDatadogFlushServiceCheck(t *testing.T) { transport := &DatadogRoundTripper{Endpoint: "/api/v1/check_run", Contains: ""} - ddSink, err := NewDatadogMetricSink(10, 2500, "example.com", []string{"gloobles:toots"}, "http://example.com", "secret", &http.Client{Transport: transport}, logrus.New(), nil) + ddSink, err := NewDatadogMetricSink(10, 2500, "example.com", []string{"gloobles:toots"}, "http://example.com", "secret", &http.Client{Transport: transport}, logrus.New(), nil, nil) assert.NoError(t, err) testCheck := samplers.InterMetric{ @@ -468,3 +468,32 @@ func TestDataDogDropMetric(t *testing.T) { assert.Empty(t, serviceChecks, "No service check metrics are reported") assert.Equal(t, 2, len(ddMetrics)) } + +func TestDataDogDropTagsByMetricPrefix(t *testing.T) { + + ddSink := DatadogMetricSink{ + excludeTagsPrefixByPrefixMetric: map[string][]string{ + "remove.a": []string{"tag-ab"}, + }, + } + + testsMetricCount := []struct { + Name string + Metric samplers.InterMetric + expectedTagCount int + }{ + {"Ignore dropped tags", samplers.InterMetric{Name: "foo.a.b", Tags: []string{"tag-a", "tag-ab", "tag-abc"}}, 3}, + {"dropped tags", samplers.InterMetric{Name: "remove.a.b", Tags: []string{"tag-a", "tag-ab", "tag-abc"}}, 1}, + {"dropped tags", samplers.InterMetric{Name: "remove.a", Tags: []string{"tag-a", "tag-ab"}}, 1}, + } + + for _, test := range testsMetricCount { + t.Run(test.Name, func(t *testing.T) { + metrics := []samplers.InterMetric{test.Metric} + ddMetrics, serviceChecks := ddSink.finalizeMetrics(metrics) + assert.Empty(t, serviceChecks, "No service check metrics are reported") + assert.Equal(t, test.expectedTagCount, len(ddMetrics[0].Tags)) + }) + } + +}