Skip to content

Commit

Permalink
Merge branch 'main' into restore_spanner_receiver
Browse files Browse the repository at this point in the history
  • Loading branch information
dashpole authored Oct 28, 2024
2 parents 542bdda + 3b31e40 commit 4b6e647
Show file tree
Hide file tree
Showing 74 changed files with 4,380 additions and 75 deletions.
27 changes: 27 additions & 0 deletions .chloggen/add_azure_default_auth.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
# Use this changelog template to create an entry for release notes.

# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix'
change_type: enhancement

# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver)
component: azuredataexplorerexporter

# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`).
note: Add new configuration option `use_default_auth` to enable default authentication for Azure Data Explorer. This option allows users to leverage workload identity for authentication.

# Mandatory: One or more tracking issues related to the change. You can use the PR number here if no issue exists.
issues: [33667]

# (Optional) One or more lines of additional information to render under the primary note.
# These lines will be padded with 2 spaces and then inserted directly into the document.
# Use pipe (|) for multiline entries.
subtext:

# If your change doesn't affect end users or the exported elements of any package,
# you should instead start your pull request title with [chore] or use the "Skip Changelog" label.
# Optional: The change log or logs in which this entry should be included.
# e.g. '[user]' or '[user, api]'
# Include 'user' if the change is relevant to end users.
# Include 'api' if there is a change to a library API.
# Default: '[user]'
change_logs: [user]
27 changes: 27 additions & 0 deletions .chloggen/receiver-otlpjsonfile-profiles.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
# Use this changelog template to create an entry for release notes.

# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix'
change_type: enhancement

# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver)
component: otlpjsonfilereceiver

# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`).
note: Add support for profiles signal

# Mandatory: One or more tracking issues related to the change. You can use the PR number here if no issue exists.
issues: [35977]

# (Optional) One or more lines of additional information to render under the primary note.
# These lines will be padded with 2 spaces and then inserted directly into the document.
# Use pipe (|) for multiline entries.
subtext:

# If your change doesn't affect end users or the exported elements of any package,
# you should instead start your pull request title with [chore] or use the "Skip Changelog" label.
# Optional: The change log or logs in which this entry should be included.
# e.g. '[user]' or '[user, api]'
# Include 'user' if the change is relevant to end users.
# Include 'api' if there is a change to a library API.
# Default: '[user]'
change_logs: []
27 changes: 27 additions & 0 deletions .chloggen/split-log-records.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
# Use this changelog template to create an entry for release notes.

# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix'
change_type: enhancement

# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver)
component: connector/routing

# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`).
note: Add ability to route log records individually using OTTL log record context.

# Mandatory: One or more tracking issues related to the change. You can use the PR number here if no issue exists.
issues: [19738]

# (Optional) One or more lines of additional information to render under the primary note.
# These lines will be padded with 2 spaces and then inserted directly into the document.
# Use pipe (|) for multiline entries.
subtext:

# If your change doesn't affect end users or the exported elements of any package,
# you should instead start your pull request title with [chore] or use the "Skip Changelog" label.
# Optional: The change log or logs in which this entry should be included.
# e.g. '[user]' or '[user, api]'
# Include 'user' if the change is relevant to end users.
# Include 'api' if there is a change to a library API.
# Default: '[user]'
change_logs: []
27 changes: 27 additions & 0 deletions .chloggen/splunkenterprisereceiver-search-artifacts.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
# Use this changelog template to create an entry for release notes.

# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix'
change_type: enhancement

# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver)
component: splunkenterprisereceiver

# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`).
note: Add new metrics for Splunk Enterprise dispatch artifacts

# Mandatory: One or more tracking issues related to the change. You can use the PR number here if no issue exists.
issues: [35950]

# (Optional) One or more lines of additional information to render under the primary note.
# These lines will be padded with 2 spaces and then inserted directly into the document.
# Use pipe (|) for multiline entries.
subtext:

# If your change doesn't affect end users or the exported elements of any package,
# you should instead start your pull request title with [chore] or use the "Skip Changelog" label.
# Optional: The change log or logs in which this entry should be included.
# e.g. '[user]' or '[user, api]'
# Include 'user' if the change is relevant to end users.
# Include 'api' if there is a change to a library API.
# Default: '[user]'
change_logs: [user]
100 changes: 95 additions & 5 deletions connector/routingconnector/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -33,14 +33,17 @@ If you are not already familiar with connectors, you may find it helpful to firs
The following settings are available:

- `table (required)`: the routing table for this connector.
- `table.context (optional, default: resource)`: the [OTTL Context] in which the statement will be evaluated. Currently, only `resource` and `log` are supported.
- `table.statement`: the routing condition provided as the [OTTL] statement. Required if `table.condition` is not provided.
- `table.condition`: the routing condition provided as the [OTTL] condition. Required if `table.statement` is not provided.
- `table.pipelines (required)`: the list of pipelines to use when the routing condition is met.
- `default_pipelines (optional)`: contains the list of pipelines to use when a record does not meet any of specified conditions.
- `error_mode (optional)`: determines how errors returned from OTTL statements are handled. Valid values are `propagate`, `ignore` and `silent`. If `ignore` or `silent` is used and a statement's condition has an error then the payload will be routed to the default pipelines. When `silent` is used the error is not logged. If not supplied, `propagate` is used.
- `match_once (optional, default: false)`: determines whether the connector matches multiple statements or not. If enabled, the payload will be routed to the first pipeline in the `table` whose routing condition is met.
- `match_once (optional, default: false)`: determines whether the connector matches multiple statements or not. If enabled, the payload will be routed to the first pipeline in the `table` whose routing condition is met. May only be `false` when used with `resource` context.

Example:
### Examples

Route traces based on an attribute:

```yaml
receivers:
Expand Down Expand Up @@ -91,6 +94,92 @@ service:
exporters: [jaeger/ecorp]
```
Route logs based on region:
```yaml
receivers:
otlp:

exporters:
file/other:
path: ./other.log
file/east:
path: ./east.log
file/west:
path: ./west.log

connectors:
routing:
match_once: true
default_pipelines: [logs/other]
table:
- context: log
condition: attributes["region"] == "east"
pipelines: [logs/east]
- context: log
condition: attributes["region"] == "west"
pipelines: [logs/west]

service:
pipelines:
logs/in:
receivers: [otlp]
exporters: [routing]
logs/east:
receivers: [routing]
exporters: [file/east]
logs/west:
receivers: [routing]
exporters: [file/west]
logs/other:
receivers: [routing]
exporters: [file/other]
```
Route all low level logs to cheap storage. Route the remainder based on service name:
```yaml
receivers:
otlp:

exporters:
file/cheap:
path: ./cheap.log
file/service1:
path: ./service1-important.log
file/ecorp:
path: ./service2-important.log

connectors:
routing:
match_once: true
table:
- context: log
condition: severity_number < SEVERITY_NUMBER_ERROR
pipelines: [logs/cheap]
- context: resource
condition: attributes["service.name"] == "service1"
pipelines: [logs/service1]
- context: resource
condition: attributes["service.name"] == "service2"
pipelines: [logs/service2]

service:
pipelines:
logs/in:
receivers: [otlp]
exporters: [routing]
logs/cheap:
receivers: [routing]
exporters: [file/cheap]
logs/service1:
receivers: [routing]
exporters: [file/service1]
logs/service2:
receivers: [routing]
exporters: [file/service2]
```
A signal may get matched by routing conditions of more than one routing table entry. In this case, the signal will be routed to all pipelines of matching routes.
Respectively, if none of the routing conditions met, then a signal is routed to default pipelines.
Expand All @@ -109,10 +198,11 @@ Respectively, if none of the routing conditions met, then a signal is routed to
The full list of settings exposed for this connector are documented [here](./config.go) with detailed sample configuration files:
- [logs](./testdata/config_logs.yaml)
- [metrics](./testdata/config_metrics.yaml)
- [traces](./testdata/config_traces.yaml)
- [logs](./testdata/config/logs.yaml)
- [metrics](./testdata/config/metrics.yaml)
- [traces](./testdata/config/traces.yaml)
[Connectors README]:https://github.com/open-telemetry/opentelemetry-collector/blob/main/connector/README.md
[OTTL]: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/pkg/ottl/README.md
[OTTL Context]: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/pkg/ottl/LANGUAGE.md#contexts
14 changes: 14 additions & 0 deletions connector/routingconnector/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -67,13 +67,27 @@ func (c *Config) Validate() error {
if len(item.Pipelines) == 0 {
return errNoPipelines
}

switch item.Context {
case "", "resource": // ok
case "log":
if !c.MatchOnce {
return errors.New("log context is not supported with match_once: false")
}
default:
return errors.New("invalid context: " + item.Context)
}
}

return nil
}

// RoutingTableItem specifies how data should be routed to the different pipelines
type RoutingTableItem struct {
// One of "resource" or "log" (other OTTL contexts will be added in the future)
// Optional. Default "resource".
Context string `mapstructure:"context"`

// Statement is a OTTL statement used for making a routing decision.
// One of 'Statement' or 'Condition' must be provided.
Statement string `mapstructure:"statement"`
Expand Down
31 changes: 31 additions & 0 deletions connector/routingconnector/config_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,37 @@ func TestValidateConfig(t *testing.T) {
},
error: "invalid route: both condition and statement provided",
},
{
name: "invalid context",
config: &Config{
Table: []RoutingTableItem{
{
Context: "invalid",
Statement: `route() where attributes["attr"] == "acme"`,
Pipelines: []pipeline.ID{
pipeline.NewIDWithName(pipeline.SignalTraces, "otlp"),
},
},
},
},
error: "invalid context: invalid",
},
{
name: "log context with match_once false",
config: &Config{
MatchOnce: false,
Table: []RoutingTableItem{
{
Context: "log",
Statement: `route() where attributes["attr"] == "acme"`,
Pipelines: []pipeline.ID{
pipeline.NewIDWithName(pipeline.SignalTraces, "otlp"),
},
},
},
},
error: "log context is not supported with match_once: false",
},
}

for _, tt := range tests {
Expand Down
40 changes: 22 additions & 18 deletions connector/routingconnector/logs.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ import (

"github.com/open-telemetry/opentelemetry-collector-contrib/connector/routingconnector/internal/plogutil"
"github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl"
"github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottllog"
"github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/ottlresource"
)

Expand Down Expand Up @@ -74,29 +75,36 @@ func (c *logsConnector) switchLogs(ctx context.Context, ld plog.Logs) error {
var errs error
for _, route := range c.router.routeSlice {
matchedLogs := plog.NewLogs()

plogutil.MoveResourcesIf(ld, matchedLogs,
func(rl plog.ResourceLogs) bool {
rtx := ottlresource.NewTransformContext(rl.Resource(), rl)
_, isMatch, err := route.statement.Execute(ctx, rtx)
errs = errors.Join(errs, err)
return isMatch
},
)

switch route.statementContext {
case "", "resource":
plogutil.MoveResourcesIf(ld, matchedLogs,
func(rl plog.ResourceLogs) bool {
rtx := ottlresource.NewTransformContext(rl.Resource(), rl)
_, isMatch, err := route.resourceStatement.Execute(ctx, rtx)
errs = errors.Join(errs, err)
return isMatch
},
)
case "log":
plogutil.MoveRecordsWithContextIf(ld, matchedLogs,
func(rl plog.ResourceLogs, sl plog.ScopeLogs, lr plog.LogRecord) bool {
ltx := ottllog.NewTransformContext(lr, sl.Scope(), rl.Resource(), sl, rl)
_, isMatch, err := route.logStatement.Execute(ctx, ltx)
errs = errors.Join(errs, err)
return isMatch
},
)
}
if errs != nil {
if c.config.ErrorMode == ottl.PropagateError {
return errs
}
groupAll(groups, c.router.defaultConsumer, matchedLogs)

}
groupAll(groups, route.consumer, matchedLogs)
}

// anything left wasn't matched by any route. Send to default consumer
groupAll(groups, c.router.defaultConsumer, ld)

for consumer, group := range groups {
errs = errors.Join(errs, consumer.ConsumeLogs(ctx, group))
}
Expand All @@ -110,14 +118,12 @@ func (c *logsConnector) matchAll(ctx context.Context, ld plog.Logs) error {
// higher CPU usage.
groups := make(map[consumer.Logs]plog.Logs)
var errs error

for i := 0; i < ld.ResourceLogs().Len(); i++ {
rlogs := ld.ResourceLogs().At(i)
rtx := ottlresource.NewTransformContext(rlogs.Resource(), rlogs)

noRoutesMatch := true
for _, route := range c.router.routeSlice {
_, isMatch, err := route.statement.Execute(ctx, rtx)
_, isMatch, err := route.resourceStatement.Execute(ctx, rtx)
if err != nil {
if c.config.ErrorMode == ottl.PropagateError {
return err
Expand All @@ -129,9 +135,7 @@ func (c *logsConnector) matchAll(ctx context.Context, ld plog.Logs) error {
noRoutesMatch = false
group(groups, route.consumer, rlogs)
}

}

if noRoutesMatch {
// no route conditions are matched, add resource logs to default exporters group
group(groups, c.router.defaultConsumer, rlogs)
Expand Down
Loading

0 comments on commit 4b6e647

Please sign in to comment.