From 3ae10545b9053a9ffcb93a95ff369806ce06ab92 Mon Sep 17 00:00:00 2001 From: adatzer Date: Tue, 15 Mar 2022 10:02:27 +0200 Subject: [PATCH 01/25] Allow configuration from a file (closes #105) --- cmd/init.go | 16 +- config/component.go | 54 ++ config/component_test.go | 450 ++++++++++++++ config/config.go | 566 +++++++----------- config/config_test.go | 151 ++--- config/decode.go | 169 ++++++ config/decode_test.go | 199 ++++++ config/examples/README.md | 128 ++++ .../failure-targets/eventhub-extended.hcl | 30 + .../failure-targets/eventhub-simple.hcl | 11 + .../failure-targets/http-extended.hcl | 42 ++ .../examples/failure-targets/http-simple.hcl | 8 + .../failure-targets/kafka-extended.hcl | 69 +++ .../examples/failure-targets/kafka-simple.hcl | 11 + .../failure-targets/kinesis-extended.hcl | 14 + .../failure-targets/kinesis-simple.hcl | 11 + config/examples/failure-targets/pubsub.hcl | 11 + .../examples/failure-targets/sqs-extended.hcl | 14 + .../examples/failure-targets/sqs-simple.hcl | 11 + config/examples/failure-targets/stdout.hcl | 6 + config/examples/sentry/sentry-extended.hcl | 12 + config/examples/sentry/sentry-simple.hcl | 6 + config/examples/sources/kinesis-extended.hcl | 25 + config/examples/sources/kinesis-simple.hcl | 14 + config/examples/sources/pubsub-extended.hcl | 14 + config/examples/sources/pubsub-simple.hcl | 11 + config/examples/sources/sqs-extended.hcl | 17 + config/examples/sources/sqs-simple.hcl | 11 + config/examples/sources/stdin-extended.hcl | 8 + config/examples/sources/stdin-simple.hcl | 7 + .../stats-receiver-statsd-extended.hcl | 20 + .../stats-receiver-statsd-simple.hcl | 5 + config/examples/targets/eventhub-extended.hcl | 30 + config/examples/targets/eventhub-simple.hcl | 11 + config/examples/targets/http-extended.hcl | 42 ++ config/examples/targets/http-simple.hcl | 8 + config/examples/targets/kafka-extended.hcl | 69 +++ config/examples/targets/kafka-simple.hcl | 11 + config/examples/targets/kinesis-extended.hcl | 14 + config/examples/targets/kinesis-simple.hcl | 11 + config/examples/targets/pubsub.hcl | 11 + config/examples/targets/sqs-extended.hcl | 14 + config/examples/targets/sqs-simple.hcl | 11 + config/examples/targets/stdout.hcl | 6 + config/test-fixtures/empty.hcl | 1 + config/test-fixtures/invalids.hcl | 11 + config/test-fixtures/observer.hcl | 11 + config/test-fixtures/sentry.hcl | 7 + .../test-fixtures/source-kinesis-extended.hcl | 12 + .../test-fixtures/source-kinesis-simple.hcl | 9 + config/test-fixtures/source-sqs.hcl | 9 + .../target-eventhub-extended.hcl | 14 + .../test-fixtures/target-eventhub-simple.hcl | 8 + config/test-fixtures/target-http-extended.hcl | 17 + config/test-fixtures/target-http-simple.hcl | 7 + .../test-fixtures/target-kafka-extended.hcl | 26 + config/test-fixtures/target-kafka-simple.hcl | 8 + config/test-fixtures/target-kinesis.hcl | 9 + config/test-fixtures/target-pubsub.hcl | 8 + config/test-fixtures/target-sqs.hcl | 9 + go.mod | 12 +- go.sum | 29 + pkg/source/kinesis/kinesis_source.go | 67 ++- pkg/source/kinesis/kinesis_source_test.go | 94 ++- pkg/source/pubsub/pubsub_source.go | 52 +- pkg/source/sourceconfig/source_config.go | 37 +- pkg/source/sourceconfig/source_config_test.go | 4 +- pkg/source/sqs/sqs_source.go | 60 +- pkg/source/sqs/sqs_source_test.go | 78 ++- pkg/source/stdin/stdin_source.go | 48 +- pkg/source/stdin/stdin_source_test.go | 10 +- pkg/statsreceiver/statsd.go | 54 ++ pkg/target/eventhub.go | 53 +- pkg/target/http.go | 66 ++ pkg/target/kafka.go | 74 ++- pkg/target/kinesis.go | 41 ++ pkg/target/pubsub.go | 40 ++ pkg/target/sqs.go | 41 ++ pkg/target/stdout.go | 26 + 79 files changed, 2873 insertions(+), 538 deletions(-) create mode 100644 config/component.go create mode 100644 config/component_test.go create mode 100644 config/decode.go create mode 100644 config/decode_test.go create mode 100644 config/examples/README.md create mode 100644 config/examples/failure-targets/eventhub-extended.hcl create mode 100644 config/examples/failure-targets/eventhub-simple.hcl create mode 100644 config/examples/failure-targets/http-extended.hcl create mode 100644 config/examples/failure-targets/http-simple.hcl create mode 100644 config/examples/failure-targets/kafka-extended.hcl create mode 100644 config/examples/failure-targets/kafka-simple.hcl create mode 100644 config/examples/failure-targets/kinesis-extended.hcl create mode 100644 config/examples/failure-targets/kinesis-simple.hcl create mode 100644 config/examples/failure-targets/pubsub.hcl create mode 100644 config/examples/failure-targets/sqs-extended.hcl create mode 100644 config/examples/failure-targets/sqs-simple.hcl create mode 100644 config/examples/failure-targets/stdout.hcl create mode 100644 config/examples/sentry/sentry-extended.hcl create mode 100644 config/examples/sentry/sentry-simple.hcl create mode 100644 config/examples/sources/kinesis-extended.hcl create mode 100644 config/examples/sources/kinesis-simple.hcl create mode 100644 config/examples/sources/pubsub-extended.hcl create mode 100644 config/examples/sources/pubsub-simple.hcl create mode 100644 config/examples/sources/sqs-extended.hcl create mode 100644 config/examples/sources/sqs-simple.hcl create mode 100644 config/examples/sources/stdin-extended.hcl create mode 100644 config/examples/sources/stdin-simple.hcl create mode 100644 config/examples/stats-receivers/stats-receiver-statsd-extended.hcl create mode 100644 config/examples/stats-receivers/stats-receiver-statsd-simple.hcl create mode 100644 config/examples/targets/eventhub-extended.hcl create mode 100644 config/examples/targets/eventhub-simple.hcl create mode 100644 config/examples/targets/http-extended.hcl create mode 100644 config/examples/targets/http-simple.hcl create mode 100644 config/examples/targets/kafka-extended.hcl create mode 100644 config/examples/targets/kafka-simple.hcl create mode 100644 config/examples/targets/kinesis-extended.hcl create mode 100644 config/examples/targets/kinesis-simple.hcl create mode 100644 config/examples/targets/pubsub.hcl create mode 100644 config/examples/targets/sqs-extended.hcl create mode 100644 config/examples/targets/sqs-simple.hcl create mode 100644 config/examples/targets/stdout.hcl create mode 100644 config/test-fixtures/empty.hcl create mode 100644 config/test-fixtures/invalids.hcl create mode 100644 config/test-fixtures/observer.hcl create mode 100644 config/test-fixtures/sentry.hcl create mode 100644 config/test-fixtures/source-kinesis-extended.hcl create mode 100644 config/test-fixtures/source-kinesis-simple.hcl create mode 100644 config/test-fixtures/source-sqs.hcl create mode 100644 config/test-fixtures/target-eventhub-extended.hcl create mode 100644 config/test-fixtures/target-eventhub-simple.hcl create mode 100644 config/test-fixtures/target-http-extended.hcl create mode 100644 config/test-fixtures/target-http-simple.hcl create mode 100644 config/test-fixtures/target-kafka-extended.hcl create mode 100644 config/test-fixtures/target-kafka-simple.hcl create mode 100644 config/test-fixtures/target-kinesis.hcl create mode 100644 config/test-fixtures/target-pubsub.hcl create mode 100644 config/test-fixtures/target-sqs.hcl diff --git a/cmd/init.go b/cmd/init.go index 2045a265..939425c8 100644 --- a/cmd/init.go +++ b/cmd/init.go @@ -44,8 +44,8 @@ func Init() (*config.Config, bool, error) { } // Configure GCP Access (if set) - if cfg.GoogleServiceAccountB64 != "" { - targetFile, err := common.GetGCPServiceAccountFromBase64(cfg.GoogleServiceAccountB64) + if cfg.Data.GoogleServiceAccountB64 != "" { + targetFile, err := common.GetGCPServiceAccountFromBase64(cfg.Data.GoogleServiceAccountB64) if err != nil { return nil, false, errors.Wrap(err, "Failed to store GCP Service Account JSON file") } @@ -53,11 +53,11 @@ func Init() (*config.Config, bool, error) { } // Configure Sentry - sentryEnabled := cfg.Sentry.Dsn != "" + sentryEnabled := cfg.Data.Sentry.Dsn != "" if sentryEnabled { err := sentry.Init(sentry.ClientOptions{ - Dsn: cfg.Sentry.Dsn, - Debug: cfg.Sentry.Debug, + Dsn: cfg.Data.Sentry.Dsn, + Debug: cfg.Data.Sentry.Debug, AttachStacktrace: true, }) if err != nil { @@ -65,7 +65,7 @@ func Init() (*config.Config, bool, error) { } sentryTagsMap := map[string]string{} - err = json.Unmarshal([]byte(cfg.Sentry.Tags), &sentryTagsMap) + err = json.Unmarshal([]byte(cfg.Data.Sentry.Tags), &sentryTagsMap) if err != nil { return nil, false, errors.Wrap(err, "Failed to unmarshall SENTRY_TAGS to map") } @@ -79,10 +79,10 @@ func Init() (*config.Config, bool, error) { } // Configure logging level - if level, ok := logLevelsMap[cfg.LogLevel]; ok { + if level, ok := logLevelsMap[cfg.Data.LogLevel]; ok { log.SetLevel(level) } else { - return nil, sentryEnabled, fmt.Errorf("Supported log levels are 'debug, info, warning, error, fatal, panic'; provided %s", cfg.LogLevel) + return nil, sentryEnabled, fmt.Errorf("Supported log levels are 'debug, info, warning, error, fatal, panic'; provided %s", cfg.Data.LogLevel) } log.Debugf("Config: %+v", cfg) diff --git a/config/component.go b/config/component.go new file mode 100644 index 00000000..de270815 --- /dev/null +++ b/config/component.go @@ -0,0 +1,54 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package config + +// ComponentConfigurable is the interface that wraps the ProvideDefault method. +type ComponentConfigurable interface { + // ProvideDefault returns a pointer to a structure that will be + // written with the decoded configuration. + ProvideDefault() (interface{}, error) +} + +// ComponentCreator is the interface that wraps the Create method. +type ComponentCreator interface { + // Create returns a pointer to an output structure given a pointer + // to an input structure. This interface is expected to be implemented + // by components that are creatable through a configuration. + Create(i interface{}) (interface{}, error) +} + +// Pluggable is the interface that groups +// ComponentConfigurable and ComponentCreator. +type Pluggable interface { + ComponentConfigurable + ComponentCreator +} + +// DecodingHandler is the type of any function that, given a ComponentConfigurable +// and a Decoder, returns a pointer to a structure that was decoded. +type DecodingHandler func(c ComponentConfigurable, d Decoder) (interface{}, error) + +// WithDecoderOptions returns a DecodingHandler closed over some DecoderOptions. +func WithDecoderOptions(opts *DecoderOptions) DecodingHandler { + return func(c ComponentConfigurable, d Decoder) (interface{}, error) { + return Configure(c, d, opts) + } +} + +// Configure returns the decoded target. +func Configure(c ComponentConfigurable, d Decoder, opts *DecoderOptions) (interface{}, error) { + target, err := c.ProvideDefault() // target is ptr + if err != nil { + return nil, err + } + + if err = d.Decode(opts, target); err != nil { + return nil, err + } + + return target, nil +} diff --git a/config/component_test.go b/config/component_test.go new file mode 100644 index 00000000..f9515d12 --- /dev/null +++ b/config/component_test.go @@ -0,0 +1,450 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package config + +import ( + "errors" + "path/filepath" + "reflect" + "testing" + + "github.com/davecgh/go-spew/spew" + "github.com/stretchr/testify/assert" + + "github.com/snowplow-devops/stream-replicator/pkg/statsreceiver" + "github.com/snowplow-devops/stream-replicator/pkg/target" +) + +func TestCreateTargetComponentHCL(t *testing.T) { + testCases := []struct { + File string + Plug Pluggable + Expected interface{} + }{ + { + File: "target-sqs.hcl", + Plug: testSQSTargetAdapter(testSQSTargetFunc), + Expected: &target.SQSTargetConfig{ + QueueName: "testQueue", + Region: "eu-test-1", + RoleARN: "xxx-test-role-arn", + }, + }, + { + File: "target-eventhub-simple.hcl", + Plug: testEventHubTargetAdapter(testEventHubTargetFunc), + Expected: &target.EventHubConfig{ + EventHubNamespace: "testNamespace", + EventHubName: "testName", + MaxAutoRetries: 1, + MessageByteLimit: 1048576, + ChunkByteLimit: 1048576, + ChunkMessageLimit: 500, + ContextTimeoutInSeconds: 20, + BatchByteLimit: 1048576, + }, + }, + { + File: "target-eventhub-extended.hcl", + Plug: testEventHubTargetAdapter(testEventHubTargetFunc), + Expected: &target.EventHubConfig{ + EventHubNamespace: "testNamespace", + EventHubName: "testName", + MaxAutoRetries: 2, + MessageByteLimit: 1000000, + ChunkByteLimit: 1000000, + ChunkMessageLimit: 501, + ContextTimeoutInSeconds: 21, + BatchByteLimit: 1000000, + }, + }, + { + File: "target-http-simple.hcl", + Plug: testHTTPTargetAdapter(testHTTPTargetFunc), + Expected: &target.HTTPTargetConfig{ + HTTPURL: "testUrl", + ByteLimit: 1048576, + RequestTimeoutInSeconds: 5, + ContentType: "application/json", + Headers: "", + BasicAuthUsername: "", + BasicAuthPassword: "", + CertFile: "", + KeyFile: "", + CaFile: "", + SkipVerifyTLS: false, + }, + }, + { + File: "target-http-extended.hcl", + Plug: testHTTPTargetAdapter(testHTTPTargetFunc), + Expected: &target.HTTPTargetConfig{ + HTTPURL: "testUrl", + ByteLimit: 1000000, + RequestTimeoutInSeconds: 2, + ContentType: "test/test", + Headers: "{\"Accept-Language\":\"en-US\"}", + BasicAuthUsername: "testUsername", + BasicAuthPassword: "testPass", + CertFile: "test.cert", + KeyFile: "test.key", + CaFile: "test.ca", + SkipVerifyTLS: true, + }, + }, + { + File: "target-kafka-simple.hcl", + Plug: testKafkaTargetAdapter(testKafkaTargetFunc), + Expected: &target.KafkaConfig{ + Brokers: "testBrokers", + TopicName: "testTopic", + TargetVersion: "", + MaxRetries: 10, + ByteLimit: 1048576, + Compress: false, + WaitForAll: false, + Idempotent: false, + EnableSASL: false, + SASLUsername: "", + SASLPassword: "", + SASLAlgorithm: "sha512", + CertFile: "", + KeyFile: "", + CaFile: "", + SkipVerifyTLS: false, + ForceSync: false, + FlushFrequency: 0, + FlushMessages: 0, + FlushBytes: 0, + }, + }, + { + File: "target-kafka-extended.hcl", + Plug: testKafkaTargetAdapter(testKafkaTargetFunc), + Expected: &target.KafkaConfig{ + Brokers: "testBrokers", + TopicName: "testTopic", + TargetVersion: "1.2.3", + MaxRetries: 11, + ByteLimit: 1000000, + Compress: true, + WaitForAll: true, + Idempotent: true, + EnableSASL: true, + SASLUsername: "testUsername", + SASLPassword: "testPass", + SASLAlgorithm: "sha256", + CertFile: "test.cert", + KeyFile: "test.key", + CaFile: "test.ca", + SkipVerifyTLS: true, + ForceSync: true, + FlushFrequency: 2, + FlushMessages: 2, + FlushBytes: 2, + }, + }, + { + File: "target-kinesis.hcl", + Plug: testKinesisTargetAdapter(testKinesisTargetFunc), + Expected: &target.KinesisTargetConfig{ + StreamName: "testStream", + Region: "eu-test-1", + RoleARN: "xxx-test-role-arn", + }, + }, + { + File: "target-pubsub.hcl", + Plug: testPubSubTargetAdapter(testPubSubTargetFunc), + Expected: &target.PubSubTargetConfig{ + ProjectID: "testId", + TopicName: "testTopic", + }, + }, + } + + for _, tt := range testCases { + t.Run(tt.File, func(t *testing.T) { + assert := assert.New(t) + + filename := filepath.Join("test-fixtures", tt.File) + t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", filename) + + c, err := NewConfig() + assert.NotNil(c) + assert.Nil(err) + + use := c.Data.Target.Use + decoderOpts := &DecoderOptions{ + Input: use.Body, + } + + result, err := c.CreateComponent(tt.Plug, decoderOpts) + assert.NotNil(result) + assert.Nil(err) + + if !reflect.DeepEqual(result, tt.Expected) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(result), + spew.Sdump(tt.Expected)) + } + }) + } +} + +func TestCreateFailureTargetComponentENV(t *testing.T) { + testCase := struct { + Name string + Plug Pluggable + Expected interface{} + }{ + Name: "test_failure_target_kafka_extended_env", + Plug: testKafkaTargetAdapter(testKafkaTargetFunc), + Expected: &target.KafkaConfig{ + Brokers: "testBrokers", + TopicName: "testTopic", + TargetVersion: "1.2.3", + MaxRetries: 11, + ByteLimit: 1000000, + Compress: true, + WaitForAll: true, + Idempotent: true, + EnableSASL: true, + SASLUsername: "testUsername", + SASLPassword: "testPass", + SASLAlgorithm: "sha256", + CertFile: "test.cert", + KeyFile: "test.key", + CaFile: "test.ca", + SkipVerifyTLS: true, + ForceSync: true, + FlushFrequency: 2, + FlushMessages: 2, + FlushBytes: 2, + }, + } + + t.Run(testCase.Name, func(t *testing.T) { + assert := assert.New(t) + + t.Setenv("FAILURE_TARGET_NAME", "kafka") + t.Setenv("FAILURE_TARGET_KAFKA_BROKERS", "testBrokers") + t.Setenv("FAILURE_TARGET_KAFKA_TOPIC_NAME", "testTopic") + t.Setenv("FAILURE_TARGET_KAFKA_TARGET_VERSION", "1.2.3") + t.Setenv("FAILURE_TARGET_KAFKA_MAX_RETRIES", "11") + t.Setenv("FAILURE_TARGET_KAFKA_BYTE_LIMIT", "1000000") + t.Setenv("FAILURE_TARGET_KAFKA_COMPRESS", "true") + t.Setenv("FAILURE_TARGET_KAFKA_WAIT_FOR_ALL", "true") + t.Setenv("FAILURE_TARGET_KAFKA_IDEMPOTENT", "true") + t.Setenv("FAILURE_TARGET_KAFKA_ENABLE_SASL", "true") + t.Setenv("FAILURE_TARGET_KAFKA_SASL_USERNAME", "testUsername") + t.Setenv("FAILURE_TARGET_KAFKA_SASL_PASSWORD", "testPass") + t.Setenv("FAILURE_TARGET_KAFKA_SASL_ALGORITHM", "sha256") + t.Setenv("FAILURE_TARGET_KAFKA_TLS_CERT_FILE", "test.cert") + t.Setenv("FAILURE_TARGET_KAFKA_TLS_KEY_FILE", "test.key") + t.Setenv("FAILURE_TARGET_KAFKA_TLS_CA_FILE", "test.ca") + t.Setenv("FAILURE_TARGET_KAFKA_TLS_SKIP_VERIFY_TLS", "true") + t.Setenv("FAILURE_TARGET_KAFKA_FORCE_SYNC_PRODUCER", "true") + t.Setenv("FAILURE_TARGET_KAFKA_FLUSH_FREQUENCY", "2") + t.Setenv("FAILURE_TARGET_KAFKA_FLUSH_MESSAGES", "2") + t.Setenv("FAILURE_TARGET_KAFKA_FLUSH_BYTES", "2") + + c, err := NewConfig() + assert.NotNil(c) + assert.Nil(err) + + assert.Equal(c.Data.FailureTarget.Target.Name, "kafka") + decoderOpts := &DecoderOptions{ + Prefix: "FAILURE_", + } + + result, err := c.CreateComponent(testCase.Plug, decoderOpts) + assert.NotNil(result) + assert.Nil(err) + + if !reflect.DeepEqual(result, testCase.Expected) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(result), + spew.Sdump(testCase.Expected)) + } + }) +} + +func TestCreateObserverComponentHCL(t *testing.T) { + testCases := []struct { + File string + Plug Pluggable + Expected interface{} + }{ + { + File: "observer.hcl", + Plug: testStatsDAdapter(testStatsDFunc), + Expected: &statsreceiver.StatsDStatsReceiverConfig{ + Address: "test.localhost", + Prefix: "snowplow.test", + Tags: "{\"testKey\": \"testValue\"}", + }, + }, + } + + for _, tt := range testCases { + t.Run(tt.File, func(t *testing.T) { + assert := assert.New(t) + + filename := filepath.Join("test-fixtures", tt.File) + t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", filename) + + c, err := NewConfig() + assert.NotNil(c) + assert.Nil(err) + assert.Equal(c.Data.StatsReceiver.TimeoutSec, 2) + assert.Equal(c.Data.StatsReceiver.BufferSec, 20) + + use := c.Data.StatsReceiver.Receiver + decoderOpts := &DecoderOptions{ + Input: use.Body, + } + + result, err := c.CreateComponent(tt.Plug, decoderOpts) + assert.NotNil(result) + assert.Nil(err) + + if !reflect.DeepEqual(result, tt.Expected) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(result), + spew.Sdump(tt.Expected)) + } + }) + } +} + +// Test Helpers +// SQS +func testSQSTargetAdapter(f func(c *target.SQSTargetConfig) (*target.SQSTargetConfig, error)) target.SQSTargetAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*target.SQSTargetConfig) + if !ok { + return nil, errors.New("invalid input, expected SQSTargetConfig") + } + + return f(cfg) + } + +} + +func testSQSTargetFunc(c *target.SQSTargetConfig) (*target.SQSTargetConfig, error) { + + return c, nil +} + +// EventHub +func testEventHubTargetAdapter(f func(c *target.EventHubConfig) (*target.EventHubConfig, error)) target.EventHubTargetAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*target.EventHubConfig) + if !ok { + return nil, errors.New("invalid input, expected EventHubTargetConfig") + } + + return f(cfg) + } + +} + +func testEventHubTargetFunc(c *target.EventHubConfig) (*target.EventHubConfig, error) { + + return c, nil +} + +// HTTP +func testHTTPTargetAdapter(f func(c *target.HTTPTargetConfig) (*target.HTTPTargetConfig, error)) target.HTTPTargetAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*target.HTTPTargetConfig) + if !ok { + return nil, errors.New("invalid input, expected HTTPTargetConfig") + } + + return f(cfg) + } + +} + +func testHTTPTargetFunc(c *target.HTTPTargetConfig) (*target.HTTPTargetConfig, error) { + + return c, nil +} + +// Kafka +func testKafkaTargetAdapter(f func(c *target.KafkaConfig) (*target.KafkaConfig, error)) target.KafkaTargetAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*target.KafkaConfig) + if !ok { + return nil, errors.New("invalid input, expected KafkaTargetConfig") + } + + return f(cfg) + } + +} + +func testKafkaTargetFunc(c *target.KafkaConfig) (*target.KafkaConfig, error) { + + return c, nil +} + +// Kinesis +func testKinesisTargetAdapter(f func(c *target.KinesisTargetConfig) (*target.KinesisTargetConfig, error)) target.KinesisTargetAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*target.KinesisTargetConfig) + if !ok { + return nil, errors.New("invalid input, expected KinesisTargetConfig") + } + + return f(cfg) + } + +} + +func testKinesisTargetFunc(c *target.KinesisTargetConfig) (*target.KinesisTargetConfig, error) { + + return c, nil +} + +// PubSub +func testPubSubTargetAdapter(f func(c *target.PubSubTargetConfig) (*target.PubSubTargetConfig, error)) target.PubSubTargetAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*target.PubSubTargetConfig) + if !ok { + return nil, errors.New("invalid input, expected PubSubTargetConfig") + } + + return f(cfg) + } + +} + +func testPubSubTargetFunc(c *target.PubSubTargetConfig) (*target.PubSubTargetConfig, error) { + + return c, nil +} + +// StatsD +func testStatsDAdapter(f func(c *statsreceiver.StatsDStatsReceiverConfig) (*statsreceiver.StatsDStatsReceiverConfig, error)) statsreceiver.StatsDStatsReceiverAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*statsreceiver.StatsDStatsReceiverConfig) + if !ok { + return nil, errors.New("invalid input, expected StatsDStatsReceiverConfig") + } + + return f(cfg) + } + +} + +func testStatsDFunc(c *statsreceiver.StatsDStatsReceiverConfig) (*statsreceiver.StatsDStatsReceiverConfig, error) { + + return c, nil +} diff --git a/config/config.go b/config/config.go index 2a94aeb8..ff82e5ed 100644 --- a/config/config.go +++ b/config/config.go @@ -9,11 +9,13 @@ package config import ( "fmt" "os" + "path/filepath" "strconv" "strings" "time" - "github.com/caarlos0/env/v6" + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/hclparse" "github.com/pkg/errors" "github.com/snowplow-devops/stream-replicator/pkg/failure" @@ -26,426 +28,274 @@ import ( "github.com/snowplow-devops/stream-replicator/pkg/transform" ) -// ---------- [ TARGETS ] ---------- - -// KinesisTargetConfig configures the destination for records consumed -type KinesisTargetConfig struct { - StreamName string `env:"TARGET_KINESIS_STREAM_NAME"` - Region string `env:"TARGET_KINESIS_REGION"` - RoleARN string `env:"TARGET_KINESIS_ROLE_ARN"` -} - -// PubSubTargetConfig configures the destination for records consumed -type PubSubTargetConfig struct { - ProjectID string `env:"TARGET_PUBSUB_PROJECT_ID"` - TopicName string `env:"TARGET_PUBSUB_TOPIC_NAME"` -} - -// SQSTargetConfig configures the destination for records consumed -type SQSTargetConfig struct { - QueueName string `env:"TARGET_SQS_QUEUE_NAME"` - Region string `env:"TARGET_SQS_REGION"` - RoleARN string `env:"TARGET_SQS_ROLE_ARN"` +// Config holds the configuration data along with the decoder to decode them +type Config struct { + Data *ConfigurationData + Decoder Decoder } -// KafkaTargetConfig configures the destination for records consumed -type KafkaTargetConfig struct { - Brokers string `env:"TARGET_KAFKA_BROKERS"` // REQUIRED - TopicName string `env:"TARGET_KAFKA_TOPIC_NAME"` // REQUIRED - TargetVersion string `env:"TARGET_KAFKA_TARGET_VERSION"` // The Kafka version we should target e.g. 2.7.0 or 0.11.0.2 - MaxRetries int `env:"TARGET_KAFKA_MAX_RETRIES" envDefault:"10"` // Max retries - ByteLimit int `env:"TARGET_KAFKA_BYTE_LIMIT" envDefault:"1048576"` // Kafka Default is 1MiB - Compress bool `env:"TARGET_KAFKA_COMPRESS"` // Reduces Network usage & Increases latency by compressing data - WaitForAll bool `env:"TARGET_KAFKA_WAIT_FOR_ALL"` // Sets RequireAcks = WaitForAll which waits for min.insync.replicas to Ack - Idempotent bool `env:"TARGET_KAFKA_IDEMPOTENT"` // Exactly once writes - Also sets RequiredAcks = WaitForAll - EnableSASL bool `env:"TARGET_KAFKA_ENABLE_SASL"` // Enables SASL Support - SASLUsername string `env:"TARGET_KAFKA_SASL_USERNAME"` // SASL auth - SASLPassword string `env:"TARGET_KAFKA_SASL_PASSWORD"` // SASL auth - SASLAlgorithm string `env:"TARGET_KAFKA_SASL_ALGORITHM" envDefault:"sha512"` // sha256 or sha512 - CertFile string `env:"TARGET_KAFKA_TLS_CERT_FILE"` // The optional certificate file for client authentication - KeyFile string `env:"TARGET_KAFKA_TLS_KEY_FILE"` // The optional key file for client authentication - CaFile string `env:"TARGET_KAFKA_TLS_CA_FILE"` // The optional certificate authority file for TLS client authentication - SkipVerifyTLS bool `env:"TARGET_KAFKA_TLS_SKIP_VERIFY_TLS"` // Optional skip verifying ssl certificates chain - ForceSyncProducer bool `env:"TARGET_KAFKA_FORCE_SYNC_PRODUCER"` // Forces the use of the Sync Producer, emits as fast as possible, may limit performance - FlushFrequency int `env:"TARGET_KAFKA_FLUSH_FREQUENCY" envDefault:"0"` // Milliseconds between flushes of events - 0 = as fast as possible - FlushMessages int `env:"TARGET_KAFKA_FLUSH_MESSAGES" envDefault:"0"` // Best effort for how many messages are sent in each batch - 0 = as fast as possible - FlushBytes int `env:"TARGET_KAFKA_FLUSH_BYTES" envDefault:"0"` // Best effort for how many bytes will trigger a flush - 0 = as fast as possible +// ConfigurationData for holding all configuration options +type ConfigurationData struct { + Source *Component `hcl:"source,block" envPrefix:"SOURCE_"` + Target *Component `hcl:"target,block" envPrefix:"TARGET_"` + FailureTarget *FailureConfig `hcl:"failure_target,block"` + Sentry *SentryConfig `hcl:"sentry,block"` + StatsReceiver *StatsConfig `hcl:"stats_receiver,block"` + Transformation string `hcl:"message_transformation,optional" env:"MESSAGE_TRANSFORMATION"` + LogLevel string `hcl:"log_level,optional" env:"LOG_LEVEL"` + GoogleServiceAccountB64 string `hcl:"google_application_credentials_b64,optional" env:"GOOGLE_APPLICATION_CREDENTIALS_B64"` } -// EventHubTargetConfig configures the destination for records consumed -type EventHubTargetConfig struct { - EventHubNamespace string `env:"TARGET_EVENTHUB_NAMESPACE"` // REQUIRED - namespace housing Eventhub - EventHubName string `env:"TARGET_EVENTHUB_NAME"` // REQUIRED - name of Eventhub - MaxAutoRetries int `env:"TARGET_EVENTHUB_MAX_AUTO_RETRY" envDefault:"1"` // Number of retries handled automatically by the EH library - all retries should be completed before context timeout - MessageByteLimit int `env:"TARGET_EVENTHUB_MESSAGE_BYTE_LIMIT" envDefault:"1048576"` // Default presumes paid tier limit is 1MB - ChunkByteLimit int `env:"TARGET_EVENTHUB_CHUNK_BYTE_LIMIT" envDefault:"1048576"` // Default chunk size of 1MB is arbitrary - ChunkMessageLimit int `env:"TARGET_EVENTHUB_CHUNK_MESSAGE_LIMIT" envDefault:"500"` // Default of 500 is arbitrary - ContextTimeoutInSeconds int `env:"TARGET_EVENTHUB_CONTEXT_TIMEOUT_SECONDS" envDefault:"20"` // Default of 20 is arbitrary - BatchByteLimit int `env:"TARGET_EVENTHUB_BATCH_BYTE_LIMIT" envDefault:"1048576"` // Default batch size of 1MB is the limit for EH's high tier +// Component is a type to abstract over configuration blocks. +type Component struct { + Use *Use `hcl:"use,block"` } -// HTTPTargetConfig configures the destination for records consumed -type HTTPTargetConfig struct { - HTTPURL string `env:"TARGET_HTTP_URL"` // REQUIRED - url endpoint - ByteLimit int `env:"TARGET_HTTP_BYTE_LIMIT" envDefault:"1048576"` // Byte limit for requests - RequestTimeoutInSeconds int `env:"TARGET_HTTP_TIMEOUT_IN_SECONDS" envDefault:"5"` // Request timeout in seconds - ContentType string `env:"TARGET_HTTP_CONTENT_TYPE" envDefault:"application/json"` // Content type for POST request - Headers string `env:"TARGET_HTTP_HEADERS"` // Optional headers to add to the request, provided as a JSON of string key-value pairs. eg: `{"Max Forwards": "10", "Accept-Language": "en-US,en-IE", "Accept-Datetime": "Thu, 31 May 2007 20:35:00 GMT"}` - BasicAuthUsername string `env:"TARGET_HTTP_BASICAUTH_USERNAME"` // Optional basicauth username - BasicAuthPassword string `env:"TARGET_HTTP_BASICAUTH_PASSWORD"` // Optional basicauth password - CertFile string `env:"TARGET_HTTP_TLS_CERT_FILE"` // The optional certificate file for client authentication - KeyFile string `env:"TARGET_HTTP_TLS_KEY_FILE"` // The optional key file for client authentication - CaFile string `env:"TARGET_HTTP_TLS_CA_FILE"` // The optional certificate authority file for TLS client authentication - SkipVerifyTLS bool `env:"TARGET_HTTP_TLS_SKIP_VERIFY_TLS" envDefault:"false"` // Optional skip verifying ssl certificates chain - if certfile and keyfile are not provided, this setting is not applied. +// Use is a type to denote what a component will be configured to use. +type Use struct { + Name string `hcl:",label" env:"NAME"` + Body hcl.Body `hcl:",remain"` } -// TargetsConfig holds configuration for the available targets -type TargetsConfig struct { - Kinesis KinesisTargetConfig - PubSub PubSubTargetConfig - SQS SQSTargetConfig - Kafka KafkaTargetConfig - EventHub EventHubTargetConfig - HTTP HTTPTargetConfig +// FailureConfig holds configuration for the failure target. +// It includes the target component to use. +type FailureConfig struct { + Target *Use `hcl:"use,block" envPrefix:"FAILURE_TARGET_"` + Format string `hcl:"format,optional" env:"FAILURE_TARGETS_FORMAT"` } -// ---------- [ FAILURE MESSAGE TARGETS ] ---------- - -// FailureKinesisTargetConfig configures the destination for records consumed -type FailureKinesisTargetConfig struct { - StreamName string `env:"FAILURE_TARGET_KINESIS_STREAM_NAME"` - Region string `env:"FAILURE_TARGET_KINESIS_REGION"` - RoleARN string `env:"FAILURE_TARGET_KINESIS_ROLE_ARN"` +// SentryConfig configures the Sentry error tracker. +type SentryConfig struct { + Dsn string `hcl:"dsn" env:"SENTRY_DSN"` + Tags string `hcl:"tags,optional" env:"SENTRY_TAGS"` + Debug bool `hcl:"debug,optional" env:"SENTRY_DEBUG"` } -// FailurePubSubTargetConfig configures the destination for records consumed -type FailurePubSubTargetConfig struct { - ProjectID string `env:"FAILURE_TARGET_PUBSUB_PROJECT_ID"` - TopicName string `env:"FAILURE_TARGET_PUBSUB_TOPIC_NAME"` +// StatsConfig holds configuration for stats receivers. +// It includes a receiver component to use. +type StatsConfig struct { + Receiver *Use `hcl:"use,block" envPrefix:"STATS_RECEIVER_"` + TimeoutSec int `hcl:"timeout_sec,optional" env:"STATS_RECEIVER_TIMEOUT_SEC"` + BufferSec int `hcl:"buffer_sec,optional" env:"STATS_RECEIVER_BUFFER_SEC"` } -// FailureSQSTargetConfig configures the destination for records consumed -type FailureSQSTargetConfig struct { - QueueName string `env:"FAILURE_TARGET_SQS_QUEUE_NAME"` - Region string `env:"FAILURE_TARGET_SQS_REGION"` - RoleARN string `env:"FAILURE_TARGET_SQS_ROLE_ARN"` +// defaultConfigData returns the initial main configuration target. +func defaultConfigData() *ConfigurationData { + return &ConfigurationData{ + Source: &Component{&Use{Name: "stdin"}}, + Target: &Component{&Use{Name: "stdout"}}, + + FailureTarget: &FailureConfig{ + Target: &Use{Name: "stdout"}, + Format: "snowplow", + }, + Sentry: &SentryConfig{ + Tags: "{}", + }, + StatsReceiver: &StatsConfig{ + Receiver: &Use{}, + TimeoutSec: 1, + BufferSec: 15, + }, + Transformation: "none", + LogLevel: "info", + } } -// FailureKafkaTargetConfig configures the destination for records consumed -type FailureKafkaTargetConfig struct { - Brokers string `env:"FAILURE_TARGET_KAFKA_BROKERS"` // REQUIRED - TopicName string `env:"FAILURE_TARGET_KAFKA_TOPIC_NAME"` // REQUIRED - TargetVersion string `env:"FAILURE_TARGET_KAFKA_TARGET_VERSION"` // The Kafka version we should target e.g. 2.7.0 or 0.11.0.2 - MaxRetries int `env:"FAILURE_TARGET_KAFKA_MAX_RETRIES" envDefault:"10"` // Max retries - ByteLimit int `env:"FAILURE_TARGET_KAFKA_BYTE_LIMIT" envDefault:"1048576"` // Kafka Default is 1MiB - Compress bool `env:"FAILURE_TARGET_KAFKA_COMPRESS"` // Reduces Network usage & Increases latency by compressing data - WaitForAll bool `env:"FAILURE_TARGET_KAFKA_WAIT_FOR_ALL"` // Sets RequireAcks = WaitForAll which waits for min.insync.replicas to Ack - Idempotent bool `env:"FAILURE_TARGET_KAFKA_IDEMPOTENT"` // Exactly once writes - EnableSASL bool `env:"FAILURE_TARGET_KAFKA_ENABLE_SASL"` // Enables SASL Support - SASLUsername string `env:"FAILURE_TARGET_KAFKA_SASL_USERNAME"` // SASL auth - SASLPassword string `env:"FAILURE_TARGET_KAFKA_SASL_PASSWORD"` // SASL auth - SASLAlgorithm string `env:"FAILURE_TARGET_KAFKA_SASL_ALGORITHM" envDefault:"sha512"` // sha256 or sha512 - CertFile string `env:"FAILURE_TARGET_KAFKA_TLS_CERT_FILE"` // The optional certificate file for client authentication - KeyFile string `env:"FAILURE_TARGET_KAFKA_TLS_KEY_FILE"` // The optional key file for client authentication - CaFile string `env:"FAILURE_TARGET_KAFKA_TLS_CA_FILE"` // The optional certificate authority file for TLS client authentication - SkipVerifyTLS bool `env:"FAILURE_TARGET_KAFKA_TLS_SKIP_VERIFY_TLS"` // Optional skip verifying ssl certificates chain - ForceSyncProducer bool `env:"FAILURE_TARGET_KAFKA_FORCE_SYNC_PRODUCER"` // Forces the use of the Sync Producer, emits as fast as possible, may limit performance - FlushFrequency int `env:"FAILURE_TARGET_KAFKA_FLUSH_FREQUENCY" envDefault:"0"` // Milliseconds between flushes of events - 0 = as fast as possible - FlushMessages int `env:"FAILURE_TARGET_KAFKA_FLUSH_MESSAGES" envDefault:"0"` // Best effort for how many messages are sent in each batch - 0 = as fast as possible - FlushBytes int `env:"FAILURE_TARGET_KAFKA_FLUSH_BYTES" envDefault:"0"` // Best effort for how many bytes will trigger a flush - 0 = as fast as possible -} +// NewConfig returns a configuration +func NewConfig() (*Config, error) { + filename := os.Getenv("STREAM_REPLICATOR_CONFIG_FILE") + if filename == "" { + return newEnvConfig() + } -// FailureEventHubTargetConfig configures the destination for records consumed -type FailureEventHubTargetConfig struct { - EventHubNamespace string `env:"FAILURE_TARGET_EVENTHUB_NAMESPACE"` // REQUIRED - namespace housing Eventhub - EventHubName string `env:"FAILURE_TARGET_EVENTHUB_NAME"` // REQUIRED - name of Eventhub - MaxAutoRetries int `env:"FAILURE_TARGET_EVENTHUB_MAX_AUTO_RETRY" envDefault:"1"` // Number of retries handled automatically by the EH library - all retries should be completed before context timeout - MessageByteLimit int `env:"FAILURE_TARGET_EVENTHUB_MESSAGE_BYTE_LIMIT" envDefault:"1048576"` // Default presumes paid tier limit is 1MB - ChunkByteLimit int `env:"FAILURE_TARGET_EVENTHUB_CHUNK_BYTE_LIMIT" envDefault:"1048576"` // Default chunk size of 1MB is arbitrary - ChunkMessageLimit int `env:"FAILURE_TARGET_EVENTHUB_CHUNK_MESSAGE_LIMIT" envDefault:"500"` // Default of 500 is arbitrary - ContextTimeoutInSeconds int `env:"FAILURE_TARGET_EVENTHUB_CONTEXT_TIMEOUT_SECONDS" envDefault:"20"` // Default of 20 is arbitrary - BatchByteLimit int `env:"FAILURE_TARGET_EVENTHUB_BATCH_BYTE_LIMIT" envDefault:"1048576"` // Default batch size of 1MB is the limit for EH's high tier + switch suffix := strings.ToLower(filepath.Ext(filename)); suffix { + case ".hcl": + return newHclConfig(filename) + default: + return nil, errors.New("invalid extension for the configuration file") + } } -// FailureHTTPTargetConfig configures the destination for records consumed -type FailureHTTPTargetConfig struct { - HTTPURL string `env:"FAILURE_TARGET_HTTP_URL"` // REQUIRED - url endpoint - byteLimit int `env:"FAILURE_TARGET_HTTP_BYTE_LIMIT" envDefault:"1048576"` // Byte limit for requests - requestTimeoutInSeconds int `env:"FAILURE_TARGET_HTTP_TIMEOUT_IN_SECONDS" envDefault:"5"` // Request timeout in seconds - ContentType string `env:"FAILURE_TARGET_HTTP_CONTENT_TYPE" envDefault:"application/json"` // Content type for POST request - Headers string `env:"FAILURE_TARGET_HTTP_HEADERS"` // Optional headers to add to the request, provided as a JSON of string key-value pairs. eg: `{"Max Forwards": "10", "Accept-Language": "en-US,en-IE", "Accept-Datetime": "Thu, 31 May 2007 20:35:00 GMT"}` - BasicAuthUsername string `env:"FAILURE_TARGET_HTTP_BASICAUTH_USERNAME"` // Optional basicauth username - BasicAuthPassword string `env:"FAILURE_TARGET_HTTP_BASICAUTH_PASSWORD"` // Optional basicauth password - CertFile string `env:"FAILURE_TARGET_HTTP_TLS_CERT_FILE"` // The optional certificate file for client authentication - KeyFile string `env:"FAILURE_TARGET_HTTP_TLS_KEY_FILE"` // The optional key file for client authentication - CaFile string `env:"FAILURE_TARGET_HTTP_TLS_CA_FILE"` // The optional certificate authority file for TLS client authentication - SkipVerifyTLS bool `env:"FAILURE_TARGET_HTTP_TLS_SKIP_VERIFY_TLS" envDefault:"false"` // Optional skip verifying ssl certificates chain - if certfile and keyfile are not provided, this setting is not applied. -} +func newEnvConfig() (*Config, error) { + var err error -// FailureTargetsConfig holds configuration for the available targets -type FailureTargetsConfig struct { - Kinesis FailureKinesisTargetConfig - PubSub FailurePubSubTargetConfig - SQS FailureSQSTargetConfig - Kafka FailureKafkaTargetConfig - EventHub FailureEventHubTargetConfig - HTTP FailureHTTPTargetConfig - - // Format defines how the message will be transformed before - // being sent to the target - Format string `env:"FAILURE_TARGETS_FORMAT" envDefault:"snowplow"` -} + decoderOpts := &DecoderOptions{} + envDecoder := &EnvDecoder{} -// ---------- [ SOURCES ] ---------- + configData := defaultConfigData() -// KinesisSourceConfig configures the source for records pulled -type KinesisSourceConfig struct { - StreamName string `env:"SOURCE_KINESIS_STREAM_NAME"` - Region string `env:"SOURCE_KINESIS_REGION"` - RoleARN string `env:"SOURCE_KINESIS_ROLE_ARN"` - AppName string `env:"SOURCE_KINESIS_APP_NAME"` - StartTimestamp string `env:"SOURCE_KINESIS_START_TIMESTAMP"` // Timestamp for the kinesis shard iterator to begin processing. Format YYYY-MM-DD HH:MM:SS.MS (miliseconds optional) -} + err = envDecoder.Decode(decoderOpts, configData) + if err != nil { + return nil, err + } -// PubSubSourceConfig configures the source for records pulled -type PubSubSourceConfig struct { - ProjectID string `env:"SOURCE_PUBSUB_PROJECT_ID"` - SubscriptionID string `env:"SOURCE_PUBSUB_SUBSCRIPTION_ID"` -} + mainConfig := Config{ + Data: configData, + Decoder: envDecoder, + } -// SQSSourceConfig configures the source for records pulled -type SQSSourceConfig struct { - QueueName string `env:"SOURCE_SQS_QUEUE_NAME"` - Region string `env:"SOURCE_SQS_REGION"` - RoleARN string `env:"SOURCE_SQS_ROLE_ARN"` + return &mainConfig, nil } -// SourcesConfig holds configuration for the available sources -type SourcesConfig struct { - Kinesis KinesisSourceConfig - PubSub PubSubSourceConfig - SQS SQSSourceConfig - - // ConcurrentWrites is how many go-routines a source can leverage to parallelise processing - ConcurrentWrites int `env:"SOURCE_CONCURRENT_WRITES" envDefault:"50"` -} +func newHclConfig(filename string) (*Config, error) { + src, err := os.ReadFile(filename) + if err != nil { + return nil, err + } -// ---------- [ OBSERVABILITY ] ---------- + // Parsing + parser := hclparse.NewParser() + fileHCL, diags := parser.ParseHCL(src, filename) + if diags.HasErrors() { + return nil, diags + } -// SentryConfig configures the Sentry error tracker -type SentryConfig struct { - Dsn string `env:"SENTRY_DSN"` - Tags string `env:"SENTRY_TAGS" envDefault:"{}"` - Debug bool `env:"SENTRY_DEBUG" envDefault:"false"` -} + // Creating EvalContext + evalContext := CreateHclContext() // ptr -// StatsDStatsReceiverConfig configures the stats metrics receiver -type StatsDStatsReceiverConfig struct { - Address string `env:"STATS_RECEIVER_STATSD_ADDRESS"` - Prefix string `env:"STATS_RECEIVER_STATSD_PREFIX" envDefault:"snowplow.stream-replicator"` - Tags string `env:"STATS_RECEIVER_STATSD_TAGS" envDefault:"{}"` -} + // Decoding + configData := defaultConfigData() + decoderOpts := &DecoderOptions{Input: fileHCL.Body} + hclDecoder := &HclDecoder{EvalContext: evalContext} -// StatsReceiversConfig holds configuration for different stats receivers -type StatsReceiversConfig struct { - StatsD StatsDStatsReceiverConfig + err = hclDecoder.Decode(decoderOpts, configData) + if err != nil { + return nil, err + } - // TimeoutSec is how long the observer will wait for a new result before looping - TimeoutSec int `env:"STATS_RECEIVER_TIMEOUT_SEC" envDefault:"1"` + mainConfig := Config{ + Data: configData, + Decoder: hclDecoder, + } - // BufferSec is how long the observer buffers results before pushing results out and resetting - BufferSec int `env:"STATS_RECEIVER_BUFFER_SEC" envDefault:"15"` + return &mainConfig, nil } -// Config for holding all configuration details -type Config struct { - Source string `env:"SOURCE" envDefault:"stdin"` - Sources SourcesConfig - Target string `env:"TARGET" envDefault:"stdout"` - Targets TargetsConfig - FailureTarget string `env:"FAILURE_TARGET" envDefault:"stdout"` - FailureTargets FailureTargetsConfig - Transformation string `env:"MESSAGE_TRANSFORMATION" envDefault:"none"` - LogLevel string `env:"LOG_LEVEL" envDefault:"info"` - Sentry SentryConfig - StatsReceiver string `env:"STATS_RECEIVER"` - StatsReceivers StatsReceiversConfig - - // Provides the ability to provide a GCP service account to the application directly - GoogleServiceAccountB64 string `env:"GOOGLE_APPLICATION_CREDENTIALS_B64"` -} +// CreateComponent creates a pluggable component given the decoder options. +func (c *Config) CreateComponent(p Pluggable, opts *DecoderOptions) (interface{}, error) { + componentConfigure := WithDecoderOptions(opts) -// NewConfig resolves the config from the environment -func NewConfig() (*Config, error) { - cfg := Config{} - err := env.Parse(&cfg) + decodedConfig, err := componentConfigure(p, c.Decoder) if err != nil { return nil, err } - return &cfg, nil + + return p.Create(decodedConfig) } // GetTarget builds and returns the target that is configured func (c *Config) GetTarget() (targetiface.Target, error) { - switch c.Target { + var plug Pluggable + useTarget := c.Data.Target.Use + decoderOpts := &DecoderOptions{ + Input: useTarget.Body, + } + + switch useTarget.Name { case "stdout": - return target.NewStdoutTarget() + plug = target.AdaptStdoutTargetFunc( + target.NewStdoutTarget, + ) case "kinesis": - return target.NewKinesisTarget( - c.Targets.Kinesis.Region, - c.Targets.Kinesis.StreamName, - c.Targets.Kinesis.RoleARN, + plug = target.AdaptKinesisTargetFunc( + target.KinesisTargetConfigFunction, ) case "pubsub": - return target.NewPubSubTarget( - c.Targets.PubSub.ProjectID, - c.Targets.PubSub.TopicName, + plug = target.AdaptPubSubTargetFunc( + target.PubSubTargetConfigFunction, ) case "sqs": - return target.NewSQSTarget( - c.Targets.SQS.Region, - c.Targets.SQS.QueueName, - c.Targets.SQS.RoleARN, + plug = target.AdaptSQSTargetFunc( + target.SQSTargetConfigFunction, ) case "kafka": - return target.NewKafkaTarget(&target.KafkaConfig{ - Brokers: c.Targets.Kafka.Brokers, - TopicName: c.Targets.Kafka.TopicName, - TargetVersion: c.Targets.Kafka.TargetVersion, - MaxRetries: c.Targets.Kafka.MaxRetries, - ByteLimit: c.Targets.Kafka.ByteLimit, - Compress: c.Targets.Kafka.Compress, - WaitForAll: c.Targets.Kafka.WaitForAll, - Idempotent: c.Targets.Kafka.Idempotent, - EnableSASL: c.Targets.Kafka.EnableSASL, - SASLUsername: c.Targets.Kafka.SASLUsername, - SASLPassword: c.Targets.Kafka.SASLPassword, - SASLAlgorithm: c.Targets.Kafka.SASLAlgorithm, - CertFile: c.Targets.Kafka.CertFile, - KeyFile: c.Targets.Kafka.KeyFile, - CaFile: c.Targets.Kafka.CaFile, - SkipVerifyTLS: c.Targets.Kafka.SkipVerifyTLS, - ForceSync: c.Targets.Kafka.ForceSyncProducer, - FlushFrequency: c.Targets.Kafka.FlushFrequency, - FlushMessages: c.Targets.Kafka.FlushMessages, - FlushBytes: c.Targets.Kafka.FlushBytes, - }) + plug = target.AdaptKafkaTargetFunc( + target.NewKafkaTarget, + ) case "eventhub": - return target.NewEventHubTarget(&target.EventHubConfig{ - EventHubNamespace: c.Targets.EventHub.EventHubNamespace, - EventHubName: c.Targets.EventHub.EventHubName, - MaxAutoRetries: c.Targets.EventHub.MaxAutoRetries, - MessageByteLimit: c.Targets.EventHub.MessageByteLimit, - ChunkByteLimit: c.Targets.EventHub.ChunkByteLimit, - ChunkMessageLimit: c.Targets.EventHub.ChunkMessageLimit, - ContextTimeoutInSeconds: c.Targets.EventHub.ContextTimeoutInSeconds, - BatchByteLimit: c.Targets.EventHub.BatchByteLimit, - }) + plug = target.AdaptEventHubTargetFunc( + target.NewEventHubTarget, + ) case "http": - return target.NewHTTPTarget( - c.Targets.HTTP.HTTPURL, - c.Targets.HTTP.RequestTimeoutInSeconds, - c.Targets.HTTP.ByteLimit, - c.Targets.HTTP.ContentType, - c.Targets.HTTP.Headers, - c.Targets.HTTP.BasicAuthUsername, - c.Targets.HTTP.BasicAuthPassword, - c.Targets.HTTP.CertFile, - c.Targets.HTTP.KeyFile, - c.Targets.HTTP.CaFile, - c.Targets.HTTP.SkipVerifyTLS, + plug = target.AdaptHTTPTargetFunc( + target.HTTPTargetConfigFunction, ) default: - return nil, errors.New(fmt.Sprintf("Invalid target found; expected one of 'stdout, kinesis, pubsub, sqs, kafka, eventhub, http' and got '%s'", c.Target)) + return nil, errors.New(fmt.Sprintf("Invalid target found; expected one of 'stdout, kinesis, pubsub, sqs, kafka, eventhub, http' and got '%s'", useTarget.Name)) + } + + component, err := c.CreateComponent(plug, decoderOpts) + if err != nil { + return nil, err + } + + if t, ok := component.(targetiface.Target); ok { + return t, nil } + + return nil, fmt.Errorf("could not interpret target configuration for %q", useTarget.Name) } // GetFailureTarget builds and returns the target that is configured func (c *Config) GetFailureTarget(AppName string, AppVersion string) (failureiface.Failure, error) { - var t targetiface.Target + var plug Pluggable var err error - switch c.FailureTarget { + useFailureTarget := c.Data.FailureTarget.Target + decoderOpts := &DecoderOptions{ + Prefix: "FAILURE_", + Input: useFailureTarget.Body, + } + + switch useFailureTarget.Name { case "stdout": - t, err = target.NewStdoutTarget() + plug = target.AdaptStdoutTargetFunc( + target.NewStdoutTarget, + ) case "kinesis": - t, err = target.NewKinesisTarget( - c.FailureTargets.Kinesis.Region, - c.FailureTargets.Kinesis.StreamName, - c.FailureTargets.Kinesis.RoleARN, + plug = target.AdaptKinesisTargetFunc( + target.KinesisTargetConfigFunction, ) case "pubsub": - t, err = target.NewPubSubTarget( - c.FailureTargets.PubSub.ProjectID, - c.FailureTargets.PubSub.TopicName, + plug = target.AdaptPubSubTargetFunc( + target.PubSubTargetConfigFunction, ) case "sqs": - t, err = target.NewSQSTarget( - c.FailureTargets.SQS.Region, - c.FailureTargets.SQS.QueueName, - c.FailureTargets.SQS.RoleARN, + plug = target.AdaptSQSTargetFunc( + target.SQSTargetConfigFunction, ) case "kafka": - t, err = target.NewKafkaTarget(&target.KafkaConfig{ - Brokers: c.FailureTargets.Kafka.Brokers, - TopicName: c.FailureTargets.Kafka.TopicName, - TargetVersion: c.FailureTargets.Kafka.TargetVersion, - MaxRetries: c.FailureTargets.Kafka.MaxRetries, - ByteLimit: c.FailureTargets.Kafka.ByteLimit, - Compress: c.FailureTargets.Kafka.Compress, - WaitForAll: c.FailureTargets.Kafka.WaitForAll, - Idempotent: c.FailureTargets.Kafka.Idempotent, - EnableSASL: c.FailureTargets.Kafka.EnableSASL, - SASLUsername: c.FailureTargets.Kafka.SASLUsername, - SASLPassword: c.FailureTargets.Kafka.SASLPassword, - SASLAlgorithm: c.FailureTargets.Kafka.SASLAlgorithm, - CertFile: c.FailureTargets.Kafka.CertFile, - KeyFile: c.FailureTargets.Kafka.KeyFile, - CaFile: c.FailureTargets.Kafka.CaFile, - SkipVerifyTLS: c.FailureTargets.Kafka.SkipVerifyTLS, - ForceSync: c.FailureTargets.Kafka.ForceSyncProducer, - FlushFrequency: c.FailureTargets.Kafka.FlushFrequency, - FlushMessages: c.FailureTargets.Kafka.FlushMessages, - FlushBytes: c.FailureTargets.Kafka.FlushBytes, - }) + plug = target.AdaptKafkaTargetFunc( + target.NewKafkaTarget, + ) case "eventhub": - t, err = target.NewEventHubTarget(&target.EventHubConfig{ - EventHubNamespace: c.FailureTargets.EventHub.EventHubNamespace, - EventHubName: c.FailureTargets.EventHub.EventHubName, - MaxAutoRetries: c.FailureTargets.EventHub.MaxAutoRetries, - MessageByteLimit: c.FailureTargets.EventHub.MessageByteLimit, - ChunkByteLimit: c.FailureTargets.EventHub.ChunkByteLimit, - ChunkMessageLimit: c.FailureTargets.EventHub.ChunkMessageLimit, - ContextTimeoutInSeconds: c.FailureTargets.EventHub.ContextTimeoutInSeconds, - BatchByteLimit: c.FailureTargets.EventHub.BatchByteLimit, - }) + plug = target.AdaptEventHubTargetFunc( + target.NewEventHubTarget, + ) case "http": - t, err = target.NewHTTPTarget( - c.FailureTargets.HTTP.HTTPURL, - c.FailureTargets.HTTP.requestTimeoutInSeconds, - c.FailureTargets.HTTP.byteLimit, - c.FailureTargets.HTTP.ContentType, - c.FailureTargets.HTTP.Headers, - c.FailureTargets.HTTP.BasicAuthUsername, - c.FailureTargets.HTTP.BasicAuthPassword, - c.FailureTargets.HTTP.CertFile, - c.FailureTargets.HTTP.KeyFile, - c.FailureTargets.HTTP.CaFile, - c.FailureTargets.HTTP.SkipVerifyTLS, + plug = target.AdaptHTTPTargetFunc( + target.HTTPTargetConfigFunction, ) default: - err = errors.New(fmt.Sprintf("Invalid failure target found; expected one of 'stdout, kinesis, pubsub, sqs, kafka, eventhub, http' and got '%s'", c.FailureTarget)) + return nil, errors.New(fmt.Sprintf("Invalid failure target found; expected one of 'stdout, kinesis, pubsub, sqs, kafka, eventhub, http' and got '%s'", useFailureTarget.Name)) } + + component, err := c.CreateComponent(plug, decoderOpts) if err != nil { return nil, err } - switch c.FailureTargets.Format { - case "snowplow": - return failure.NewSnowplowFailure(t, AppName, AppVersion) - default: - return nil, errors.New(fmt.Sprintf("Invalid failure format found; expected one of 'snowplow' and got '%s'", c.FailureTargets.Format)) + if t, ok := component.(targetiface.Target); ok { + switch c.Data.FailureTarget.Format { + case "snowplow": + return failure.NewSnowplowFailure(t, AppName, AppVersion) + default: + return nil, errors.New(fmt.Sprintf("Invalid failure format found; expected one of 'snowplow' and got '%s'", c.Data.FailureTarget.Format)) + } } + + return nil, fmt.Errorf("could not interpret failure target configuration for %q", useFailureTarget.Name) } // GetTransformations builds and returns transformationApplyFunction from the transformations configured @@ -453,7 +303,7 @@ func (c *Config) GetTransformations() (transform.TransformationApplyFunction, er funcs := make([]transform.TransformationFunction, 0, 0) // Parse list of transformations - transformations := strings.Split(c.Transformation, ",") + transformations := strings.Split(c.Data.Transformation, ",") for _, transformation := range transformations { // Parse function name-option sets @@ -472,7 +322,7 @@ func (c *Config) GetTransformations() (transform.TransformationApplyFunction, er funcs = append(funcs, filterFunc) case "none": default: - return nil, errors.New(fmt.Sprintf("Invalid transformation found; expected one of 'spEnrichedToJson', 'spEnrichedSetPk:{option}', spEnrichedFilter:{option} and got '%s'", c.Transformation)) + return nil, errors.New(fmt.Sprintf("Invalid transformation found; expected one of 'spEnrichedToJson', 'spEnrichedSetPk:{option}', spEnrichedFilter:{option} and got '%s'", c.Data.Transformation)) } } return transform.NewTransformation(funcs...), nil @@ -504,22 +354,34 @@ func (c *Config) GetObserver(tags map[string]string) (*observer.Observer, error) if err != nil { return nil, err } - return observer.New(sr, time.Duration(c.StatsReceivers.TimeoutSec)*time.Second, time.Duration(c.StatsReceivers.BufferSec)*time.Second), nil + return observer.New(sr, time.Duration(c.Data.StatsReceiver.TimeoutSec)*time.Second, time.Duration(c.Data.StatsReceiver.BufferSec)*time.Second), nil } // GetStatsReceiver builds and returns the stats receiver func (c *Config) GetStatsReceiver(tags map[string]string) (statsreceiveriface.StatsReceiver, error) { - switch c.StatsReceiver { + useReceiver := c.Data.StatsReceiver.Receiver + decoderOpts := &DecoderOptions{ + Input: useReceiver.Body, + } + + switch useReceiver.Name { case "statsd": - return statsreceiver.NewStatsDStatsReceiver( - c.StatsReceivers.StatsD.Address, - c.StatsReceivers.StatsD.Prefix, - c.StatsReceivers.StatsD.Tags, - tags, + plug := statsreceiver.AdaptStatsDStatsReceiverFunc( + statsreceiver.NewStatsDReceiverWithTags(tags), ) + component, err := c.CreateComponent(plug, decoderOpts) + if err != nil { + return nil, err + } + + if r, ok := component.(statsreceiveriface.StatsReceiver); ok { + return r, nil + } + + return nil, fmt.Errorf("could not interpret stats receiver configuration for %q", useReceiver.Name) case "": return nil, nil default: - return nil, errors.New(fmt.Sprintf("Invalid stats receiver found; expected one of 'statsd' and got '%s'", c.StatsReceiver)) + return nil, errors.New(fmt.Sprintf("Invalid stats receiver found; expected one of 'statsd' and got '%s'", useReceiver.Name)) } } diff --git a/config/config_test.go b/config/config_test.go index dfe39de7..c42d2f32 100644 --- a/config/config_test.go +++ b/config/config_test.go @@ -8,6 +8,7 @@ package config import ( "os" + "path/filepath" "testing" "github.com/stretchr/testify/assert" @@ -21,10 +22,10 @@ func TestNewConfig(t *testing.T) { assert.NotNil(c) assert.Nil(err) - assert.Equal("info", c.LogLevel) - assert.Equal("stdout", c.Target) - assert.Equal("none", c.Transformation) - assert.Equal("stdin", c.Source) + assert.Equal("info", c.Data.LogLevel) + assert.Equal("stdout", c.Data.Target.Use.Name) + assert.Equal("none", c.Data.Transformation) + assert.Equal("stdin", c.Data.Source.Use.Name) // Tests on sources moved to the source package. @@ -49,20 +50,20 @@ func TestNewConfig_FromEnv(t *testing.T) { assert := assert.New(t) defer os.Unsetenv("LOG_LEVEL") - defer os.Unsetenv("TARGET") - defer os.Unsetenv("SOURCE") + defer os.Unsetenv("TARGET_NAME") + defer os.Unsetenv("SOURCE_NAME") os.Setenv("LOG_LEVEL", "debug") - os.Setenv("TARGET", "kinesis") - os.Setenv("SOURCE", "kinesis") + os.Setenv("TARGET_NAME", "kinesis") + os.Setenv("SOURCE_NAME", "kinesis") c, err := NewConfig() assert.NotNil(c) assert.Nil(err) - assert.Equal("debug", c.LogLevel) - assert.Equal("kinesis", c.Target) - assert.Equal("kinesis", c.Source) + assert.Equal("debug", c.Data.LogLevel) + assert.Equal("kinesis", c.Data.Target.Use.Name) + assert.Equal("kinesis", c.Data.Source.Use.Name) } func TestNewConfig_FromEnvInvalid(t *testing.T) { @@ -114,9 +115,9 @@ func TestNewConfig_FilterFailure(t *testing.T) { func TestNewConfig_InvalidTarget(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("TARGET") + defer os.Unsetenv("TARGET_NAME") - os.Setenv("TARGET", "fake") + os.Setenv("TARGET_NAME", "fake") c, err := NewConfig() assert.NotNil(c) @@ -131,9 +132,9 @@ func TestNewConfig_InvalidTarget(t *testing.T) { func TestNewConfig_InvalidFailureTarget(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("FAILURE_TARGET") + defer os.Unsetenv("FAILURE_TARGET_NAME") - os.Setenv("FAILURE_TARGET", "fake") + os.Setenv("FAILURE_TARGET_NAME", "fake") c, err := NewConfig() assert.NotNil(c) @@ -165,9 +166,9 @@ func TestNewConfig_InvalidFailureFormat(t *testing.T) { func TestNewConfig_InvalidStatsReceiver(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("STATS_RECEIVER") + defer os.Unsetenv("STATS_RECEIVER_NAME") - os.Setenv("STATS_RECEIVER", "fake") + os.Setenv("STATS_RECEIVER_NAME", "fake") c, err := NewConfig() assert.NotNil(c) @@ -198,104 +199,72 @@ func TestNewConfig_GetTags(t *testing.T) { assert.True(ok) } -func TestNewConfig_KafkaTargetDefaults(t *testing.T) { +func TestNewConfig_Hcl_invalids(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("TARGET") - - os.Setenv("TARGET", "kafka") + filename := filepath.Join("test-fixtures", "invalids.hcl") + t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", filename) c, err := NewConfig() assert.NotNil(c) assert.Nil(err) - target := c.Targets.Kafka - assert.NotNil(target) - assert.Equal(target.MaxRetries, 10) - assert.Equal(target.ByteLimit, 1048576) - assert.Equal(target.Compress, false) - assert.Equal(target.WaitForAll, false) - assert.Equal(target.Idempotent, false) - assert.Equal(target.EnableSASL, false) - assert.Equal(target.ForceSyncProducer, false) - assert.Equal(target.FlushFrequency, 0) - assert.Equal(target.FlushMessages, 0) - assert.Equal(target.FlushBytes, 0) -} - -func TestNewConfig_KafkaFailureTargetDefaults(t *testing.T) { - assert := assert.New(t) - - defer os.Unsetenv("FAILURE_TARGET") - - os.Setenv("FAILURE_TARGET", "kafka") - - c, err := NewConfig() - assert.NotNil(c) - assert.Nil(err) + t.Run("invalid_transformation", func(t *testing.T) { + transformation, err := c.GetTransformations() + assert.Nil(transformation) + assert.NotNil(err) + assert.Equal("Invalid transformation found; expected one of 'spEnrichedToJson', 'spEnrichedSetPk:{option}', spEnrichedFilter:{option} and got 'fakeHCL'", err.Error()) + }) + + t.Run("invalid_target", func(t *testing.T) { + target, err := c.GetTarget() + assert.Nil(target) + assert.NotNil(err) + assert.Equal("Invalid target found; expected one of 'stdout, kinesis, pubsub, sqs, kafka, eventhub, http' and got 'fakeHCL'", err.Error()) + }) + + t.Run("invalid_failure_target", func(t *testing.T) { + ftarget, err := c.GetFailureTarget("testAppName", "0.0.0") + assert.Nil(ftarget) + assert.NotNil(err) + assert.Equal("Invalid failure target found; expected one of 'stdout, kinesis, pubsub, sqs, kafka, eventhub, http' and got 'fakeHCL'", err.Error()) + }) - target := c.FailureTargets.Kafka - assert.NotNil(target) - assert.Equal(target.MaxRetries, 10) - assert.Equal(target.ByteLimit, 1048576) - assert.Equal(target.Compress, false) - assert.Equal(target.WaitForAll, false) - assert.Equal(target.Idempotent, false) - assert.Equal(target.EnableSASL, false) - assert.Equal(target.ForceSyncProducer, false) - assert.Equal(target.FlushFrequency, 0) - assert.Equal(target.FlushMessages, 0) - assert.Equal(target.FlushBytes, 0) } -func TestNewConfig_EventhubTargetDefaults(t *testing.T) { +func TestNewConfig_Hcl_defaults(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("TARGET") - defer os.Unsetenv("TARGET_EVENTHUB_NAMESPACE") - defer os.Unsetenv("TARGET_EVENTHUB_NAME") - - os.Setenv("TARGET", "eventhub") - os.Setenv("TARGET_EVENTHUB_NAMESPACE", "fake") - os.Setenv("TARGET_EVENTHUB_NAME", "fake") + filename := filepath.Join("test-fixtures", "empty.hcl") + t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", filename) c, err := NewConfig() assert.NotNil(c) assert.Nil(err) - target := c.Targets.EventHub - assert.NotNil(target) - assert.Equal(target.EventHubName, "fake") - assert.Equal(target.EventHubNamespace, "fake") - assert.Equal(target.MessageByteLimit, 1048576) - assert.Equal(target.ChunkByteLimit, 1048576) - assert.Equal(target.ChunkMessageLimit, 500) - assert.Equal(target.ContextTimeoutInSeconds, 20) - assert.Equal(target.BatchByteLimit, 1048576) + assert.Equal(c.Data.Source.Use.Name, "stdin") + assert.Equal(c.Data.Target.Use.Name, "stdout") + assert.Equal(c.Data.FailureTarget.Target.Name, "stdout") + assert.Equal(c.Data.FailureTarget.Format, "snowplow") + assert.Equal(c.Data.Sentry.Tags, "{}") + assert.Equal(c.Data.Sentry.Debug, false) + assert.Equal(c.Data.StatsReceiver.TimeoutSec, 1) + assert.Equal(c.Data.StatsReceiver.BufferSec, 15) + assert.Equal(c.Data.Transformation, "none") + assert.Equal(c.Data.LogLevel, "info") } -func TestNewConfig_EventhubFailureTargetDefaults(t *testing.T) { +func TestNewConfig_Hcl_sentry(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("FAILURE_TARGET") - defer os.Unsetenv("FAILURE_TARGET_EVENTHUB_NAMESPACE") - defer os.Unsetenv("FAILURE_TARGET_EVENTHUB_NAME") - - os.Setenv("FAILURE_TARGET", "eventhub") - os.Setenv("FAILURE_TARGET_EVENTHUB_NAMESPACE", "fake") - os.Setenv("FAILURE_TARGET_EVENTHUB_NAME", "fake") + filename := filepath.Join("test-fixtures", "sentry.hcl") + t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", filename) c, err := NewConfig() assert.NotNil(c) assert.Nil(err) - target := c.FailureTargets.EventHub - assert.NotNil(target) - assert.Equal(target.EventHubName, "fake") - assert.Equal(target.EventHubNamespace, "fake") - assert.Equal(target.MessageByteLimit, 1048576) - assert.Equal(target.ChunkByteLimit, 1048576) - assert.Equal(target.ChunkMessageLimit, 500) - assert.Equal(target.ContextTimeoutInSeconds, 20) - assert.Equal(target.BatchByteLimit, 1048576) + assert.Equal(c.Data.Sentry.Debug, true) + assert.Equal(c.Data.Sentry.Tags, "{\"testKey\":\"testValue\"}") + assert.Equal(c.Data.Sentry.Dsn, "testDsn") } diff --git a/config/decode.go b/config/decode.go new file mode 100644 index 00000000..58ea16a1 --- /dev/null +++ b/config/decode.go @@ -0,0 +1,169 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package config + +import ( + "errors" + "os" + + "github.com/caarlos0/env/v6" + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/gohcl" + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/function" +) + +// Decoder is the interface that wraps the Decode method. +type Decoder interface { + // Decode decodes onto target given DecoderOptions. + // The target argument must be a pointer to an allocated structure. + Decode(opts *DecoderOptions, target interface{}) error +} + +// DecoderOptions represent the options for a Decoder. +// The purpose of this type is to unify the input to the different available +// Decoders. The zero value of DecoderOptions means no-prefix/nil-input, +// which should be usable by the Decoders. +type DecoderOptions struct { + Prefix string + Input hcl.Body +} + +// EnvDecoder implements Decoder. +type EnvDecoder struct{} + +// Decode populates target from the environment. +// The target argument must be a pointer to a struct type value. +func (e *EnvDecoder) Decode(opts *DecoderOptions, target interface{}) error { + // Decoder Options cannot be missing + if opts == nil { + return errors.New("missing DecoderOptions for EnvDecoder") + } + + // If target is nil then we assume that target is not decodable. + if target == nil { + return nil + } + + envOpts := env.Options{ + Prefix: opts.Prefix, // zero value ok + } + + return env.Parse(target, envOpts) +} + +// HclDecoder implements Decoder. +type HclDecoder struct { + EvalContext *hcl.EvalContext +} + +// Decode populates target given HCL input through DecoderOptions. +// The target argument must be a pointer to an allocated structure. +// If the HCL input is nil, we assume there is nothing to do and the target +// stays unaffected. If the target is nil, we assume is not decodable. +func (h *HclDecoder) Decode(opts *DecoderOptions, target interface{}) error { + // Decoder Options cannot be missing + if opts == nil { + return errors.New("missing DecoderOptions for HclDecoder") + } + + src := opts.Input + if src == nil { + return nil // zero value ok + } + + // If target is nil then we assume that target is not decodable. + if target == nil { + return nil + } + + // Decode + diag := gohcl.DecodeBody(src, h.EvalContext, target) + if len(diag) > 0 { + return diag + } + + return nil +} + +// CreateHclContext creates an *hcl.EvalContext that is used in decoding HCL. +// Here we can add the evaluation features available for the HCL configuration +// users. +// For now, below is an example of 2 different ways users can reference +// environment variables in their HCL configuration file. +func CreateHclContext() *hcl.EvalContext { + evalCtx := &hcl.EvalContext{ + Functions: hclCtxFunctions(), + Variables: hclCtxVariables(), + } + + return evalCtx +} + +// hclCtxFunctions constracts the Functions map of the hcl.EvalContext +// Here, for example, we add the `env` as function. +// Users can reference any env var as `env("MY_ENV_VAR")` e.g. +// ``` +// listen_addr = env("LISTEN_ADDR") +// ``` +func hclCtxFunctions() map[string]function.Function { + funcs := map[string]function.Function{ + "env": envFunc(), + } + + return funcs +} + +// hclCtxVariables constracts the Variables map of the hcl.EvalContext +// Here, for example, we add the `env` as variable. +// Users can reference any env var as `env.MY_ENV_VAR` e.g. +// ``` +// listen_addr = env.LISTEN_ADDR +// ``` +func hclCtxVariables() map[string]cty.Value { + vars := map[string]cty.Value{ + "env": cty.ObjectVal(envVarsMap(os.Environ())), + } + + return vars +} + +// envFunc constructs a cty.Function that takes a key as string argument and +// returns a string representation of the environment variable behind it. +func envFunc() function.Function { + return function.New(&function.Spec{ + Params: []function.Parameter{ + { + Name: "key", + Type: cty.String, + AllowNull: false, + AllowUnknown: false, + }, + }, + Type: function.StaticReturnType(cty.String), + Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { + key := args[0].AsString() + value := os.Getenv(key) + return cty.StringVal(value), nil + }, + }) +} + +// envVarsMap constructs a map of the environment variables to be used in +// hcl.EvalContext +func envVarsMap(environ []string) map[string]cty.Value { + envMap := make(map[string]cty.Value) + for _, s := range environ { + for j := 1; j < len(s); j++ { + if s[j] == '=' { + envMap[s[0:j]] = cty.StringVal(s[j+1:]) + } + } + } + + return envMap +} diff --git a/config/decode_test.go b/config/decode_test.go new file mode 100644 index 00000000..cb5ea825 --- /dev/null +++ b/config/decode_test.go @@ -0,0 +1,199 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package config + +import ( + "reflect" + "testing" + + "github.com/davecgh/go-spew/spew" + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/hclparse" + "github.com/stretchr/testify/assert" +) + +type testStruct struct { + Test string `hcl:"test_string" env:"TEST_STRING"` +} + +func TestEnvDecode(t *testing.T) { + envDecoder := EnvDecoder{} + + testCases := []struct { + TestName string + DecoderOpts *DecoderOptions + Target interface{} + Expected interface{} + }{ + { + "nil_target", + &DecoderOptions{}, + nil, + nil, + }, + { + "decoder_opts", + &DecoderOptions{}, + &testStruct{}, + &testStruct{ + Test: "ateststring", + }, + }, + { + "decoder_opts_with_prefix", + &DecoderOptions{ + Prefix: "PREFIX_", + }, + &testStruct{}, + &testStruct{ + Test: "ateststringprefixed", + }, + }, + } + + for _, tt := range testCases { + t.Run(tt.TestName, func(t *testing.T) { + assert := assert.New(t) + t.Setenv("TEST_STRING", "ateststring") + t.Setenv("PREFIX_TEST_STRING", "ateststringprefixed") + + err := envDecoder.Decode(tt.DecoderOpts, tt.Target) + assert.Nil(err) + + if !reflect.DeepEqual(tt.Target, tt.Expected) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(tt.Target), + spew.Sdump(tt.Expected)) + } + + }) + } +} + +func TestHclDecode(t *testing.T) { + evalCtx := &hcl.EvalContext{} + hclDecoder := HclDecoder{evalCtx} + hclSrc := ` +test_string = "ateststring" +` + p := hclparse.NewParser() + hclFile, diags := p.ParseHCL([]byte(hclSrc), "placeholder.hcl") + if diags.HasErrors() { + t.Errorf("Failed parsing HCL test source") + } + testInput := hclFile.Body + + testCases := []struct { + TestName string + DecoderOpts *DecoderOptions + Target interface{} + Expected interface{} + }{ + { + "nil_target", + &DecoderOptions{}, + nil, + nil, + }, + { + "decoder_opts_no_input", + &DecoderOptions{}, + &testStruct{ + Test: "noChange", + }, + &testStruct{ + Test: "noChange", + }, + }, + { + "decoder_opts_with_input", + &DecoderOptions{ + Input: testInput, + }, + &testStruct{}, + &testStruct{ + Test: "ateststring", + }, + }, + } + + for _, tt := range testCases { + t.Run(tt.TestName, func(t *testing.T) { + assert := assert.New(t) + err := hclDecoder.Decode(tt.DecoderOpts, tt.Target) + if err != nil { + t.Errorf("decoding failed") + } + assert.Nil(err) + + if !reflect.DeepEqual(tt.Target, tt.Expected) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(tt.Target), + spew.Sdump(tt.Expected)) + } + }) + } +} + +func TestCreateHclContext(t *testing.T) { + t.Setenv("TEST_STRING", "ateststring") + t.Setenv("TEST_INT", "2") + type testHclStruct struct { + TestStr string `hcl:"test_string"` + TestInt int `hcl:"test_int"` + } + + evalCtx := CreateHclContext() + hclDecoder := HclDecoder{evalCtx} + hclSrc := ` +test_string = env.TEST_STRING +test_int = env("TEST_INT") +` + p := hclparse.NewParser() + hclFile, diags := p.ParseHCL([]byte(hclSrc), "placeholder.hcl") + if diags.HasErrors() { + t.Errorf("Failed parsing HCL test source") + } + testInput := hclFile.Body + + testCases := []struct { + TestName string + DecoderOpts *DecoderOptions + Target interface{} + Expected interface{} + }{ + { + "Hcl_eval_context_with_env_fun_and_var", + &DecoderOptions{ + Input: testInput, + }, + &testHclStruct{}, + &testHclStruct{ + TestStr: "ateststring", + TestInt: 2, + }, + }, + } + + for _, tt := range testCases { + t.Run(tt.TestName, func(t *testing.T) { + assert := assert.New(t) + + err := hclDecoder.Decode(tt.DecoderOpts, tt.Target) + if err != nil { + t.Errorf(err.Error()) + } + assert.Nil(err) + + if !reflect.DeepEqual(tt.Target, tt.Expected) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(tt.Target), + spew.Sdump(tt.Expected)) + } + }) + } +} diff --git a/config/examples/README.md b/config/examples/README.md new file mode 100644 index 00000000..5c2ae43f --- /dev/null +++ b/config/examples/README.md @@ -0,0 +1,128 @@ +# Configuring stream-replicator from a file + +Another option to configuring stream-replicator purely from environment variables as described in the [wiki](https://github.com/snowplow-devops/stream-replicator/wiki), is through an HCL file. + +This directory contains particular examples of configuration options. + +## General structure + +The general structure of the configuration file is composed of: + +1. Source configuration +2. Target configuration +3. Failure target configuration +4. Observability configuration +5. Transformation configuration +6. Additional configuration options + +As a vague example: + +```hcl +// block for configuring the source +source { + use "sqs" { + // block for configuring SQS as source + } +} + +// block for configuring the target +target { + use "kafka" { + // block for configuring Kafka as target + } +} + +// block for configuring the failure target +failure_target { + use "pubsub" { + // block for configuring PubSub as failure target + } +} + +// block for configuring stats receiver +stats_receiver { + use "statsd" { + // block for configuring StatsD as a receiver + } +} + +// block for configuring sentry +sentry {} + +// string to configure message transformation (default: "none") +message_transformation = "none" + +// log level configuration (default: "info") +log_level = "info" + +// Ability to provide a GCP service account (b64) to the application directly +google_application_credentials = "" +``` + +So, a complete example could be: + +```hcl +// example.hcl + +source { + use "sqs" { + queue_name = "mySqsQueue" + region = "eu-west-1" + } +} + +target { + use "kafka" { + brokers = "my-kafka-broker-connectinon-string" + topic_name = "snowplow-enriched-good" + } +} + +failure_target { + use "kinesis" { + stream_name = "some-acme-stream" + region = "us-east-1" + } +} + +stats_receiver { + use "statsd" { + address = "127.0.0.1:8125" + } +} + +sentry { + dsn = "https://acme.com/1" + debug = true +} + +log_level = "debug" +``` + +In the example files in this directory, there is a simple and extended version for configuring each: + + - source + - target + - failure_target + - sentry + - stats-receiver + +## Referencing environment variables in the configuration file + +There are 2 ways to reference environment variables in the HCL file: + +1. As `env("MY_ENV_VAR")` + + For example: + + ```txt + sasl_password = env("SASL_PASSWORD") + ``` + +2. As `env.MY_ENV_VAR` + + For example: + + ```txt + sasl_password = env.SASL_PASSWORD + ``` diff --git a/config/examples/failure-targets/eventhub-extended.hcl b/config/examples/failure-targets/eventhub-extended.hcl new file mode 100644 index 00000000..fc701c33 --- /dev/null +++ b/config/examples/failure-targets/eventhub-extended.hcl @@ -0,0 +1,30 @@ +# Extended configuration for Eventhub as a failure target (all options) + +failure_target { + use "eventhub" { + # Namespace housing Eventhub + namespace = "testNamespace" + + # Name of Eventhub + name = "testName" + + # Number of retries handled automatically by the EventHubs library. + # All retries should be completed before context timeout (default: 1). + max_auto_retries = 2 + + # Default presumes paid tier byte limit is 1MB (default: 1048576) + message_byte_limit = 1048576 + + # Chunk byte limit (default: 1048576) + chunk_byte_limit = 1048576 + + # Chunk message limit (default: 500) + chunk_message_limit = 500 + + # The time (seconds) before context timeout (default: 20) + context_timeout_in_seconds = 20 + + # Default batch size of 1MB is the limit for Eventhub's high tier + batch_byte_limit = 1048576 + } +} diff --git a/config/examples/failure-targets/eventhub-simple.hcl b/config/examples/failure-targets/eventhub-simple.hcl new file mode 100644 index 00000000..258fddbc --- /dev/null +++ b/config/examples/failure-targets/eventhub-simple.hcl @@ -0,0 +1,11 @@ +# Simple configuration for Eventhub as a failure target (only required options) + +failure_target { + use "eventhub" { + # Namespace housing Eventhub + namespace = "testNamespace" + + # Name of Eventhub + name = "testName" + } +} diff --git a/config/examples/failure-targets/http-extended.hcl b/config/examples/failure-targets/http-extended.hcl new file mode 100644 index 00000000..68f319f4 --- /dev/null +++ b/config/examples/failure-targets/http-extended.hcl @@ -0,0 +1,42 @@ +# Extended configuration for HTTP as a failure target (all options) + +failure_target { + use "http" { + # URL endpoint + url = "https://acme.com/x" + + # Byte limit for requests (default: 1048576) + byte_limit = 1048576 + + # Request timeout in seconds (default: 5) + request_timeout_in_seconds = 5 + + # Content type for POST request (default: "application/json") + content_type = "application/json" + + # Optional headers to add to the request. + # It is provided as a JSON string of key-value pairs (default: ""). + headers = "{\"Accept-Language\":\"en-US\"}" + + # Optional basicauth username + basic_auth_username = "myUsername" + + # Optional basicauth password + # Even though you could just reference the password directly as a string, + # you could also reference an environment variable. + basic_auth_password = env.MY_AUTH_PASSWORD + + # The optional certificate file for client authentication + cert_file = "myLocalhost.crt" + + # The optional key file for client authentication + key_file = "MyLocalhost.key" + + # The optional certificate authority file for TLS client authentication + ca_file = "myRootCA.crt" + + # Whether to skip verifying ssl certificates chain (default: false) + # If cert_file and key_file are not provided, this setting is not applied. + skip_verify_tls = true + } +} diff --git a/config/examples/failure-targets/http-simple.hcl b/config/examples/failure-targets/http-simple.hcl new file mode 100644 index 00000000..af506b92 --- /dev/null +++ b/config/examples/failure-targets/http-simple.hcl @@ -0,0 +1,8 @@ +# Simple configuration for HTTP as a failure target (only required options) + +failure_target { + use "http" { + # URL endpoint + url = "https://acme.com/x" + } +} diff --git a/config/examples/failure-targets/kafka-extended.hcl b/config/examples/failure-targets/kafka-extended.hcl new file mode 100644 index 00000000..4ef2ba60 --- /dev/null +++ b/config/examples/failure-targets/kafka-extended.hcl @@ -0,0 +1,69 @@ +# Extended configuration for Kafka as a failure target (all options) + +failure_target { + use "kafka" { + # Kafka broker connectinon string + brokers = "my-kafka-connection-string" + + # Kafka topic name + topic_name = "snowplow-enriched-good" + + # The Kafka version + target_version = "2.7.0" + + # Max retries (default: 10) + max_retries = 10 + + # Kafka default byte limit is 1MB (default: 1048576) + byte_limit = 1048576 + + # Whether to compress data (default: false). + # Reduces network usage and increases latency. + compress = true + + # Sets RequireAck s= WaitForAll, which waits for min.insync.replicas + # to Ack (default: false) + wait_for_all = true + + # Exactly once writes - Also sets RequiredAcks = WaitForAll (default: false) + idempotent = true + + # Whether to enable SASL support (defailt: false) + enable_sasl = true + + # SASL AUTH + sasl_username = "mySaslUsername" + sasl_password = env.SASL_PASSWORD + + # The SASL Algorithm to use: "sha512" or "sha256" (default: "sha512") + sasl_algorithm = "sha256" + + # The optional certificate file for client authentication + cert_file = "myLocalhost.crt" + + # The optional key file for client authentication + key_file = "MyLocalhost.key" + + # The optional certificate authority file for TLS client authentication + ca_file = "myRootCA.crt" + + # Whether to skip verifying ssl certificates chain (default: false) + skip_verify_tls = true + + # Forces the use of the Sync Producer (default: false). + # Emits as fast as possible but may limit performance. + force_sync_producer = true + + # Milliseconds between flushes of events (default: 0) + # Setting to 0 means as fast as possible. + flush_frequency = 2 + + # Best effort for how many messages are sent in each batch (default: 0) + # Setting to 0 means as fast as possible. + flush_messages = 2 + + # Best effort for how many bytes will trigger a flush (default: 0) + # Setting to 0 means as fast as possible. + flush_bytes = 2 + } +} diff --git a/config/examples/failure-targets/kafka-simple.hcl b/config/examples/failure-targets/kafka-simple.hcl new file mode 100644 index 00000000..a5c594fc --- /dev/null +++ b/config/examples/failure-targets/kafka-simple.hcl @@ -0,0 +1,11 @@ +# Simple configuration for Kafka as a failure target (only required options) + +failure_target { + use "kafka" { + # Kafka broker connectinon string + brokers = "my-kafka-connection-string" + + # Kafka topic name + topic_name = "snowplow-enriched-good" + } +} diff --git a/config/examples/failure-targets/kinesis-extended.hcl b/config/examples/failure-targets/kinesis-extended.hcl new file mode 100644 index 00000000..3d99cbba --- /dev/null +++ b/config/examples/failure-targets/kinesis-extended.hcl @@ -0,0 +1,14 @@ +# Extended configuration of Kinesis as a failure target (all options) + +failure_target { + use "kinesis" { + # Kinesis stream name to send data to + stream_name = "my-stream" + + # AWS region of Kinesis stream + region = "us-west-1" + + # Optional ARN to use on the stream (default: "") + role_arn = "arn:aws:iam::123456789012:role/myrole" + } +} diff --git a/config/examples/failure-targets/kinesis-simple.hcl b/config/examples/failure-targets/kinesis-simple.hcl new file mode 100644 index 00000000..2d619fc0 --- /dev/null +++ b/config/examples/failure-targets/kinesis-simple.hcl @@ -0,0 +1,11 @@ +# Simple configuration of Kinesis as a failure target (only required options) + +failure_target { + use "kinesis" { + # Kinesis stream name to send data to + stream_name = "my-stream" + + # AWS region of Kinesis stream + region = "us-west-1" + } +} diff --git a/config/examples/failure-targets/pubsub.hcl b/config/examples/failure-targets/pubsub.hcl new file mode 100644 index 00000000..f86858f0 --- /dev/null +++ b/config/examples/failure-targets/pubsub.hcl @@ -0,0 +1,11 @@ +# Configuration of PubSub as a failure target. + +failure_target { + use "pubsub" { + # ID of the GCP Project + project_id = "acme-project" + + # Name of the topic to send data into + topic_name = "some-acme-topic" + } +} diff --git a/config/examples/failure-targets/sqs-extended.hcl b/config/examples/failure-targets/sqs-extended.hcl new file mode 100644 index 00000000..4c5b4c68 --- /dev/null +++ b/config/examples/failure-targets/sqs-extended.hcl @@ -0,0 +1,14 @@ +# Extended configuration of SQS as a failure target (all options) + +failure_target { + use "sqs" { + # SQS queue name + queue_name = "mySqsQueue" + + # AWS region of SQS queue + region = "us-west-1" + + # Role ARN to use on SQS queue + role_arn = "arn:aws:iam::123456789012:role/myrole" + } +} diff --git a/config/examples/failure-targets/sqs-simple.hcl b/config/examples/failure-targets/sqs-simple.hcl new file mode 100644 index 00000000..f44acdd1 --- /dev/null +++ b/config/examples/failure-targets/sqs-simple.hcl @@ -0,0 +1,11 @@ +# Simple configuration of SQS as a failure target (only required options) + +failure_target { + use "sqs" { + # SQS queue name + queue_name = "mySqsQueue" + + # AWS region of SQS queue + region = "us-west-1" + } +} diff --git a/config/examples/failure-targets/stdout.hcl b/config/examples/failure-targets/stdout.hcl new file mode 100644 index 00000000..373fb61d --- /dev/null +++ b/config/examples/failure-targets/stdout.hcl @@ -0,0 +1,6 @@ +# Stdout has no required configuration options as a failure target. +# Since it is the default failure target, the failure_target block can be omitted. + +failure_target { + use "stdout" {} +} diff --git a/config/examples/sentry/sentry-extended.hcl b/config/examples/sentry/sentry-extended.hcl new file mode 100644 index 00000000..97ea0236 --- /dev/null +++ b/config/examples/sentry/sentry-extended.hcl @@ -0,0 +1,12 @@ +# Extended sentry configuration (all options) + +sentry { + # The DSN to send Sentry alerts to + dsn = "https://acme.com/1" + + # Whether to put Sentry into debug mode (default: false) + debug = true + + # Escaped JSON string with tags to send to Sentry (default: "{}") + tags = "{\"aKey\":\"aValue\"}" +} diff --git a/config/examples/sentry/sentry-simple.hcl b/config/examples/sentry/sentry-simple.hcl new file mode 100644 index 00000000..56b1edc3 --- /dev/null +++ b/config/examples/sentry/sentry-simple.hcl @@ -0,0 +1,6 @@ +# Simple sentry configuration (only required options) + +sentry { + # The DSN to send Sentry alerts to + dsn = "https://acme.com/1" +} diff --git a/config/examples/sources/kinesis-extended.hcl b/config/examples/sources/kinesis-extended.hcl new file mode 100644 index 00000000..aaacacfd --- /dev/null +++ b/config/examples/sources/kinesis-extended.hcl @@ -0,0 +1,25 @@ +# Extended configuration for Kinesis as a source (all options) + +source { + use "kinesis" { + # Kinesis stream name to read from (required) + stream_name = "my-stream" + + # AWS region of Kinesis stream (required) + region = "us-west-1" + + # App name for Stream Replicator (required) + app_name = "StreamReplicatorProd1" + + # Optional ARN to use on source stream (default: "") + role_arn = "arn:aws:iam::123456789012:role/myrole" + + # Timestamp for the kinesis shard iterator to begin processing. + # Format YYYY-MM-DD HH:MM:SS.MS (miliseconds optional) + # (default: TRIM_HORIZON) + start_timestamp = "2020-01-01 10:00:00" + + # Number of events to process concurrently (default: 50) + concurrent_writes = 15 + } +} diff --git a/config/examples/sources/kinesis-simple.hcl b/config/examples/sources/kinesis-simple.hcl new file mode 100644 index 00000000..dde9c6a2 --- /dev/null +++ b/config/examples/sources/kinesis-simple.hcl @@ -0,0 +1,14 @@ +# Simple configuration for Kinesis as a source (only required options) + +source { + use "kinesis" { + # Kinesis stream name to read from + stream_name = "my-stream" + + # AWS region of Kinesis stream + region = "us-west-1" + + # App name for Stream Replicator + app_name = "StreamReplicatorProd1" + } +} diff --git a/config/examples/sources/pubsub-extended.hcl b/config/examples/sources/pubsub-extended.hcl new file mode 100644 index 00000000..6805478a --- /dev/null +++ b/config/examples/sources/pubsub-extended.hcl @@ -0,0 +1,14 @@ +# Extended configuration for PubSub as a source (all options) + +source { + use "pubsub" { + # GCP Project ID + project_id = "project-id" + + # subscription ID for the pubsub subscription + subscription_id = "subscription-id" + + # Number of events to process concurrently (default: 50) + concurrent_writes = 20 + } +} diff --git a/config/examples/sources/pubsub-simple.hcl b/config/examples/sources/pubsub-simple.hcl new file mode 100644 index 00000000..04827e3a --- /dev/null +++ b/config/examples/sources/pubsub-simple.hcl @@ -0,0 +1,11 @@ +# Simple configuration for PubSub as a source (only required options) + +source { + use "pubsub" { + # GCP Project ID + project_id = "project-id" + + # subscription ID for the pubsub subscription + subscription_id = "subscription-id" + } +} diff --git a/config/examples/sources/sqs-extended.hcl b/config/examples/sources/sqs-extended.hcl new file mode 100644 index 00000000..82f3544e --- /dev/null +++ b/config/examples/sources/sqs-extended.hcl @@ -0,0 +1,17 @@ +# Extended configuration for SQS as a source (all options) + +source { + use "sqs" { + # SQS queue name + queue_name = "mySqsQueue" + + # AWS region of SQS queue + region = "us-west-1" + + # Role ARN to use on source queue + role_arn = "arn:aws:iam::123456789012:role/myrole" + + # Number of events to process concurrently (default: 50) + concurrent_writes = 20 + } +} diff --git a/config/examples/sources/sqs-simple.hcl b/config/examples/sources/sqs-simple.hcl new file mode 100644 index 00000000..06b71413 --- /dev/null +++ b/config/examples/sources/sqs-simple.hcl @@ -0,0 +1,11 @@ +# Simple configuration for SQS as a source (only required options) + +source { + use "sqs" { + # SQS queue name + queue_name = "mySqsQueue" + + # AWS region of SQS queue + region = "us-west-1" + } +} diff --git a/config/examples/sources/stdin-extended.hcl b/config/examples/sources/stdin-extended.hcl new file mode 100644 index 00000000..294a725c --- /dev/null +++ b/config/examples/sources/stdin-extended.hcl @@ -0,0 +1,8 @@ +# Extended configuration for Stdin as a source (all options) + +source { + use "stdin" { + # Number of events to process concurrently (default: 50) + concurrent_writes = 20 + } +} diff --git a/config/examples/sources/stdin-simple.hcl b/config/examples/sources/stdin-simple.hcl new file mode 100644 index 00000000..3723a16e --- /dev/null +++ b/config/examples/sources/stdin-simple.hcl @@ -0,0 +1,7 @@ +# Simple configuration for Stdin as a source +# Stdin has no required configuration options. +# Since it is the default source, the source block can also be omitted. + +source { + use "stdin" {} +} diff --git a/config/examples/stats-receivers/stats-receiver-statsd-extended.hcl b/config/examples/stats-receivers/stats-receiver-statsd-extended.hcl new file mode 100644 index 00000000..a516d12d --- /dev/null +++ b/config/examples/stats-receivers/stats-receiver-statsd-extended.hcl @@ -0,0 +1,20 @@ +# Extended configuration for StatsD stats receiver (all options) + +stats_receiver { + use "statsd" { + # StatsD server address + address = "127.0.0.1:8125" + + # StatsD metric prefix (default: "snowplow.stream-replicator") + prefix = "snowplow.stream-replicator" + + # Escaped JSON string with tags to send to StatsD (default: "{}") + tags = "{\"aKey\": \"aValue\"}" + } + + # Time (seconds) the observer waits for new results (default: 1) + timeout_sec = 2 + + # Aggregation time window (seconds) for metrics being collected (default: 15) + buffer_sec = 20 +} diff --git a/config/examples/stats-receivers/stats-receiver-statsd-simple.hcl b/config/examples/stats-receivers/stats-receiver-statsd-simple.hcl new file mode 100644 index 00000000..19cc8110 --- /dev/null +++ b/config/examples/stats-receivers/stats-receiver-statsd-simple.hcl @@ -0,0 +1,5 @@ +# Simple configuration for StatsD stats receiver + +stats_receiver { + use "statsd" {} +} diff --git a/config/examples/targets/eventhub-extended.hcl b/config/examples/targets/eventhub-extended.hcl new file mode 100644 index 00000000..042e1f4e --- /dev/null +++ b/config/examples/targets/eventhub-extended.hcl @@ -0,0 +1,30 @@ +# Extended configuration for Eventhub as a target (all options) + +target { + use "eventhub" { + # Namespace housing Eventhub + namespace = "testNamespace" + + # Name of Eventhub + name = "testName" + + # Number of retries handled automatically by the EventHubs library. + # All retries should be completed before context timeout (default: 1). + max_auto_retries = 2 + + # Default presumes paid tier byte limit is 1MB (default: 1048576) + message_byte_limit = 1048576 + + # Chunk byte limit (default: 1048576) + chunk_byte_limit = 1048576 + + # Chunk message limit (default: 500) + chunk_message_limit = 500 + + # The time (seconds) before context timeout (default: 20) + context_timeout_in_seconds = 20 + + # Default batch size of 1MB is the limit for Eventhub's high tier + batch_byte_limit = 1048576 + } +} diff --git a/config/examples/targets/eventhub-simple.hcl b/config/examples/targets/eventhub-simple.hcl new file mode 100644 index 00000000..144b4683 --- /dev/null +++ b/config/examples/targets/eventhub-simple.hcl @@ -0,0 +1,11 @@ +# Simple configuration for Eventhub as a target (only required options) + +target { + use "eventhub" { + # Namespace housing Eventhub + namespace = "testNamespace" + + # Name of Eventhub + name = "testName" + } +} diff --git a/config/examples/targets/http-extended.hcl b/config/examples/targets/http-extended.hcl new file mode 100644 index 00000000..a70ca33e --- /dev/null +++ b/config/examples/targets/http-extended.hcl @@ -0,0 +1,42 @@ +# Extended configuration for HTTP target (all options) + +target { + use "http" { + # URL endpoint + url = "https://acme.com/x" + + # Byte limit for requests (default: 1048576) + byte_limit = 1048576 + + # Request timeout in seconds (default: 5) + request_timeout_in_seconds = 5 + + # Content type for POST request (default: "application/json") + content_type = "application/json" + + # Optional headers to add to the request. + # It is provided as a JSON string of key-value pairs (default: ""). + headers = "{\"Accept-Language\":\"en-US\"}" + + # Optional basicauth username + basic_auth_username = "myUsername" + + # Optional basicauth password + # Even though you could just reference the password directly as a string, + # you could also reference an environment variable. + basic_auth_password = env.MY_AUTH_PASSWORD + + # The optional certificate file for client authentication + cert_file = "myLocalhost.crt" + + # The optional key file for client authentication + key_file = "MyLocalhost.key" + + # The optional certificate authority file for TLS client authentication + ca_file = "myRootCA.crt" + + # Whether to skip verifying ssl certificates chain (default: false) + # If cert_file and key_file are not provided, this setting is not applied. + skip_verify_tls = true + } +} diff --git a/config/examples/targets/http-simple.hcl b/config/examples/targets/http-simple.hcl new file mode 100644 index 00000000..20079f88 --- /dev/null +++ b/config/examples/targets/http-simple.hcl @@ -0,0 +1,8 @@ +# Simple configuration for HTTP target (only required options) + +target { + use "http" { + # URL endpoint + url = "https://acme.com/x" + } +} diff --git a/config/examples/targets/kafka-extended.hcl b/config/examples/targets/kafka-extended.hcl new file mode 100644 index 00000000..16bdcc9c --- /dev/null +++ b/config/examples/targets/kafka-extended.hcl @@ -0,0 +1,69 @@ +# Extended configuration for Kafka as a target (all options) + +target { + use "kafka" { + # Kafka broker connectinon string + brokers = "my-kafka-connection-string" + + # Kafka topic name + topic_name = "snowplow-enriched-good" + + # The Kafka version + target_version = "2.7.0" + + # Max retries (default: 10) + max_retries = 10 + + # Kafka default byte limit is 1MB (default: 1048576) + byte_limit = 1048576 + + # Whether to compress data (default: false). + # Reduces network usage and increases latency. + compress = true + + # Sets RequireAck s= WaitForAll, which waits for min.insync.replicas + # to Ack (default: false) + wait_for_all = true + + # Exactly once writes - Also sets RequiredAcks = WaitForAll (default: false) + idempotent = true + + # Whether to enable SASL support (defailt: false) + enable_sasl = true + + # SASL AUTH + sasl_username = "mySaslUsername" + sasl_password = env.SASL_PASSWORD + + # The SASL Algorithm to use: "sha512" or "sha256" (default: "sha512") + sasl_algorithm = "sha256" + + # The optional certificate file for client authentication + cert_file = "myLocalhost.crt" + + # The optional key file for client authentication + key_file = "MyLocalhost.key" + + # The optional certificate authority file for TLS client authentication + ca_file = "myRootCA.crt" + + # Whether to skip verifying ssl certificates chain (default: false) + skip_verify_tls = true + + # Forces the use of the Sync Producer (default: false). + # Emits as fast as possible but may limit performance. + force_sync_producer = true + + # Milliseconds between flushes of events (default: 0) + # Setting to 0 means as fast as possible. + flush_frequency = 2 + + # Best effort for how many messages are sent in each batch (default: 0) + # Setting to 0 means as fast as possible. + flush_messages = 2 + + # Best effort for how many bytes will trigger a flush (default: 0) + # Setting to 0 means as fast as possible. + flush_bytes = 2 + } +} diff --git a/config/examples/targets/kafka-simple.hcl b/config/examples/targets/kafka-simple.hcl new file mode 100644 index 00000000..b3fa6b18 --- /dev/null +++ b/config/examples/targets/kafka-simple.hcl @@ -0,0 +1,11 @@ +# Simple configuration for Kafka as a target (only required options) + +target { + use "kafka" { + # Kafka broker connectinon string + brokers = "my-kafka-connection-string" + + # Kafka topic name + topic_name = "snowplow-enriched-good" + } +} diff --git a/config/examples/targets/kinesis-extended.hcl b/config/examples/targets/kinesis-extended.hcl new file mode 100644 index 00000000..12fd8118 --- /dev/null +++ b/config/examples/targets/kinesis-extended.hcl @@ -0,0 +1,14 @@ +# Extended configuration of Kinesis as a target (all options) + +target { + use "kinesis" { + # Kinesis stream name to send data to + stream_name = "my-stream" + + # AWS region of Kinesis stream + region = "us-west-1" + + # Optional ARN to use on the stream (default: "") + role_arn = "arn:aws:iam::123456789012:role/myrole" + } +} diff --git a/config/examples/targets/kinesis-simple.hcl b/config/examples/targets/kinesis-simple.hcl new file mode 100644 index 00000000..17952616 --- /dev/null +++ b/config/examples/targets/kinesis-simple.hcl @@ -0,0 +1,11 @@ +# Simple configuration of Kinesis as a target (only required options) + +target { + use "kinesis" { + # Kinesis stream name to send data to + stream_name = "my-stream" + + # AWS region of Kinesis stream + region = "us-west-1" + } +} diff --git a/config/examples/targets/pubsub.hcl b/config/examples/targets/pubsub.hcl new file mode 100644 index 00000000..40e281e6 --- /dev/null +++ b/config/examples/targets/pubsub.hcl @@ -0,0 +1,11 @@ +# Configuration of PubSub as a target. + +target { + use "pubsub" { + # ID of the GCP Project + project_id = "acme-project" + + # Name of the topic to send data into + topic_name = "some-acme-topic" + } +} diff --git a/config/examples/targets/sqs-extended.hcl b/config/examples/targets/sqs-extended.hcl new file mode 100644 index 00000000..fefaacec --- /dev/null +++ b/config/examples/targets/sqs-extended.hcl @@ -0,0 +1,14 @@ +# Extended configuration of SQS as a target (all options) + +target { + use "sqs" { + # SQS queue name + queue_name = "mySqsQueue" + + # AWS region of SQS queue + region = "us-west-1" + + # Role ARN to use on SQS queue + role_arn = "arn:aws:iam::123456789012:role/myrole" + } +} diff --git a/config/examples/targets/sqs-simple.hcl b/config/examples/targets/sqs-simple.hcl new file mode 100644 index 00000000..dc0ae07c --- /dev/null +++ b/config/examples/targets/sqs-simple.hcl @@ -0,0 +1,11 @@ +# Simple configuration of SQS as a target (only required options) + +target { + use "sqs" { + # SQS queue name + queue_name = "mySqsQueue" + + # AWS region of SQS queue + region = "us-west-1" + } +} diff --git a/config/examples/targets/stdout.hcl b/config/examples/targets/stdout.hcl new file mode 100644 index 00000000..e5580753 --- /dev/null +++ b/config/examples/targets/stdout.hcl @@ -0,0 +1,6 @@ +# Stdout has no required configuration options as a target. +# Since it is the default target, the target block can also be omitted. + +target { + use "stdout" {} +} diff --git a/config/test-fixtures/empty.hcl b/config/test-fixtures/empty.hcl new file mode 100644 index 00000000..46a47ea7 --- /dev/null +++ b/config/test-fixtures/empty.hcl @@ -0,0 +1 @@ +# empty HCL file to test defaults diff --git a/config/test-fixtures/invalids.hcl b/config/test-fixtures/invalids.hcl new file mode 100644 index 00000000..7075e475 --- /dev/null +++ b/config/test-fixtures/invalids.hcl @@ -0,0 +1,11 @@ +# configuration with various invalid options + +message_transformation = "fakeHCL" + +target { + use "fakeHCL" {} +} + +failure_target { + use "fakeHCL" {} +} diff --git a/config/test-fixtures/observer.hcl b/config/test-fixtures/observer.hcl new file mode 100644 index 00000000..1ada6421 --- /dev/null +++ b/config/test-fixtures/observer.hcl @@ -0,0 +1,11 @@ +# stats receiver extended configuration + +stats_receiver { + use "statsd" { + address = "test.localhost" + prefix = "snowplow.test" + tags = "{\"testKey\": \"testValue\"}" + } + timeout_sec = 2 + buffer_sec = 20 +} diff --git a/config/test-fixtures/sentry.hcl b/config/test-fixtures/sentry.hcl new file mode 100644 index 00000000..55e0e117 --- /dev/null +++ b/config/test-fixtures/sentry.hcl @@ -0,0 +1,7 @@ +# just sentry configuration + +sentry { + dsn = "testDsn" + debug = true + tags = "{\"testKey\":\"testValue\"}" +} diff --git a/config/test-fixtures/source-kinesis-extended.hcl b/config/test-fixtures/source-kinesis-extended.hcl new file mode 100644 index 00000000..cbb85ada --- /dev/null +++ b/config/test-fixtures/source-kinesis-extended.hcl @@ -0,0 +1,12 @@ +# kinesis source extended configuration + +source { + use "kinesis" { + stream_name = "testStream" + region = "us-test-1" + role_arn = "xxx-test-role-arn" + app_name = "testApp" + start_timestamp = "2022-03-15 07:52:53" + concurrent_writes = 51 + } +} diff --git a/config/test-fixtures/source-kinesis-simple.hcl b/config/test-fixtures/source-kinesis-simple.hcl new file mode 100644 index 00000000..0030e252 --- /dev/null +++ b/config/test-fixtures/source-kinesis-simple.hcl @@ -0,0 +1,9 @@ +# kinesis source required configuration + +source { + use "kinesis" { + stream_name = "testStream" + region = "us-test-1" + app_name = "testApp" + } +} diff --git a/config/test-fixtures/source-sqs.hcl b/config/test-fixtures/source-sqs.hcl new file mode 100644 index 00000000..ca226ba7 --- /dev/null +++ b/config/test-fixtures/source-sqs.hcl @@ -0,0 +1,9 @@ +# sqs source configuration + +source { + use "sqs" { + queue_name = "testQueue" + region = "us-test-1" + role_arn = "xxx-test-role-arn" + } +} diff --git a/config/test-fixtures/target-eventhub-extended.hcl b/config/test-fixtures/target-eventhub-extended.hcl new file mode 100644 index 00000000..7dd4fd1c --- /dev/null +++ b/config/test-fixtures/target-eventhub-extended.hcl @@ -0,0 +1,14 @@ +# eventhub target extended config + +target { + use "eventhub" { + namespace = "testNamespace" + name = "testName" + max_auto_retries = 2 + message_byte_limit = 1000000 + chunk_byte_limit = 1000000 + chunk_message_limit = 501 + context_timeout_in_seconds = 21 + batch_byte_limit = 1000000 + } +} diff --git a/config/test-fixtures/target-eventhub-simple.hcl b/config/test-fixtures/target-eventhub-simple.hcl new file mode 100644 index 00000000..205ac45a --- /dev/null +++ b/config/test-fixtures/target-eventhub-simple.hcl @@ -0,0 +1,8 @@ +# eventhub target config + +target { + use "eventhub" { + namespace = "testNamespace" + name = "testName" + } +} diff --git a/config/test-fixtures/target-http-extended.hcl b/config/test-fixtures/target-http-extended.hcl new file mode 100644 index 00000000..7dfadbf4 --- /dev/null +++ b/config/test-fixtures/target-http-extended.hcl @@ -0,0 +1,17 @@ +# http target extended configuration + +target { + use "http" { + url = "testUrl" + byte_limit = 1000000 + request_timeout_in_seconds = 2 + content_type = "test/test" + headers = "{\"Accept-Language\":\"en-US\"}" + basic_auth_username = "testUsername" + basic_auth_password = "testPass" + cert_file = "test.cert" + key_file = "test.key" + ca_file = "test.ca" + skip_verify_tls = true + } +} diff --git a/config/test-fixtures/target-http-simple.hcl b/config/test-fixtures/target-http-simple.hcl new file mode 100644 index 00000000..f7617dd9 --- /dev/null +++ b/config/test-fixtures/target-http-simple.hcl @@ -0,0 +1,7 @@ +# http target required configuration + +target { + use "http" { + url = "testUrl" + } +} diff --git a/config/test-fixtures/target-kafka-extended.hcl b/config/test-fixtures/target-kafka-extended.hcl new file mode 100644 index 00000000..d0faa9f8 --- /dev/null +++ b/config/test-fixtures/target-kafka-extended.hcl @@ -0,0 +1,26 @@ +# kafka target extended configuration + +target { + use "kafka" { + brokers = "testBrokers" + topic_name = "testTopic" + target_version = "1.2.3" + max_retries = 11 + byte_limit = 1000000 + compress = true + wait_for_all = true + idempotent = true + enable_sasl = true + sasl_username = "testUsername" + sasl_password = "testPass" + sasl_algorithm = "sha256" + cert_file = "test.cert" + key_file = "test.key" + ca_file = "test.ca" + skip_verify_tls = true + force_sync_producer = true + flush_frequency = 2 + flush_messages = 2 + flush_bytes = 2 + } +} diff --git a/config/test-fixtures/target-kafka-simple.hcl b/config/test-fixtures/target-kafka-simple.hcl new file mode 100644 index 00000000..29fa4780 --- /dev/null +++ b/config/test-fixtures/target-kafka-simple.hcl @@ -0,0 +1,8 @@ +# kafka target required configuration + +target { + use "kafka" { + brokers = "testBrokers" + topic_name = "testTopic" + } +} diff --git a/config/test-fixtures/target-kinesis.hcl b/config/test-fixtures/target-kinesis.hcl new file mode 100644 index 00000000..763ca44b --- /dev/null +++ b/config/test-fixtures/target-kinesis.hcl @@ -0,0 +1,9 @@ +# kinesis target configuration + +target { + use "kinesis" { + stream_name = "testStream" + region = "eu-test-1" + role_arn = "xxx-test-role-arn" + } +} diff --git a/config/test-fixtures/target-pubsub.hcl b/config/test-fixtures/target-pubsub.hcl new file mode 100644 index 00000000..e71b2a30 --- /dev/null +++ b/config/test-fixtures/target-pubsub.hcl @@ -0,0 +1,8 @@ +# pubsub target configuration + +target { + use "pubsub" { + project_id = "testId" + topic_name = "testTopic" + } +} diff --git a/config/test-fixtures/target-sqs.hcl b/config/test-fixtures/target-sqs.hcl new file mode 100644 index 00000000..81dceb89 --- /dev/null +++ b/config/test-fixtures/target-sqs.hcl @@ -0,0 +1,9 @@ +# config for sqs target + +target { + use "sqs" { + queue_name = "testQueue" + region = "eu-test-1" + role_arn = "xxx-test-role-arn" + } +} diff --git a/go.mod b/go.mod index 16376d89..28ee2387 100644 --- a/go.mod +++ b/go.mod @@ -14,7 +14,7 @@ require ( github.com/Shopify/sarama v1.34.0 github.com/aws/aws-lambda-go v1.26.0 github.com/aws/aws-sdk-go v1.40.22 - github.com/caarlos0/env/v6 v6.6.2 + github.com/caarlos0/env/v6 v6.9.1 github.com/cpuguy83/go-md2man/v2 v2.0.1 // indirect github.com/form3tech-oss/jwt-go v3.2.5+incompatible // indirect github.com/getsentry/sentry-go v0.11.0 @@ -51,6 +51,12 @@ require ( gopkg.in/stretchr/testify.v1 v1.2.2 // indirect ) +require ( + github.com/davecgh/go-spew v1.1.1 + github.com/hashicorp/hcl/v2 v2.11.1 + github.com/zclconf/go-cty v1.10.0 +) + require ( github.com/Azure/go-autorest v14.2.0+incompatible // indirect github.com/Azure/go-autorest/autorest/date v0.3.0 // indirect @@ -58,7 +64,8 @@ require ( github.com/Azure/go-autorest/autorest/validation v0.3.1 // indirect github.com/Azure/go-autorest/logger v0.2.1 // indirect github.com/Azure/go-autorest/tracing v0.6.0 // indirect - github.com/davecgh/go-spew v1.1.1 // indirect + github.com/agext/levenshtein v1.2.1 // indirect + github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect github.com/devigned/tab v0.1.1 // indirect github.com/eapache/go-resiliency v1.2.0 // indirect github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21 // indirect @@ -75,6 +82,7 @@ require ( github.com/jmespath/go-jmespath v0.4.0 // indirect github.com/json-iterator/go v1.1.11 // indirect github.com/jstemmer/go-junit-report v0.9.1 // indirect + github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.1 // indirect github.com/pierrec/lz4/v4 v4.1.14 // indirect diff --git a/go.sum b/go.sum index f46adff2..23ae8f5f 100644 --- a/go.sum +++ b/go.sum @@ -115,8 +115,15 @@ github.com/Shopify/sarama v1.34.0/go.mod h1:V2ceE9UupUf4/oP1Z38SI49fAnD0/MtkqDDH github.com/Shopify/toxiproxy v2.1.4+incompatible h1:TKdv8HiTLgE5wdJuEML90aBgNWsokNbMijUGhmcoBJc= github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI= github.com/Shopify/toxiproxy/v2 v2.3.0/go.mod h1:KvQTtB6RjCJY4zqNJn7C7JDFgsG5uoHYDirfUfpIm0c= +github.com/agext/levenshtein v1.2.1 h1:QmvMAjj2aEICytGiWzmxoE0x2KZvE0fvmqMOfy2tjT8= +github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3/go.mod h1:oL81AME2rN47vu18xqj1S1jPIPuN7afo62yKTNn3XMM= +github.com/apparentlymart/go-textseg v1.0.0 h1:rRmlIsPEEhUTIKQb7T++Nz/A5Q6C9IuX2wFoYVvnCs0= +github.com/apparentlymart/go-textseg v1.0.0/go.mod h1:z96Txxhf3xSFMPmb5X/1W05FF/Nj9VFpLOpjS5yuumk= +github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw= +github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo= github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= github.com/aws/aws-lambda-go v1.26.0 h1:6ujqBpYF7tdZcBvPIccs98SpeGfrt/UOVEiexfNIdHA= github.com/aws/aws-lambda-go v1.26.0/go.mod h1:jJmlefzPfGnckuHdXX7/80O3BvUUi12XOkbv4w9SGLU= @@ -126,6 +133,8 @@ github.com/aws/aws-sdk-go v1.40.22/go.mod h1:585smgzpB/KqRA+K3y/NL/oYRqQvpNJYvLm github.com/aymerick/raymond v2.0.3-0.20180322193309-b565731e1464+incompatible/go.mod h1:osfaiScAUVup+UC9Nfq76eWqDhXlp+4UYaA8uhTBO6g= github.com/caarlos0/env/v6 v6.6.2 h1:BypLXDWQTA32rS4UM7pBz+/0BOuvs6C7LSeQAxMwyvI= github.com/caarlos0/env/v6 v6.6.2/go.mod h1:P0BVSgU9zfkxfSpFUs6KsO3uWR4k3Ac0P66ibAGTybM= +github.com/caarlos0/env/v6 v6.9.1 h1:zOkkjM0F6ltnQ5eBX6IPI41UP/KDGEK7rRPwGCNos8k= +github.com/caarlos0/env/v6 v6.9.1/go.mod h1:hvp/ryKXKipEkcuYjs9mI4bBCg+UI0Yhgm5Zu0ddvwc= github.com/cactus/go-statsd-client/statsd v0.0.0-20190922113730-52b467de415c/go.mod h1:D4RDtP0MffJ3+R36OkGul0LwJLIN8nRb0Ac6jZmJCmo= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= @@ -199,6 +208,7 @@ github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9 github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-martini/martini v0.0.0-20170121215854-22fa46961aab/go.mod h1:/P9AEU963A2AYjv4d1V5eVL1CQbEJq6aCNHDDjibzu8= +github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo= github.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= github.com/gobwas/ws v1.0.2/go.mod h1:szmBTxLgaFppYjEmNtny/v3w89xOydFnnZMcgRRu/EM= @@ -217,6 +227,7 @@ github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= +github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= @@ -300,7 +311,10 @@ github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/b github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hashicorp/hcl/v2 v2.11.1 h1:yTyWcXcm9XB0TEkyU/JCRU6rYy4K+mgLtzn2wlrJbcc= +github.com/hashicorp/hcl/v2 v2.11.1/go.mod h1:FwWsfWEjyV/CMj8s/gqAuiviY72rJ1/oayI9WftqcKg= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= @@ -365,6 +379,7 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k= github.com/labstack/echo/v4 v4.1.11/go.mod h1:i541M3Fj6f76NZtHSj7TXnyM8n2gaodfvfxNnFqi74g= github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k= github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= @@ -379,6 +394,8 @@ github.com/mediocregopher/radix/v3 v3.4.2/go.mod h1:8FL3F6UQRXHXIBSPUs5h0RybMF8i github.com/microcosm-cc/bluemonday v1.0.2/go.mod h1:iVP4YcDBq+n/5fb23BhYFvIMq/leAFZyRl6bYmGDlGc= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 h1:DpOJ2HYzCv8LZP15IdmG+YdwD2luVPHITV96TkirNBM= +github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.4.1 h1:CpVNEelQCZBooIPDn+AR3NpivK/TIKU8bDxdASFVQag= github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= @@ -447,6 +464,7 @@ github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU= github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= +github.com/spf13/pflag v1.0.2/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= @@ -476,6 +494,9 @@ github.com/valyala/tcplisten v0.0.0-20161114210144-ceec8f93295a/go.mod h1:v3UYOV github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g= github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8= +github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= +github.com/vmihailenco/msgpack/v4 v4.3.12/go.mod h1:gborTTJjAo/GWTqqRjrLCn9pgNN+NXzzngzBKDPIqw4= +github.com/vmihailenco/tagparser v0.1.1/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI= github.com/xdg/scram v1.0.3 h1:nTadYh2Fs4BK2xdldEa2g5bbaZp0/+1nJMMPtPxS/to= github.com/xdg/scram v1.0.3/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I= github.com/xdg/stringprep v1.0.3 h1:cmL5Enob4W83ti/ZHuZLuKD/xqJfus4fVPwE+/BDm+4= @@ -493,6 +514,11 @@ github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/zclconf/go-cty v1.2.0/go.mod h1:hOPWgoHbaTUnI5k4D2ld+GRpFJSCe6bCM7m1q/N4PQ8= +github.com/zclconf/go-cty v1.8.0/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uUAzyuvAk= +github.com/zclconf/go-cty v1.10.0 h1:mp9ZXQeIcN8kAwuqorjH+Q+njbJKjLrvB2yIh4q7U+0= +github.com/zclconf/go-cty v1.10.0/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uUAzyuvAk= +github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b/go.mod h1:ZRKQfBXbGkpdV6QMzT3rU1kSTAnfu1dO8dPKjYprgj8= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= @@ -505,6 +531,7 @@ go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqe golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= @@ -559,6 +586,7 @@ golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.5.0 h1:UG21uOlmZabA4fW5i7ZX6bjw1xELEGg/ZLgZq9auk/Q= golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180811021610-c39426892332/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -641,6 +669,7 @@ golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5h golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502175342-a43fa875dd82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= diff --git a/pkg/source/kinesis/kinesis_source.go b/pkg/source/kinesis/kinesis_source.go index 586acc1d..70a76490 100644 --- a/pkg/source/kinesis/kinesis_source.go +++ b/pkg/source/kinesis/kinesis_source.go @@ -21,13 +21,22 @@ import ( "github.com/twinj/uuid" "github.com/twitchscience/kinsumer" - config "github.com/snowplow-devops/stream-replicator/config" "github.com/snowplow-devops/stream-replicator/pkg/common" "github.com/snowplow-devops/stream-replicator/pkg/models" "github.com/snowplow-devops/stream-replicator/pkg/source/sourceconfig" "github.com/snowplow-devops/stream-replicator/pkg/source/sourceiface" ) +// configuration configures the source for records pulled +type configuration struct { + StreamName string `hcl:"stream_name" env:"SOURCE_KINESIS_STREAM_NAME"` + Region string `hcl:"region" env:"SOURCE_KINESIS_REGION"` + AppName string `hcl:"app_name" env:"SOURCE_KINESIS_APP_NAME"` + RoleARN string `hcl:"role_arn,optional" env:"SOURCE_KINESIS_ROLE_ARN"` + StartTimestamp string `hcl:"start_timestamp,optional" env:"SOURCE_KINESIS_START_TIMESTAMP"` // Timestamp for the kinesis shard iterator to begin processing. Format YYYY-MM-DD HH:MM:SS.MS (miliseconds optional) + ConcurrentWrites int `hcl:"concurrent_writes,optional" env:"SOURCE_CONCURRENT_WRITES"` +} + // --- Kinesis source // kinesisSource holds a new client for reading messages from kinesis @@ -45,14 +54,14 @@ type kinesisSource struct { // configFunctionGeneratorWithInterfaces generates the kinesis Source Config function, allowing you // to provide a Kinesis + DynamoDB client directly to allow for mocking and localstack usage -func configFunctionGeneratorWithInterfaces(kinesisClient kinesisiface.KinesisAPI, dynamodbClient dynamodbiface.DynamoDBAPI, awsAccountID string) func(c *config.Config) (sourceiface.Source, error) { +func configFunctionGeneratorWithInterfaces(kinesisClient kinesisiface.KinesisAPI, dynamodbClient dynamodbiface.DynamoDBAPI, awsAccountID string) func(c *configuration) (sourceiface.Source, error) { // Return a function which returns the source - return func(c *config.Config) (sourceiface.Source, error) { + return func(c *configuration) (sourceiface.Source, error) { // Handle iteratorTstamp if provided var iteratorTstamp time.Time var tstampParseErr error - if c.Sources.Kinesis.StartTimestamp != "" { - iteratorTstamp, tstampParseErr = time.Parse("2006-01-02 15:04:05.999", c.Sources.Kinesis.StartTimestamp) + if c.StartTimestamp != "" { + iteratorTstamp, tstampParseErr = time.Parse("2006-01-02 15:04:05.999", c.StartTimestamp) if tstampParseErr != nil { return nil, errors.Wrap(tstampParseErr, fmt.Sprintf("Failed to parse provided value for SOURCE_KINESIS_START_TIMESTAMP: %v", iteratorTstamp)) } @@ -62,17 +71,17 @@ func configFunctionGeneratorWithInterfaces(kinesisClient kinesisiface.KinesisAPI kinesisClient, dynamodbClient, awsAccountID, - c.Sources.ConcurrentWrites, - c.Sources.Kinesis.Region, - c.Sources.Kinesis.StreamName, - c.Sources.Kinesis.AppName, + c.ConcurrentWrites, + c.Region, + c.StreamName, + c.AppName, &iteratorTstamp) } } // configFunction returns a kinesis source from a config -func configFunction(c *config.Config) (sourceiface.Source, error) { - awsSession, awsConfig, awsAccountID, err := common.GetAWSSession(c.Sources.Kinesis.Region, c.Sources.Kinesis.RoleARN) +func configFunction(c *configuration) (sourceiface.Source, error) { + awsSession, awsConfig, awsAccountID, err := common.GetAWSSession(c.Region, c.RoleARN) if err != nil { return nil, err } @@ -87,8 +96,42 @@ func configFunction(c *config.Config) (sourceiface.Source, error) { return sourceConfigFunction(c) } +// The adapter type is an adapter for functions to be used as +// pluggable components for Kinesis Source. Implements the Pluggable interface. +type adapter func(i interface{}) (interface{}, error) + +// Create implements the ComponentCreator interface. +func (f adapter) Create(i interface{}) (interface{}, error) { + return f(i) +} + +// ProvideDefault implements the ComponentConfigurable interface. +func (f adapter) ProvideDefault() (interface{}, error) { + // Provide defaults + cfg := &configuration{ + ConcurrentWrites: 50, + } + + return cfg, nil +} + +// adapterGenerator returns a Kinesis Source adapter. +func adapterGenerator(f func(c *configuration) (sourceiface.Source, error)) adapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*configuration) + if !ok { + return nil, errors.New("invalid input, expected configuration for kinesis source") + } + + return f(cfg) + } +} + // ConfigPair is passed to configuration to determine when to build a Kinesis source. -var ConfigPair = sourceconfig.ConfigPair{SourceName: "kinesis", SourceConfigFunc: configFunction} +var ConfigPair = sourceconfig.ConfigPair{ + Name: "kinesis", + Handle: adapterGenerator(configFunction), +} // --- Kinsumer overrides diff --git a/pkg/source/kinesis/kinesis_source_test.go b/pkg/source/kinesis/kinesis_source_test.go index 4ae312e8..340ab930 100644 --- a/pkg/source/kinesis/kinesis_source_test.go +++ b/pkg/source/kinesis/kinesis_source_test.go @@ -7,14 +7,18 @@ package kinesissource import ( + "errors" "fmt" "os" + "path/filepath" + "reflect" "testing" "time" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/service/kinesis" "github.com/aws/aws-sdk-go/service/kinesis/kinesisiface" + "github.com/davecgh/go-spew/spew" "github.com/stretchr/testify/assert" config "github.com/snowplow-devops/stream-replicator/config" @@ -173,9 +177,12 @@ func TestGetSource_WithKinesisSource(t *testing.T) { defer testutil.DeleteAWSLocalstackDynamoDBTables(dynamodbClient, appName) - defer os.Unsetenv("SOURCE") + defer os.Unsetenv("SOURCE_NAME") + defer os.Unsetenv("SOURCE_KINESIS_STREAM_NAME") + defer os.Unsetenv("SOURCE_KINESIS_REGION") + defer os.Unsetenv("SOURCE_KINESIS_APP_NAME") - os.Setenv("SOURCE", "kinesis") + os.Setenv("SOURCE_NAME", "kinesis") os.Setenv("SOURCE_KINESIS_STREAM_NAME", streamName) os.Setenv("SOURCE_KINESIS_REGION", testutil.AWSLocalstackRegion) @@ -187,8 +194,9 @@ func TestGetSource_WithKinesisSource(t *testing.T) { // Use our function generator to interact with localstack kinesisSourceConfigFunctionWithLocalstack := configFunctionGeneratorWithInterfaces(kinesisClient, dynamodbClient, "00000000000") + adaptedHandle := adapterGenerator(kinesisSourceConfigFunctionWithLocalstack) - kinesisSourceConfigPairWithLocalstack := sourceconfig.ConfigPair{SourceName: "kinesis", SourceConfigFunc: kinesisSourceConfigFunctionWithLocalstack} + kinesisSourceConfigPairWithLocalstack := sourceconfig.ConfigPair{Name: "kinesis", Handle: adaptedHandle} supportedSources := []sourceconfig.ConfigPair{kinesisSourceConfigPairWithLocalstack} source, err := sourceconfig.GetSource(c, supportedSources) @@ -197,3 +205,83 @@ func TestGetSource_WithKinesisSource(t *testing.T) { assert.IsType(&kinesisSource{}, source) } + +func TestKinesisSourceHCL(t *testing.T) { + testFixPath := "../../../config/test-fixtures" + testCases := []struct { + File string + Plug config.Pluggable + Expected interface{} + }{ + { + File: "source-kinesis-simple.hcl", + Plug: testKinesisSourceAdapter(testKinesisSourceFunc), + Expected: &configuration{ + StreamName: "testStream", + Region: "us-test-1", + AppName: "testApp", + RoleARN: "", + StartTimestamp: "", + ConcurrentWrites: 50, + }, + }, + { + File: "source-kinesis-extended.hcl", + Plug: testKinesisSourceAdapter(testKinesisSourceFunc), + Expected: &configuration{ + StreamName: "testStream", + Region: "us-test-1", + AppName: "testApp", + RoleARN: "xxx-test-role-arn", + StartTimestamp: "2022-03-15 07:52:53", + ConcurrentWrites: 51, + }, + }, + } + + for _, tt := range testCases { + t.Run(tt.File, func(t *testing.T) { + assert := assert.New(t) + + filename := filepath.Join(testFixPath, tt.File) + t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", filename) + + c, err := config.NewConfig() + assert.NotNil(c) + assert.Nil(err) + + use := c.Data.Source.Use + decoderOpts := &config.DecoderOptions{ + Input: use.Body, + } + + result, err := c.CreateComponent(tt.Plug, decoderOpts) + assert.NotNil(result) + assert.Nil(err) + + if !reflect.DeepEqual(result, tt.Expected) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(result), + spew.Sdump(tt.Expected)) + } + }) + } +} + +// Helpers +func testKinesisSourceAdapter(f func(c *configuration) (*configuration, error)) adapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*configuration) + if !ok { + return nil, errors.New("invalid input, expected KinesisSourceConfig") + } + + return f(cfg) + } + +} + +func testKinesisSourceFunc(c *configuration) (*configuration, error) { + + return c, nil +} diff --git a/pkg/source/pubsub/pubsub_source.go b/pkg/source/pubsub/pubsub_source.go index fa27bf7e..acb10626 100644 --- a/pkg/source/pubsub/pubsub_source.go +++ b/pkg/source/pubsub/pubsub_source.go @@ -16,12 +16,18 @@ import ( log "github.com/sirupsen/logrus" "github.com/twinj/uuid" - config "github.com/snowplow-devops/stream-replicator/config" "github.com/snowplow-devops/stream-replicator/pkg/models" "github.com/snowplow-devops/stream-replicator/pkg/source/sourceconfig" "github.com/snowplow-devops/stream-replicator/pkg/source/sourceiface" ) +// configuration configures the source for records pulled +type configuration struct { + ProjectID string `hcl:"project_id" env:"SOURCE_PUBSUB_PROJECT_ID"` + SubscriptionID string `hcl:"subscription_id" env:"SOURCE_PUBSUB_SUBSCRIPTION_ID"` + ConcurrentWrites int `hcl:"concurrent_writes,optional" env:"SOURCE_CONCURRENT_WRITES"` +} + // pubSubSource holds a new client for reading messages from PubSub type pubSubSource struct { projectID string @@ -36,16 +42,50 @@ type pubSubSource struct { } // configFunction returns a pubsub source from a config -func configFunction(c *config.Config) (sourceiface.Source, error) { +func configFunction(c *configuration) (sourceiface.Source, error) { return newPubSubSource( - c.Sources.ConcurrentWrites, - c.Sources.PubSub.ProjectID, - c.Sources.PubSub.SubscriptionID, + c.ConcurrentWrites, + c.ProjectID, + c.SubscriptionID, ) } +// The adapter type is an adapter for functions to be used as +// pluggable components for PubSub Source. It implements the Pluggable interface. +type adapter func(i interface{}) (interface{}, error) + +// Create implements the ComponentCreator interface. +func (f adapter) Create(i interface{}) (interface{}, error) { + return f(i) +} + +// ProvideDefault implements the ComponentConfigurable interface +func (f adapter) ProvideDefault() (interface{}, error) { + // Provide defaults + cfg := &configuration{ + ConcurrentWrites: 50, + } + + return cfg, nil +} + +// adapterGenerator returns a PubSub Source adapter. +func adapterGenerator(f func(c *configuration) (sourceiface.Source, error)) adapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*configuration) + if !ok { + return nil, errors.New("invalid input, expected PubSubSourceConfig") + } + + return f(cfg) + } +} + // ConfigPair is passed to configuration to determine when to build a Pubsub source. -var ConfigPair = sourceconfig.ConfigPair{SourceName: "pubsub", SourceConfigFunc: configFunction} +var ConfigPair = sourceconfig.ConfigPair{ + Name: "pubsub", + Handle: adapterGenerator(configFunction), +} // newPubSubSource creates a new client for reading messages from PubSub func newPubSubSource(concurrentWrites int, projectID string, subscriptionID string) (*pubSubSource, error) { diff --git a/pkg/source/sourceconfig/source_config.go b/pkg/source/sourceconfig/source_config.go index 59b9d288..3eb752ab 100644 --- a/pkg/source/sourceconfig/source_config.go +++ b/pkg/source/sourceconfig/source_config.go @@ -14,23 +14,36 @@ import ( "github.com/snowplow-devops/stream-replicator/pkg/source/sourceiface" ) -// configFunction is a function which returns a source. -type configFunction func(*config.Config) (sourceiface.Source, error) - -// ConfigPair contains the name of a source and its ConfigFunction. +// ConfigPair contains the name of a source and its handle that satisfies the +// Pluggable interface. type ConfigPair struct { - SourceName string - SourceConfigFunc configFunction + Name string + Handle config.Pluggable } -// GetSource iterates the list of supported sources, matches the provided config for source, and returns a source. +// GetSource creates and returns the source that is configured. func GetSource(c *config.Config, supportedSources []ConfigPair) (sourceiface.Source, error) { + useSource := c.Data.Source.Use + decoderOpts := &config.DecoderOptions{ + Input: useSource.Body, + } + sourceList := make([]string, 0) - for _, configPair := range supportedSources { - if configPair.SourceName == c.Source { - return configPair.SourceConfigFunc(c) + for _, pair := range supportedSources { + if pair.Name == useSource.Name { + plug := pair.Handle + component, err := c.CreateComponent(plug, decoderOpts) + if err != nil { + return nil, err + } + + if s, ok := component.(sourceiface.Source); ok { + return s, nil + } + + return nil, fmt.Errorf("could not interpret source configuration for %q", useSource.Name) } - sourceList = append(sourceList, configPair.SourceName) + sourceList = append(sourceList, pair.Name) } - return nil, fmt.Errorf("Invalid source found: %s. Supported sources in this build: %s", c.Source, strings.Join(sourceList, ", ")) + return nil, fmt.Errorf("Invalid source found: %s. Supported sources in this build: %s", useSource.Name, strings.Join(sourceList, ", ")) } diff --git a/pkg/source/sourceconfig/source_config_test.go b/pkg/source/sourceconfig/source_config_test.go index 40a2cb2e..234d91ac 100644 --- a/pkg/source/sourceconfig/source_config_test.go +++ b/pkg/source/sourceconfig/source_config_test.go @@ -17,9 +17,9 @@ import ( func TestNewConfig_InvalidSource(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("SOURCE") + defer os.Unsetenv("SOURCE_NAME") - os.Setenv("SOURCE", "fake") + os.Setenv("SOURCE_NAME", "fake") c, err := config.NewConfig() assert.NotNil(c) diff --git a/pkg/source/sqs/sqs_source.go b/pkg/source/sqs/sqs_source.go index 4499985f..28039e41 100644 --- a/pkg/source/sqs/sqs_source.go +++ b/pkg/source/sqs/sqs_source.go @@ -19,13 +19,20 @@ import ( log "github.com/sirupsen/logrus" "github.com/twinj/uuid" - config "github.com/snowplow-devops/stream-replicator/config" "github.com/snowplow-devops/stream-replicator/pkg/common" "github.com/snowplow-devops/stream-replicator/pkg/models" "github.com/snowplow-devops/stream-replicator/pkg/source/sourceconfig" "github.com/snowplow-devops/stream-replicator/pkg/source/sourceiface" ) +// configuration configures the source for records pulled +type configuration struct { + QueueName string `hcl:"queue_name" env:"SOURCE_SQS_QUEUE_NAME"` + Region string `hcl:"region" env:"SOURCE_SQS_REGION"` + RoleARN string `hcl:"role_arn,optional" env:"SOURCE_SQS_ROLE_ARN"` + ConcurrentWrites int `hcl:"concurrent_writes,optional" env:"SOURCE_CONCURRENT_WRITES"` +} + // sqsSource holds a new client for reading messages from SQS type sqsSource struct { client sqsiface.SQSAPI @@ -47,15 +54,15 @@ type sqsSource struct { // configFunctionGeneratorWithInterfaces generates the SQS Source Config function, allowing you // to provide an SQS client directly to allow for mocking and localstack usage -func configFunctionGeneratorWithInterfaces(client sqsiface.SQSAPI, awsAccountID string) func(c *config.Config) (sourceiface.Source, error) { - return func(c *config.Config) (sourceiface.Source, error) { - return newSQSSourceWithInterfaces(client, awsAccountID, c.Sources.ConcurrentWrites, c.Sources.SQS.Region, c.Sources.SQS.QueueName) +func configFunctionGeneratorWithInterfaces(client sqsiface.SQSAPI, awsAccountID string) func(c *configuration) (sourceiface.Source, error) { + return func(c *configuration) (sourceiface.Source, error) { + return newSQSSourceWithInterfaces(client, awsAccountID, c.ConcurrentWrites, c.Region, c.QueueName) } } -// configFunction returns an SQS source from a config -func configFunction(c *config.Config) (sourceiface.Source, error) { - awsSession, awsConfig, awsAccountID, err := common.GetAWSSession(c.Sources.SQS.Region, c.Sources.SQS.RoleARN) +// configFunction returns an SQS source from a config. +func configFunction(c *configuration) (sourceiface.Source, error) { + awsSession, awsConfig, awsAccountID, err := common.GetAWSSession(c.Region, c.RoleARN) if err != nil { return nil, err } @@ -67,8 +74,43 @@ func configFunction(c *config.Config) (sourceiface.Source, error) { return sourceConfigFunc(c) } -// ConfigPair is passed to configuration to determine when to build an SQS source. -var ConfigPair = sourceconfig.ConfigPair{SourceName: "sqs", SourceConfigFunc: configFunction} +// The adapter type is an adapter for functions to be used as +// pluggable components for SQS Source. It implements the Pluggable interface. +type adapter func(i interface{}) (interface{}, error) + +// Create implements the ComponentCreator interface. +func (f adapter) Create(i interface{}) (interface{}, error) { + return f(i) +} + +// ProvideDefault implements the ComponentConfigurable interface. +func (f adapter) ProvideDefault() (interface{}, error) { + // Provide defaults + cfg := &configuration{ + ConcurrentWrites: 50, + } + + return cfg, nil +} + +// adapterGenerator returns an SQS Source adapter. +func adapterGenerator(f func(c *configuration) (sourceiface.Source, error)) adapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*configuration) + if !ok { + return nil, errors.New("invalid input, expected SQSSourceConfig") + } + + return f(cfg) + } +} + +// ConfigPair is passed to configuration to determine when and how to build +// an SQS source. +var ConfigPair = sourceconfig.ConfigPair{ + Name: "sqs", + Handle: adapterGenerator(configFunction), +} // newSQSSourceWithInterfaces allows you to provide an SQS client directly to allow // for mocking and localstack usage diff --git a/pkg/source/sqs/sqs_source_test.go b/pkg/source/sqs/sqs_source_test.go index 7462fc98..4a55cf3e 100644 --- a/pkg/source/sqs/sqs_source_test.go +++ b/pkg/source/sqs/sqs_source_test.go @@ -7,10 +7,14 @@ package sqssource import ( + "errors" "os" + "path/filepath" + "reflect" "testing" "time" + "github.com/davecgh/go-spew/spew" "github.com/stretchr/testify/assert" config "github.com/snowplow-devops/stream-replicator/config" @@ -108,9 +112,9 @@ func TestGetSource_WithSQSSource(t *testing.T) { defer testutil.DeleteAWSLocalstackSQSQueue(sqsClient, &queueName) - defer os.Unsetenv("SOURCE") + defer os.Unsetenv("SOURCE_NAME") - os.Setenv("SOURCE", "sqs") + os.Setenv("SOURCE_NAME", "sqs") os.Setenv("SOURCE_SQS_QUEUE_NAME", queueName) c, err := config.NewConfig() @@ -118,7 +122,9 @@ func TestGetSource_WithSQSSource(t *testing.T) { assert.Nil(err) sqsSourceConfigFunctionWithLocalStack := configFunctionGeneratorWithInterfaces(sqsClient, "00000000000") - sqsSourceConfigPairWithInterfaces := sourceconfig.ConfigPair{SourceName: "sqs", SourceConfigFunc: sqsSourceConfigFunctionWithLocalStack} + adaptedHandle := adapterGenerator(sqsSourceConfigFunctionWithLocalStack) + + sqsSourceConfigPairWithInterfaces := sourceconfig.ConfigPair{Name: "sqs", Handle: adaptedHandle} supportedSources := []sourceconfig.ConfigPair{sqsSourceConfigPairWithInterfaces} source, err := sourceconfig.GetSource(c, supportedSources) @@ -127,3 +133,69 @@ func TestGetSource_WithSQSSource(t *testing.T) { assert.IsType(&sqsSource{}, source) } + +func TestKinesisSourceHCL(t *testing.T) { + testFixPath := "../../../config/test-fixtures" + testCases := []struct { + File string + Plug config.Pluggable + Expected interface{} + }{ + { + File: "source-sqs.hcl", + Plug: testSQSSourceAdapter(testSQSSourceFunc), + Expected: &configuration{ + QueueName: "testQueue", + Region: "us-test-1", + RoleARN: "xxx-test-role-arn", + ConcurrentWrites: 50, + }, + }, + } + + for _, tt := range testCases { + t.Run(tt.File, func(t *testing.T) { + assert := assert.New(t) + + filename := filepath.Join(testFixPath, tt.File) + t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", filename) + + c, err := config.NewConfig() + assert.NotNil(c) + assert.Nil(err) + + use := c.Data.Source.Use + decoderOpts := &config.DecoderOptions{ + Input: use.Body, + } + + result, err := c.CreateComponent(tt.Plug, decoderOpts) + assert.NotNil(result) + assert.Nil(err) + + if !reflect.DeepEqual(result, tt.Expected) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(result), + spew.Sdump(tt.Expected)) + } + }) + } +} + +// Helpers +func testSQSSourceAdapter(f func(c *configuration) (*configuration, error)) adapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*configuration) + if !ok { + return nil, errors.New("invalid input, expected SQSSourceConfig") + } + + return f(cfg) + } + +} + +func testSQSSourceFunc(c *configuration) (*configuration, error) { + + return c, nil +} diff --git a/pkg/source/stdin/stdin_source.go b/pkg/source/stdin/stdin_source.go index 79386c42..8148760d 100644 --- a/pkg/source/stdin/stdin_source.go +++ b/pkg/source/stdin/stdin_source.go @@ -16,12 +16,16 @@ import ( log "github.com/sirupsen/logrus" "github.com/twinj/uuid" - config "github.com/snowplow-devops/stream-replicator/config" "github.com/snowplow-devops/stream-replicator/pkg/models" "github.com/snowplow-devops/stream-replicator/pkg/source/sourceconfig" "github.com/snowplow-devops/stream-replicator/pkg/source/sourceiface" ) +// configuration configures the source for records pulled +type configuration struct { + ConcurrentWrites int `hcl:"concurrent_writes,optional" env:"SOURCE_CONCURRENT_WRITES"` +} + // stdinSource holds a new client for reading messages from stdin type stdinSource struct { concurrentWrites int @@ -29,15 +33,49 @@ type stdinSource struct { log *log.Entry } -// configfunction returns an stdin source from a config -func configfunction(c *config.Config) (sourceiface.Source, error) { +// configFunction returns an stdin source from a config +func configfunction(c *configuration) (sourceiface.Source, error) { return newStdinSource( - c.Sources.ConcurrentWrites, + c.ConcurrentWrites, ) } +// The adapter type is an adapter for functions to be used as +// pluggable components for Stdin Source. It implements the Pluggable interface. +type adapter func(i interface{}) (interface{}, error) + +// Create implements the ComponentCreator interface. +func (f adapter) Create(i interface{}) (interface{}, error) { + return f(i) +} + +// ProvideDefault implements the ComponentConfigurable interface. +func (f adapter) ProvideDefault() (interface{}, error) { + // Provide defaults + cfg := &configuration{ + ConcurrentWrites: 50, + } + + return cfg, nil +} + +// adapterGenerator returns a StdinSource adapter. +func adapterGenerator(f func(c *configuration) (sourceiface.Source, error)) adapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*configuration) + if !ok { + return nil, errors.New("invalid input, expected StdinSourceConfig") + } + + return f(cfg) + } +} + // ConfigPair is passed to configuration to determine when to build an stdin source. -var ConfigPair = sourceconfig.ConfigPair{SourceName: "stdin", SourceConfigFunc: configfunction} +var ConfigPair = sourceconfig.ConfigPair{ + Name: "stdin", + Handle: adapterGenerator(configfunction), +} // newStdinSource creates a new client for reading messages from stdin func newStdinSource(concurrentWrites int) (*stdinSource, error) { diff --git a/pkg/source/stdin/stdin_source_test.go b/pkg/source/stdin/stdin_source_test.go index 8bca5c57..e9f01729 100644 --- a/pkg/source/stdin/stdin_source_test.go +++ b/pkg/source/stdin/stdin_source_test.go @@ -64,15 +64,15 @@ func TestGetSource_WithStdinSource(t *testing.T) { supportedSources := []sourceconfig.ConfigPair{ConfigPair} - defer os.Unsetenv("SOURCE") + defer os.Unsetenv("SOURCE_NAME") - os.Setenv("SOURCE", "stdin") + os.Setenv("SOURCE_NAME", "stdin") - stdinConfig, err := config.NewConfig() - assert.NotNil(stdinConfig) + c, err := config.NewConfig() + assert.NotNil(c) assert.Nil(err) - stdinSource, err := sourceconfig.GetSource(stdinConfig, supportedSources) + stdinSource, err := sourceconfig.GetSource(c, supportedSources) assert.NotNil(stdinSource) assert.Nil(err) diff --git a/pkg/statsreceiver/statsd.go b/pkg/statsreceiver/statsd.go index 48babe95..49864739 100644 --- a/pkg/statsreceiver/statsd.go +++ b/pkg/statsreceiver/statsd.go @@ -17,6 +17,13 @@ import ( "github.com/snowplow-devops/stream-replicator/pkg/models" ) +// StatsDStatsReceiverConfig configures the stats metrics receiver +type StatsDStatsReceiverConfig struct { + Address string `hcl:"address,optional" env:"STATS_RECEIVER_STATSD_ADDRESS"` + Prefix string `hcl:"prefix,optional" env:"STATS_RECEIVER_STATSD_PREFIX"` + Tags string `hcl:"tags,optional" env:"STATS_RECEIVER_STATSD_TAGS"` +} + // StatsDStatsReceiver holds a new client for writing statistics to a StatsD server type StatsDStatsReceiver struct { client *statsd.Client @@ -51,6 +58,53 @@ func NewStatsDStatsReceiver(address string, prefix string, tagsRaw string, tagsM }, nil } +// NewStatsDReceiverWithTags closes over a given tags map and returns a function +// that creates a StatsDStatsReceiver given a StatsDStatsReceiverConfig. +func NewStatsDReceiverWithTags(tags map[string]string) func(c *StatsDStatsReceiverConfig) (*StatsDStatsReceiver, error) { + return func(c *StatsDStatsReceiverConfig) (*StatsDStatsReceiver, error) { + return NewStatsDStatsReceiver( + c.Address, + c.Prefix, + c.Tags, + tags, + ) + } +} + +// The StatsDStatsReceiverAdapter type is an adapter for functions to be used as +// pluggable components for StatsD Stats Receiver. +// It implements the Pluggable interface. +type StatsDStatsReceiverAdapter func(i interface{}) (interface{}, error) + +// Create implements the ComponentCreator interface. +func (f StatsDStatsReceiverAdapter) Create(i interface{}) (interface{}, error) { + return f(i) +} + +// ProvideDefault implements the ComponentConfigurable interface. +func (f StatsDStatsReceiverAdapter) ProvideDefault() (interface{}, error) { + // Provide defaults for the optional parameters + // whose default is not their zero value. + cfg := &StatsDStatsReceiverConfig{ + Prefix: "snowplow.stream-replicator", + Tags: "{}", + } + + return cfg, nil +} + +// AdaptStatsDStatsReceiverFunc returns a StatsDStatsReceiverAdapter. +func AdaptStatsDStatsReceiverFunc(f func(c *StatsDStatsReceiverConfig) (*StatsDStatsReceiver, error)) StatsDStatsReceiverAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*StatsDStatsReceiverConfig) + if !ok { + return nil, errors.New("invalid input, expected StatsDStatsReceiverConfig") + } + + return f(cfg) + } +} + // Send emits the bufferred metrics to the receiver func (s *StatsDStatsReceiver) Send(b *models.ObserverBuffer) { s.client.Incr("message_sent", b.MsgSent) diff --git a/pkg/target/eventhub.go b/pkg/target/eventhub.go index 478bf44b..b199bd93 100644 --- a/pkg/target/eventhub.go +++ b/pkg/target/eventhub.go @@ -21,14 +21,14 @@ import ( // EventHubConfig holds a config object for Azure EventHub type EventHubConfig struct { - EventHubNamespace string - EventHubName string - MaxAutoRetries int - MessageByteLimit int - ChunkByteLimit int - ChunkMessageLimit int - ContextTimeoutInSeconds int - BatchByteLimit int + EventHubNamespace string `hcl:"namespace" env:"TARGET_EVENTHUB_NAMESPACE"` + EventHubName string `hcl:"name" env:"TARGET_EVENTHUB_NAME"` + MaxAutoRetries int `hcl:"max_auto_retries,optional" env:"TARGET_EVENTHUB_MAX_AUTO_RETRY"` + MessageByteLimit int `hcl:"message_byte_limit,optional" env:"TARGET_EVENTHUB_MESSAGE_BYTE_LIMIT"` + ChunkByteLimit int `hcl:"chunk_byte_limit,optional" env:"TARGET_EVENTHUB_CHUNK_BYTE_LIMIT"` + ChunkMessageLimit int `hcl:"chunk_message_limit,optional" env:"TARGET_EVENTHUB_CHUNK_MESSAGE_LIMIT"` + ContextTimeoutInSeconds int `hcl:"context_timeout_in_seconds,optional" env:"TARGET_EVENTHUB_CONTEXT_TIMEOUT_SECONDS"` + BatchByteLimit int `hcl:"batch_byte_limit,optional" env:"TARGET_EVENTHUB_BATCH_BYTE_LIMIT"` } // EventHubTarget holds a new client for writing messages to Azure EventHub @@ -84,6 +84,43 @@ func NewEventHubTarget(cfg *EventHubConfig) (*EventHubTarget, error) { }, err } +// The EventHubTargetAdapter type is an adapter for functions to be used as +// pluggable components for EventHub target. Implements the Pluggable interface. +type EventHubTargetAdapter func(i interface{}) (interface{}, error) + +// Create implements the ComponentCreator interface. +func (f EventHubTargetAdapter) Create(i interface{}) (interface{}, error) { + return f(i) +} + +// ProvideDefault implements the ComponentConfigurable interface. +func (f EventHubTargetAdapter) ProvideDefault() (interface{}, error) { + // Provide defaults for the optional parameters + // whose default is not their zero value. + cfg := &EventHubConfig{ + MaxAutoRetries: 1, + MessageByteLimit: 1048576, + ChunkByteLimit: 1048576, + ChunkMessageLimit: 500, + ContextTimeoutInSeconds: 20, + BatchByteLimit: 1048576, + } + + return cfg, nil +} + +// AdaptEventHubTargetFunc returns an EventHubTargetAdapter. +func AdaptEventHubTargetFunc(f func(c *EventHubConfig) (*EventHubTarget, error)) EventHubTargetAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*EventHubConfig) + if !ok { + return nil, errors.New("invalid input, expected EventHubConfig") + } + + return f(cfg) + } +} + func (eht *EventHubTarget) Write(messages []*models.Message) (*models.TargetWriteResult, error) { eht.log.Debugf("Writing %d messages to stream ...", len(messages)) diff --git a/pkg/target/http.go b/pkg/target/http.go index 976af5ed..98c33937 100644 --- a/pkg/target/http.go +++ b/pkg/target/http.go @@ -21,6 +21,21 @@ import ( "github.com/snowplow-devops/stream-replicator/pkg/models" ) +// HTTPTargetConfig configures the destination for records consumed +type HTTPTargetConfig struct { + HTTPURL string `hcl:"url" env:"TARGET_HTTP_URL"` + ByteLimit int `hcl:"byte_limit,optional" env:"TARGET_HTTP_BYTE_LIMIT"` // defBytes + RequestTimeoutInSeconds int `hcl:"request_timeout_in_seconds,optional" env:"TARGET_HTTP_TIMEOUT_IN_SECONDS"` // def ???? Request timeout in seconds + ContentType string `hcl:"content_type,optional" env:"TARGET_HTTP_CONTENT_TYPE"` // application/json + Headers string `hcl:"headers,optional" env:"TARGET_HTTP_HEADERS" ` + BasicAuthUsername string `hcl:"basic_auth_username,optional" env:"TARGET_HTTP_BASICAUTH_USERNAME"` + BasicAuthPassword string `hcl:"basic_auth_password,optional" env:"TARGET_HTTP_BASICAUTH_PASSWORD"` + CertFile string `hcl:"cert_file,optional" env:"TARGET_HTTP_TLS_CERT_FILE"` + KeyFile string `hcl:"key_file,optional" env:"TARGET_HTTP_TLS_KEY_FILE"` + CaFile string `hcl:"ca_file,optional" env:"TARGET_HTTP_TLS_CA_FILE"` + SkipVerifyTLS bool `hcl:"skip_verify_tls,optional" env:"TARGET_HTTP_TLS_SKIP_VERIFY_TLS"` // false +} + // HTTPTarget holds a new client for writing messages to HTTP endpoints type HTTPTarget struct { client *http.Client @@ -101,6 +116,57 @@ func NewHTTPTarget(httpURL string, requestTimeout int, byteLimit int, contentTyp }, nil } +// HTTPTargetConfigFunction creates HTTPTarget from HTTPTargetConfig +func HTTPTargetConfigFunction(c *HTTPTargetConfig) (*HTTPTarget, error) { + return NewHTTPTarget( + c.HTTPURL, + c.RequestTimeoutInSeconds, + c.ByteLimit, + c.ContentType, + c.Headers, + c.BasicAuthUsername, + c.BasicAuthPassword, + c.CertFile, + c.KeyFile, + c.CaFile, + c.SkipVerifyTLS, + ) +} + +// The HTTPTargetAdapter type is an adapter for functions to be used as +// pluggable components for HTTP Target. It implements the Pluggable interface. +type HTTPTargetAdapter func(i interface{}) (interface{}, error) + +// Create implements the ComponentCreator interface. +func (f HTTPTargetAdapter) Create(i interface{}) (interface{}, error) { + return f(i) +} + +// ProvideDefault implements the ComponentConfigurable interface. +func (f HTTPTargetAdapter) ProvideDefault() (interface{}, error) { + // Provide defaults for the optional parameters + // whose default is not their zero value. + cfg := &HTTPTargetConfig{ + ByteLimit: 1048576, + RequestTimeoutInSeconds: 5, + ContentType: "application/json", + } + + return cfg, nil +} + +// AdaptHTTPTargetFunc returns an HTTPTargetAdapter. +func AdaptHTTPTargetFunc(f func(c *HTTPTargetConfig) (*HTTPTarget, error)) HTTPTargetAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*HTTPTargetConfig) + if !ok { + return nil, errors.New("invalid input, expected HTTPTargetConfig") + } + + return f(cfg) + } +} + func (ht *HTTPTarget) Write(messages []*models.Message) (*models.TargetWriteResult, error) { ht.log.Debugf("Writing %d messages to topic ...", len(messages)) diff --git a/pkg/target/kafka.go b/pkg/target/kafka.go index 1f5f4c09..61247151 100644 --- a/pkg/target/kafka.go +++ b/pkg/target/kafka.go @@ -24,26 +24,26 @@ import ( // KafkaConfig contains configurable options for the kafka target type KafkaConfig struct { - Brokers string - TopicName string - TargetVersion string - MaxRetries int - ByteLimit int - Compress bool - WaitForAll bool - Idempotent bool - EnableSASL bool - SASLUsername string - SASLPassword string - SASLAlgorithm string - CertFile string - KeyFile string - CaFile string - SkipVerifyTLS bool - ForceSync bool - FlushFrequency int - FlushMessages int - FlushBytes int + Brokers string `hcl:"brokers" env:"TARGET_KAFKA_BROKERS"` + TopicName string `hcl:"topic_name" env:"TARGET_KAFKA_TOPIC_NAME"` + TargetVersion string `hcl:"target_version,optional" env:"TARGET_KAFKA_TARGET_VERSION"` + MaxRetries int `hcl:"max_retries,optional" env:"TARGET_KAFKA_MAX_RETRIES"` + ByteLimit int `hcl:"byte_limit,optional" env:"TARGET_KAFKA_BYTE_LIMIT"` + Compress bool `hcl:"compress,optional" env:"TARGET_KAFKA_COMPRESS"` + WaitForAll bool `hcl:"wait_for_all,optional" env:"TARGET_KAFKA_WAIT_FOR_ALL"` + Idempotent bool `hcl:"idempotent,optional" env:"TARGET_KAFKA_IDEMPOTENT"` + EnableSASL bool `hcl:"enable_sasl,optional" env:"TARGET_KAFKA_ENABLE_SASL"` + SASLUsername string `hcl:"sasl_username,optional" env:"TARGET_KAFKA_SASL_USERNAME" ` + SASLPassword string `hcl:"sasl_password,optional" env:"TARGET_KAFKA_SASL_PASSWORD"` + SASLAlgorithm string `hcl:"sasl_algorithm,optional" env:"TARGET_KAFKA_SASL_ALGORITHM"` + CertFile string `hcl:"cert_file,optional" env:"TARGET_KAFKA_TLS_CERT_FILE"` + KeyFile string `hcl:"key_file,optional" env:"TARGET_KAFKA_TLS_KEY_FILE"` + CaFile string `hcl:"ca_file,optional" env:"TARGET_KAFKA_TLS_CA_FILE"` + SkipVerifyTLS bool `hcl:"skip_verify_tls,optional" env:"TARGET_KAFKA_TLS_SKIP_VERIFY_TLS"` + ForceSync bool `hcl:"force_sync_producer,optional" env:"TARGET_KAFKA_FORCE_SYNC_PRODUCER"` + FlushFrequency int `hcl:"flush_frequency,optional" env:"TARGET_KAFKA_FLUSH_FREQUENCY"` + FlushMessages int `hcl:"flush_messages,optional" env:"TARGET_KAFKA_FLUSH_MESSAGES"` + FlushBytes int `hcl:"flush_bytes,optional" env:"TARGET_KAFKA_FLUSH_BYTES"` } // KafkaTarget holds a new client for writing messages to Apache Kafka @@ -175,6 +175,40 @@ func NewKafkaTarget(cfg *KafkaConfig) (*KafkaTarget, error) { }, producerError } +// The KafkaTargetAdapter type is an adapter for functions to be used as +// pluggable components for Kafka target. It implements the Pluggable interface. +type KafkaTargetAdapter func(i interface{}) (interface{}, error) + +// Create implements the ComponentCreator interface. +func (f KafkaTargetAdapter) Create(i interface{}) (interface{}, error) { + return f(i) +} + +// ProvideDefault implements the ComponentConfigurable interface. +func (f KafkaTargetAdapter) ProvideDefault() (interface{}, error) { + // Provide defaults for the optional parameters + // whose default is not their zero value. + cfg := &KafkaConfig{ + MaxRetries: 10, + ByteLimit: 1048576, + SASLAlgorithm: "sha512", + } + + return cfg, nil +} + +// AdaptKafkaTargetFunc returns a KafkaTargetAdapter. +func AdaptKafkaTargetFunc(f func(c *KafkaConfig) (*KafkaTarget, error)) KafkaTargetAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*KafkaConfig) + if !ok { + return nil, errors.New("invalid input, expected KafkaConfig") + } + + return f(cfg) + } +} + // Write pushes all messages to the required target func (kt *KafkaTarget) Write(messages []*models.Message) (*models.TargetWriteResult, error) { kt.log.Debugf("Writing %d messages to topic ...", len(messages)) diff --git a/pkg/target/kinesis.go b/pkg/target/kinesis.go index 7dcc2c1f..29f1dd24 100644 --- a/pkg/target/kinesis.go +++ b/pkg/target/kinesis.go @@ -31,6 +31,13 @@ const ( kinesisPutRecordsRequestByteLimit = kinesisPutRecordsMessageByteLimit * 5 ) +// KinesisTargetConfig configures the destination for records consumed +type KinesisTargetConfig struct { + StreamName string `hcl:"stream_name" env:"TARGET_KINESIS_STREAM_NAME"` + Region string `hcl:"region" env:"TARGET_KINESIS_REGION"` + RoleARN string `hcl:"role_arn,optional" env:"TARGET_KINESIS_ROLE_ARN"` +} + // KinesisTarget holds a new client for writing messages to kinesis type KinesisTarget struct { client kinesisiface.KinesisAPI @@ -64,6 +71,40 @@ func NewKinesisTargetWithInterfaces(client kinesisiface.KinesisAPI, awsAccountID }, nil } +// KinesisTargetConfigFunction creates KinesisTarget from KinesisTargetConfig. +func KinesisTargetConfigFunction(c *KinesisTargetConfig) (*KinesisTarget, error) { + return NewKinesisTarget(c.Region, c.StreamName, c.RoleARN) +} + +// The KinesisTargetAdapter type is an adapter for functions to be used as +// pluggable components for Kinesis Target. Implements the Pluggable interface. +type KinesisTargetAdapter func(i interface{}) (interface{}, error) + +// Create implements the ComponentCreator interface. +func (f KinesisTargetAdapter) Create(i interface{}) (interface{}, error) { + return f(i) +} + +// ProvideDefault implements the ComponentConfigurable interface. +func (f KinesisTargetAdapter) ProvideDefault() (interface{}, error) { + // Provide defaults if any + cfg := &KinesisTargetConfig{} + + return cfg, nil +} + +// AdaptKinesisTargetFunc returns a KinesisTargetAdapter. +func AdaptKinesisTargetFunc(f func(c *KinesisTargetConfig) (*KinesisTarget, error)) KinesisTargetAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*KinesisTargetConfig) + if !ok { + return nil, errors.New("invalid input, expected KinesisTargetConfig") + } + + return f(cfg) + } +} + // Write pushes all messages to the required target // TODO: Should each put be in its own goroutine? func (kt *KinesisTarget) Write(messages []*models.Message) (*models.TargetWriteResult, error) { diff --git a/pkg/target/pubsub.go b/pkg/target/pubsub.go index 6d72eddd..c25398ed 100644 --- a/pkg/target/pubsub.go +++ b/pkg/target/pubsub.go @@ -25,6 +25,12 @@ const ( pubSubPublishMessageByteLimit = 10485760 ) +// PubSubTargetConfig configures the destination for records consumed +type PubSubTargetConfig struct { + ProjectID string `hcl:"project_id" env:"TARGET_PUBSUB_PROJECT_ID"` + TopicName string `hcl:"topic_name" env:"TARGET_PUBSUB_TOPIC_NAME"` +} + // PubSubTarget holds a new client for writing messages to Google PubSub type PubSubTarget struct { projectID string @@ -59,6 +65,40 @@ func NewPubSubTarget(projectID string, topicName string) (*PubSubTarget, error) }, nil } +// PubSubTargetConfigFunction creates PubSubTarget from PubSubTargetConfig +func PubSubTargetConfigFunction(c *PubSubTargetConfig) (*PubSubTarget, error) { + return NewPubSubTarget(c.ProjectID, c.TopicName) +} + +// The PubSubTargetAdapter type is an adapter for functions to be used as +// pluggable components for PubSub Target. It implements the Pluggable interface. +type PubSubTargetAdapter func(i interface{}) (interface{}, error) + +// Create implements the ComponentCreator interface. +func (f PubSubTargetAdapter) Create(i interface{}) (interface{}, error) { + return f(i) +} + +// ProvideDefault implements the ComponentConfigurable interface. +func (f PubSubTargetAdapter) ProvideDefault() (interface{}, error) { + // Provide defaults if any + cfg := &PubSubTargetConfig{} + + return cfg, nil +} + +// AdaptPubSubTargetFunc returns a PubSubTargetAdapter. +func AdaptPubSubTargetFunc(f func(c *PubSubTargetConfig) (*PubSubTarget, error)) PubSubTargetAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*PubSubTargetConfig) + if !ok { + return nil, errors.New("invalid input, expected PubSubTargetConfig") + } + + return f(cfg) + } +} + // Write pushes all messages to the required target func (ps *PubSubTarget) Write(messages []*models.Message) (*models.TargetWriteResult, error) { ps.log.Debugf("Writing %d messages to topic ...", len(messages)) diff --git a/pkg/target/sqs.go b/pkg/target/sqs.go index 7e23ecc6..f612b6b1 100644 --- a/pkg/target/sqs.go +++ b/pkg/target/sqs.go @@ -32,6 +32,13 @@ const ( sqsSendMessageBatchByteLimit = 262144 ) +// SQSTargetConfig configures the destination for records consumed +type SQSTargetConfig struct { + QueueName string `hcl:"queue_name" env:"TARGET_SQS_QUEUE_NAME"` + Region string `hcl:"region" env:"TARGET_SQS_REGION"` + RoleARN string `hcl:"role_arn,optional" env:"TARGET_SQS_ROLE_ARN"` +} + // SQSTarget holds a new client for writing messages to sqs type SQSTarget struct { client sqsiface.SQSAPI @@ -66,6 +73,40 @@ func NewSQSTargetWithInterfaces(client sqsiface.SQSAPI, awsAccountID string, reg }, nil } +// SQSTargetConfigFunction creates an SQSTarget from an SQSTargetConfig +func SQSTargetConfigFunction(c *SQSTargetConfig) (*SQSTarget, error) { + return NewSQSTarget(c.Region, c.QueueName, c.RoleARN) +} + +// The SQSTargetAdapter type is an adapter for functions to be used as +// pluggable components for SQS Target. It implements the Pluggable interface. +type SQSTargetAdapter func(i interface{}) (interface{}, error) + +// Create implements the ComponentCreator interface. +func (f SQSTargetAdapter) Create(i interface{}) (interface{}, error) { + return f(i) +} + +// ProvideDefault implements the ComponentConfigurable interface. +func (f SQSTargetAdapter) ProvideDefault() (interface{}, error) { + // Provide defaults if any + cfg := &SQSTargetConfig{} + + return cfg, nil +} + +// AdaptSQSTargetFunc returns a SQSTargetAdapter. +func AdaptSQSTargetFunc(f func(c *SQSTargetConfig) (*SQSTarget, error)) SQSTargetAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*SQSTargetConfig) + if !ok { + return nil, errors.New("invalid input, expected SQSTargetConfig") + } + + return f(cfg) + } +} + // Write pushes all messages to the required target // TODO: Should each put be in its own goroutine? func (st *SQSTarget) Write(messages []*models.Message) (*models.TargetWriteResult, error) { diff --git a/pkg/target/stdout.go b/pkg/target/stdout.go index 74b6fca7..4ecc0781 100644 --- a/pkg/target/stdout.go +++ b/pkg/target/stdout.go @@ -7,6 +7,7 @@ package target import ( + "errors" "fmt" log "github.com/sirupsen/logrus" @@ -26,6 +27,31 @@ func NewStdoutTarget() (*StdoutTarget, error) { }, nil } +// The StdoutTargetAdapter type is an adapter for functions to be used as +// pluggable components for Stdout Target. It implements the Pluggable interface. +type StdoutTargetAdapter func(i interface{}) (interface{}, error) + +// Create implements the ComponentCreator interface. +func (f StdoutTargetAdapter) Create(i interface{}) (interface{}, error) { + return f(i) +} + +// ProvideDefault implements the ComponentConfigurable interface. +func (f StdoutTargetAdapter) ProvideDefault() (interface{}, error) { + return nil, nil +} + +// AdaptStdoutTargetFunc returns StdoutTargetAdapter. +func AdaptStdoutTargetFunc(f func() (*StdoutTarget, error)) StdoutTargetAdapter { + return func(i interface{}) (interface{}, error) { + if i != nil { + return nil, errors.New("unexpected configuration input for Stdout target") + } + + return f() + } +} + // Write pushes all messages to the required target func (st *StdoutTarget) Write(messages []*models.Message) (*models.TargetWriteResult, error) { st.log.Debugf("Writing %d messages to stdout ...", len(messages)) From 1a410fbffb7a978d01b9b0f9d5fdde8a489aeccc Mon Sep 17 00:00:00 2001 From: TiganeteaRobert Date: Fri, 1 Jul 2022 16:51:23 +0100 Subject: [PATCH 02/25] =?UTF-8?q?Clean=20up=20tls=20configuration=C2=A0(cl?= =?UTF-8?q?oses=20#177)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .gitignore | 3 + Makefile | 3 - cmd/cli/cli.go | 11 +++ config/component_test.go | 31 ++++--- config/config_test.go | 1 + .../failure-targets/http-extended.hcl | 2 +- config/examples/targets/http-extended.hcl | 2 +- config/examples/targets/kafka-extended.hcl | 6 +- config/test-fixtures/target-http-extended.hcl | 6 +- .../test-fixtures/target-kafka-extended.hcl | 6 +- integration/http/localhost.key | 2 +- integration/http/rootCA.crt | 2 +- pkg/common/helpers.go | 89 ++++++++++++++++--- pkg/common/helpers_test.go | 21 ++++- pkg/target/http.go | 16 +++- pkg/target/http_test.go | 19 ++-- pkg/target/kafka.go | 11 ++- pkg/target/targetutil.go | 42 --------- 18 files changed, 180 insertions(+), 93 deletions(-) delete mode 100644 pkg/target/targetutil.go diff --git a/.gitignore b/.gitignore index 21bf590a..c414211c 100644 --- a/.gitignore +++ b/.gitignore @@ -12,3 +12,6 @@ vendor/ build/ dist/ .localstack/ + +#temporary directory created by tests +tmp_replicator/ \ No newline at end of file diff --git a/Makefile b/Makefile index 6c132d60..6bf6cc67 100644 --- a/Makefile +++ b/Makefile @@ -11,8 +11,6 @@ go_dirs = `go list ./... | grep -v /build/ | grep -v /vendor/` build_dir = build vendor_dir = vendor integration_dir = integration -cert_dir = $(integration_dir)/http -abs_cert_dir = $$(pwd)/$(cert_dir) ngrok_path = ${NGROK_DIR}ngrok # Set NGROK_DIR to `/path/to/directory/` for local setup coverage_dir = $(build_dir)/coverage @@ -144,7 +142,6 @@ test: test-setup GO111MODULE=on go tool cover -func=$(coverage_out) integration-test: test-setup - export CERT_DIR=$(abs_cert_dir); \ GO111MODULE=on go test $(go_dirs) -v -covermode=count -coverprofile=$(coverage_out) GO111MODULE=on go tool cover -html=$(coverage_out) -o $(coverage_html) GO111MODULE=on go tool cover -func=$(coverage_out) diff --git a/cmd/cli/cli.go b/cmd/cli/cli.go index 5608195f..290e8448 100644 --- a/cmd/cli/cli.go +++ b/cmd/cli/cli.go @@ -22,6 +22,7 @@ import ( _ "net/http/pprof" "github.com/snowplow-devops/stream-replicator/cmd" + "github.com/snowplow-devops/stream-replicator/pkg/common" "github.com/snowplow-devops/stream-replicator/pkg/failure/failureiface" "github.com/snowplow-devops/stream-replicator/pkg/models" "github.com/snowplow-devops/stream-replicator/pkg/observer" @@ -121,6 +122,11 @@ func RunCli(supportedSourceConfigPairs []sourceconfig.ConfigPair) { select { case <-stop: log.Debug("source.Stop() finished successfully!") + + err := common.DeleteTemporaryDir() + if err != nil { + log.Debugf(`error deleting tmp directory: %v`, err) + } case <-time.After(5 * time.Second): log.Error("source.Stop() took more than 5 seconds, forcing shutdown ...") @@ -128,6 +134,11 @@ func RunCli(supportedSourceConfigPairs []sourceconfig.ConfigPair) { ft.Close() o.Stop() + err := common.DeleteTemporaryDir() + if err != nil { + log.Debugf(`error deleting tmp directory: %v`, err) + } + os.Exit(1) } }() diff --git a/config/component_test.go b/config/component_test.go index f9515d12..1292ab74 100644 --- a/config/component_test.go +++ b/config/component_test.go @@ -8,6 +8,7 @@ package config import ( "errors" + "os" "path/filepath" "reflect" "testing" @@ -19,6 +20,10 @@ import ( "github.com/snowplow-devops/stream-replicator/pkg/target" ) +func init() { + os.Clearenv() +} + func TestCreateTargetComponentHCL(t *testing.T) { testCases := []struct { File string @@ -90,9 +95,9 @@ func TestCreateTargetComponentHCL(t *testing.T) { Headers: "{\"Accept-Language\":\"en-US\"}", BasicAuthUsername: "testUsername", BasicAuthPassword: "testPass", - CertFile: "test.cert", - KeyFile: "test.key", - CaFile: "test.ca", + CertFile: "myLocalhost.crt", + KeyFile: "MyLocalhost.key", + CaFile: "myRootCA.crt", SkipVerifyTLS: true, }, }, @@ -138,9 +143,9 @@ func TestCreateTargetComponentHCL(t *testing.T) { SASLUsername: "testUsername", SASLPassword: "testPass", SASLAlgorithm: "sha256", - CertFile: "test.cert", - KeyFile: "test.key", - CaFile: "test.ca", + CertFile: "myLocalhost.crt", + KeyFile: "MyLocalhost.key", + CaFile: "myRootCA.crt", SkipVerifyTLS: true, ForceSync: true, FlushFrequency: 2, @@ -217,9 +222,9 @@ func TestCreateFailureTargetComponentENV(t *testing.T) { SASLUsername: "testUsername", SASLPassword: "testPass", SASLAlgorithm: "sha256", - CertFile: "test.cert", - KeyFile: "test.key", - CaFile: "test.ca", + CertFile: "test/certfile.crt", + KeyFile: "test/keyfile.key", + CaFile: "test/cafile.crt", SkipVerifyTLS: true, ForceSync: true, FlushFrequency: 2, @@ -230,7 +235,7 @@ func TestCreateFailureTargetComponentENV(t *testing.T) { t.Run(testCase.Name, func(t *testing.T) { assert := assert.New(t) - + t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", "") t.Setenv("FAILURE_TARGET_NAME", "kafka") t.Setenv("FAILURE_TARGET_KAFKA_BROKERS", "testBrokers") t.Setenv("FAILURE_TARGET_KAFKA_TOPIC_NAME", "testTopic") @@ -244,9 +249,9 @@ func TestCreateFailureTargetComponentENV(t *testing.T) { t.Setenv("FAILURE_TARGET_KAFKA_SASL_USERNAME", "testUsername") t.Setenv("FAILURE_TARGET_KAFKA_SASL_PASSWORD", "testPass") t.Setenv("FAILURE_TARGET_KAFKA_SASL_ALGORITHM", "sha256") - t.Setenv("FAILURE_TARGET_KAFKA_TLS_CERT_FILE", "test.cert") - t.Setenv("FAILURE_TARGET_KAFKA_TLS_KEY_FILE", "test.key") - t.Setenv("FAILURE_TARGET_KAFKA_TLS_CA_FILE", "test.ca") + t.Setenv("FAILURE_TARGET_KAFKA_TLS_CERT_FILE", "test/certfile.crt") + t.Setenv("FAILURE_TARGET_KAFKA_TLS_KEY_FILE", "test/keyfile.key") + t.Setenv("FAILURE_TARGET_KAFKA_TLS_CA_FILE", "test/cafile.crt") t.Setenv("FAILURE_TARGET_KAFKA_TLS_SKIP_VERIFY_TLS", "true") t.Setenv("FAILURE_TARGET_KAFKA_FORCE_SYNC_PRODUCER", "true") t.Setenv("FAILURE_TARGET_KAFKA_FLUSH_FREQUENCY", "2") diff --git a/config/config_test.go b/config/config_test.go index c42d2f32..ec242f8e 100644 --- a/config/config_test.go +++ b/config/config_test.go @@ -44,6 +44,7 @@ func TestNewConfig(t *testing.T) { observer, err := c.GetObserver(map[string]string{}) assert.NotNil(observer) assert.Nil(err) + os.RemoveAll(`tmp_replicator`) } func TestNewConfig_FromEnv(t *testing.T) { diff --git a/config/examples/failure-targets/http-extended.hcl b/config/examples/failure-targets/http-extended.hcl index 68f319f4..ce3b5c95 100644 --- a/config/examples/failure-targets/http-extended.hcl +++ b/config/examples/failure-targets/http-extended.hcl @@ -36,7 +36,7 @@ failure_target { ca_file = "myRootCA.crt" # Whether to skip verifying ssl certificates chain (default: false) - # If cert_file and key_file are not provided, this setting is not applied. + # If tls_cert and tls_key are not provided, this setting is not applied. skip_verify_tls = true } } diff --git a/config/examples/targets/http-extended.hcl b/config/examples/targets/http-extended.hcl index a70ca33e..d1223e14 100644 --- a/config/examples/targets/http-extended.hcl +++ b/config/examples/targets/http-extended.hcl @@ -36,7 +36,7 @@ target { ca_file = "myRootCA.crt" # Whether to skip verifying ssl certificates chain (default: false) - # If cert_file and key_file are not provided, this setting is not applied. + # If tls_cert and tls_key are not provided, this setting is not applied. skip_verify_tls = true } } diff --git a/config/examples/targets/kafka-extended.hcl b/config/examples/targets/kafka-extended.hcl index 16bdcc9c..19e28ca8 100644 --- a/config/examples/targets/kafka-extended.hcl +++ b/config/examples/targets/kafka-extended.hcl @@ -39,13 +39,13 @@ target { sasl_algorithm = "sha256" # The optional certificate file for client authentication - cert_file = "myLocalhost.crt" + cert_file = "myLocalhost.crt" # The optional key file for client authentication - key_file = "MyLocalhost.key" + key_file = "MyLocalhost.key" # The optional certificate authority file for TLS client authentication - ca_file = "myRootCA.crt" + ca_file = "myRootCA.crt" # Whether to skip verifying ssl certificates chain (default: false) skip_verify_tls = true diff --git a/config/test-fixtures/target-http-extended.hcl b/config/test-fixtures/target-http-extended.hcl index 7dfadbf4..d29f4d8f 100644 --- a/config/test-fixtures/target-http-extended.hcl +++ b/config/test-fixtures/target-http-extended.hcl @@ -9,9 +9,9 @@ target { headers = "{\"Accept-Language\":\"en-US\"}" basic_auth_username = "testUsername" basic_auth_password = "testPass" - cert_file = "test.cert" - key_file = "test.key" - ca_file = "test.ca" + cert_file = "myLocalhost.crt" + key_file = "MyLocalhost.key" + ca_file = "myRootCA.crt" skip_verify_tls = true } } diff --git a/config/test-fixtures/target-kafka-extended.hcl b/config/test-fixtures/target-kafka-extended.hcl index d0faa9f8..b325fcd7 100644 --- a/config/test-fixtures/target-kafka-extended.hcl +++ b/config/test-fixtures/target-kafka-extended.hcl @@ -14,9 +14,9 @@ target { sasl_username = "testUsername" sasl_password = "testPass" sasl_algorithm = "sha256" - cert_file = "test.cert" - key_file = "test.key" - ca_file = "test.ca" + cert_file = "myLocalhost.crt" + key_file = "MyLocalhost.key" + ca_file = "myRootCA.crt" skip_verify_tls = true force_sync_producer = true flush_frequency = 2 diff --git a/integration/http/localhost.key b/integration/http/localhost.key index 93424361..11dec0e6 100644 --- a/integration/http/localhost.key +++ b/integration/http/localhost.key @@ -24,4 +24,4 @@ itxdXvoomlhwDKZv0Y+vPm4V9SBx/36ubf6bM6vKoZTSuv2+ktA/uInFW+y/1mLH KD1JlQKBgQDNZry00fN3iJ9stUNEYVaAtXQ1a0/LY/r2NuC04IwemwOyFUvzY7G9 sNXeIxTYjQ9OCp9+EE1n6Q3yg63MmTrNuD51f0h2tftokYBaoYBny34HuQf0N7qF laOI6yiORZ4eGdYrpCq+q+J0fAkRca0M4Nq/lDEw4bric38WpPxV3Q== ------END RSA PRIVATE KEY----- +-----END RSA PRIVATE KEY----- \ No newline at end of file diff --git a/integration/http/rootCA.crt b/integration/http/rootCA.crt index 79bbfc13..df8b1a27 100644 --- a/integration/http/rootCA.crt +++ b/integration/http/rootCA.crt @@ -27,4 +27,4 @@ Ggg1Qo5z0+XT2l+2KhOC02ydgHV1/tT6cVVX3ZkBvvb/WPHmVp9bT8zqeJzrMQkM 9DaKEyZKw+LYy7sZp4p4giE/JAzBLidsfIdznhYguPjKgboPMfiJvapzyZPEJsDu ShYb5uIlytHwAVlGiUgjx+z/YXBQN1vWsCm5pVL4RGdXdcq5HZzZRaJxAUBrfmiU uCJPEnUJ1emIqakgSy3yA+9WtQ== ------END CERTIFICATE----- +-----END CERTIFICATE----- \ No newline at end of file diff --git a/pkg/common/helpers.go b/pkg/common/helpers.go index 0c44df1a..edea93fe 100644 --- a/pkg/common/helpers.go +++ b/pkg/common/helpers.go @@ -7,8 +7,11 @@ package common import ( + "crypto/tls" + "crypto/x509" "encoding/base64" "fmt" + "io/ioutil" "os" "time" @@ -26,25 +29,47 @@ import ( // and attempt to create a JSON file on disk within the /tmp directory // for later use in creating GCP clients. func GetGCPServiceAccountFromBase64(serviceAccountB64 string) (string, error) { - sDec, err := base64.StdEncoding.DecodeString(serviceAccountB64) + targetFile := fmt.Sprintf(`tmp_replicator/stream-replicator-service-account-%s.json`, uuid.NewV4().String()) + err := DecodeB64ToFile(serviceAccountB64, targetFile) if err != nil { - return "", errors.Wrap(err, "Failed to Base64 decode service account") + return ``, err } + return targetFile, nil +} + +// DeleteTemporaryDir deletes the temp directory we created to store credentials +func DeleteTemporaryDir() error { + err := os.RemoveAll(`tmp_replicator`) + return err +} - targetFile := fmt.Sprintf("/tmp/stream-replicator-service-account-%s.json", uuid.NewV4().String()) +// DecodeB64ToFile takes a B64-encoded credential, decodes it, and writes it to a file +func DecodeB64ToFile(b64String, filename string) error { + tls, decodeErr := base64.StdEncoding.DecodeString(b64String) + if decodeErr != nil { + return errors.Wrap(decodeErr, "Failed to Base64 decode for creating file "+filename) + } - f, err := os.Create(targetFile) + err := createTempDir(`tmp_replicator`) if err != nil { - return "", errors.Wrap(err, fmt.Sprintf("Failed to create target file '%s' for service account", targetFile)) + return err } - defer f.Close() - _, err2 := f.WriteString(string(sDec)) - if err2 != nil { - return "", errors.Wrap(err, fmt.Sprintf("Failed to write decoded service account to target file '%s'", targetFile)) + f, createErr := os.Create(filename) + if createErr != nil { + return errors.Wrap(createErr, fmt.Sprintf("Failed to create file '%s'", filename)) } - return targetFile, nil + _, writeErr := f.WriteString(string(tls)) + if writeErr != nil { + return errors.Wrap(decodeErr, fmt.Sprintf("Failed to write decoded base64 string to target file '%s'", filename)) + } + err = f.Close() + if err != nil { + return err + } + + return nil } // GetAWSSession is a general tool to handle generating an AWS session @@ -87,3 +112,47 @@ func GetAverageFromDuration(sum time.Duration, total int64) time.Duration { } return time.Duration(0) } + +func createTempDir(dirName string) error { + dir, statErr := os.Stat(dirName) + if statErr != nil && !errors.Is(statErr, os.ErrNotExist) { + return errors.Wrap(statErr, fmt.Sprintf("Failed checking for existence of %s dir", dirName)) + } + + if dir == nil { + dirErr := os.Mkdir(dirName, 0700) + if dirErr != nil && !errors.Is(dirErr, os.ErrExist) { + return errors.Wrap(dirErr, fmt.Sprintf("Failed to create %s directory", dirName)) + } + } + return nil +} + +// CreateTLSConfiguration creates a TLS configuration for use in a target +func CreateTLSConfiguration(certFile string, keyFile string, caFile string, skipVerify bool) (*tls.Config, error) { + if certFile == "" || keyFile == "" { + return nil, nil + } + + cert, err := tls.LoadX509KeyPair(certFile, keyFile) + if err != nil { + return nil, err + } + + caCert, err := ioutil.ReadFile(caFile) + if err != nil { + return nil, err + } + + caCertPool, err := x509.SystemCertPool() + if err != nil { + return nil, err + } + caCertPool.AppendCertsFromPEM(caCert) + + return &tls.Config{ + Certificates: []tls.Certificate{cert}, + RootCAs: caCertPool, + InsecureSkipVerify: skipVerify, + }, nil +} diff --git a/pkg/common/helpers_test.go b/pkg/common/helpers_test.go index ee06b640..15fd47ae 100644 --- a/pkg/common/helpers_test.go +++ b/pkg/common/helpers_test.go @@ -7,6 +7,8 @@ package common import ( + "crypto/tls" + "os" "strings" "testing" "time" @@ -14,16 +16,21 @@ import ( "github.com/stretchr/testify/assert" ) +func init() { + os.Clearenv() +} + // --- Cloud Helpers func TestGetGCPServiceAccountFromBase64(t *testing.T) { assert := assert.New(t) + defer DeleteTemporaryDir() path, err := GetGCPServiceAccountFromBase64("ewogICJoZWxsbyI6IndvcmxkIgp9") assert.NotEqual(path, "") assert.Nil(err) - assert.True(strings.HasPrefix(path, "/tmp/stream-replicator-service-account-")) + assert.True(strings.HasPrefix(path, "tmp_replicator/stream-replicator-service-account-")) assert.True(strings.HasSuffix(path, ".json")) } @@ -34,12 +41,13 @@ func TestGetGCPServiceAccountFromBase64_NotBase64(t *testing.T) { assert.Equal(path, "") assert.NotNil(err) - assert.True(strings.HasPrefix(err.Error(), "Failed to Base64 decode service account: ")) + assert.True(strings.HasPrefix(err.Error(), "Failed to Base64 decode")) } func TestGetAWSSession(t *testing.T) { assert := assert.New(t) + t.Setenv("AWS_SHARED_CREDENTIALS_FILE", "") sess, cfg, accID, err := GetAWSSession("us-east-1", "") assert.NotNil(sess) assert.Nil(cfg) @@ -64,3 +72,12 @@ func TestGetAverageFromDuration(t *testing.T) { duration2 := GetAverageFromDuration(time.Duration(10)*time.Second, 2) assert.Equal(time.Duration(5)*time.Second, duration2) } + +func TestCreateTLSConfiguration(t *testing.T) { + assert := assert.New(t) + + conf, err := CreateTLSConfiguration(`../../integration/http/localhost.crt`, `../../integration/http/localhost.key`, `../../integration/http/rootCA.crt`, false) + + assert.Nil(err) + assert.IsType(tls.Config{}, *conf) +} diff --git a/pkg/target/http.go b/pkg/target/http.go index 98c33937..919b9766 100644 --- a/pkg/target/http.go +++ b/pkg/target/http.go @@ -15,12 +15,19 @@ import ( "net/url" "time" + "github.com/snowplow-devops/stream-replicator/pkg/common" + "github.com/hashicorp/go-multierror" "github.com/pkg/errors" log "github.com/sirupsen/logrus" + "github.com/snowplow-devops/stream-replicator/pkg/models" ) +const ( + httpTarget = `http_target` +) + // HTTPTargetConfig configures the destination for records consumed type HTTPTargetConfig struct { HTTPURL string `hcl:"url" env:"TARGET_HTTP_URL"` @@ -96,11 +103,16 @@ func NewHTTPTarget(httpURL string, requestTimeout int, byteLimit int, contentTyp if err1 != nil { return nil, err1 } - tlsConfig, err2 := CreateTLSConfiguration(certFile, keyFile, caFile, skipVerifyTLS) + transport := &http.Transport{} + + tlsConfig, err2 := common.CreateTLSConfiguration(certFile, keyFile, caFile, skipVerifyTLS) if err2 != nil { return nil, err2 } - transport := &http.Transport{TLSClientConfig: tlsConfig} + if tlsConfig != nil { + transport.TLSClientConfig = tlsConfig + } + return &HTTPTarget{ client: &http.Client{ Transport: transport, diff --git a/pkg/target/http_test.go b/pkg/target/http_test.go index 6a5ce820..fbd1f79a 100644 --- a/pkg/target/http_test.go +++ b/pkg/target/http_test.go @@ -295,9 +295,9 @@ func TestHttpWrite_TLS(t *testing.T) { "", "", "", - os.Getenv("CERT_DIR")+"/localhost.crt", - os.Getenv("CERT_DIR")+"/localhost.key", - os.Getenv("CERT_DIR")+"/rootCA.crt", + string(`../../integration/http/localhost.crt`), + string(`../../integration/http/localhost.key`), + string(`../../integration/http/rootCA.crt`), false) if err != nil { panic(err) @@ -319,6 +319,8 @@ func TestHttpWrite_TLS(t *testing.T) { ngrokAddress := getNgrokAddress() + "/hello" + os.RemoveAll(`tmp_replicator`) + // Test that https requests work for different endpoints when different certs are provided manually target2, err2 := NewHTTPTarget(ngrokAddress, 5, @@ -327,11 +329,12 @@ func TestHttpWrite_TLS(t *testing.T) { "", "", "", - os.Getenv("CERT_DIR")+"/localhost.crt", - os.Getenv("CERT_DIR")+"/localhost.key", - os.Getenv("CERT_DIR")+"/rootCA.crt", + string(`../../integration/http/localhost.crt`), + string(`../../integration/http/localhost.key`), + string(`../../integration/http/rootCA.crt`), false) if err2 != nil { + os.RemoveAll(`tmp_replicator`) panic(err2) } @@ -342,6 +345,8 @@ func TestHttpWrite_TLS(t *testing.T) { assert.Equal(int64(20), ackOps) + os.RemoveAll(`tmp_replicator`) + // Test that https works when certs aren't manually provided // Test that https requests work for different endpoints when different certs are provided manually @@ -357,6 +362,7 @@ func TestHttpWrite_TLS(t *testing.T) { "", false) if err4 != nil { + os.RemoveAll(`tmp_replicator`) panic(err4) } @@ -366,6 +372,7 @@ func TestHttpWrite_TLS(t *testing.T) { assert.Equal(10, len(writeResult3.Sent)) assert.Equal(int64(30), ackOps) + os.RemoveAll(`tmp_replicator`) } type ngrokAPIObject struct { diff --git a/pkg/target/kafka.go b/pkg/target/kafka.go index 61247151..4e2808f8 100644 --- a/pkg/target/kafka.go +++ b/pkg/target/kafka.go @@ -14,12 +14,19 @@ import ( "strings" "time" + "github.com/snowplow-devops/stream-replicator/pkg/common" + "github.com/Shopify/sarama" "github.com/hashicorp/go-multierror" "github.com/pkg/errors" log "github.com/sirupsen/logrus" - "github.com/snowplow-devops/stream-replicator/pkg/models" "github.com/xdg/scram" + + "github.com/snowplow-devops/stream-replicator/pkg/models" +) + +const ( + kafkaTarget = `kafka_target` ) // KafkaConfig contains configurable options for the kafka target @@ -116,7 +123,7 @@ func NewKafkaTarget(cfg *KafkaConfig) (*KafkaTarget, error) { } } - tlsConfig, err := CreateTLSConfiguration(cfg.CertFile, cfg.KeyFile, cfg.CaFile, cfg.SkipVerifyTLS) + tlsConfig, err := common.CreateTLSConfiguration(cfg.CertFile, cfg.KeyFile, cfg.CaFile, cfg.SkipVerifyTLS) if err != nil { return nil, err } diff --git a/pkg/target/targetutil.go b/pkg/target/targetutil.go deleted file mode 100644 index 7b920ae1..00000000 --- a/pkg/target/targetutil.go +++ /dev/null @@ -1,42 +0,0 @@ -// PROPRIETARY AND CONFIDENTIAL -// -// Unauthorized copying of this file via any medium is strictly prohibited. -// -// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. - -package target - -import ( - "crypto/tls" - "crypto/x509" - "io/ioutil" -) - -// CreateTLSConfiguration creates a TLS configuration for use in a target -func CreateTLSConfiguration(certFile string, keyFile string, caFile string, skipVerify bool) (*tls.Config, error) { - if certFile == "" || keyFile == "" { - return nil, nil - } - - cert, err := tls.LoadX509KeyPair(certFile, keyFile) - if err != nil { - return nil, err - } - - caCert, err := ioutil.ReadFile(caFile) - if err != nil { - return nil, err - } - - caCertPool, err := x509.SystemCertPool() - if err != nil { - return nil, err - } - caCertPool.AppendCertsFromPEM(caCert) - - return &tls.Config{ - Certificates: []tls.Certificate{cert}, - RootCAs: caCertPool, - InsecureSkipVerify: skipVerify, - }, nil -} From 7ca6be1f285fcc4db9daa3f60b3d168960aad905 Mon Sep 17 00:00:00 2001 From: TiganeteaRobert Date: Wed, 11 May 2022 14:06:46 +0300 Subject: [PATCH 03/25] Use Snyk test to block release if there are vulnerabilities (closes #119) --- .github/workflows/cd.yml | 8 ++++++++ .github/workflows/ci.yml | 10 +++++++++- 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/.github/workflows/cd.yml b/.github/workflows/cd.yml index 0620dcb1..cb7e20d6 100644 --- a/.github/workflows/cd.yml +++ b/.github/workflows/cd.yml @@ -49,6 +49,14 @@ jobs: echo "VERSION file ${{steps.version.outputs.VERSION_FILE}} does not match tagged version ${{ github.ref }}" exit 1 + - name: Snyk Setup + uses: snyk/actions/setup@master + + - name: Run Snyk to check for vulnerabilities + run: snyk test --project-name=stream-replicator --severity-threshold=high + env: + SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} + - name: Compile run: make all diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ee02cc62..654ecdc5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -27,6 +27,14 @@ jobs: - name: Checkout code uses: actions/checkout@v2 + - name: Snyk Setup + uses: snyk/actions/setup@master + + - name: Run Snyk to check for vulnerabilities + run: snyk test --project-name=stream-replicator --severity-threshold=high + env: + SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} + - name: Cache go modules uses: actions/cache@v2 with: @@ -60,4 +68,4 @@ jobs: run: make lint - name: Compile all targets - run: make all + run: make all \ No newline at end of file From 5e8a8bfd9409b856d55efda26745469fc9cb3499 Mon Sep 17 00:00:00 2001 From: TiganeteaRobert Date: Thu, 28 Apr 2022 14:28:36 +0300 Subject: [PATCH 04/25] =?UTF-8?q?Extend=20filtering=20to=20use=20custom=20?= =?UTF-8?q?data=C2=A0(closes=20#176)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- config/config.go | 12 ++ config/config_test.go | 2 +- go.mod | 2 +- go.sum | 11 +- pkg/transform/snowplow_enriched_filter.go | 143 +++++++++++++++--- .../snowplow_enriched_filter_test.go | 75 ++++++++- pkg/transform/snowplow_enriched_util.go | 16 ++ pkg/transform/transform_test_variables.go | 6 +- 8 files changed, 230 insertions(+), 37 deletions(-) diff --git a/config/config.go b/config/config.go index ff82e5ed..0b524773 100644 --- a/config/config.go +++ b/config/config.go @@ -320,6 +320,18 @@ func (c *Config) GetTransformations() (transform.TransformationApplyFunction, er return nil, err } funcs = append(funcs, filterFunc) + case "spEnrichedFilterContext": + filterFunc, err := transform.NewSpEnrichedFilterFunctionContext(funcOpts[1]) + if err != nil { + return nil, err + } + funcs = append(funcs, filterFunc) + case "spEnrichedFilterUnstructEvent": + filterFunc, err := transform.NewSpEnrichedFilterFunctionUnstructEvent(funcOpts[1]) + if err != nil { + return nil, err + } + funcs = append(funcs, filterFunc) case "none": default: return nil, errors.New(fmt.Sprintf("Invalid transformation found; expected one of 'spEnrichedToJson', 'spEnrichedSetPk:{option}', spEnrichedFilter:{option} and got '%s'", c.Data.Transformation)) diff --git a/config/config_test.go b/config/config_test.go index ec242f8e..ce03c7d8 100644 --- a/config/config_test.go +++ b/config/config_test.go @@ -110,7 +110,7 @@ func TestNewConfig_FilterFailure(t *testing.T) { transformation, err := c.GetTransformations() assert.Nil(transformation) assert.NotNil(err) - assert.Equal(`Invalid filter function config, must be of the format {field name}=={value}[|{value}|...] or {field name}!={value}[|{value}|...]`, err.Error()) + assert.Equal(`invalid filter function config, must be of the format {field name}=={value}[|{value}|...] or {field name}!={value}[|{value}|...]`, err.Error()) } func TestNewConfig_InvalidTarget(t *testing.T) { diff --git a/go.mod b/go.mod index 28ee2387..5cd3b36e 100644 --- a/go.mod +++ b/go.mod @@ -33,7 +33,6 @@ require ( github.com/smira/go-statsd v1.3.2 github.com/snowplow-devops/go-retry v0.0.0-20210106090855-8989bbdbae1c github.com/snowplow-devops/go-sentryhook v0.0.0-20210106082031-21bf7f9dac2a - github.com/snowplow/snowplow-golang-analytics-sdk v0.1.0 github.com/stretchr/testify v1.7.0 github.com/twinj/uuid v1.0.0 github.com/twitchscience/kinsumer v0.0.0-20210611163023-da24975e2c91 @@ -54,6 +53,7 @@ require ( require ( github.com/davecgh/go-spew v1.1.1 github.com/hashicorp/hcl/v2 v2.11.1 + github.com/snowplow/snowplow-golang-analytics-sdk v0.2.2 github.com/zclconf/go-cty v1.10.0 ) diff --git a/go.sum b/go.sum index 23ae8f5f..78450e5e 100644 --- a/go.sum +++ b/go.sum @@ -131,8 +131,6 @@ github.com/aws/aws-sdk-go v1.25.19/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpi github.com/aws/aws-sdk-go v1.40.22 h1:iit4tJ1hjL2GlNCrbE4aJza6jTmvEE2pDTnShct/yyY= github.com/aws/aws-sdk-go v1.40.22/go.mod h1:585smgzpB/KqRA+K3y/NL/oYRqQvpNJYvLm+LY1U59Q= github.com/aymerick/raymond v2.0.3-0.20180322193309-b565731e1464+incompatible/go.mod h1:osfaiScAUVup+UC9Nfq76eWqDhXlp+4UYaA8uhTBO6g= -github.com/caarlos0/env/v6 v6.6.2 h1:BypLXDWQTA32rS4UM7pBz+/0BOuvs6C7LSeQAxMwyvI= -github.com/caarlos0/env/v6 v6.6.2/go.mod h1:P0BVSgU9zfkxfSpFUs6KsO3uWR4k3Ac0P66ibAGTybM= github.com/caarlos0/env/v6 v6.9.1 h1:zOkkjM0F6ltnQ5eBX6IPI41UP/KDGEK7rRPwGCNos8k= github.com/caarlos0/env/v6 v6.9.1/go.mod h1:hvp/ryKXKipEkcuYjs9mI4bBCg+UI0Yhgm5Zu0ddvwc= github.com/cactus/go-statsd-client/statsd v0.0.0-20190922113730-52b467de415c/go.mod h1:D4RDtP0MffJ3+R36OkGul0LwJLIN8nRb0Ac6jZmJCmo= @@ -208,6 +206,7 @@ github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9 github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-martini/martini v0.0.0-20170121215854-22fa46961aab/go.mod h1:/P9AEU963A2AYjv4d1V5eVL1CQbEJq6aCNHDDjibzu8= +github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68= github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo= github.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= @@ -379,12 +378,11 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348 h1:MtvEpTB6LX3vkb4ax0b5D2DHbNAUsen0Gx5wZoq3lV4= github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k= github.com/labstack/echo/v4 v4.1.11/go.mod h1:i541M3Fj6f76NZtHSj7TXnyM8n2gaodfvfxNnFqi74g= github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k= github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= -github.com/matryer/is v1.4.0 h1:sosSmIWwkYITGrxZ25ULNDeKiMNzFSr4V/eqBQP0PeE= -github.com/matryer/is v1.4.0/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU= github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= @@ -441,6 +439,7 @@ github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/schollz/closestmatch v2.1.0+incompatible/go.mod h1:RtP1ddjLong6gTkbtmuhtR2uUrrJOpYzYRvbcPAid+g= +github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= @@ -457,8 +456,8 @@ github.com/snowplow-devops/go-sentryhook v0.0.0-20210106082031-21bf7f9dac2a h1:9 github.com/snowplow-devops/go-sentryhook v0.0.0-20210106082031-21bf7f9dac2a/go.mod h1:7/jMxl0yrvgiUlv5L37fw6pql71aNh55sKQc4kBFj5s= github.com/snowplow-devops/kinsumer v1.3.0 h1:uN8PPG8EffKjcfTcDqsHWnnsTFvYGMU39XlDPULIQcA= github.com/snowplow-devops/kinsumer v1.3.0/go.mod h1:SebvcasLweQnOygk9SOFkM/JjBtXFviUxoAq19CwrHQ= -github.com/snowplow/snowplow-golang-analytics-sdk v0.1.0 h1:FA8xHSHzoshF3fJDK9tqUDnuBmyqTiGPRLvIaRQMk2I= -github.com/snowplow/snowplow-golang-analytics-sdk v0.1.0/go.mod h1:Z8ZW805JGCYhnq1wnHe2PIiamUnvoNtAtXPWNyS0mV8= +github.com/snowplow/snowplow-golang-analytics-sdk v0.2.2 h1:ehPNYJ4tOq+n4Lj8jtentKS4UzzvRv5iQ8vlESQj8qw= +github.com/snowplow/snowplow-golang-analytics-sdk v0.2.2/go.mod h1:Z8ZW805JGCYhnq1wnHe2PIiamUnvoNtAtXPWNyS0mV8= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= diff --git a/pkg/transform/snowplow_enriched_filter.go b/pkg/transform/snowplow_enriched_filter.go index 8a807bb6..b52a5bd5 100644 --- a/pkg/transform/snowplow_enriched_filter.go +++ b/pkg/transform/snowplow_enriched_filter.go @@ -13,21 +13,67 @@ import ( "strings" "github.com/snowplow-devops/stream-replicator/pkg/models" + "github.com/snowplow/snowplow-golang-analytics-sdk/analytics" ) -// NewSpEnrichedFilterFunction returns a TransformationFunction which filters messages based on a field in the Snowplow enriched event. +func findSpEnrichedFilterValue(queriedField, parsedEventName, eventVer, field string, parsedMessage analytics.ParsedEvent, path []interface{}) ([]interface{}, error) { + var vf interface{} + var valueFound []interface{} + var err error + + switch { + case strings.HasPrefix(queriedField, `contexts_`): + vf, err = parsedMessage.GetContextValue(queriedField, path...) + valueFound = append(valueFound, vf.([]interface{})...) + case strings.HasPrefix(queriedField, `unstruct_event`): + eventNameFull := `unstruct_event_` + parsedEventName + if queriedField == eventNameFull || queriedField == eventNameFull+`_`+eventVer { + vf, err = parsedMessage.GetUnstructEventValue(path...) + valueFound = append(valueFound, vf) + } + default: + vf, err = parsedMessage.GetValue(field) + valueFound = append(valueFound, vf) + } + if err != nil { + // GetValue returns an error if the field requested is empty. Check for that particular error before returning error + if err.Error() == analytics.EmptyFieldErr { + return nil, nil + } + return nil, err + } + return valueFound, nil +} + +func evaluateSpEnrichedFilter(valuesToMatch string, valuesFound []interface{}, isNegationFilter, shouldKeepMessage *bool) { + for _, valueToMatch := range strings.Split(valuesToMatch, "|") { + for _, v := range valuesFound { + if fmt.Sprintf("%v", v) == valueToMatch { + // Once config value is matched once, change shouldKeepMessage, and stop looking for matches + if *isNegationFilter { + *shouldKeepMessage = false + } else { + *shouldKeepMessage = true + } + return + + } + } + } +} + +// createSpEnrichedFilterFunction returns a TransformationFunction which filters messages based on a field in the Snowplow enriched event. // The filterconfig should describe the conditions for including a message. // For example "aid=abc|def" includes all events with app IDs of abc or def, and filters out the rest. // aid!=abc|def includes all events whose app IDs do not match abc or def, and filters out the rest. -func NewSpEnrichedFilterFunction(filterConfig string) (TransformationFunction, error) { - +func createSpEnrichedFilterFunction(filterConfig string, isUnstructEvent bool, isContext bool) (TransformationFunction, error) { // This regex prevents whitespace characters in the value provided regex := `\S+(!=|==)[^\s\|]+((?:\|[^\s|]+)*)$` re := regexp.MustCompile(regex) if !(re.MatchString(filterConfig)) { // If invalid, return an error which will be returned by the main function - return nil, errors.New("Invalid filter function config, must be of the format {field name}=={value}[|{value}|...] or {field name}!={value}[|{value}|...]") + return nil, errors.New("invalid filter function config, must be of the format {field name}=={value}[|{value}|...] or {field name}!={value}[|{value}|...]") } // Check for a negation condition first @@ -48,43 +94,90 @@ func NewSpEnrichedFilterFunction(filterConfig string) (TransformationFunction, e // Start by resetting shouldKeepMessage to isNegationFilter shouldKeepMessage := isNegationFilter - // Evalute intermediateState to parsedEvent + // Evaluate intermediateState to parsedEvent parsedMessage, parseErr := intermediateAsSpEnrichedParsed(intermediateState, message) if parseErr != nil { message.SetError(parseErr) return nil, nil, message, nil } - valueFound, err := parsedMessage.GetValue(keyValues[0]) + // This regex retrieves the path fields + // (e.g. field1.field2[0].field3 -> [field1, field2, 0, field3]) + regex = `\w+` + re = regexp.MustCompile(regex) - // GetValue returns an error if the field requested is empty. Check for that particular error before failing the message. - if err != nil && err.Error() == fmt.Sprintf("Field %s is empty", keyValues[0]) { - valueFound = nil - } else if err != nil { - message.SetError(err) - return nil, nil, message, nil + // separate the path string into words using regex + path := re.FindAllString(keyValues[0], -1) + separatedPath := make([]string, len(path)-1) + for idx, pathField := range path[1:] { + separatedPath[idx] = pathField } - evaluation: - for _, valueToMatch := range strings.Split(keyValues[1], "|") { - if valueToMatch == fmt.Sprintf("%v", valueFound) { // coerce to string as valueFound may be any type found in a Snowplow event - if isNegationFilter { - shouldKeepMessage = false - } else { - shouldKeepMessage = true - } - break evaluation - // Once config value is matched once, change shouldKeepMessage, and stop looking for matches + var parsedEventName string + var eventMajorVer string + var err error + + // only call SDK functions if an unstruct_event is being filtered + if isUnstructEvent { + // get event name + eventName, err := parsedMessage.GetValue(`event_name`) + if err != nil { + message.SetError(err) + return nil, nil, message, nil + } + parsedEventName = eventName.(string) + // get event version + fullEventVer, err := parsedMessage.GetValue(`event_version`) + if err != nil { + message.SetError(err) + return nil, nil, message, nil + } + // get the major event version + eventMajorVer = strings.Split(fullEventVer.(string), `-`)[0] + if eventMajorVer == `` { + message.SetError(fmt.Errorf(`invalid schema version format: %s`, fullEventVer)) + return nil, nil, message, nil } } - // If message is not to be kept, return it as a filtered message to be acked in the main function - if !shouldKeepMessage { + // find the value in the event + valueFound, err := findSpEnrichedFilterValue( + path[0], + parsedEventName, + eventMajorVer, + keyValues[0], + parsedMessage, + convertPathToInterfaces(separatedPath), + ) + if err != nil { + message.SetError(err) + return nil, nil, message, nil + } + + // evaluate whether the found value passes the filter, determining if the message should be kept + evaluateSpEnrichedFilter(keyValues[1], valueFound, &isNegationFilter, &shouldKeepMessage) + // if message is not to be kept, return it as a filtered message to be acked in the main function + if !shouldKeepMessage { return nil, message, nil, nil } - // Otherwise, return the message and intermediateState for further processing. + // otherwise, return the message and intermediateState for further processing. return message, nil, nil, parsedMessage }, nil } + +// NewSpEnrichedFilterFunction returns a TransformationFunction which filters messages based on a field in the Snowplow enriched event. +func NewSpEnrichedFilterFunction(filterConfig string) (TransformationFunction, error) { + return createSpEnrichedFilterFunction(filterConfig, false, false) +} + +// NewSpEnrichedFilterFunctionContext returns a TransformationFunction for filtering a context +func NewSpEnrichedFilterFunctionContext(filterConfig string) (TransformationFunction, error) { + return createSpEnrichedFilterFunction(filterConfig, false, true) +} + +// NewSpEnrichedFilterFunctionUnstructEvent returns a TransformationFunction for filtering an unstruct_event +func NewSpEnrichedFilterFunctionUnstructEvent(filterConfig string) (TransformationFunction, error) { + return createSpEnrichedFilterFunction(filterConfig, true, false) +} diff --git a/pkg/transform/snowplow_enriched_filter_test.go b/pkg/transform/snowplow_enriched_filter_test.go index 32da7ac2..e8c0f825 100644 --- a/pkg/transform/snowplow_enriched_filter_test.go +++ b/pkg/transform/snowplow_enriched_filter_test.go @@ -21,6 +21,16 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { PartitionKey: "some-key", } + var messageGoodInt = models.Message{ + Data: snowplowTsv4, + PartitionKey: "some-key", + } + + var messageWithUnstructEvent = models.Message{ + Data: snowplowTsv1, + PartitionKey: "some-key", + } + // Single value cases aidFilterFuncKeep, _ := NewSpEnrichedFilterFunction("app_id==test-data3") @@ -116,11 +126,74 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Equal(snowplowTsv3, nilNegationIn.Data) assert.Nil(nilNegationOut) assert.Nil(fail8) + + // context filter success + contextFuncKeep, _ := NewSpEnrichedFilterFunctionContext("contexts_nl_basjes_yauaa_context_1.test1.test2[0].test3==testValue") + + contextKeepIn, contextKeepOut, fail9, _ := contextFuncKeep(&messageGood, nil) + + assert.Equal(snowplowTsv3, contextKeepIn.Data) + assert.Nil(contextKeepOut) + assert.Nil(fail9) + + // context filter success (integer value) + contextFuncKeep, _ = NewSpEnrichedFilterFunctionContext("contexts_nl_basjes_yauaa_context_1.test1.test2[0].test3==1") + + contextKeepIn, contextKeepOut, fail9, _ = contextFuncKeep(&messageGoodInt, nil) + + assert.Equal(snowplowTsv4, contextKeepIn.Data) + assert.Nil(contextKeepOut) + assert.Nil(fail9) + + // context filter failure + contextFuncKeep, _ = NewSpEnrichedFilterFunctionContext("contexts_nl_basjes_yauaa_context_2.test1.test2[0].test3==testValue") + + contextKeepIn, contextKeepOut, fail9, _ = contextFuncKeep(&messageGood, nil) + + assert.Nil(contextKeepIn) + assert.Equal(snowplowTsv3, contextKeepOut.Data) + assert.Nil(fail9) + + // event filter success, filtered event name + eventFilterFunCkeep, _ := NewSpEnrichedFilterFunctionUnstructEvent("unstruct_event_add_to_cart_1.sku==item41") + + eventKeepIn, eventKeepOut, fail10, _ := eventFilterFunCkeep(&messageWithUnstructEvent, nil) + + assert.Equal(snowplowTsv1, eventKeepIn.Data) + assert.Nil(eventKeepOut) + assert.Nil(fail10) + + // event filter success, filtered event name, no event ver + eventFilterFunCkeep, _ = NewSpEnrichedFilterFunctionUnstructEvent("unstruct_event_add_to_cart.sku==item41") + + eventKeepIn, eventKeepOut, fail10, _ = eventFilterFunCkeep(&messageWithUnstructEvent, nil) + + assert.Equal(snowplowTsv1, eventKeepIn.Data) + assert.Nil(eventKeepOut) + assert.Nil(fail10) + + // event filter failure, wrong event name + eventFilterFunCkeep, _ = NewSpEnrichedFilterFunctionUnstructEvent("unstruct_event_wrong_name.sku==item41") + + eventKeepIn, eventKeepOut, fail11, _ := eventFilterFunCkeep(&messageWithUnstructEvent, nil) + + assert.Nil(eventKeepIn) + assert.Equal(snowplowTsv1, eventKeepOut.Data) + assert.Nil(fail11) + + // event filter failure, field not found + eventFilterFunCkeep, _ = NewSpEnrichedFilterFunctionUnstructEvent("unstruct_event_add_to_cart.ska==item41") + + eventNoFieldIn, eventNoFieldOut, fail12, _ := eventFilterFunCkeep(&messageWithUnstructEvent, nil) + + assert.Nil(eventNoFieldIn) + assert.Nil(eventNoFieldOut) + assert.NotNil(fail12) } func TestNewSpEnrichedFilterFunction_Error(t *testing.T) { assert := assert.New(t) - error := `Invalid filter function config, must be of the format {field name}=={value}[|{value}|...] or {field name}!={value}[|{value}|...]` + error := `invalid filter function config, must be of the format {field name}=={value}[|{value}|...] or {field name}!={value}[|{value}|...]` filterFunc, err1 := NewSpEnrichedFilterFunction("") diff --git a/pkg/transform/snowplow_enriched_util.go b/pkg/transform/snowplow_enriched_util.go index 3f374d4d..d0d5cc6e 100644 --- a/pkg/transform/snowplow_enriched_util.go +++ b/pkg/transform/snowplow_enriched_util.go @@ -9,6 +9,7 @@ package transform import ( "github.com/snowplow-devops/stream-replicator/pkg/models" "github.com/snowplow/snowplow-golang-analytics-sdk/analytics" + "strconv" ) func intermediateAsSpEnrichedParsed(intermediateState interface{}, message *models.Message) (analytics.ParsedEvent, error) { @@ -23,3 +24,18 @@ func intermediateAsSpEnrichedParsed(intermediateState interface{}, message *mode } return parsedMessage, nil } + +// convertPathToInterfaces converts a slice of strings representing a path to a slice of interfaces to be used +// by the SDK Get() function +func convertPathToInterfaces(path []string) []interface{} { + var output []interface{} + for _, pathField := range path { + pathFieldInt, err := strconv.Atoi(pathField) + if err != nil { + output = append(output, pathField) + } else { + output = append(output, pathFieldInt) + } + } + return output +} diff --git a/pkg/transform/transform_test_variables.go b/pkg/transform/transform_test_variables.go index 57f67336..9302f063 100644 --- a/pkg/transform/transform_test_variables.go +++ b/pkg/transform/transform_test_variables.go @@ -14,14 +14,14 @@ import ( var snowplowTsv1 = []byte(`test-data1 pc 2019-05-10 14:40:37.436 2019-05-10 14:40:35.972 2019-05-10 14:40:35.551 unstruct e9234345-f042-46ad-b1aa-424464066a33 py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 18.194.133.57 d26822f5-52cc-4292-8f77-14ef6b7a27e2 {"schema":"iglu:com.snowplowanalytics.snowplow/unstruct_event/jsonschema/1-0-0","data":{"schema":"iglu:com.snowplowanalytics.snowplow/add_to_cart/jsonschema/1-0-0","data":{"sku":"item41","quantity":2,"unitPrice":32.4,"currency":"GBP"}}} python-requests/2.21.0 2019-05-10 14:40:35.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??"}}]} 2019-05-10 14:40:35.972 com.snowplowanalytics.snowplow add_to_cart jsonschema 1-0-0 `) var spTsv1Parsed, _ = analytics.ParseEvent(string(snowplowTsv1)) var snowplowJSON1 = []byte(`{"app_id":"test-data1","collector_tstamp":"2019-05-10T14:40:35.972Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:35.972Z","dvce_created_tstamp":"2019-05-10T14:40:35.551Z","dvce_sent_tstamp":"2019-05-10T14:40:35Z","etl_tstamp":"2019-05-10T14:40:37.436Z","event":"unstruct","event_format":"jsonschema","event_id":"e9234345-f042-46ad-b1aa-424464066a33","event_name":"add_to_cart","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"d26822f5-52cc-4292-8f77-14ef6b7a27e2","platform":"pc","unstruct_event_com_snowplowanalytics_snowplow_add_to_cart_1":{"currency":"GBP","quantity":2,"sku":"item41","unitPrice":32.4},"user_id":"user\u003cbuilt-in function input\u003e","user_ipaddress":"18.194.133.57","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) - var snowplowTsv2 = []byte(`test-data2 pc 2019-05-10 14:40:32.392 2019-05-10 14:40:31.105 2019-05-10 14:40:30.218 transaction_item 5071169f-3050-473f-b03f-9748319b1ef2 py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 18.194.133.57 68220ade-307b-4898-8e25-c4c8ac92f1d7 transaction item58 35.87 1 python-requests/2.21.0 2019-05-10 14:40:30.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??"}}]} 2019-05-10 14:40:31.105 com.snowplowanalytics.snowplow transaction_item jsonschema 1-0-0 `) var spTsv2Parsed, _ = analytics.ParseEvent(string(snowplowTsv2)) var snowplowJSON2 = []byte(`{"app_id":"test-data2","collector_tstamp":"2019-05-10T14:40:31.105Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:31.105Z","dvce_created_tstamp":"2019-05-10T14:40:30.218Z","dvce_sent_tstamp":"2019-05-10T14:40:30Z","etl_tstamp":"2019-05-10T14:40:32.392Z","event":"transaction_item","event_format":"jsonschema","event_id":"5071169f-3050-473f-b03f-9748319b1ef2","event_name":"transaction_item","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"68220ade-307b-4898-8e25-c4c8ac92f1d7","platform":"pc","ti_orderid":"transaction\u003cbuilt-in function input\u003e","ti_price":35.87,"ti_quantity":1,"ti_sku":"item58","user_id":"user\u003cbuilt-in function input\u003e","user_ipaddress":"18.194.133.57","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) -var snowplowTsv3 = []byte(`test-data3 pc 2019-05-10 14:40:30.836 2019-05-10 14:40:29.576 2019-05-10 14:40:29.204 page_view e8aef68d-8533-45c6-a672-26a0f01be9bd py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 18.194.133.57 b66c4a12-8584-4c7a-9a5d-7c96f59e2556 www.demo-site.com/campaign-landing-page landing-page 80 www.demo-site.com/campaign-landing-page python-requests/2.21.0 2019-05-10 14:40:29.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??"}}]} 2019-05-10 14:40:29.576 com.snowplowanalytics.snowplow page_view jsonschema 1-0-0 `) +var snowplowTsv3 = []byte(`test-data3 pc 2019-05-10 14:40:30.836 2019-05-10 14:40:29.576 2019-05-10 14:40:29.204 page_view e8aef68d-8533-45c6-a672-26a0f01be9bd py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 18.194.133.57 b66c4a12-8584-4c7a-9a5d-7c96f59e2556 www.demo-site.com/campaign-landing-page landing-page 80 www.demo-site.com/campaign-landing-page python-requests/2.21.0 2019-05-10 14:40:29.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??","test1":{"test2":[{"test3":"testValue"}]}}}]} 2019-05-10 14:40:29.576 com.snowplowanalytics.snowplow page_view jsonschema 1-0-0 `) var spTsv3Parsed, _ = analytics.ParseEvent(string(snowplowTsv3)) -var snowplowJSON3 = []byte(`{"app_id":"test-data3","collector_tstamp":"2019-05-10T14:40:29.576Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:29.576Z","dvce_created_tstamp":"2019-05-10T14:40:29.204Z","dvce_sent_tstamp":"2019-05-10T14:40:29Z","etl_tstamp":"2019-05-10T14:40:30.836Z","event":"page_view","event_format":"jsonschema","event_id":"e8aef68d-8533-45c6-a672-26a0f01be9bd","event_name":"page_view","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"b66c4a12-8584-4c7a-9a5d-7c96f59e2556","page_title":"landing-page","page_url":"www.demo-site.com/campaign-landing-page","page_urlpath":"www.demo-site.com/campaign-landing-page","page_urlport":80,"platform":"pc","user_id":"user\u003cbuilt-in function input\u003e","user_ipaddress":"18.194.133.57","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) +var snowplowJSON3 = []byte(`{"app_id":"test-data3","collector_tstamp":"2019-05-10T14:40:29.576Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??","test1":{"test2":[{"test3":"testValue"}]}}],"derived_tstamp":"2019-05-10T14:40:29.576Z","dvce_created_tstamp":"2019-05-10T14:40:29.204Z","dvce_sent_tstamp":"2019-05-10T14:40:29Z","etl_tstamp":"2019-05-10T14:40:30.836Z","event":"page_view","event_format":"jsonschema","event_id":"e8aef68d-8533-45c6-a672-26a0f01be9bd","event_name":"page_view","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"b66c4a12-8584-4c7a-9a5d-7c96f59e2556","page_title":"landing-page","page_url":"www.demo-site.com/campaign-landing-page","page_urlpath":"www.demo-site.com/campaign-landing-page","page_urlport":80,"platform":"pc","user_id":"user\u003cbuilt-in function input\u003e","user_ipaddress":"18.194.133.57","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) +var snowplowTsv4 = []byte(`test-data3 pc 2019-05-10 14:40:30.836 2019-05-10 14:40:29.576 2019-05-10 14:40:29.204 page_view e8aef68d-8533-45c6-a672-26a0f01be9bd py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 18.194.133.57 b66c4a12-8584-4c7a-9a5d-7c96f59e2556 www.demo-site.com/campaign-landing-page landing-page 80 www.demo-site.com/campaign-landing-page python-requests/2.21.0 2019-05-10 14:40:29.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??","test1":{"test2":[{"test3":1}]}}}]} 2019-05-10 14:40:29.576 com.snowplowanalytics.snowplow page_view jsonschema 1-0-0 `) var nonSnowplowString = []byte(`not a snowplow event`) From 1df4085c84d7919115a84acc4943cf4bc84475f3 Mon Sep 17 00:00:00 2001 From: TiganeteaRobert Date: Fri, 20 May 2022 14:47:31 +0300 Subject: [PATCH 05/25] Add telemetry (closes #124) --- cmd/cli/cli.go | 7 +- config/config.go | 7 +- config/examples/README.md | 5 ++ go.mod | 8 ++- go.sum | 41 +++++------- pkg/telemetry/const.go | 17 +++++ pkg/telemetry/telemetry.go | 132 +++++++++++++++++++++++++++++++++++++ 7 files changed, 189 insertions(+), 28 deletions(-) create mode 100644 pkg/telemetry/const.go create mode 100644 pkg/telemetry/telemetry.go diff --git a/cmd/cli/cli.go b/cmd/cli/cli.go index 290e8448..32bb9e37 100644 --- a/cmd/cli/cli.go +++ b/cmd/cli/cli.go @@ -29,6 +29,7 @@ import ( "github.com/snowplow-devops/stream-replicator/pkg/source/sourceconfig" "github.com/snowplow-devops/stream-replicator/pkg/source/sourceiface" "github.com/snowplow-devops/stream-replicator/pkg/target/targetiface" + "github.com/snowplow-devops/stream-replicator/pkg/telemetry" "github.com/snowplow-devops/stream-replicator/pkg/transform" ) @@ -106,6 +107,8 @@ func RunCli(supportedSourceConfigPairs []sourceconfig.ConfigPair) { } o.Start() + stopTelemetry := telemetry.InitTelemetryWithCollector(cfg) + // Handle SIGTERM sig := make(chan os.Signal) signal.Notify(sig, os.Interrupt, syscall.SIGTERM, os.Kill) @@ -122,7 +125,8 @@ func RunCli(supportedSourceConfigPairs []sourceconfig.ConfigPair) { select { case <-stop: log.Debug("source.Stop() finished successfully!") - + + stopTelemetry() err := common.DeleteTemporaryDir() if err != nil { log.Debugf(`error deleting tmp directory: %v`, err) @@ -133,6 +137,7 @@ func RunCli(supportedSourceConfigPairs []sourceconfig.ConfigPair) { t.Close() ft.Close() o.Stop() + stopTelemetry() err := common.DeleteTemporaryDir() if err != nil { diff --git a/config/config.go b/config/config.go index 0b524773..89014142 100644 --- a/config/config.go +++ b/config/config.go @@ -44,6 +44,8 @@ type ConfigurationData struct { Transformation string `hcl:"message_transformation,optional" env:"MESSAGE_TRANSFORMATION"` LogLevel string `hcl:"log_level,optional" env:"LOG_LEVEL"` GoogleServiceAccountB64 string `hcl:"google_application_credentials_b64,optional" env:"GOOGLE_APPLICATION_CREDENTIALS_B64"` + UserProvidedID string `hcl:"user_provided_id,optional" env:"USER_PROVIDED_ID"` + DisableTelemetry bool `hcl:"disable_telemetry,optional" env:"DISABLE_TELEMETRY"` } // Component is a type to abstract over configuration blocks. @@ -97,8 +99,9 @@ func defaultConfigData() *ConfigurationData { TimeoutSec: 1, BufferSec: 15, }, - Transformation: "none", - LogLevel: "info", + Transformations: nil, + LogLevel: "info", + DisableTelemetry: false, } } diff --git a/config/examples/README.md b/config/examples/README.md index 5c2ae43f..3ce0646b 100644 --- a/config/examples/README.md +++ b/config/examples/README.md @@ -57,6 +57,9 @@ log_level = "info" // Ability to provide a GCP service account (b64) to the application directly google_application_credentials = "" + +// Optional parameter that helps us categorise telemetry events +user_provided_id = "" ``` So, a complete example could be: @@ -97,6 +100,8 @@ sentry { } log_level = "debug" + +user_provided_id = "my-example-id" ``` In the example files in this directory, there is a simple and extended version for configuring each: diff --git a/go.mod b/go.mod index 5cd3b36e..3df09baa 100644 --- a/go.mod +++ b/go.mod @@ -27,12 +27,12 @@ require ( github.com/klauspost/compress v1.15.0 // indirect github.com/mitchellh/mapstructure v1.4.1 // indirect github.com/myesui/uuid v1.0.0 // indirect - github.com/pierrec/lz4 v2.6.1+incompatible // indirect github.com/pkg/errors v0.9.1 github.com/sirupsen/logrus v1.8.1 github.com/smira/go-statsd v1.3.2 github.com/snowplow-devops/go-retry v0.0.0-20210106090855-8989bbdbae1c github.com/snowplow-devops/go-sentryhook v0.0.0-20210106082031-21bf7f9dac2a + github.com/snowplow/snowplow-golang-analytics-sdk v0.2.2 github.com/stretchr/testify v1.7.0 github.com/twinj/uuid v1.0.0 github.com/twitchscience/kinsumer v0.0.0-20210611163023-da24975e2c91 @@ -53,7 +53,7 @@ require ( require ( github.com/davecgh/go-spew v1.1.1 github.com/hashicorp/hcl/v2 v2.11.1 - github.com/snowplow/snowplow-golang-analytics-sdk v0.2.2 + github.com/snowplow/snowplow-golang-tracker/v2 v2.4.1 github.com/zclconf/go-cty v1.10.0 ) @@ -73,7 +73,10 @@ require ( github.com/golang/protobuf v1.5.2 // indirect github.com/google/go-cmp v0.5.6 // indirect github.com/googleapis/gax-go/v2 v2.0.5 // indirect + github.com/hashicorp/go-immutable-radix v1.1.0 // indirect + github.com/hashicorp/go-memdb v1.0.4 // indirect github.com/hashicorp/go-uuid v1.0.2 // indirect + github.com/hashicorp/golang-lru v0.5.1 // indirect github.com/jcmturner/aescts/v2 v2.0.0 // indirect github.com/jcmturner/dnsutils/v2 v2.0.0 // indirect github.com/jcmturner/gofork v1.0.0 // indirect @@ -82,6 +85,7 @@ require ( github.com/jmespath/go-jmespath v0.4.0 // indirect github.com/json-iterator/go v1.1.11 // indirect github.com/jstemmer/go-junit-report v0.9.1 // indirect + github.com/mattn/go-sqlite3 v2.0.2+incompatible // indirect github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.1 // indirect diff --git a/go.sum b/go.sum index 78450e5e..fda3c53b 100644 --- a/go.sum +++ b/go.sum @@ -108,12 +108,9 @@ github.com/CloudyKit/jet/v3 v3.0.0/go.mod h1:HKQPgSJmdK8hdoAbKUUWajkHyHo4RaU5rMd github.com/Joker/hpp v1.0.0/go.mod h1:8x5n+M1Hp5hC0g8okX3sR3vFQwynaX/UgSOM9MeBKzY= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/Shopify/goreferrer v0.0.0-20181106222321-ec9c9a553398/go.mod h1:a1uqRtAwp2Xwc6WNPJEufxJ7fx3npB4UV/JOLmbu5I0= -github.com/Shopify/sarama v1.29.1 h1:wBAacXbYVLmWieEA/0X/JagDdCZ8NVFOfS6l6+2u5S0= -github.com/Shopify/sarama v1.29.1/go.mod h1:mdtqvCSg8JOxk8PmpTNGyo6wzd4BMm4QXSfDnTXmgkE= github.com/Shopify/sarama v1.34.0 h1:j4zTaFHFnfvuV2fdLZyXqIg0Tu4Mzl9f064Z5/H+o4o= github.com/Shopify/sarama v1.34.0/go.mod h1:V2ceE9UupUf4/oP1Z38SI49fAnD0/MtkqDDHvolIeeQ= -github.com/Shopify/toxiproxy v2.1.4+incompatible h1:TKdv8HiTLgE5wdJuEML90aBgNWsokNbMijUGhmcoBJc= -github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI= +github.com/Shopify/toxiproxy/v2 v2.3.0 h1:62YkpiP4bzdhKMH+6uC5E95y608k3zDwdzuBMsnn3uQ= github.com/Shopify/toxiproxy/v2 v2.3.0/go.mod h1:KvQTtB6RjCJY4zqNJn7C7JDFgsG5uoHYDirfUfpIm0c= github.com/agext/levenshtein v1.2.1 h1:QmvMAjj2aEICytGiWzmxoE0x2KZvE0fvmqMOfy2tjT8= github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= @@ -189,8 +186,6 @@ github.com/form3tech-oss/jwt-go v3.2.5+incompatible h1:/l4kBbb4/vGSsdtB5nUe8L7B9 github.com/form3tech-oss/jwt-go v3.2.5+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= -github.com/frankban/quicktest v1.11.3 h1:8sXhOn0uLys67V8EsXLc6eszDs8VXWxL3iRvebPhedY= -github.com/frankban/quicktest v1.11.3/go.mod h1:wRf/ReqHper53s+kmmSZizM8NamnL3IM0I9ntUbOk+k= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/gavv/httpexpect v2.0.0+incompatible/go.mod h1:x+9tiU1YnrOvnB725RkpoLv1M62hOWzwo5OXotisrKc= github.com/getsentry/sentry-go v0.9.0/go.mod h1:kELm/9iCblqUYh+ZRML7PNdCvEuw24wBvJPYyi86cws= @@ -303,12 +298,18 @@ github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFb github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-immutable-radix v1.1.0 h1:vN9wG1D6KG6YHRTWr8512cxGOVgTMEfgEdSj/hr8MPc= +github.com/hashicorp/go-immutable-radix v1.1.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-memdb v1.0.4 h1:sIdJHAEtV3//iXcUb4LumSQeorYos5V0ptvqvQvFgDA= +github.com/hashicorp/go-memdb v1.0.4/go.mod h1:LWQ8R70vPrS4OEY9k28D2z8/Zzyu34NVzeRibGAzHO0= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-uuid v1.0.2 h1:cfejS+Tpcp13yd5nYHWDI6qVCny6wyX2Mt5SGur2IGE= github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1 h1:0hERBMJE1eitiLkihrMvRVBYAkpHzc/J3QdDN+dAcgU= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= @@ -324,6 +325,8 @@ github.com/iris-contrib/go.uuid v2.0.0+incompatible/go.mod h1:iz2lgM/1UnEf1kP0L/ github.com/iris-contrib/jade v1.1.3/go.mod h1:H/geBymxJhShH5kecoiOCSssPX7QWYH7UaeZTSWddIk= github.com/iris-contrib/pongo2 v0.0.1/go.mod h1:Ssh+00+3GAZqSQb30AvBRNxBx7rf0GqwkjqxNd0u65g= github.com/iris-contrib/schema v0.0.1/go.mod h1:urYA3uvUNG1TIIjOSCzHr9/LmbQo8LrOcOqfqxa4hXw= +github.com/jarcoal/httpmock v1.0.4 h1:jp+dy/+nonJE4g4xbVtl9QdrUNbn6/3hDT5R4nDIZnA= +github.com/jarcoal/httpmock v1.0.4/go.mod h1:ATjnClrvW/3tijVmpL/va5Z3aAyGvqU3gCT8nX0Txik= github.com/jcmturner/aescts/v2 v2.0.0 h1:9YKLH6ey7H4eDBXW8khjYslgyqG2xZikXP0EQFKrle8= github.com/jcmturner/aescts/v2 v2.0.0/go.mod h1:AiaICIRyfYg35RUkr8yESTqvSy7csK90qZ5xfvvsoNs= github.com/jcmturner/dnsutils/v2 v2.0.0 h1:lltnkeZGL0wILNvrNiVCR6Ro5PGU/SeBvVO/8c/iPbo= @@ -363,16 +366,13 @@ github.com/kataras/sitemap v0.0.5/go.mod h1:KY2eugMKiPwsJgx7+U103YZehfvNGOXURubc github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.8.2/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= github.com/klauspost/compress v1.9.7/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= -github.com/klauspost/compress v1.12.2/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= -github.com/klauspost/compress v1.13.4 h1:0zhec2I8zGnjWcKyLl6i3gPqKANCCn5e9xmviEEeX6s= -github.com/klauspost/compress v1.13.4/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= github.com/klauspost/compress v1.15.0 h1:xqfchp4whNFxn5A4XFyyYtitiWI8Hy5EW59jEwcyL6U= github.com/klauspost/compress v1.15.0/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= github.com/klauspost/cpuid v1.2.1/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= @@ -387,6 +387,8 @@ github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVc github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ= +github.com/mattn/go-sqlite3 v2.0.2+incompatible h1:qzw9c2GNT8UFrgWNDhCTqRqYUSmu/Dav/9Z58LGpk7U= +github.com/mattn/go-sqlite3 v2.0.2+incompatible/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= github.com/mattn/goveralls v0.0.2/go.mod h1:8d1ZMHsd7fW6IRPKQh46F2WRpyib5/X4FOpevwGNQEw= github.com/mediocregopher/radix/v3 v3.4.2/go.mod h1:8FL3F6UQRXHXIBSPUs5h0RybMF8i4n7wVopoX3x7Bv8= github.com/microcosm-cc/bluemonday v1.0.2/go.mod h1:iVP4YcDBq+n/5fb23BhYFvIMq/leAFZyRl6bYmGDlGc= @@ -414,9 +416,6 @@ github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+W github.com/onsi/ginkgo v1.10.3/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= -github.com/pierrec/lz4 v2.6.0+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= -github.com/pierrec/lz4 v2.6.1+incompatible h1:9UY3+iC23yxF0UfGaYrGplQ+79Rg+h/q9FV9ix19jjM= -github.com/pierrec/lz4 v2.6.1+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= github.com/pierrec/lz4/v4 v4.1.14 h1:+fL8AQEZtz/ijeNnpduH0bROTu0O3NZAlPjQxGn8LwE= github.com/pierrec/lz4/v4 v4.1.14/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4= @@ -431,6 +430,7 @@ github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 h1:N/ElC8H3+5X github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.6.1 h1:/FiVV8dS/e+YqF2JvO3yXRFbBLTIuSDkuC7aBOAvL+k= github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo= github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= @@ -458,6 +458,8 @@ github.com/snowplow-devops/kinsumer v1.3.0 h1:uN8PPG8EffKjcfTcDqsHWnnsTFvYGMU39X github.com/snowplow-devops/kinsumer v1.3.0/go.mod h1:SebvcasLweQnOygk9SOFkM/JjBtXFviUxoAq19CwrHQ= github.com/snowplow/snowplow-golang-analytics-sdk v0.2.2 h1:ehPNYJ4tOq+n4Lj8jtentKS4UzzvRv5iQ8vlESQj8qw= github.com/snowplow/snowplow-golang-analytics-sdk v0.2.2/go.mod h1:Z8ZW805JGCYhnq1wnHe2PIiamUnvoNtAtXPWNyS0mV8= +github.com/snowplow/snowplow-golang-tracker/v2 v2.4.1 h1:bp1MynC4BkywqTfpt4wddqZxtN4U7d3UUqxjalcGR1s= +github.com/snowplow/snowplow-golang-tracker/v2 v2.4.1/go.mod h1:/74pOlgs8xon7CAWihi1peUflolbKSSy2Fu/UDF4PgI= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= @@ -490,12 +492,12 @@ github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyC github.com/valyala/fasthttp v1.6.0/go.mod h1:FstJa9V+Pj9vQ7OJie2qMHdwemEDaDiSdBnvPM1Su9w= github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8= github.com/valyala/tcplisten v0.0.0-20161114210144-ceec8f93295a/go.mod h1:v3UYOV9WzVtRmSR+PDvWpU/qWl4Wa5LApYYX4ZtKbio= -github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= -github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g= -github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8= github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= github.com/vmihailenco/msgpack/v4 v4.3.12/go.mod h1:gborTTJjAo/GWTqqRjrLCn9pgNN+NXzzngzBKDPIqw4= github.com/vmihailenco/tagparser v0.1.1/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI= +github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= +github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g= +github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8= github.com/xdg/scram v1.0.3 h1:nTadYh2Fs4BK2xdldEa2g5bbaZp0/+1nJMMPtPxS/to= github.com/xdg/scram v1.0.3/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I= github.com/xdg/stringprep v1.0.3 h1:cmL5Enob4W83ti/ZHuZLuKD/xqJfus4fVPwE+/BDm+4= @@ -541,9 +543,6 @@ golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPh golang.org/x/crypto v0.0.0-20200728195943-123391ffb6de/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20201112155050-0c6587e931a9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20210812204632-0ba0e8f03122 h1:AOT7vJYHE32m61R8d1WlcqhOO1AocesDsKpcMq+UOaA= -golang.org/x/crypto v0.0.0-20210812204632-0ba0e8f03122/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20220214200702-86341886e292 h1:f+lwQ+GtmgoY+A2YaQxlSOnDjXcQ7ZRLWOHbC6HtRqE= golang.org/x/crypto v0.0.0-20220214200702-86341886e292/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= @@ -626,8 +625,6 @@ golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLd golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d h1:LO7XpTYMwTqxjLcGWPijK3vRXg1aWdlNOVOHRq45d7c= -golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2 h1:NWy5+hlRbC7HK+PmcXVUmW1IMyFce7to56IUvhUFm7Y= golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= @@ -711,8 +708,6 @@ golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210831042530-f4d43177bf5e h1:XMgFehsDnnLGtjvjOfqWSUzt0alpTR1RSEuznObga2c= -golang.org/x/sys v0.0.0-20210831042530-f4d43177bf5e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e h1:fLOSk5Q00efkSvAm+4xcoXD+RRmLmmulPn5I3Y9F2EM= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= diff --git a/pkg/telemetry/const.go b/pkg/telemetry/const.go new file mode 100644 index 00000000..528d1486 --- /dev/null +++ b/pkg/telemetry/const.go @@ -0,0 +1,17 @@ +package telemetry + +import ( + "time" + + "github.com/snowplow-devops/stream-replicator/cmd" +) + +var ( + interval = time.Hour + method = "POST" + protocol = "https" + url = "telemetry-g.snowplowanalytics.com" + port = "443" + applicationName = "stream-replicator" + applicationVersion = cmd.AppVersion +) diff --git a/pkg/telemetry/telemetry.go b/pkg/telemetry/telemetry.go new file mode 100644 index 00000000..c26ff6bc --- /dev/null +++ b/pkg/telemetry/telemetry.go @@ -0,0 +1,132 @@ +package telemetry + +import ( + "fmt" + "net/http" + "time" + + log "github.com/sirupsen/logrus" + conf "github.com/snowplow-devops/stream-replicator/config" + gt "github.com/snowplow/snowplow-golang-tracker/v2/tracker" + "github.com/twinj/uuid" +) + +// config holds the configuration for telemetry +type config struct { + disable bool + interval time.Duration + method string + url string + protocol string + port string + userProvidedID string + applicationName string + applicationVersion string + appGeneratedID string +} + +func newTelemetryWithConfig(cfg *conf.Config) *config { + return &config{ + disable: cfg.Data.DisableTelemetry, + interval: interval, + method: method, + protocol: protocol, + url: url, + port: port, + userProvidedID: cfg.Data.UserProvidedID, + applicationName: applicationName, + applicationVersion: applicationVersion, + appGeneratedID: uuid.NewV4().String(), + } +} + +func initTelemetry(telemetry *config) func() { + storage := gt.InitStorageMemory() + emitter := gt.InitEmitter( + gt.RequireCollectorUri(fmt.Sprintf(`%s:%s`, telemetry.url, telemetry.port)), + gt.OptionRequestType(telemetry.method), + gt.OptionProtocol(telemetry.protocol), + gt.OptionCallback(func(goodResults []gt.CallbackResult, badResults []gt.CallbackResult) { + for _, goodResult := range goodResults { + if goodResult.Status != http.StatusOK { + log.WithFields(log.Fields{ + "error_code": goodResult.Status, + }).Debugf("Error sending telemetry event") + return + } + } + for _, badResult := range badResults { + if badResult.Status != http.StatusOK { + log.WithFields(log.Fields{ + "error_code": badResult.Status, + }).Debugf("Error sending telemetry event") + return + } + } + log.Info(`Telemetry event sent successfully`) + }), + gt.OptionStorage(storage), + ) + + tracker := gt.InitTracker( + gt.RequireEmitter(emitter), + gt.OptionNamespace("telemetry"), + gt.OptionAppId(telemetry.applicationName), + ) + + ticker := time.NewTicker(telemetry.interval) + + stop := make(chan struct{}) + + go func() { + makeAndTrackHeartbeat(telemetry, tracker) + for { + select { + case <-ticker.C: + makeAndTrackHeartbeat(telemetry, tracker) + case <-stop: + return + } + + } + }() + + return func() { + close(stop) + } +} + +func makeAndTrackHeartbeat(telemetry *config, tracker *gt.Tracker) { + event := makeHeartbeatEvent(*telemetry) + + tracker.TrackSelfDescribingEvent(gt.SelfDescribingEvent{ + Event: event, + Timestamp: nil, + EventId: nil, + TrueTimestamp: nil, + Contexts: nil, + Subject: nil, + }) +} + +// InitTelemetryWithCollector initialises telemetry +func InitTelemetryWithCollector(cfg *conf.Config) func() { + telemetry := newTelemetryWithConfig(cfg) + if telemetry.disable { + return func() {} + } + return initTelemetry(telemetry) +} + +func makeHeartbeatEvent(service config) *gt.SelfDescribingJson { + payload := gt.InitPayload() + + payload.Add(`userProvidedId`, &service.userProvidedID) + payload.Add(`applicationName`, &service.applicationName) + payload.Add(`applicationVersion`, &service.applicationVersion) + payload.Add(`appGeneratedId`, &service.appGeneratedID) + + selfDescJSON := gt.InitSelfDescribingJson( + `iglu:com.snowplowanalytics.oss/oss_context/jsonschema/1-0-1`, payload.Get()) + return selfDescJSON +} From ecb7dbc7c7e2ca0346992b8530750a421823732d Mon Sep 17 00:00:00 2001 From: TiganeteaRobert Date: Fri, 10 Jun 2022 16:47:00 +0300 Subject: [PATCH 06/25] Remove AWS Lambda and GCP Cloudfunctions builds (closes #140) --- Makefile | 49 +--------------- README.md | 2 +- cmd/aws/lambda/main.go | 35 ------------ cmd/gcp/cloudfunctions/function.go | 33 ----------- cmd/serverless.go | 91 ------------------------------ go.mod | 1 - go.sum | 5 -- 7 files changed, 3 insertions(+), 213 deletions(-) delete mode 100644 cmd/aws/lambda/main.go delete mode 100644 cmd/gcp/cloudfunctions/function.go delete mode 100644 cmd/serverless.go diff --git a/Makefile b/Makefile index 6bf6cc67..2543cf47 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -.PHONY: all gox aws-lambda gcp-cloudfunctions cli cli-linux cli-darwin cli-windows container format lint tidy test-setup test integration-reset integration-up integration-down integration-test container-release clean +.PHONY: all gox cli cli-linux cli-darwin cli-windows container format lint tidy test-setup test integration-reset integration-up integration-down integration-test container-release clean # ----------------------------------------------------------------------------- # CONSTANTS @@ -32,57 +32,12 @@ gcp_container_name = snowplow/stream-replicator-gcp # BUILDING # ----------------------------------------------------------------------------- -all: aws-lambda gcp-cloudfunctions cli container +all: cli container gox: GO111MODULE=on go install github.com/mitchellh/gox@latest mkdir -p $(compiled_dir) -aws-lambda: gox - # WARNING: Binary must be called 'main' to work in Lambda - GO111MODULE=on CGO_ENABLED=0 gox -osarch=linux/amd64 -output=$(linux_out_dir)/aws/lambda/main ./cmd/aws/lambda/ - - # Create ZIP file for upload to Lambda - (cd $(linux_out_dir)/aws/lambda/ && zip -r staging.zip main) - mv $(linux_out_dir)/aws/lambda/staging.zip $(compiled_dir)/aws_lambda_stream_replicator_$(version)_linux_amd64.zip - -gcp-cloudfunctions: gox - mkdir -p $(staging_dir)/gcp/cloudfunctions - - # Copy dependencies into staging area - cp ./cmd/gcp/cloudfunctions/function.go $(staging_dir)/gcp/cloudfunctions/function.go - - # Get module dependencies in a vendor directory - GO111MODULE=on go mod vendor - cp -R ./$(vendor_dir)/ $(staging_dir)/gcp/cloudfunctions/vendor/ - - # Copy local packages into staging area - mkdir -p $(staging_dir)/gcp/cloudfunctions/vendor/github.com/snowplow-devops/stream-replicator/cmd/ - cp ./cmd/constants.go $(staging_dir)/gcp/cloudfunctions/vendor/github.com/snowplow-devops/stream-replicator/cmd/constants.go - cp ./cmd/init.go $(staging_dir)/gcp/cloudfunctions/vendor/github.com/snowplow-devops/stream-replicator/cmd/init.go - cp ./cmd/serverless.go $(staging_dir)/gcp/cloudfunctions/vendor/github.com/snowplow-devops/stream-replicator/cmd/serverless.go - - mkdir -p $(staging_dir)/gcp/cloudfunctions/vendor/github.com/snowplow-devops/stream-replicator/config/ - cp ./config/config.go $(staging_dir)/gcp/cloudfunctions/vendor/github.com/snowplow-devops/stream-replicator/config/config.go - - mkdir -p $(staging_dir)/gcp/cloudfunctions/vendor/github.com/snowplow-devops/stream-replicator/ - cp -R ./pkg/ $(staging_dir)/gcp/cloudfunctions/vendor/github.com/snowplow-devops/stream-replicator/pkg/ - - mkdir -p $(staging_dir)/gcp/cloudfunctions/vendor/github.com/snowplow-devops/stream-replicator/third_party/snowplow/ - cp -R ./third_party/snowplow/badrows/ $(staging_dir)/gcp/cloudfunctions/vendor/github.com/snowplow-devops/stream-replicator/third_party/snowplow/badrows - cp -R ./third_party/snowplow/iglu/ $(staging_dir)/gcp/cloudfunctions/vendor/github.com/snowplow-devops/stream-replicator/third_party/snowplow/iglu - - echo "# github.com/snowplow-devops/stream-replicator v$(version)" >> $(staging_dir)/gcp/cloudfunctions/vendor/modules.txt - echo "github.com/snowplow-devops/stream-replicator/config" >> $(staging_dir)/gcp/cloudfunctions/vendor/modules.txt - echo "github.com/snowplow-devops/stream-replicator/cmd" >> $(staging_dir)/gcp/cloudfunctions/vendor/modules.txt - echo "github.com/snowplow-devops/stream-replicator/pkg" >> $(staging_dir)/gcp/cloudfunctions/vendor/modules.txt - echo "github.com/snowplow-devops/stream-replicator/third_party/snowplow/badrows" >> $(staging_dir)/gcp/cloudfunctions/vendor/modules.txt - echo "github.com/snowplow-devops/stream-replicator/third_party/snowplow/iglu" >> $(staging_dir)/gcp/cloudfunctions/vendor/modules.txt - - # Create ZIP file for upload to CloudFunctions - (cd $(staging_dir)/gcp/cloudfunctions/ && zip -r staging.zip .) - mv $(staging_dir)/gcp/cloudfunctions/staging.zip $(compiled_dir)/gcp_cloudfunctions_stream_replicator_$(version)_linux_amd64.zip - cli: gox cli-linux cli-darwin cli-windows (cd $(linux_out_dir)/aws/cli/ && zip -r staging.zip stream-replicator) mv $(linux_out_dir)/aws/cli/staging.zip $(compiled_dir)/aws_cli_stream_replicator_$(version)_linux_amd64.zip diff --git a/README.md b/README.md index 44f982f9..fcc32981 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ ## Overview -Easily replicate data streams wherever you need them to be! This application is available in three different runtimes to facilitate different needs - AWS Lambda, GCP CloudFunctions and as a standalone application. +Easily replicate data streams wherever you need them to be! This application is available as a standalone application. See the [wiki documention](https://github.com/snowplow-devops/stream-replicator/wiki) for details on how to configure and run the application. diff --git a/cmd/aws/lambda/main.go b/cmd/aws/lambda/main.go deleted file mode 100644 index 640fa9c2..00000000 --- a/cmd/aws/lambda/main.go +++ /dev/null @@ -1,35 +0,0 @@ -// PROPRIETARY AND CONFIDENTIAL -// -// Unauthorized copying of this file via any medium is strictly prohibited. -// -// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. - -package main - -import ( - "context" - - "github.com/aws/aws-lambda-go/events" - "github.com/aws/aws-lambda-go/lambda" - - "github.com/snowplow-devops/stream-replicator/cmd" - "github.com/snowplow-devops/stream-replicator/pkg/models" -) - -func main() { - lambda.Start(HandleRequest) -} - -// HandleRequest processes the Kinesis event and forwards it onto another stream -func HandleRequest(ctx context.Context, event events.KinesisEvent) error { - messages := make([]*models.Message, len(event.Records)) - for i := 0; i < len(messages); i++ { - record := event.Records[i] - messages[i] = &models.Message{ - Data: record.Kinesis.Data, - PartitionKey: record.Kinesis.PartitionKey, - } - } - - return cmd.ServerlessRequestHandler(messages) -} diff --git a/cmd/gcp/cloudfunctions/function.go b/cmd/gcp/cloudfunctions/function.go deleted file mode 100644 index e9795594..00000000 --- a/cmd/gcp/cloudfunctions/function.go +++ /dev/null @@ -1,33 +0,0 @@ -// PROPRIETARY AND CONFIDENTIAL -// -// Unauthorized copying of this file via any medium is strictly prohibited. -// -// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. - -package cloudfunctions - -import ( - "context" - - "github.com/twinj/uuid" - - "github.com/snowplow-devops/stream-replicator/cmd" - "github.com/snowplow-devops/stream-replicator/pkg/models" -) - -// PubSubMessage is the payload of a Pub/Sub message -type PubSubMessage struct { - Data []byte `json:"data"` -} - -// HandleRequest consumes a Pub/Sub message -func HandleRequest(ctx context.Context, m PubSubMessage) error { - messages := []*models.Message{ - { - Data: m.Data, - PartitionKey: uuid.NewV4().String(), - }, - } - - return cmd.ServerlessRequestHandler(messages) -} diff --git a/cmd/serverless.go b/cmd/serverless.go deleted file mode 100644 index 06b19776..00000000 --- a/cmd/serverless.go +++ /dev/null @@ -1,91 +0,0 @@ -// PROPRIETARY AND CONFIDENTIAL -// -// Unauthorized copying of this file via any medium is strictly prohibited. -// -// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. - -package cmd - -import ( - "time" - - "github.com/getsentry/sentry-go" - log "github.com/sirupsen/logrus" - - "github.com/snowplow-devops/stream-replicator/pkg/models" -) - -// ServerlessRequestHandler is a common function for all -// serverless implementations to leverage -func ServerlessRequestHandler(messages []*models.Message) error { - cfg, sentryEnabled, err := Init() - if err != nil { - return err - } - if sentryEnabled { - defer sentry.Flush(2 * time.Second) - } - - // --- Setup structs - - t, err := cfg.GetTarget() - if err != nil { - return err - } - t.Open() - - tr, err := cfg.GetTransformations() - if err != nil { - return err - } - - ft, err := cfg.GetFailureTarget(AppName, AppVersion) - if err != nil { - return err - } - ft.Open() - - // --- Process events - - transformed := tr(messages) - // no error as errors should be returned in the failures array of TransformationResult - - // Ack filtered messages with no further action - messagesToFilter := transformed.Filtered - for _, msg := range messagesToFilter { - if msg.AckFunc != nil { - msg.AckFunc() - } - } - - res, err := t.Write(transformed.Result) - if err != nil { - log.WithFields(log.Fields{"error": err}).Error(err) - } - - if len(res.Oversized) > 0 { - res2, err := ft.WriteOversized(t.MaximumAllowedMessageSizeBytes(), res.Oversized) - if len(res2.Oversized) != 0 || len(res2.Invalid) != 0 { - log.Fatal("Oversized message transformation resulted in new oversized / invalid messages") - } - if err != nil { - log.WithFields(log.Fields{"error": err}).Error(err) - } - } - - invalid := append(res.Invalid, transformed.Invalid...) - - if len(invalid) > 0 { - res3, err := ft.WriteInvalid(invalid) - if len(res3.Oversized) != 0 || len(res3.Invalid) != 0 { - log.Fatal("Invalid message transformation resulted in new invalid / oversized messages") - } - if err != nil { - log.WithFields(log.Fields{"error": err}).Error(err) - } - } - - t.Close() - ft.Close() - return err -} diff --git a/go.mod b/go.mod index 3df09baa..29b7739b 100644 --- a/go.mod +++ b/go.mod @@ -12,7 +12,6 @@ require ( github.com/Azure/go-autorest/autorest v0.11.19 // indirect github.com/Azure/go-autorest/autorest/adal v0.9.14 // indirect github.com/Shopify/sarama v1.34.0 - github.com/aws/aws-lambda-go v1.26.0 github.com/aws/aws-sdk-go v1.40.22 github.com/caarlos0/env/v6 v6.9.1 github.com/cpuguy83/go-md2man/v2 v2.0.1 // indirect diff --git a/go.sum b/go.sum index fda3c53b..7a4bcc94 100644 --- a/go.sum +++ b/go.sum @@ -122,8 +122,6 @@ github.com/apparentlymart/go-textseg v1.0.0/go.mod h1:z96Txxhf3xSFMPmb5X/1W05FF/ github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw= github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo= github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= -github.com/aws/aws-lambda-go v1.26.0 h1:6ujqBpYF7tdZcBvPIccs98SpeGfrt/UOVEiexfNIdHA= -github.com/aws/aws-lambda-go v1.26.0/go.mod h1:jJmlefzPfGnckuHdXX7/80O3BvUUi12XOkbv4w9SGLU= github.com/aws/aws-sdk-go v1.25.19/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= github.com/aws/aws-sdk-go v1.40.22 h1:iit4tJ1hjL2GlNCrbE4aJza6jTmvEE2pDTnShct/yyY= github.com/aws/aws-sdk-go v1.40.22/go.mod h1:585smgzpB/KqRA+K3y/NL/oYRqQvpNJYvLm+LY1U59Q= @@ -148,7 +146,6 @@ github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3Ee github.com/cpuguy83/go-md2man v1.0.10 h1:BSKMNlYxDvnunlTymqtgONjNnaRV1sTpcovwwjF22jk= github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= -github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.1 h1:r/myEWzV9lfsM1tFLgDyu0atFtJ1fXn261LKYj/3DxU= github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= @@ -485,7 +482,6 @@ github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljT github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY= github.com/urfave/cli v1.22.5 h1:lNq9sAHXK2qfdI8W+GRItjCEkI+2oR4d+MEHy1CKXoU= github.com/urfave/cli v1.22.5/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= -github.com/urfave/cli/v2 v2.2.0/go.mod h1:SE9GqnLQmjVa0iPEY0f1w3ygNIYcIJ0OKPMoW2caLfQ= github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI= github.com/urfave/negroni v1.0.0/go.mod h1:Meg73S6kFm/4PpbYdq35yYWoCZ9mS/YSx+lKnmiohz4= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= @@ -938,7 +934,6 @@ gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v3 v3.0.0-20191120175047-4206685974f2/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= From 9938cc64dcdc279835a1dba8448331b0402c42bf Mon Sep 17 00:00:00 2001 From: adatzer Date: Sat, 2 Apr 2022 10:35:14 +0300 Subject: [PATCH 07/25] Fail tests on NewConfig error (closes #145) --- config/component_test.go | 13 +++-- config/config_test.go | 52 ++++++++++++++----- pkg/source/kinesis/kinesis_source_test.go | 8 ++- pkg/source/sourceconfig/source_config_test.go | 4 +- pkg/source/sqs/sqs_source_test.go | 10 ++-- pkg/source/stdin/stdin_source_test.go | 4 +- 6 files changed, 67 insertions(+), 24 deletions(-) diff --git a/config/component_test.go b/config/component_test.go index 1292ab74..0e0b7d35 100644 --- a/config/component_test.go +++ b/config/component_test.go @@ -181,7 +181,9 @@ func TestCreateTargetComponentHCL(t *testing.T) { c, err := NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } use := c.Data.Target.Use decoderOpts := &DecoderOptions{ @@ -260,7 +262,9 @@ func TestCreateFailureTargetComponentENV(t *testing.T) { c, err := NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } assert.Equal(c.Data.FailureTarget.Target.Name, "kafka") decoderOpts := &DecoderOptions{ @@ -305,7 +309,10 @@ func TestCreateObserverComponentHCL(t *testing.T) { c, err := NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } + assert.Equal(c.Data.StatsReceiver.TimeoutSec, 2) assert.Equal(c.Data.StatsReceiver.BufferSec, 20) diff --git a/config/config_test.go b/config/config_test.go index ce03c7d8..3cdec05a 100644 --- a/config/config_test.go +++ b/config/config_test.go @@ -20,11 +20,13 @@ func TestNewConfig(t *testing.T) { c, err := NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } assert.Equal("info", c.Data.LogLevel) assert.Equal("stdout", c.Data.Target.Use.Name) - assert.Equal("none", c.Data.Transformation) + assert.Equal("none", c.Data.Transform.Transformation) assert.Equal("stdin", c.Data.Source.Use.Name) // Tests on sources moved to the source package. @@ -60,7 +62,9 @@ func TestNewConfig_FromEnv(t *testing.T) { c, err := NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } assert.Equal("debug", c.Data.LogLevel) assert.Equal("kinesis", c.Data.Target.Use.Name) @@ -88,7 +92,9 @@ func TestNewConfig_InvalidTransformation(t *testing.T) { c, err := NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } transformation, err := c.GetTransformations() assert.Nil(transformation) @@ -105,7 +111,9 @@ func TestNewConfig_FilterFailure(t *testing.T) { c, err := NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } transformation, err := c.GetTransformations() assert.Nil(transformation) @@ -122,7 +130,9 @@ func TestNewConfig_InvalidTarget(t *testing.T) { c, err := NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } source, err := c.GetTarget() assert.Nil(source) @@ -139,7 +149,9 @@ func TestNewConfig_InvalidFailureTarget(t *testing.T) { c, err := NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } source, err := c.GetFailureTarget("testAppName", "0.0.0") assert.Nil(source) @@ -156,7 +168,9 @@ func TestNewConfig_InvalidFailureFormat(t *testing.T) { c, err := NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } source, err := c.GetFailureTarget("testAppName", "0.0.0") assert.Nil(source) @@ -173,7 +187,9 @@ func TestNewConfig_InvalidStatsReceiver(t *testing.T) { c, err := NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } source, err := c.GetObserver(map[string]string{}) assert.Nil(source) @@ -186,7 +202,9 @@ func TestNewConfig_GetTags(t *testing.T) { c, err := NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } tags, err := c.GetTags() assert.NotNil(tags) @@ -208,7 +226,9 @@ func TestNewConfig_Hcl_invalids(t *testing.T) { c, err := NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } t.Run("invalid_transformation", func(t *testing.T) { transformation, err := c.GetTransformations() @@ -241,7 +261,9 @@ func TestNewConfig_Hcl_defaults(t *testing.T) { c, err := NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } assert.Equal(c.Data.Source.Use.Name, "stdin") assert.Equal(c.Data.Target.Use.Name, "stdout") @@ -251,7 +273,7 @@ func TestNewConfig_Hcl_defaults(t *testing.T) { assert.Equal(c.Data.Sentry.Debug, false) assert.Equal(c.Data.StatsReceiver.TimeoutSec, 1) assert.Equal(c.Data.StatsReceiver.BufferSec, 15) - assert.Equal(c.Data.Transformation, "none") + assert.Equal(c.Data.Transform.Transformation, "none") assert.Equal(c.Data.LogLevel, "info") } @@ -263,7 +285,9 @@ func TestNewConfig_Hcl_sentry(t *testing.T) { c, err := NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } assert.Equal(c.Data.Sentry.Debug, true) assert.Equal(c.Data.Sentry.Tags, "{\"testKey\":\"testValue\"}") diff --git a/pkg/source/kinesis/kinesis_source_test.go b/pkg/source/kinesis/kinesis_source_test.go index 340ab930..ea32c011 100644 --- a/pkg/source/kinesis/kinesis_source_test.go +++ b/pkg/source/kinesis/kinesis_source_test.go @@ -190,7 +190,9 @@ func TestGetSource_WithKinesisSource(t *testing.T) { c, err := config.NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } // Use our function generator to interact with localstack kinesisSourceConfigFunctionWithLocalstack := configFunctionGeneratorWithInterfaces(kinesisClient, dynamodbClient, "00000000000") @@ -248,7 +250,9 @@ func TestKinesisSourceHCL(t *testing.T) { c, err := config.NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } use := c.Data.Source.Use decoderOpts := &config.DecoderOptions{ diff --git a/pkg/source/sourceconfig/source_config_test.go b/pkg/source/sourceconfig/source_config_test.go index 234d91ac..3c69ad48 100644 --- a/pkg/source/sourceconfig/source_config_test.go +++ b/pkg/source/sourceconfig/source_config_test.go @@ -23,7 +23,9 @@ func TestNewConfig_InvalidSource(t *testing.T) { c, err := config.NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } supportedSources := []ConfigPair{} diff --git a/pkg/source/sqs/sqs_source_test.go b/pkg/source/sqs/sqs_source_test.go index 4a55cf3e..fba5297b 100644 --- a/pkg/source/sqs/sqs_source_test.go +++ b/pkg/source/sqs/sqs_source_test.go @@ -119,7 +119,9 @@ func TestGetSource_WithSQSSource(t *testing.T) { c, err := config.NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } sqsSourceConfigFunctionWithLocalStack := configFunctionGeneratorWithInterfaces(sqsClient, "00000000000") adaptedHandle := adapterGenerator(sqsSourceConfigFunctionWithLocalStack) @@ -134,7 +136,7 @@ func TestGetSource_WithSQSSource(t *testing.T) { assert.IsType(&sqsSource{}, source) } -func TestKinesisSourceHCL(t *testing.T) { +func TestSQSSourceHCL(t *testing.T) { testFixPath := "../../../config/test-fixtures" testCases := []struct { File string @@ -162,7 +164,9 @@ func TestKinesisSourceHCL(t *testing.T) { c, err := config.NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } use := c.Data.Source.Use decoderOpts := &config.DecoderOptions{ diff --git a/pkg/source/stdin/stdin_source_test.go b/pkg/source/stdin/stdin_source_test.go index e9f01729..eeb7506e 100644 --- a/pkg/source/stdin/stdin_source_test.go +++ b/pkg/source/stdin/stdin_source_test.go @@ -70,7 +70,9 @@ func TestGetSource_WithStdinSource(t *testing.T) { c, err := config.NewConfig() assert.NotNil(c) - assert.Nil(err) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } stdinSource, err := sourceconfig.GetSource(c, supportedSources) From dc3f25e11321aca54ebd9f84d6f9fb9e543c220f Mon Sep 17 00:00:00 2001 From: adatzer Date: Thu, 7 Apr 2022 11:24:45 +0300 Subject: [PATCH 08/25] Add custom transformation layer (closes #146) --- cmd/cli/cli.go | 3 +- config/config.go | 68 +- config/config_test.go | 117 +- config/examples/README.md | 18 +- config/test-fixtures/invalids.hcl | 4 +- .../transform-invalid-layer-js.hcl | 7 + .../transform-invalid-layer-lua.hcl | 7 + .../test-fixtures/transform-js-extended.hcl | 12 + config/test-fixtures/transform-js-simple.hcl | 9 + .../test-fixtures/transform-lua-extended.hcl | 11 + config/test-fixtures/transform-lua-simple.hcl | 9 + go.mod | 9 +- go.sum | 19 +- pkg/transform/engine.go | 35 + pkg/transform/engine_javascript.go | 277 +++ pkg/transform/engine_javascript_test.go | 1937 ++++++++++++++++ pkg/transform/engine_lua.go | 427 ++++ pkg/transform/engine_lua_test.go | 1958 +++++++++++++++++ .../snowplow_enriched_filter_test.go | 46 +- .../transformconfig/transform_config.go | 272 +++ .../transformconfig/transform_config_test.go | 491 +++++ 21 files changed, 5623 insertions(+), 113 deletions(-) create mode 100644 config/test-fixtures/transform-invalid-layer-js.hcl create mode 100644 config/test-fixtures/transform-invalid-layer-lua.hcl create mode 100644 config/test-fixtures/transform-js-extended.hcl create mode 100644 config/test-fixtures/transform-js-simple.hcl create mode 100644 config/test-fixtures/transform-lua-extended.hcl create mode 100644 config/test-fixtures/transform-lua-simple.hcl create mode 100644 pkg/transform/engine.go create mode 100644 pkg/transform/engine_javascript.go create mode 100644 pkg/transform/engine_javascript_test.go create mode 100644 pkg/transform/engine_lua.go create mode 100644 pkg/transform/engine_lua_test.go create mode 100644 pkg/transform/transformconfig/transform_config.go create mode 100644 pkg/transform/transformconfig/transform_config_test.go diff --git a/cmd/cli/cli.go b/cmd/cli/cli.go index 32bb9e37..a216c2f6 100644 --- a/cmd/cli/cli.go +++ b/cmd/cli/cli.go @@ -31,6 +31,7 @@ import ( "github.com/snowplow-devops/stream-replicator/pkg/target/targetiface" "github.com/snowplow-devops/stream-replicator/pkg/telemetry" "github.com/snowplow-devops/stream-replicator/pkg/transform" + "github.com/snowplow-devops/stream-replicator/pkg/transform/transformconfig" ) const ( @@ -80,7 +81,7 @@ func RunCli(supportedSourceConfigPairs []sourceconfig.ConfigPair) { return err } - tr, err := cfg.GetTransformations() + tr, err := transformconfig.GetTransformations(cfg) if err != nil { return err } diff --git a/config/config.go b/config/config.go index 89014142..17f5fd4a 100644 --- a/config/config.go +++ b/config/config.go @@ -25,7 +25,6 @@ import ( "github.com/snowplow-devops/stream-replicator/pkg/statsreceiver/statsreceiveriface" "github.com/snowplow-devops/stream-replicator/pkg/target" "github.com/snowplow-devops/stream-replicator/pkg/target/targetiface" - "github.com/snowplow-devops/stream-replicator/pkg/transform" ) // Config holds the configuration data along with the decoder to decode them @@ -81,6 +80,12 @@ type StatsConfig struct { BufferSec int `hcl:"buffer_sec,optional" env:"STATS_RECEIVER_BUFFER_SEC"` } +// TransformConfig holds configuration for tranformations. +type TransformConfig struct { + Message string `hcl:"message_transformation,optional" env:"MESSAGE_TRANSFORMATION"` + Layer *Use `hcl:"use,block" envPrefix:"TRANSFORMATION_LAYER_"` +} + // defaultConfigData returns the initial main configuration target. func defaultConfigData() *ConfigurationData { return &ConfigurationData{ @@ -301,48 +306,6 @@ func (c *Config) GetFailureTarget(AppName string, AppVersion string) (failureifa return nil, fmt.Errorf("could not interpret failure target configuration for %q", useFailureTarget.Name) } -// GetTransformations builds and returns transformationApplyFunction from the transformations configured -func (c *Config) GetTransformations() (transform.TransformationApplyFunction, error) { - funcs := make([]transform.TransformationFunction, 0, 0) - - // Parse list of transformations - transformations := strings.Split(c.Data.Transformation, ",") - - for _, transformation := range transformations { - // Parse function name-option sets - funcOpts := strings.Split(transformation, ":") - - switch funcOpts[0] { - case "spEnrichedToJson": - funcs = append(funcs, transform.SpEnrichedToJSON) - case "spEnrichedSetPk": - funcs = append(funcs, transform.NewSpEnrichedSetPkFunction(funcOpts[1])) - case "spEnrichedFilter": - filterFunc, err := transform.NewSpEnrichedFilterFunction(funcOpts[1]) - if err != nil { - return nil, err - } - funcs = append(funcs, filterFunc) - case "spEnrichedFilterContext": - filterFunc, err := transform.NewSpEnrichedFilterFunctionContext(funcOpts[1]) - if err != nil { - return nil, err - } - funcs = append(funcs, filterFunc) - case "spEnrichedFilterUnstructEvent": - filterFunc, err := transform.NewSpEnrichedFilterFunctionUnstructEvent(funcOpts[1]) - if err != nil { - return nil, err - } - funcs = append(funcs, filterFunc) - case "none": - default: - return nil, errors.New(fmt.Sprintf("Invalid transformation found; expected one of 'spEnrichedToJson', 'spEnrichedSetPk:{option}', spEnrichedFilter:{option} and got '%s'", c.Data.Transformation)) - } - } - return transform.NewTransformation(funcs...), nil -} - // GetTags returns a list of tags to use in identifying this instance of stream-replicator with enough // entropy so as to avoid collisions as it should not be possible to have both the host and process_id be // the same. @@ -400,3 +363,22 @@ func (c *Config) GetStatsReceiver(tags map[string]string) (statsreceiveriface.St return nil, errors.New(fmt.Sprintf("Invalid stats receiver found; expected one of 'statsd' and got '%s'", useReceiver.Name)) } } + +// ProvideTransformMessage implements transformconfig.configProvider +func (c *Config) ProvideTransformMessage() string { + return c.Data.Transform.Message +} + +// ProvideTransformLayerName implements transformconfig.configProvider +func (c *Config) ProvideTransformLayerName() string { + return c.Data.Transform.Layer.Name +} + +// ProvideTransformComponent implements transformconfig.configProvider +func (c *Config) ProvideTransformComponent(p Pluggable) (interface{}, error) { + decoderOpts := &DecoderOptions{ + Input: c.Data.Transform.Layer.Body, + } + + return c.CreateComponent(p, decoderOpts) +} diff --git a/config/config_test.go b/config/config_test.go index 3cdec05a..3635bc2f 100644 --- a/config/config_test.go +++ b/config/config_test.go @@ -11,6 +11,7 @@ import ( "path/filepath" "testing" + "github.com/snowplow-devops/stream-replicator/pkg/transform" "github.com/stretchr/testify/assert" ) @@ -26,7 +27,7 @@ func TestNewConfig(t *testing.T) { assert.Equal("info", c.Data.LogLevel) assert.Equal("stdout", c.Data.Target.Use.Name) - assert.Equal("none", c.Data.Transform.Transformation) + assert.Equal("none", c.Data.Transform.Message) assert.Equal("stdin", c.Data.Source.Use.Name) // Tests on sources moved to the source package. @@ -35,10 +36,6 @@ func TestNewConfig(t *testing.T) { assert.NotNil(target) assert.Nil(err) - transformation, err := c.GetTransformations() - assert.NotNil(transformation) - assert.Nil(err) - failureTarget, err := c.GetFailureTarget("testAppName", "0.0.0") assert.NotNil(failureTarget) assert.Nil(err) @@ -83,44 +80,6 @@ func TestNewConfig_FromEnvInvalid(t *testing.T) { assert.NotNil(err) } -func TestNewConfig_InvalidTransformation(t *testing.T) { - assert := assert.New(t) - - defer os.Unsetenv("MESSAGE_TRANSFORMATION") - - os.Setenv("MESSAGE_TRANSFORMATION", "fake") - - c, err := NewConfig() - assert.NotNil(c) - if err != nil { - t.Fatalf("function NewConfig failed with error: %q", err.Error()) - } - - transformation, err := c.GetTransformations() - assert.Nil(transformation) - assert.NotNil(err) - assert.Equal("Invalid transformation found; expected one of 'spEnrichedToJson', 'spEnrichedSetPk:{option}', spEnrichedFilter:{option} and got 'fake'", err.Error()) -} - -func TestNewConfig_FilterFailure(t *testing.T) { - assert := assert.New(t) - - defer os.Unsetenv("MESSAGE_TRANSFORMATION") - - os.Setenv("MESSAGE_TRANSFORMATION", "spEnrichedFilter:incompatibleArg") - - c, err := NewConfig() - assert.NotNil(c) - if err != nil { - t.Fatalf("function NewConfig failed with error: %q", err.Error()) - } - - transformation, err := c.GetTransformations() - assert.Nil(transformation) - assert.NotNil(err) - assert.Equal(`invalid filter function config, must be of the format {field name}=={value}[|{value}|...] or {field name}!={value}[|{value}|...]`, err.Error()) -} - func TestNewConfig_InvalidTarget(t *testing.T) { assert := assert.New(t) @@ -230,13 +189,6 @@ func TestNewConfig_Hcl_invalids(t *testing.T) { t.Fatalf("function NewConfig failed with error: %q", err.Error()) } - t.Run("invalid_transformation", func(t *testing.T) { - transformation, err := c.GetTransformations() - assert.Nil(transformation) - assert.NotNil(err) - assert.Equal("Invalid transformation found; expected one of 'spEnrichedToJson', 'spEnrichedSetPk:{option}', spEnrichedFilter:{option} and got 'fakeHCL'", err.Error()) - }) - t.Run("invalid_target", func(t *testing.T) { target, err := c.GetTarget() assert.Nil(target) @@ -273,7 +225,7 @@ func TestNewConfig_Hcl_defaults(t *testing.T) { assert.Equal(c.Data.Sentry.Debug, false) assert.Equal(c.Data.StatsReceiver.TimeoutSec, 1) assert.Equal(c.Data.StatsReceiver.BufferSec, 15) - assert.Equal(c.Data.Transform.Transformation, "none") + assert.Equal(c.Data.Transform.Message, "none") assert.Equal(c.Data.LogLevel, "info") } @@ -293,3 +245,66 @@ func TestNewConfig_Hcl_sentry(t *testing.T) { assert.Equal(c.Data.Sentry.Tags, "{\"testKey\":\"testValue\"}") assert.Equal(c.Data.Sentry.Dsn, "testDsn") } + +func TestDefaultTransformation(t *testing.T) { + assert := assert.New(t) + + t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", "") + t.Setenv("MESSAGE_TRANSFORMATION", "") + + c, err := NewConfig() + assert.NotNil(c) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } + + assert.Equal("none", c.Data.Transform.Message) + assert.Equal("none", c.ProvideTransformMessage()) + assert.Equal("", c.ProvideTransformLayerName()) +} + +func TestTransformationProviderImplementation(t *testing.T) { + testFixPath := "./test-fixtures" + testCases := []struct { + File string + Plug Pluggable + Message string + LayerName string + }{ + { + File: "transform-lua-simple.hcl", + Plug: transform.LuaLayer().(Pluggable), + Message: "lua:fun", + LayerName: "lua", + }, + { + File: "transform-js-simple.hcl", + Plug: transform.JSLayer().(Pluggable), + Message: "js:fun", + LayerName: "js", + }, + } + + for _, tt := range testCases { + t.Run(tt.File, func(t *testing.T) { + assert := assert.New(t) + + configFile := filepath.Join(testFixPath, tt.File) + t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", configFile) + + c, err := NewConfig() + assert.NotNil(c) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } + + assert.Equal(tt.Message, c.ProvideTransformMessage()) + assert.Equal(tt.LayerName, c.ProvideTransformLayerName()) + + component, err := c.ProvideTransformComponent(tt.Plug) + assert.Nil(err) + assert.NotNil(component) + + }) + } +} diff --git a/config/examples/README.md b/config/examples/README.md index 3ce0646b..066e56b0 100644 --- a/config/examples/README.md +++ b/config/examples/README.md @@ -49,8 +49,11 @@ stats_receiver { // block for configuring sentry sentry {} -// string to configure message transformation (default: "none") -message_transformation = "none" +// block for configuring transformations +transform { + // string to configure message transformation (default: "none") + message_transformation = "none" +} // log level configuration (default: "info") log_level = "info" @@ -99,6 +102,17 @@ sentry { debug = true } +transform { + message_transformation = "spEnrichedFilter:app_id==myApp,js:customFunction" + + use "js" { + source_b64 = "CmZ1bmN0aW9uIGN1c3RvbUZ1bmN0aW9uKGlucHV0KSB7CiAgICByZXR1cm4gaW5wdXQ7Cn0K" + timeout_sec = 2 + disable_source_maps = false + snowplow_mode = true + } +} + log_level = "debug" user_provided_id = "my-example-id" diff --git a/config/test-fixtures/invalids.hcl b/config/test-fixtures/invalids.hcl index 7075e475..1944444e 100644 --- a/config/test-fixtures/invalids.hcl +++ b/config/test-fixtures/invalids.hcl @@ -1,6 +1,8 @@ # configuration with various invalid options -message_transformation = "fakeHCL" +transform { + message_transformation = "fakeHCL" +} target { use "fakeHCL" {} diff --git a/config/test-fixtures/transform-invalid-layer-js.hcl b/config/test-fixtures/transform-invalid-layer-js.hcl new file mode 100644 index 00000000..fb4a2191 --- /dev/null +++ b/config/test-fixtures/transform-invalid-layer-js.hcl @@ -0,0 +1,7 @@ +# transform configuration + +transform { + message_transformation = "js:fun" + + use "fake" {} +} diff --git a/config/test-fixtures/transform-invalid-layer-lua.hcl b/config/test-fixtures/transform-invalid-layer-lua.hcl new file mode 100644 index 00000000..f8c46865 --- /dev/null +++ b/config/test-fixtures/transform-invalid-layer-lua.hcl @@ -0,0 +1,7 @@ +# transform configuration + +transform { + message_transformation = "lua:fun" + + use "fake" {} +} diff --git a/config/test-fixtures/transform-js-extended.hcl b/config/test-fixtures/transform-js-extended.hcl new file mode 100644 index 00000000..6292e2fe --- /dev/null +++ b/config/test-fixtures/transform-js-extended.hcl @@ -0,0 +1,12 @@ +# transform configuration - js - extended + +transform { + message_transformation = "js:fun" + + use "js" { + source_b64 = "CglmdW5jdGlvbiBmb28oeCkgewoJICAgIHJldHVybiB4OwoJfQoJ" + timeout_sec = 10 + disable_source_maps = false + snowplow_mode = true + } +} diff --git a/config/test-fixtures/transform-js-simple.hcl b/config/test-fixtures/transform-js-simple.hcl new file mode 100644 index 00000000..3e766b10 --- /dev/null +++ b/config/test-fixtures/transform-js-simple.hcl @@ -0,0 +1,9 @@ +# transform configuration - js - simple + +transform { + message_transformation = "js:fun" + + use "js" { + source_b64 = "CglmdW5jdGlvbiBmb28oeCkgewoJICAgIHJldHVybiB4OwoJfQoJ" + } +} diff --git a/config/test-fixtures/transform-lua-extended.hcl b/config/test-fixtures/transform-lua-extended.hcl new file mode 100644 index 00000000..f728b995 --- /dev/null +++ b/config/test-fixtures/transform-lua-extended.hcl @@ -0,0 +1,11 @@ +# transform configuration - lua - extended + +transform { + message_transformation = "lua:fun" + + use "lua" { + source_b64 = "CglmdW5jdGlvbiBmb28oeCkKICAgICAgICAgICByZXR1cm4geAogICAgICAgIGVuZAoJ" + timeout_sec = 10 + sandbox = false + } +} diff --git a/config/test-fixtures/transform-lua-simple.hcl b/config/test-fixtures/transform-lua-simple.hcl new file mode 100644 index 00000000..3f8c2ba0 --- /dev/null +++ b/config/test-fixtures/transform-lua-simple.hcl @@ -0,0 +1,9 @@ +# transform configuration - lua - simple + +transform { + message_transformation = "lua:fun" + + use "lua" { + source_b64 = "CglmdW5jdGlvbiBmb28oeCkKICAgICAgICAgICByZXR1cm4geAogICAgICAgIGVuZAoJ" + } +} diff --git a/go.mod b/go.mod index 29b7739b..45fd6602 100644 --- a/go.mod +++ b/go.mod @@ -24,7 +24,7 @@ require ( github.com/hashicorp/go-multierror v1.1.1 github.com/jpillora/backoff v1.0.0 // indirect github.com/klauspost/compress v1.15.0 // indirect - github.com/mitchellh/mapstructure v1.4.1 // indirect + github.com/mitchellh/mapstructure v1.4.1 github.com/myesui/uuid v1.0.0 // indirect github.com/pkg/errors v0.9.1 github.com/sirupsen/logrus v1.8.1 @@ -51,9 +51,14 @@ require ( require ( github.com/davecgh/go-spew v1.1.1 + github.com/dop251/goja v0.0.0-20220405120441-9037c2b61cbf + github.com/goccy/go-json v0.9.7 github.com/hashicorp/hcl/v2 v2.11.1 github.com/snowplow/snowplow-golang-tracker/v2 v2.4.1 + github.com/yuin/gluamapper v0.0.0-20150323120927-d836955830e7 + github.com/yuin/gopher-lua v0.0.0-20210529063254-f4c35e4016d9 github.com/zclconf/go-cty v1.10.0 + layeh.com/gopher-json v0.0.0-20201124131017-552bb3c4c3bf ) require ( @@ -66,9 +71,11 @@ require ( github.com/agext/levenshtein v1.2.1 // indirect github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect github.com/devigned/tab v0.1.1 // indirect + github.com/dlclark/regexp2 v1.4.1-0.20201116162257-a2a8dda75c91 // indirect github.com/eapache/go-resiliency v1.2.0 // indirect github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21 // indirect github.com/eapache/queue v1.1.0 // indirect + github.com/go-sourcemap/sourcemap v2.1.3+incompatible // indirect github.com/golang/protobuf v1.5.2 // indirect github.com/google/go-cmp v0.5.6 // indirect github.com/googleapis/gax-go/v2 v2.0.5 // indirect diff --git a/go.sum b/go.sum index 7a4bcc94..cfea9f12 100644 --- a/go.sum +++ b/go.sum @@ -159,6 +159,11 @@ github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZm github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= github.com/dimchansky/utfbom v1.1.0 h1:FcM3g+nofKgUteL8dm/UpdRXNC9KmADgTpLKsu0TRo4= github.com/dimchansky/utfbom v1.1.0/go.mod h1:rO41eb7gLfo8SF1jd9F8HplJm1Fewwi4mQvIirEdv+8= +github.com/dlclark/regexp2 v1.4.1-0.20201116162257-a2a8dda75c91 h1:Izz0+t1Z5nI16/II7vuEo/nHjodOg0p7+OiDpjX5t1E= +github.com/dlclark/regexp2 v1.4.1-0.20201116162257-a2a8dda75c91/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= +github.com/dop251/goja v0.0.0-20220405120441-9037c2b61cbf h1:Yt+4K30SdjOkRoRRm3vYNQgR+/ZIy0RmeUDZo7Y8zeQ= +github.com/dop251/goja v0.0.0-20220405120441-9037c2b61cbf/go.mod h1:R9ET47fwRVRPZnOGvHxxhuZcbrMCuiqOz3Rlrh4KSnk= +github.com/dop251/goja_nodejs v0.0.0-20210225215109-d91c329300e7/go.mod h1:hn7BA7c8pLvoGndExHudxTDKZ84Pyvv+90pbBjbTz0Y= github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/eapache/go-resiliency v1.2.0 h1:v7g92e/KSN71Rq7vSThKaWIq68fL4YHvWyiUKorFR1Q= github.com/eapache/go-resiliency v1.2.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs= @@ -198,11 +203,15 @@ github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9 github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-martini/martini v0.0.0-20170121215854-22fa46961aab/go.mod h1:/P9AEU963A2AYjv4d1V5eVL1CQbEJq6aCNHDDjibzu8= +github.com/go-sourcemap/sourcemap v2.1.3+incompatible h1:W1iEw64niKVGogNgBN3ePyLFfuisuzeidWPMPWmECqU= +github.com/go-sourcemap/sourcemap v2.1.3+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg= github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68= github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo= github.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= github.com/gobwas/ws v1.0.2/go.mod h1:szmBTxLgaFppYjEmNtny/v3w89xOydFnnZMcgRRu/EM= +github.com/goccy/go-json v0.9.7 h1:IcB+Aqpx/iMHu5Yooh7jEzJk1JZ7Pjtmys2ukPr7EeM= +github.com/goccy/go-json v0.9.7/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= @@ -506,11 +515,15 @@ github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0/go.mod h1:/LWChgwKmv github.com/yudai/gojsondiff v1.0.0/go.mod h1:AY32+k2cwILAkW1fbgxQ5mUmMiZFgLIV+FBNExI05xg= github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82/go.mod h1:lgjkn3NuSvDfVJdfcVVdX+jpBxNmX4rDAzaS45IcYoM= github.com/yudai/pp v2.0.1+incompatible/go.mod h1:PuxR/8QJ7cyCkFp/aUDS+JY727OFEZkTdatxwunjIkc= +github.com/yuin/gluamapper v0.0.0-20150323120927-d836955830e7 h1:noHsffKZsNfU38DwcXWEPldrTjIZ8FPNKx8mYMGnqjs= +github.com/yuin/gluamapper v0.0.0-20150323120927-d836955830e7/go.mod h1:bbMEM6aU1WDF1ErA5YJ0p91652pGv140gGw4Ww3RGp8= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/yuin/gopher-lua v0.0.0-20210529063254-f4c35e4016d9 h1:k/gmLsJDWwWqbLCur2yWnJzwQEKRcAHXo6seXGuSwWw= +github.com/yuin/gopher-lua v0.0.0-20210529063254-f4c35e4016d9/go.mod h1:E1AXubJBdNmFERAOucpDIxNzeGfLzg0mYh+UfMWdChA= github.com/zclconf/go-cty v1.2.0/go.mod h1:hOPWgoHbaTUnI5k4D2ld+GRpFJSCe6bCM7m1q/N4PQ8= github.com/zclconf/go-cty v1.8.0/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uUAzyuvAk= github.com/zclconf/go-cty v1.10.0 h1:mp9ZXQeIcN8kAwuqorjH+Q+njbJKjLrvB2yIh4q7U+0= @@ -656,6 +669,7 @@ golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5h golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190204203706-41f3e6584952/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -930,8 +944,9 @@ gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWD gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20191120175047-4206685974f2/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= @@ -943,6 +958,8 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +layeh.com/gopher-json v0.0.0-20201124131017-552bb3c4c3bf h1:rRz0YsF7VXj9fXRF6yQgFI7DzST+hsI3TeFSGupntu0= +layeh.com/gopher-json v0.0.0-20201124131017-552bb3c4c3bf/go.mod h1:ivKkcY8Zxw5ba0jldhZCYYQfGdb2K6u9tbYK1AwMIBc= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/pkg/transform/engine.go b/pkg/transform/engine.go new file mode 100644 index 00000000..4b4af505 --- /dev/null +++ b/pkg/transform/engine.go @@ -0,0 +1,35 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package transform + +// FunctionMaker is the interface that wraps the MakeFunction method +type FunctionMaker interface { + // MakeFunction returns a TransformationFunction that runs + // a given function in a runtime engine. + MakeFunction(funcName string) TransformationFunction +} + +// SmokeTester is the interface that wraps the SmokeTest method. +type SmokeTester interface { + // SmokeTest runs a test spin of the engine trying to get as close to + // running the given function as possible. + SmokeTest(funcName string) error +} + +// Engine is the interface that groups +// FunctionMaker and SmokeTester. +type Engine interface { + FunctionMaker + SmokeTester +} + +// EngineProtocol is the I/O type of an Engine. +type EngineProtocol struct { + FilterOut bool + PartitionKey string + Data interface{} +} diff --git a/pkg/transform/engine_javascript.go b/pkg/transform/engine_javascript.go new file mode 100644 index 00000000..5df77df1 --- /dev/null +++ b/pkg/transform/engine_javascript.go @@ -0,0 +1,277 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package transform + +import ( + "encoding/base64" + "fmt" + "time" + + goja "github.com/dop251/goja" + gojaparser "github.com/dop251/goja/parser" + gojson "github.com/goccy/go-json" + "github.com/mitchellh/mapstructure" + + "github.com/snowplow-devops/stream-replicator/pkg/models" +) + +// jsEngineConfig configures the JavaScript Engine. +type jsEngineConfig struct { + SourceB64 string `hcl:"source_b64" env:"TRANSFORMATION_JS_SOURCE_B64"` + RunTimeout int `hcl:"timeout_sec,optional" env:"TRANSFORMATION_JS_TIMEOUT_SEC"` + DisableSourceMaps bool `hcl:"disable_source_maps,optional" env:"TRANSFORMATION_JS_DISABLE_SOURCE_MAPS"` + SpMode bool `hcl:"snowplow_mode,optional" env:"TRANSFORMATION_JS_SNOWPLOW_MODE"` +} + +// jsEngine handles the provision of a JavaScript runtime to run transformations. +type jsEngine struct { + Code *goja.Program + RunTimeout time.Duration + SpMode bool +} + +// newJSEngine returns a JavaScript Engine from a jsEngineConfig. +func newJSEngine(c *jsEngineConfig) (*jsEngine, error) { + jsSrc, err := base64.StdEncoding.DecodeString(c.SourceB64) + if err != nil { + return nil, err + } + + compiledCode, err := compileJS(string(jsSrc), c.SourceB64, c.DisableSourceMaps) + if err != nil { + return nil, err + } + + eng := &jsEngine{ + Code: compiledCode, + RunTimeout: time.Duration(c.RunTimeout) * time.Second, + SpMode: c.SpMode, + } + + return eng, nil +} + +// The jsEngineAdapter type is an adapter for functions to be used as +// pluggable components for JavaScript Engine. Implements the Pluggable interface. +type jsEngineAdapter func(i interface{}) (interface{}, error) + +// Create implements the ComponentCreator interface. +func (f jsEngineAdapter) Create(i interface{}) (interface{}, error) { + return f(i) +} + +// ProvideDefault implements the ComponentConfigurable interface. +func (f jsEngineAdapter) ProvideDefault() (interface{}, error) { + // Provide defaults for the optional parameters + // whose default is not their zero value. + cfg := &jsEngineConfig{ + RunTimeout: 5, + DisableSourceMaps: true, + } + + return cfg, nil +} + +// adaptJSEngineFunc returns a jsEngineAdapter. +func adaptJSEngineFunc(f func(c *jsEngineConfig) (*jsEngine, error)) jsEngineAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*jsEngineConfig) + if !ok { + return nil, fmt.Errorf("invalid input, expected jsEngineConfig") + } + + return f(cfg) + } +} + +// JSLayer returns the Pluggable transformation layer implemented in JavaScript. +func JSLayer() interface{} { + return adaptJSEngineFunc(newJSEngine) +} + +// SmokeTest implements SmokeTester. +func (e *jsEngine) SmokeTest(funcName string) error { + _, _, err := initRuntime(e, funcName) + return err +} + +// MakeFunction implements FunctionMaker. +func (e *jsEngine) MakeFunction(funcName string) TransformationFunction { + + return func(message *models.Message, interState interface{}) (*models.Message, *models.Message, *models.Message, interface{}) { + // making input + input, err := mkJSEngineInput(e, message, interState) + if err != nil { + message.SetError(fmt.Errorf("failed making input for the JavaScript runtime: %q", err.Error())) + return nil, nil, message, nil + } + + // initializing + vm, fun, err := initRuntime(e, funcName) + if err != nil { + message.SetError(fmt.Errorf("failed initializing JavaScript runtime: %q", err.Error())) + return nil, nil, message, nil + } + + timer := time.AfterFunc(e.RunTimeout, func() { + vm.Interrupt("runtime deadline exceeded") + }) + defer timer.Stop() + + // running + res, err := fun(goja.Undefined(), vm.ToValue(input)) + + if err != nil { + // runtime error counts as failure + runErr := fmt.Errorf("error running JavaScript function %q: %q", funcName, err.Error()) + message.SetError(runErr) + return nil, nil, message, nil + } + + // validating output + protocol, err := validateJSEngineOut(res.Export()) + if err != nil { + message.SetError(err) + return nil, nil, message, nil + } + + // filtering - keeping same behaviour with spEnrichedFilter + if protocol.FilterOut == true { + return nil, message, nil, nil + } + + // handling data + switch protoData := protocol.Data.(type) { + case string: + message.Data = []byte(protoData) + case map[string]interface{}: + // encode + encoded, err := gojson.MarshalWithOption(protoData, gojson.DisableHTMLEscape()) + if err != nil { + message.SetError(fmt.Errorf("error encoding message data")) + return nil, nil, message, nil + } + message.Data = encoded + default: + message.SetError(fmt.Errorf("invalid return type from JavaScript transformation; expected string or object")) + return nil, nil, message, nil + } + + // setting pk if needed + pk := protocol.PartitionKey + if pk != "" && message.PartitionKey != pk { + message.PartitionKey = pk + } + + return message, nil, nil, protocol + } +} + +// compileJS compiles JavaScript code. +// Since goja.New is not goroutine-safe, we spin a new runtime for every +// transformation. The reason for this function is to allow us to at least share +// the compiled code and so run only once the parse and compile steps, +// which are implicitly run by the alternative RunString. +// see also: +// https://pkg.go.dev/github.com/dop251/goja#CompileAST +func compileJS(code, name string, disableSrcMaps bool) (*goja.Program, error) { + parserOpts := make([]gojaparser.Option, 0, 1) + + if disableSrcMaps == true { + parserOpts = append(parserOpts, gojaparser.WithDisableSourceMaps) + } + + ast, err := goja.Parse(name, code, parserOpts...) + if err != nil { + return nil, err + } + + // 'use strict' + prog, err := goja.CompileAST(ast, true) + if err != nil { + return nil, err + } + + return prog, nil +} + +// initRuntime initializes and returns an instance of a JavaScript runtime. +func initRuntime(e *jsEngine, funcName string) (*goja.Runtime, goja.Callable, error) { + // goja.New returns *goja.Runtime + vm := goja.New() + timer := time.AfterFunc(e.RunTimeout, func() { + vm.Interrupt("runtime deadline exceeded") + }) + defer timer.Stop() + + _, err := vm.RunProgram(e.Code) + if err != nil { + return nil, nil, fmt.Errorf("could not load JavaScript code: %q", err) + } + + if fun, ok := goja.AssertFunction(vm.Get(funcName)); ok { + return vm, fun, nil + } + + return nil, nil, fmt.Errorf("could not assert as function: %q", funcName) +} + +// mkJSEngineInput describes the logic for constructing the input to JS engine. +// No side effects. +func mkJSEngineInput(e *jsEngine, message *models.Message, interState interface{}) (*EngineProtocol, error) { + if interState != nil { + if i, ok := interState.(*EngineProtocol); ok { + return i, nil + } + } + + candidate := &EngineProtocol{ + Data: string(message.Data), + } + + if !e.SpMode { + return candidate, nil + } + + parsedMessage, err := intermediateAsSpEnrichedParsed(interState, message) + if err != nil { + // if spMode, error for non Snowplow enriched event data + return nil, err + } + + spMap, err := parsedMessage.ToMap() + if err != nil { + return nil, err + } + + candidate.Data = spMap + return candidate, nil +} + +// validateJSEngineOut validates the value returned by the js engine. +func validateJSEngineOut(output interface{}) (*EngineProtocol, error) { + if output == nil { + return nil, fmt.Errorf("invalid return type from JavaScript transformation; got null or undefined") + } + + if out, ok := output.(*EngineProtocol); ok { + return out, nil + } + + outMap, ok := output.(map[string]interface{}) + if !ok { + return nil, fmt.Errorf("invalid return type from JavaScript transformation") + } + + result := &EngineProtocol{} + err := mapstructure.Decode(outMap, result) + if err != nil { + return nil, fmt.Errorf("protocol violation in return value from JavaScript transformation") + } + + return result, nil +} diff --git a/pkg/transform/engine_javascript_test.go b/pkg/transform/engine_javascript_test.go new file mode 100644 index 00000000..4f434c00 --- /dev/null +++ b/pkg/transform/engine_javascript_test.go @@ -0,0 +1,1937 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package transform + +import ( + "encoding/base64" + "fmt" + "path/filepath" + "reflect" + "strings" + "testing" + "time" + + "github.com/davecgh/go-spew/spew" + "github.com/stretchr/testify/assert" + + config "github.com/snowplow-devops/stream-replicator/config" + "github.com/snowplow-devops/stream-replicator/pkg/models" +) + +func TestJSEngineConfig_ENV(t *testing.T) { + testCases := []struct { + Name string + Plug config.Pluggable + Expected interface{} + }{ + { + Name: "transform-js-from-env", + Plug: testJSEngineAdapter(testJSEngineFunc), + Expected: &jsEngineConfig{ + SourceB64: "CglmdW5jdGlvbiBmb28oeCkgewoJICAgIHJldHVybiB4OwoJfQoJ", + RunTimeout: 10, + DisableSourceMaps: false, + SpMode: false, + }, + }, + } + + for _, tt := range testCases { + t.Run(tt.Name, func(t *testing.T) { + assert := assert.New(t) + + t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", "") + + t.Setenv("MESSAGE_TRANSFORMATION", "js") + t.Setenv("TRANSFORMATION_LAYER_NAME", "js") + + t.Setenv("TRANSFORMATION_JS_SOURCE_B64", "CglmdW5jdGlvbiBmb28oeCkgewoJICAgIHJldHVybiB4OwoJfQoJ") + t.Setenv("TRANSFORMATION_JS_TIMEOUT_SEC", "10") + t.Setenv("TRANSFORMATION_JS_DISABLE_SOURCE_MAPS", "false") + t.Setenv("TRANSFORMATION_JS_SNOWPLOW_MODE", "false") + + c, err := config.NewConfig() + assert.NotNil(c) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } + + engine := c.Data.Transform.Layer + decoderOpts := &config.DecoderOptions{ + Input: engine.Body, + } + + result, err := c.CreateComponent(tt.Plug, decoderOpts) + assert.NotNil(result) + assert.Nil(err) + + if !reflect.DeepEqual(result, tt.Expected) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(result), + spew.Sdump(tt.Expected)) + } + }) + } +} + +func TestJSEngineConfig_HCL(t *testing.T) { + testFixPath := "../../config/test-fixtures" + testCases := []struct { + File string + Plug config.Pluggable + Expected interface{} + }{ + { + File: "transform-js-simple.hcl", + Plug: testJSEngineAdapter(testJSEngineFunc), + Expected: &jsEngineConfig{ + SourceB64: "CglmdW5jdGlvbiBmb28oeCkgewoJICAgIHJldHVybiB4OwoJfQoJ", + RunTimeout: 5, + DisableSourceMaps: true, + SpMode: false, + }, + }, + { + File: "transform-js-extended.hcl", + Plug: testJSEngineAdapter(testJSEngineFunc), + Expected: &jsEngineConfig{ + SourceB64: "CglmdW5jdGlvbiBmb28oeCkgewoJICAgIHJldHVybiB4OwoJfQoJ", + RunTimeout: 10, + DisableSourceMaps: false, + SpMode: true, + }, + }, + } + + for _, tt := range testCases { + t.Run(tt.File, func(t *testing.T) { + assert := assert.New(t) + + filename := filepath.Join(testFixPath, tt.File) + t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", filename) + + c, err := config.NewConfig() + assert.NotNil(c) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } + + engine := c.Data.Transform.Layer + decoderOpts := &config.DecoderOptions{ + Input: engine.Body, + } + + result, err := c.CreateComponent(tt.Plug, decoderOpts) + assert.NotNil(result) + assert.Nil(err) + + if !reflect.DeepEqual(result, tt.Expected) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(result), + spew.Sdump(tt.Expected)) + } + }) + } +} + +func TestJSLayer(t *testing.T) { + layer := JSLayer() + if _, ok := layer.(config.Pluggable); !ok { + t.Errorf("invalid interface returned from JSLayer") + } +} + +func TestJSEngineMakeFunction_SpModeFalse_IntermediateNil(t *testing.T) { + var testInterState interface{} = nil + var testSpMode bool = false + testCases := []struct { + Src string + FunName string + DisableSourceMaps bool + Input *models.Message + Expected map[string]*models.Message + ExpInterState interface{} + Error error + }{ + { + Src: ` +function identity(x) { + return x; +} +`, + FunName: "identity", + DisableSourceMaps: true, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: "asdf", + }, + Error: nil, + }, + { + Src: ` +function concatHello(x) { + let newVal = "Hello:" + x.Data; + x.Data = newVal; + return x; +} +`, + FunName: "concatHello", + DisableSourceMaps: true, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": { + Data: []byte("Hello:asdf"), + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: "Hello:asdf", + }, + Error: nil, + }, + { + Src: ` +function filterIn(x) { + x.FilterOut = false + return x; +} +`, + FunName: "filterIn", + DisableSourceMaps: true, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: "asdf", + }, + Error: nil, + }, + { + Src: ` +function filterOut(x) { + if (Object.prototype.toString.call(x.Data) === '[object String]') { + return { + FilterOut: true, + }; + } + + return { + FilterOut: false, + Data: x.Data + }; +} +`, + FunName: "filterOut", + DisableSourceMaps: true, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + "failed": nil, + }, + ExpInterState: nil, + Error: nil, + }, + { + Src: ` +function jsonIdentity(x) { + var jsonObj = JSON.parse(x.Data); + var result = JSON.stringify(jsonObj); + + return { + Data: result + }; +} +`, + FunName: "jsonIdentity", + DisableSourceMaps: false, + Input: &models.Message{ + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": { + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(testJsJSON), + }, + Error: nil, + }, + { + Src: ` +function jsonTransformFieldNameRegex(x) { + var jsonObj = JSON.parse(x.Data); + + if (jsonObj.hasOwnProperty("app_id")) { + x.Data = x.Data.replace(/app_id/, 'app_id_CHANGED'); + } + + return x; +} +`, + FunName: "jsonTransformFieldNameRegex", + DisableSourceMaps: false, + Input: &models.Message{ + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": { + Data: testJsJSONChanged1, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(testJsJSONChanged1), + }, + Error: nil, + }, + { + Src: ` +function jsonTransformFieldNameObj(x) { + + var jsonObj = JSON.parse(x.Data); + + var descriptor = Object.getOwnPropertyDescriptor(jsonObj, "app_id"); + Object.defineProperty(jsonObj, "app_id_CHANGED", descriptor); + delete jsonObj["app_id"]; + + return { + Data: JSON.stringify(jsonObj) + }; +} +`, + FunName: "jsonTransformFieldNameObj", + DisableSourceMaps: false, + Input: &models.Message{ + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": { + Data: testJsJSONChanged2, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(testJsJSONChanged2), + }, + Error: nil, + }, + { + Src: ` +function jsonFilterOut(x) { + var jsonObj = JSON.parse(x.Data); + + if (jsonObj.hasOwnProperty("app_id") && jsonObj["app_id"] === "filterMeOut") { + x.FilterOut = false; + } else { + x.FilterOut = true; + } + + return x; +} +`, + FunName: "jsonFilterOut", + DisableSourceMaps: false, + Input: &models.Message{ + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": { + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + "failed": nil, + }, + ExpInterState: nil, + Error: nil, + }, + { + Src: ` +function returnWrongType(x) { + return 0; +} +`, + FunName: "returnWrongType", + DisableSourceMaps: true, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("invalid return type from JavaScript transformation"), + }, + { + Src: ` +function returnUndefined(x) {} +`, + FunName: "returnUndefined", + DisableSourceMaps: true, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("invalid return type from JavaScript transformation; got null or undefined"), + }, + { + Src: ` +function returnNull(x) { + return null; +} +`, + FunName: "returnNull", + DisableSourceMaps: true, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("invalid return type from JavaScript transformation; got null or undefined"), + }, + { + Src: ` +function causeRuntimeError(x) { + return x.toExponential(2); +} +`, + FunName: "causeRuntimeError", + DisableSourceMaps: true, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("error running JavaScript function \"causeRuntimeError\""), + }, + { + Src: ` +function callError(x) { + throw("Failed"); +} +`, + FunName: "callError", + DisableSourceMaps: false, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("error running JavaScript function \"callError\""), + }, + { + Src: ` +function sleepTenSecs(x) { + var now = new Date().getTime(); + while(new Date().getTime() < now + 10000) { + } +} +`, + FunName: "sleepTenSecs", + DisableSourceMaps: false, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("runtime deadline exceeded"), + }, + } + + for _, tt := range testCases { + t.Run(tt.FunName, func(t *testing.T) { + assert := assert.New(t) + + src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) + jsConfig := &jsEngineConfig{ + SourceB64: src, + RunTimeout: 1, + DisableSourceMaps: tt.DisableSourceMaps, + SpMode: testSpMode, + } + + jsEngine, err := newJSEngine(jsConfig) + assert.NotNil(jsEngine) + if err != nil { + t.Fatalf("function newJSEngine failed with error: %q", err.Error()) + } + + if err := jsEngine.SmokeTest(tt.FunName); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + + transFunction := jsEngine.MakeFunction(tt.FunName) + s, f, e, i := transFunction(tt.Input, testInterState) + + if !reflect.DeepEqual(i, tt.ExpInterState) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(i), + spew.Sdump(tt.ExpInterState)) + } + + if e != nil { + gotErr := e.GetError() + expErr := tt.Error + if expErr == nil { + t.Fatalf("got unexpected error: %s", gotErr.Error()) + } + + if !strings.Contains(gotErr.Error(), expErr.Error()) { + t.Errorf("GOT_ERROR:\n%s\n does not contain\nEXPECTED_ERROR:\n%s", + gotErr.Error(), + expErr.Error()) + } + } + + assertMessagesCompareJs(t, s, tt.Expected["success"]) + assertMessagesCompareJs(t, f, tt.Expected["filtered"]) + assertMessagesCompareJs(t, e, tt.Expected["failed"]) + }) + } +} + +func TestJSEngineMakeFunction_SpModeTrue_IntermediateNil(t *testing.T) { + var testInterState interface{} = nil + var testSpMode bool = true + testCases := []struct { + Scenario string + Src string + FunName string + DisableSourceMaps bool + Input *models.Message + Expected map[string]*models.Message + ExpInterState interface{} + Error error + }{ + { + Scenario: "identity", + Src: ` +function identity(x) { + return x; +} +`, + FunName: "identity", + DisableSourceMaps: true, + Input: &models.Message{ + Data: testJsTsv, + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": { + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: testJSMap, + }, + Error: nil, + }, + { + Scenario: "filtering", + Src: ` +function filterOut(input) { + // input is an object + var spData = input.Data; + if (spData["app_id"] === "myApp") { + return input; + } + return { + FilterOut: true + }; +} +`, + FunName: "filterOut", + DisableSourceMaps: true, + Input: &models.Message{ + Data: testJsTsv, + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": { + Data: testJsTsv, + PartitionKey: "some-test-key", + }, + "failed": nil, + }, + ExpInterState: nil, + Error: nil, + }, + { + Scenario: "filteringOut_ignoresData", + Src: ` +function filterOutIgnores(x) { + return { + FilterOut: true, + Data: "shouldNotAppear", + PartitionKey: "notThis" + }; +} +`, + FunName: "filterOutIgnores", + DisableSourceMaps: true, + Input: &models.Message{ + Data: testJsTsv, + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": { + Data: testJsTsv, + PartitionKey: "some-test-key", + }, + "failed": nil, + }, + ExpInterState: nil, + Error: nil, + }, + { + Scenario: "non_Snowplow_enriched_to_failed", + Src: ` +function willNotRun(x) { + return x; +} +`, + FunName: "willNotRun", + DisableSourceMaps: false, + Input: &models.Message{ + Data: []byte("nonSpEnrichedEvent"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: []byte("nonSpEnrichedEvent"), + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("Cannot parse"), + }, + { + Scenario: "return_wrong_type", + Src: ` +function returnWrongType(x) { + return 0; +} +`, + FunName: "returnWrongType", + DisableSourceMaps: true, + Input: &models.Message{ + Data: testJsTsv, + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: testJsTsv, + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("invalid return type from JavaScript transformation"), + }, + } + + for _, tt := range testCases { + t.Run(tt.Scenario, func(t *testing.T) { + assert := assert.New(t) + + src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) + jsConfig := &jsEngineConfig{ + SourceB64: src, + RunTimeout: 1, + DisableSourceMaps: tt.DisableSourceMaps, + SpMode: testSpMode, + } + + jsEngine, err := newJSEngine(jsConfig) + assert.NotNil(jsEngine) + if err != nil { + t.Fatalf("function newJSEngine failed with error: %q", err.Error()) + } + + if err := jsEngine.SmokeTest(tt.FunName); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + + transFunction := jsEngine.MakeFunction(tt.FunName) + s, f, e, i := transFunction(tt.Input, testInterState) + + if !reflect.DeepEqual(i, tt.ExpInterState) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(i), + spew.Sdump(tt.ExpInterState)) + } + + if e != nil { + gotErr := e.GetError() + expErr := tt.Error + if expErr == nil { + t.Fatalf("got unexpected error: %s", gotErr.Error()) + } + + if !strings.Contains(gotErr.Error(), expErr.Error()) { + t.Errorf("GOT_ERROR:\n%s\n does not contain\nEXPECTED_ERROR:\n%s", + gotErr.Error(), + expErr.Error()) + } + } + + assertMessagesCompareJs(t, s, tt.Expected["success"]) + assertMessagesCompareJs(t, f, tt.Expected["filtered"]) + assertMessagesCompareJs(t, e, tt.Expected["failed"]) + }) + } +} + +func TestJSEngineMakeFunction_IntermediateState_SpModeFalse(t *testing.T) { + testSpMode := false + testCases := []struct { + Scenario string + Src string + FunName string + DisableSourceMaps bool + Input *models.Message + InterState interface{} + Expected map[string]*models.Message + ExpInterState interface{} + Error error + }{ + { + Scenario: "intermediateState_EngineProtocol_Map", + Src: ` +function identity(x) { + return x; +} +`, + FunName: "identity", + DisableSourceMaps: true, + Input: &models.Message{ + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + InterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: testJSMap, + }, + Expected: map[string]*models.Message{ + "success": { + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: testJSMap, + }, + Error: nil, + }, + { + Scenario: "intermediateState_EngineProtocol_String", + Src: ` +function identity(x) { + return x; +} +`, + FunName: "identity", + DisableSourceMaps: true, + Input: &models.Message{ + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + InterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(testJsJSON), + }, + Expected: map[string]*models.Message{ + "success": { + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(testJsJSON), + }, + Error: nil, + }, + { + Scenario: "intermediateState_not_EngineProtocol_spMode_true", + Src: ` +function identity(x) { + return x; +} +`, + FunName: "identity", + DisableSourceMaps: true, + Input: &models.Message{ + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + InterState: "notEngineProtocol", + Expected: map[string]*models.Message{ + "success": { + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(testJsJSON), + }, + Error: nil, + }, + { + Scenario: "intermediateState_not_EngineProtocol_spMode_false", + Src: ` +function identity(x) { + return x; +} +`, + FunName: "identity", + DisableSourceMaps: true, + Input: &models.Message{ + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + InterState: "notEngineProtocol", + Expected: map[string]*models.Message{ + "success": { + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(testJsJSON), + }, + Error: nil, + }, + } + + for _, tt := range testCases { + t.Run(tt.Scenario, func(t *testing.T) { + assert := assert.New(t) + + src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) + jsConfig := &jsEngineConfig{ + SourceB64: src, + RunTimeout: 1, + DisableSourceMaps: tt.DisableSourceMaps, + SpMode: testSpMode, + } + + jsEngine, err := newJSEngine(jsConfig) + assert.NotNil(jsEngine) + if err != nil { + t.Fatalf("function newJSEngine failed with error: %q", err.Error()) + } + + if err := jsEngine.SmokeTest(tt.FunName); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + + transFunction := jsEngine.MakeFunction(tt.FunName) + s, f, e, i := transFunction(tt.Input, tt.InterState) + + if !reflect.DeepEqual(i, tt.ExpInterState) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(i), + spew.Sdump(tt.ExpInterState)) + } + + if e != nil { + gotErr := e.GetError() + expErr := tt.Error + if expErr == nil { + t.Fatalf("got unexpected error: %s", gotErr.Error()) + } + + if !strings.Contains(gotErr.Error(), expErr.Error()) { + t.Errorf("GOT_ERROR:\n%s\n does not contain\nEXPECTED_ERROR:\n%s", + gotErr.Error(), + expErr.Error()) + } + } + + assertMessagesCompareJs(t, s, tt.Expected["success"]) + assertMessagesCompareJs(t, f, tt.Expected["filtered"]) + assertMessagesCompareJs(t, e, tt.Expected["failed"]) + }) + } +} + +func TestJSEngineMakeFunction_IntermediateState_SpModeTrue(t *testing.T) { + testSpMode := true + testCases := []struct { + Scenario string + Src string + FunName string + DisableSourceMaps bool + Input *models.Message + InterState interface{} + Expected map[string]*models.Message + ExpInterState interface{} + Error error + }{ + { + Scenario: "intermediateState_EngineProtocol_Map", + Src: ` +function identity(x) { + return x; +} +`, + FunName: "identity", + DisableSourceMaps: true, + Input: &models.Message{ + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + InterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: testJSMap, + }, + Expected: map[string]*models.Message{ + "success": { + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: testJSMap, + }, + Error: nil, + }, + { + Scenario: "intermediateState_EngineProtocol_String", + Src: ` +function identity(x) { + return x; +} +`, + FunName: "identity", + DisableSourceMaps: true, + Input: &models.Message{ + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + InterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(testJsJSON), + }, + Expected: map[string]*models.Message{ + "success": { + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(testJsJSON), + }, + Error: nil, + }, + { + Scenario: "intermediateState_notEngineProtocol_notSpEnriched", + Src: ` +function willNotRun(x) { + return x; +} +`, + FunName: "willNotRun", + DisableSourceMaps: true, + Input: &models.Message{ + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + InterState: "notEngineProtocol", + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("Cannot parse"), + }, + { + Scenario: "intermediateState_notEngineProtocol_SpEnriched", + Src: ` +function identity(x) { + return x; +} +`, + FunName: "identity", + DisableSourceMaps: true, + Input: &models.Message{ + Data: testJsTsv, + PartitionKey: "some-test-key", + }, + InterState: "notEngineProtocol", + Expected: map[string]*models.Message{ + "success": { + Data: testJsJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: testJSMap, + }, + Error: nil, + }, + } + + for _, tt := range testCases { + t.Run(tt.Scenario, func(t *testing.T) { + assert := assert.New(t) + + src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) + jsConfig := &jsEngineConfig{ + SourceB64: src, + RunTimeout: 1, + DisableSourceMaps: tt.DisableSourceMaps, + SpMode: testSpMode, + } + + jsEngine, err := newJSEngine(jsConfig) + assert.NotNil(jsEngine) + if err != nil { + t.Fatalf("function newJSEngine failed with error: %q", err.Error()) + } + + if err := jsEngine.SmokeTest(tt.FunName); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + + transFunction := jsEngine.MakeFunction(tt.FunName) + s, f, e, i := transFunction(tt.Input, tt.InterState) + + if !reflect.DeepEqual(i, tt.ExpInterState) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(i), + spew.Sdump(tt.ExpInterState)) + } + + if e != nil { + gotErr := e.GetError() + expErr := tt.Error + if expErr == nil { + t.Fatalf("got unexpected error: %s", gotErr.Error()) + } + + if !strings.Contains(gotErr.Error(), expErr.Error()) { + t.Errorf("GOT_ERROR:\n%s\n does not contain\nEXPECTED_ERROR:\n%s", + gotErr.Error(), + expErr.Error()) + } + } + + assertMessagesCompareJs(t, s, tt.Expected["success"]) + assertMessagesCompareJs(t, f, tt.Expected["filtered"]) + assertMessagesCompareJs(t, e, tt.Expected["failed"]) + }) + } +} + +func TestJSEngineMakeFunction_SetPK(t *testing.T) { + var testInterState interface{} = nil + testCases := []struct { + Scenario string + Src string + FunName string + DisableSourceMaps bool + SpMode bool + Input *models.Message + Expected map[string]*models.Message + ExpInterState interface{} + Error error + }{ + { + Scenario: "onlySetPk_spModeTrue", + Src: ` +function onlySetPk(x) { + x.PartitionKey = "newPk"; + return x; +} +`, + FunName: "onlySetPk", + DisableSourceMaps: true, + SpMode: true, + Input: &models.Message{ + Data: testJsTsv, + PartitionKey: "oldPK", + }, + Expected: map[string]*models.Message{ + "success": { + Data: testJsJSON, + PartitionKey: "newPk", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "newPk", + Data: testJSMap, + }, + Error: nil, + }, + { + Scenario: "onlySetPk_spModeFalse", + Src: ` +function onlySetPk(x) { + x.PartitionKey = "newPk"; + return x; +} +`, + FunName: "onlySetPk", + DisableSourceMaps: true, + SpMode: false, + Input: &models.Message{ + Data: testJsTsv, + PartitionKey: "oldPK", + }, + Expected: map[string]*models.Message{ + "success": { + Data: testJsTsv, + PartitionKey: "newPk", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "newPk", + Data: string(testJsTsv), + }, + Error: nil, + }, + { + Scenario: "filterOutIgnores", + Src: ` +function filterOutIgnores(x) { + return { + FilterOut: true, + Data: "shouldNotAppear", + PartitionKey: "notThis" + }; +} +`, + FunName: "filterOutIgnores", + DisableSourceMaps: true, + SpMode: true, + Input: &models.Message{ + Data: testJsTsv, + PartitionKey: "oldPk", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": { + Data: testJsTsv, + PartitionKey: "oldPk", + }, + "failed": nil, + }, + ExpInterState: nil, + Error: nil, + }, + } + + for _, tt := range testCases { + t.Run(tt.Scenario, func(t *testing.T) { + assert := assert.New(t) + + src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) + jsConfig := &jsEngineConfig{ + SourceB64: src, + RunTimeout: 1, + DisableSourceMaps: tt.DisableSourceMaps, + SpMode: tt.SpMode, + } + + jsEngine, err := newJSEngine(jsConfig) + assert.NotNil(jsEngine) + if err != nil { + t.Fatalf("function newJSEngine failed with error: %q", err.Error()) + } + + if err := jsEngine.SmokeTest(tt.FunName); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + + transFunction := jsEngine.MakeFunction(tt.FunName) + s, f, e, i := transFunction(tt.Input, testInterState) + + if !reflect.DeepEqual(i, tt.ExpInterState) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(i), + spew.Sdump(tt.ExpInterState)) + } + + if e != nil { + gotErr := e.GetError() + expErr := tt.Error + if expErr == nil { + t.Fatalf("got unexpected error: %s", gotErr.Error()) + } + + if !strings.Contains(gotErr.Error(), expErr.Error()) { + t.Errorf("GOT_ERROR:\n%s\n does not contain\nEXPECTED_ERROR:\n%s", + gotErr.Error(), + expErr.Error()) + } + } + + assertMessagesCompareJs(t, s, tt.Expected["success"]) + assertMessagesCompareJs(t, f, tt.Expected["filtered"]) + assertMessagesCompareJs(t, e, tt.Expected["failed"]) + }) + } +} + +func TestJSEngineSmokeTest(t *testing.T) { + testCases := []struct { + Src string + FunName string + DisableSourceMaps bool + CompileError error + SmokeError error + }{ + { + Src: ` +function identity(x) { + return x; +} +`, + FunName: "identity", + DisableSourceMaps: true, + CompileError: nil, + SmokeError: nil, + }, + { + Src: ` +function notThisOne(x) { + return x; +} +`, + FunName: "notExists", + DisableSourceMaps: true, + CompileError: nil, + SmokeError: fmt.Errorf("could not assert as function"), + }, + { + Src: ` +function syntaxError(x) { + loca y = 0; +} +`, + FunName: "syntaxError", + DisableSourceMaps: false, + CompileError: fmt.Errorf("SyntaxError"), + SmokeError: nil, + }, + } + + for _, tt := range testCases { + t.Run(tt.FunName, func(t *testing.T) { + assert := assert.New(t) + + src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) + jsConfig := &jsEngineConfig{ + SourceB64: src, + RunTimeout: 1, + DisableSourceMaps: tt.DisableSourceMaps, + } + + jsEngine, compileErr := newJSEngine(jsConfig) + + if compileErr != nil { + if tt.CompileError == nil { + t.Fatalf("got unexpected error while creating newJSEngine: %s", compileErr.Error()) + } + + if !strings.Contains(compileErr.Error(), tt.CompileError.Error()) { + t.Errorf("newJSEngine error mismatch\nGOT_ERROR:\n%q\n does not contain\nEXPECTED_ERROR:\n%q", + compileErr.Error(), + tt.CompileError.Error()) + } + } else { + assert.NotNil(jsEngine) + + smoke := jsEngine.SmokeTest(tt.FunName) + expErr := tt.SmokeError + if smoke != nil { + if expErr == nil { + t.Fatalf("got unexpected smoke-test error: %q", smoke.Error()) + } + + if !strings.Contains(smoke.Error(), expErr.Error()) { + t.Errorf("smoke error mismatch\nGOT_ERROR:\n%q\ndoes not contain\nEXPECTED_ERROR:\n%q", + smoke.Error(), + expErr.Error()) + } + } else { + assert.Nil(tt.SmokeError) + } + } + }) + } +} + +func TestJSEngineWithBuiltinsSpModeFalse(t *testing.T) { + srcCode := ` +function identity(x) { + return x; +} + +function setPk(x) { + x.PartitionKey = "testKey"; + return x; +} +` + // JS + src := base64.StdEncoding.EncodeToString([]byte(srcCode)) + jsConfig := &jsEngineConfig{ + SourceB64: src, + RunTimeout: 1, + SpMode: false, + } + + jsEngine, err := newJSEngine(jsConfig) + if err != nil { + t.Fatalf("newJSEngine failed with error: %q", err) + } + + if err := jsEngine.SmokeTest("identity"); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + if err := jsEngine.SmokeTest("setPk"); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + + jsFuncID := jsEngine.MakeFunction("identity") + jsFuncPk := jsEngine.MakeFunction("setPk") + + // Builtins + setPkToAppID := NewSpEnrichedSetPkFunction("app_id") + spEnrichedToJSON := SpEnrichedToJSON + + testCases := []struct { + Name string + Transformation TransformationApplyFunction + Input []*models.Message + ExpectedGood []*models.Message + }{ + { + Name: "identity0", + Input: messages, + Transformation: NewTransformation( + jsFuncID, + setPkToAppID, + spEnrichedToJSON, + ), + ExpectedGood: []*models.Message{ + { + Data: snowplowJSON1, + PartitionKey: "test-data1", + }, + { + Data: snowplowJSON2, + PartitionKey: "test-data2", + }, + { + Data: snowplowJSON3, + PartitionKey: "test-data3", + }, + }, + }, + { + Name: "identity2", + Input: messages, + Transformation: NewTransformation( + setPkToAppID, + spEnrichedToJSON, + jsFuncID, + ), + ExpectedGood: []*models.Message{ + { + Data: snowplowJSON1, + PartitionKey: "test-data1", + }, + { + Data: snowplowJSON2, + PartitionKey: "test-data2", + }, + { + Data: snowplowJSON3, + PartitionKey: "test-data3", + }, + }, + }, + { + Name: "setPk1", + Input: messages, + Transformation: NewTransformation( + setPkToAppID, + jsFuncPk, + spEnrichedToJSON, + ), + ExpectedGood: []*models.Message{ + { + Data: snowplowJSON1, + PartitionKey: "testKey", + }, + { + Data: snowplowJSON2, + PartitionKey: "testKey", + }, + { + Data: snowplowJSON3, + PartitionKey: "testKey", + }, + }, + }, + } + + for _, tt := range testCases { + t.Run(tt.Name, func(t *testing.T) { + assert := assert.New(t) + + result := tt.Transformation(tt.Input) + assert.NotNil(result) + assert.Equal(len(tt.ExpectedGood), len(result.Result)) + for i, res := range result.Result { + if i < len(tt.ExpectedGood) { + exp := tt.ExpectedGood[i] + if !reflect.DeepEqual(res.Data, exp.Data) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(res.Data), + spew.Sdump(exp.Data)) + } + assert.Equal(res.PartitionKey, exp.PartitionKey) + } + } + }) + } +} + +func TestJSEngineWithBuiltinsSpModeTrue(t *testing.T) { + srcCode := ` +function identity(x) { + return x; +} + +function setPk(x) { + x.PartitionKey = "testKey"; + return x; +} +` + // JS + src := base64.StdEncoding.EncodeToString([]byte(srcCode)) + jsConfig := &jsEngineConfig{ + SourceB64: src, + RunTimeout: 1, + SpMode: true, + } + + jsEngine, err := newJSEngine(jsConfig) + if err != nil { + t.Fatalf("newJSEngine failed with error: %q", err) + } + + if err := jsEngine.SmokeTest("identity"); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + if err := jsEngine.SmokeTest("setPk"); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + + jsFuncID := jsEngine.MakeFunction("identity") + jsFuncPk := jsEngine.MakeFunction("setPk") + + // Builtins + setPkToAppID := NewSpEnrichedSetPkFunction("app_id") + spEnrichedToJSON := SpEnrichedToJSON + + testCases := []struct { + Name string + Transformation TransformationApplyFunction + Input []*models.Message + ExpectedGood []*models.Message + }{ + { + Name: "identity", + Input: []*models.Message{ + { + Data: testJsTsv, + PartitionKey: "prevKey", + }, + }, + Transformation: NewTransformation( + setPkToAppID, + spEnrichedToJSON, + jsFuncID, + ), + ExpectedGood: []*models.Message{ + { + Data: testJsJSON, + PartitionKey: "test-data<>", + }, + }, + }, + { + Name: "setPk", + Input: []*models.Message{ + { + Data: testJsTsv, + PartitionKey: "prevKey", + }, + }, + Transformation: NewTransformation( + setPkToAppID, + jsFuncPk, + ), + ExpectedGood: []*models.Message{ + { + Data: testJsJSON, + PartitionKey: "testKey", + }, + }, + }, + { + Name: "mix", + Input: []*models.Message{ + { + Data: testJsTsv, + PartitionKey: "prevKey", + }, + }, + Transformation: NewTransformation( + setPkToAppID, + jsFuncID, + jsFuncPk, + jsFuncID, + ), + ExpectedGood: []*models.Message{ + { + Data: testJsJSON, + PartitionKey: "testKey", + }, + }, + }, + } + + for _, tt := range testCases { + t.Run(tt.Name, func(t *testing.T) { + assert := assert.New(t) + + result := tt.Transformation(tt.Input) + assert.NotNil(result) + assert.Equal(len(tt.ExpectedGood), len(result.Result)) + for i, res := range result.Result { + if i < len(tt.ExpectedGood) { + exp := tt.ExpectedGood[i] + if !reflect.DeepEqual(res.Data, exp.Data) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(res.Data), + spew.Sdump(exp.Data)) + } + assert.Equal(res.PartitionKey, exp.PartitionKey) + } + } + }) + } +} + +func Benchmark_JSEngine_Passthrough_DisabledSrcMaps(b *testing.B) { + b.ReportAllocs() + + srcCode := ` +function identity(x) { + return x; +} +` + src := base64.StdEncoding.EncodeToString([]byte(srcCode)) + inputMsg := &models.Message{ + Data: testJsJSON, + PartitionKey: "some-test-key", + } + + jsConfig := &jsEngineConfig{ + SourceB64: src, + RunTimeout: 5, + DisableSourceMaps: true, + } + + jsEngine, err := newJSEngine(jsConfig) + if err != nil { + b.Fatalf("function newJSEngine failed with error: %q", err.Error()) + } + + // not Smoke-Tested + transFunction := jsEngine.MakeFunction("identity") + + for n := 0; n < b.N; n++ { + transFunction(inputMsg, nil) + } +} + +func Benchmark_JSEngine_Passthrough(b *testing.B) { + b.ReportAllocs() + + srcCode := ` +function identity(x) { + return x; +} +` + src := base64.StdEncoding.EncodeToString([]byte(srcCode)) + inputMsg := &models.Message{ + Data: testJsJSON, + PartitionKey: "some-test-key", + } + + jsConfig := &jsEngineConfig{ + SourceB64: src, + RunTimeout: 5, + DisableSourceMaps: false, + } + + jsEngine, err := newJSEngine(jsConfig) + if err != nil { + b.Fatalf("function newJSEngine failed with error: %q", err.Error()) + } + + // not Smoke-Tested + transFunction := jsEngine.MakeFunction("identity") + + for n := 0; n < b.N; n++ { + transFunction(inputMsg, nil) + } +} + +func Benchmark_JSEngine_PassthroughSpMode(b *testing.B) { + b.ReportAllocs() + + srcCode := ` +function identity(x) { + return x; +} +` + src := base64.StdEncoding.EncodeToString([]byte(srcCode)) + inputMsg := &models.Message{ + Data: testJsTsv, + PartitionKey: "some-test-key", + } + + jsConfig := &jsEngineConfig{ + SourceB64: src, + RunTimeout: 5, + DisableSourceMaps: false, + } + + jsEngine, err := newJSEngine(jsConfig) + if err != nil { + b.Fatalf("function newJSEngine failed with error: %q", err.Error()) + } + + // not Smoke-Tested + transFunction := jsEngine.MakeFunction("identity") + + for n := 0; n < b.N; n++ { + transFunction(inputMsg, nil) + } +} + +func Benchmark_JSEngine_Passthrough_JsJson(b *testing.B) { + b.ReportAllocs() + + srcCode := ` +function jsonIdentity(x) { + var jsonObj = JSON.parse(x.Data); + var result = JSON.stringify(jsonObj); + + return { + Data: result + }; +} +` + src := base64.StdEncoding.EncodeToString([]byte(srcCode)) + inputMsg := &models.Message{ + Data: testJsJSON, + PartitionKey: "some-test-key", + } + + jsConfig := &jsEngineConfig{ + SourceB64: src, + RunTimeout: 5, + DisableSourceMaps: false, + } + + jsEngine, err := newJSEngine(jsConfig) + if err != nil { + b.Fatalf("function newJSEngine failed with error: %q", err.Error()) + } + + // not Smoke-Tested + transFunction := jsEngine.MakeFunction("jsonIdentity") + + for n := 0; n < b.N; n++ { + transFunction(inputMsg, nil) + } +} + +// Test helpers +func testJSEngineAdapter(f func(c *jsEngineConfig) (*jsEngineConfig, error)) jsEngineAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*jsEngineConfig) + if !ok { + return nil, fmt.Errorf("invalid input, expected jsEngineConfig") + } + + return f(cfg) + } + +} + +func testJSEngineFunc(c *jsEngineConfig) (*jsEngineConfig, error) { + return c, nil +} + +// Helper function to compare messages and avoid using reflect.DeepEqual +// on errors. Compares all but the error field of messages. +func assertMessagesCompareJs(t *testing.T, act, exp *models.Message) { + t.Helper() + + ok := false + switch { + case act == nil: + ok = exp == nil + case exp == nil: + default: + pkOk := act.PartitionKey == exp.PartitionKey + dataOk := reflect.DeepEqual(act.Data, exp.Data) + cTimeOk := reflect.DeepEqual(act.TimeCreated, exp.TimeCreated) + pTimeOk := reflect.DeepEqual(act.TimePulled, exp.TimePulled) + tTimeOk := reflect.DeepEqual(act.TimeTransformed, exp.TimeTransformed) + ackOk := reflect.DeepEqual(act.AckFunc, exp.AckFunc) + + if pkOk && dataOk && cTimeOk && pTimeOk && tTimeOk && ackOk { + ok = true + } + } + + if !ok { + t.Errorf("\nGOT:\n%s\nEXPECTED:\n%s\n", + spew.Sdump(act), + spew.Sdump(exp)) + } +} + +// helper variables +var testJsDvceCreatedTstamp, _ = time.Parse("2006-01-02 15:04:05.999", "2019-05-10 14:40:35.551") +var testJsEtlTstamp, _ = time.Parse("2006-01-02 15:04:05.999", "2019-05-10 14:40:37.436") +var testJsDerivedTstamp, _ = time.Parse("2006-01-02 15:04:05.999", "2019-05-10 14:40:35.972") +var testJsCollectorTstamp, _ = time.Parse("2006-01-02 15:04:05.999", "2019-05-10 14:40:35.972") +var testJsDvceSentTstamp, _ = time.Parse("2006-01-02 15:04:05.999", "2019-05-10 14:40:35") +var testJSMap = map[string]interface{}{ + "event_version": "1-0-0", + "app_id": "test-data<>", + "dvce_created_tstamp": testJsDvceCreatedTstamp, + "event": "unstruct", + "v_collector": "ssc-0.15.0-googlepubsub", + "network_userid": "d26822f5-52cc-4292-8f77-14ef6b7a27e2", + "event_name": "add_to_cart", + "event_vendor": "com.snowplowanalytics.snowplow", + "event_format": "jsonschema", + "platform": "pc", + "etl_tstamp": testJsEtlTstamp, + "collector_tstamp": testJsCollectorTstamp, + "user_id": "user", + "dvce_sent_tstamp": testJsDvceSentTstamp, + "derived_tstamp": testJsDerivedTstamp, + "event_id": "e9234345-f042-46ad-b1aa-424464066a33", + "v_tracker": "py-0.8.2", + "v_etl": "beam-enrich-0.2.0-common-0.36.0", + "user_ipaddress": "1.2.3.4", + "unstruct_event_com_snowplowanalytics_snowplow_add_to_cart_1": map[string]interface{}{ + "quantity": float64(2), + "unitPrice": 32.4, + "currency": "GBP", + "sku": "item41", + }, + "contexts_nl_basjes_yauaa_context_1": []interface{}{ + map[string]interface{}{ + "deviceName": "Unknown", + "layoutEngineVersionMajor": "??", + "operatingSystemName": "Unknown", + "deviceClass": "Unknown", + "agentVersion": "2.21.0", + "layoutEngineName": "Unknown", + "layoutEngineClass": "Unknown", + "agentName": "python-requests", + "agentNameVersion": "python-requests 2.21.0", + "operatingSystemVersion": "??", + "agentClass": "Special", + "deviceBrand": "Unknown", + "agentVersionMajor": "2", + "agentNameVersionMajor": "python-requests 2", + "operatingSystemClass": "Unknown", + "layoutEngineVersion": "??", + }, + }, + "useragent": "python-requests/2.21.0", +} + +var testJsTsv = []byte(`test-data<> pc 2019-05-10 14:40:37.436 2019-05-10 14:40:35.972 2019-05-10 14:40:35.551 unstruct e9234345-f042-46ad-b1aa-424464066a33 py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 1.2.3.4 d26822f5-52cc-4292-8f77-14ef6b7a27e2 {"schema":"iglu:com.snowplowanalytics.snowplow/unstruct_event/jsonschema/1-0-0","data":{"schema":"iglu:com.snowplowanalytics.snowplow/add_to_cart/jsonschema/1-0-0","data":{"sku":"item41","quantity":2,"unitPrice":32.4,"currency":"GBP"}}} python-requests/2.21.0 2019-05-10 14:40:35.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??"}}]} 2019-05-10 14:40:35.972 com.snowplowanalytics.snowplow add_to_cart jsonschema 1-0-0 `) + +// corresponding JSON to previous TSV +var testJsJSON = []byte(`{"app_id":"test-data<>","collector_tstamp":"2019-05-10T14:40:35.972Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:35.972Z","dvce_created_tstamp":"2019-05-10T14:40:35.551Z","dvce_sent_tstamp":"2019-05-10T14:40:35Z","etl_tstamp":"2019-05-10T14:40:37.436Z","event":"unstruct","event_format":"jsonschema","event_id":"e9234345-f042-46ad-b1aa-424464066a33","event_name":"add_to_cart","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"d26822f5-52cc-4292-8f77-14ef6b7a27e2","platform":"pc","unstruct_event_com_snowplowanalytics_snowplow_add_to_cart_1":{"currency":"GBP","quantity":2,"sku":"item41","unitPrice":32.4},"user_id":"user","user_ipaddress":"1.2.3.4","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) + +// json's changed and stringified inside JS +var testJsJSONChanged1 = []byte(`{"app_id_CHANGED":"test-data<>","collector_tstamp":"2019-05-10T14:40:35.972Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:35.972Z","dvce_created_tstamp":"2019-05-10T14:40:35.551Z","dvce_sent_tstamp":"2019-05-10T14:40:35Z","etl_tstamp":"2019-05-10T14:40:37.436Z","event":"unstruct","event_format":"jsonschema","event_id":"e9234345-f042-46ad-b1aa-424464066a33","event_name":"add_to_cart","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"d26822f5-52cc-4292-8f77-14ef6b7a27e2","platform":"pc","unstruct_event_com_snowplowanalytics_snowplow_add_to_cart_1":{"currency":"GBP","quantity":2,"sku":"item41","unitPrice":32.4},"user_id":"user","user_ipaddress":"1.2.3.4","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) + +var testJsJSONChanged2 = []byte(`{"collector_tstamp":"2019-05-10T14:40:35.972Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:35.972Z","dvce_created_tstamp":"2019-05-10T14:40:35.551Z","dvce_sent_tstamp":"2019-05-10T14:40:35Z","etl_tstamp":"2019-05-10T14:40:37.436Z","event":"unstruct","event_format":"jsonschema","event_id":"e9234345-f042-46ad-b1aa-424464066a33","event_name":"add_to_cart","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"d26822f5-52cc-4292-8f77-14ef6b7a27e2","platform":"pc","unstruct_event_com_snowplowanalytics_snowplow_add_to_cart_1":{"currency":"GBP","quantity":2,"sku":"item41","unitPrice":32.4},"user_id":"user","user_ipaddress":"1.2.3.4","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2","app_id_CHANGED":"test-data<>"}`) diff --git a/pkg/transform/engine_lua.go b/pkg/transform/engine_lua.go new file mode 100644 index 00000000..bd1f5ba5 --- /dev/null +++ b/pkg/transform/engine_lua.go @@ -0,0 +1,427 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package transform + +import ( + "context" + "encoding/base64" + "fmt" + "strings" + "time" + + gojson "github.com/goccy/go-json" + "github.com/mitchellh/mapstructure" + "github.com/yuin/gluamapper" + lua "github.com/yuin/gopher-lua" + luaparse "github.com/yuin/gopher-lua/parse" + luajson "layeh.com/gopher-json" + + "github.com/snowplow-devops/stream-replicator/pkg/models" +) + +// luaEngineConfig configures the Lua Engine. +type luaEngineConfig struct { + SourceB64 string `hcl:"source_b64" env:"TRANSFORMATION_LUA_SOURCE_B64"` + RunTimeout int `hcl:"timeout_sec,optional" env:"TRANSFORMATION_LUA_TIMEOUT_SEC"` + Sandbox bool `hcl:"sandbox,optional" env:"TRANSFORMATION_LUA_SANDBOX"` + SpMode bool `hcl:"snowplow_mode,optional" env:"TRANSFORMATION_LUA_SNOWPLOW_MODE"` +} + +// luaEngine handles the provision of a Lua runtime to run transformations. +type luaEngine struct { + Code *lua.FunctionProto + RunTimeout time.Duration + Options *lua.Options + SpMode bool +} + +// newLuaEngine returns a Lua Engine from a luaEngineConfig. +func newLuaEngine(c *luaEngineConfig) (*luaEngine, error) { + luaSrc, err := base64.StdEncoding.DecodeString(c.SourceB64) + if err != nil { + return nil, err + } + + compiledCode, err := compileLuaCode(string(luaSrc), c.SourceB64) + if err != nil { + return nil, err + } + + eng := &luaEngine{ + Code: compiledCode, + RunTimeout: time.Duration(c.RunTimeout) * time.Second, + Options: &lua.Options{SkipOpenLibs: c.Sandbox}, + SpMode: c.SpMode, + } + + return eng, nil +} + +// The luaEngineAdapter type is an adapter for functions to be used as +// pluggable components for Lua Engine. It implements the Pluggable interface. +type luaEngineAdapter func(i interface{}) (interface{}, error) + +// Create implements the ComponentCreator interface. +func (f luaEngineAdapter) Create(i interface{}) (interface{}, error) { + return f(i) +} + +// ProvideDefault implements the ComponentConfigurable interface. +func (f luaEngineAdapter) ProvideDefault() (interface{}, error) { + // Provide defaults for the optional parameters + // whose default is not their zero value. + cfg := &luaEngineConfig{ + RunTimeout: 5, + Sandbox: true, + } + + return cfg, nil +} + +// adaptLuaEngineFunc returns a luaEngineAdapter. +func adaptLuaEngineFunc(f func(c *luaEngineConfig) (*luaEngine, error)) luaEngineAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*luaEngineConfig) + if !ok { + return nil, fmt.Errorf("invalid input, expected luaEngineConfig") + } + + return f(cfg) + } +} + +// LuaLayer returns the Pluggable transformation layer implemented in Lua. +func LuaLayer() interface{} { + return adaptLuaEngineFunc(newLuaEngine) +} + +// SmokeTest implements SmokeTester. +func (e *luaEngine) SmokeTest(funcName string) error { + // setup the Lua state + L := lua.NewState(*e.Options) // L is ptr + defer L.Close() + + d := time.Now().Add(e.RunTimeout) + ctx, cancel := context.WithDeadline(context.Background(), d) + defer cancel() + L.SetContext(ctx) + + return initVM(e, L, funcName) +} + +// MakeFunction implements FunctionMaker. +func (e *luaEngine) MakeFunction(funcName string) TransformationFunction { + + return func(message *models.Message, interState interface{}) (*models.Message, *models.Message, *models.Message, interface{}) { + // making input + input, err := mkLuaEngineInput(e, message, interState) + if err != nil { + message.SetError(fmt.Errorf("failed making input for the Lua runtime: %q", err.Error())) + return nil, nil, message, nil + } + + // setup the Lua state + L := lua.NewState(*e.Options) + defer L.Close() + + d := time.Now().Add(e.RunTimeout) + ctx, cancel := context.WithDeadline(context.Background(), d) + defer cancel() + L.SetContext(ctx) + + err = initVM(e, L, funcName) + if err != nil { + message.SetError(fmt.Errorf("failed initializing Lua runtime: %q", err.Error())) + return nil, nil, message, nil + } + + // running + err = L.CallByParam(lua.P{ + Fn: L.GetGlobal(funcName), // name of Lua function + NRet: 1, // num of return values + Protect: true, // don't panic + }, input) + if err != nil { + // runtime error counts as failure + runErr := fmt.Errorf("error running Lua function %q: %q", funcName, err.Error()) + message.SetError(runErr) + return nil, nil, message, nil + } + + // validating output + protocol, err := validateLuaEngineOut(L.Get(-1)) + if err != nil { + message.SetError(err) + return nil, nil, message, nil + } + + // filtering - keeping same behaviour with spEnrichedFilter + if protocol.FilterOut == true { + return nil, message, nil, nil + } + + // handling data + encode := false + switch protoData := protocol.Data.(type) { + case string: + message.Data = []byte(protoData) + case map[string]interface{}: + encode = true + case map[interface{}]interface{}: + encode = true + siData := toStringIfaceMap(protoData) + protocol.Data = siData + default: + message.SetError(fmt.Errorf("invalid return type from Lua transformation; expected string or table")) + return nil, nil, message, nil + } + + // encode + if encode { + encoded, err := gojson.MarshalWithOption(protocol.Data, gojson.DisableHTMLEscape()) + if err != nil { + message.SetError(fmt.Errorf("error encoding message data")) + return nil, nil, message, nil + } + message.Data = encoded + } + + // setting pk if needed + pk := protocol.PartitionKey + if pk != "" && message.PartitionKey != pk { + message.PartitionKey = pk + } + + return message, nil, nil, protocol + + } +} + +// compileLuaCode compiles lua code. +// Since lua.NewState is not goroutine-safe, we spin a new state for every +// transformation. The reason for this function is to allow us to at least share +// the compiled bytecode (which is read-only and thus safe) and so run only once +// the load, parse and compile steps, which are implicitly run by the alternative +// lua.DoString. +// see also: +// https://github.com/yuin/gopher-lua/pull/193 +// https://github.com/yuin/gopher-lua#sharing-lua-byte-code-between-lstates +func compileLuaCode(code, name string) (*lua.FunctionProto, error) { + reader := strings.NewReader(code) + chunk, err := luaparse.Parse(reader, code) + if err != nil { + return nil, err + } + proto, err := lua.Compile(chunk, name) + if err != nil { + return nil, err + } + return proto, nil +} + +// loadLuaCode loads compiled Lua code into a lua state +func loadLuaCode(ls *lua.LState, proto *lua.FunctionProto) error { + lfunc := ls.NewFunctionFromProto(proto) + ls.Push(lfunc) + + // https://github.com/yuin/gopher-lua/blob/f4c35e4016d9d8580b007ebaeb68ecd8e0b09f1c/_state.go#L1811 + return ls.PCall(0, lua.MultRet, nil) +} + +// initVM performs the initialization steps for a Lua state. +func initVM(e *luaEngine, L *lua.LState, funcName string) error { + if e.Options.SkipOpenLibs == false { + luajson.Preload(L) + } + + err := loadLuaCode(L, e.Code) + if err != nil { + return fmt.Errorf("could not load lua code: %q", err) + } + + if _, ok := L.GetGlobal(funcName).(*lua.LFunction); !ok { + return fmt.Errorf("global Lua function not found: %q", funcName) + } + + return nil +} + +// mkLuaEngineInput describes the process of constructing input to Lua engine. +// No side effects. +func mkLuaEngineInput(e *luaEngine, message *models.Message, interState interface{}) (*lua.LTable, error) { + if interState != nil { + if i, ok := interState.(*EngineProtocol); ok { + return toLuaTable(i) + } + } + + candidate := &EngineProtocol{ + Data: string(message.Data), + } + + if !e.SpMode { + return toLuaTable(candidate) + } + + parsedMessage, err := intermediateAsSpEnrichedParsed(interState, message) + if err != nil { + // if spMode, error for non Snowplow enriched event data + return nil, err + } + + spMap, err := parsedMessage.ToMap() + if err != nil { + return nil, err + } + candidate.Data = spMap + + return toLuaTable(candidate) +} + +// toLuaTable +func toLuaTable(p *EngineProtocol) (*lua.LTable, error) { + var tmpMap map[string]interface{} + + err := mapstructure.Decode(p, &tmpMap) + if err != nil { + return nil, fmt.Errorf("error decoding to map") + } + + return mapToLTable(tmpMap) +} + +// mapToLTable converts a Go map to a lua table +// see: https://github.com/yuin/gopher-lua/issues/160#issuecomment-447608033 +func mapToLTable(m map[string]interface{}) (*lua.LTable, error) { + timeLayout := "2006-01-02T15:04:05.999Z07:00" + + // Main table pointer + ltbl := &lua.LTable{} + + // Loop map + for key, val := range m { + + switch val.(type) { + case float64: + ltbl.RawSetString(key, lua.LNumber(val.(float64))) + case int64: + ltbl.RawSetString(key, lua.LNumber(val.(int64))) + case string: + ltbl.RawSetString(key, lua.LString(val.(string))) + case bool: + ltbl.RawSetString(key, lua.LBool(val.(bool))) + case []byte: + ltbl.RawSetString(key, lua.LString(string(val.([]byte)))) + case map[string]interface{}: + // Get table from map + tmp, err := mapToLTable(val.(map[string]interface{})) + if err != nil { + return nil, err + } + ltbl.RawSetString(key, tmp) + case time.Time: + t := val.(time.Time).Format(timeLayout) + ltbl.RawSetString(key, lua.LString(t)) + case []map[string]interface{}: + // Create slice table + sliceTable := &lua.LTable{} + for _, vv := range val.([]map[string]interface{}) { + next, err := mapToLTable(vv) + if err != nil { + return nil, err + } + sliceTable.Append(next) + } + ltbl.RawSetString(key, sliceTable) + case []interface{}: + // Create slice table + sliceTable := &lua.LTable{} + for _, vv := range val.([]interface{}) { + switch vv.(type) { + case map[string]interface{}: + // Convert map to table + m, err := mapToLTable(vv.(map[string]interface{})) + if err != nil { + return nil, err + } + sliceTable.Append(m) + case float64: + sliceTable.Append(lua.LNumber(vv.(float64))) + case string: + sliceTable.Append(lua.LString(vv.(string))) + case bool: + sliceTable.Append(lua.LBool(vv.(bool))) + } + } + + // Append to main table + ltbl.RawSetString(key, sliceTable) + } + } + + return ltbl, nil +} + +// validateLuaEngineOut validates the value returned from the Lua engine is a +// Lua Table (lua.LTable) and that it maps to EngineProtocol. +func validateLuaEngineOut(output interface{}) (*EngineProtocol, error) { + if output == nil { + return nil, fmt.Errorf("invalid return type from Lua transformation; got nil") + } + + if luaTablePtr, ok := output.(*lua.LTable); ok { + result := &EngineProtocol{} + luaMapper := gluamapper.NewMapper(gluamapper.Option{ + NameFunc: gluamapper.Id, + }) + + err := luaMapper.Map(luaTablePtr, result) + if err != nil { + return nil, fmt.Errorf("protocol violation in return value from Lua transformation") + } + + return result, nil + } + + return nil, fmt.Errorf("invalid return type from Lua transformation; expected Lua Table") +} + +// toStringIfaceMap converts map[interface{}]interface{} to map[string]interface. +// This function is used in Lua Engine because of how gluamapper actually maps +// lua.LTable to Go map. +// see:https://github.com/yuin/gluamapper/blob/d836955830e75240d46ce9f0e6d148d94f2e1d3a/gluamapper.go#L44 +func toStringIfaceMap(interfaceMap map[interface{}]interface{}) map[string]interface{} { + result := make(map[string]interface{}) + for key, val := range interfaceMap { + result[fmt.Sprintf("%v", key)] = doValue(val) + } + + return result +} + +// doValue is a helper for toStringIfaceMap, to cover for values that are +// []interface{} and map[interface{}]interface. +func doValue(value interface{}) interface{} { + switch value := value.(type) { + case []interface{}: + return doIfaceSlice(value) + case map[interface{}]interface{}: + return toStringIfaceMap(value) + default: + return value + } +} + +// doIfaceSlice is a helper for doValue to handle interface slices. +func doIfaceSlice(iSlice []interface{}) []interface{} { + result := make([]interface{}, len(iSlice)) + for i, val := range iSlice { + result[i] = doValue(val) + } + + return result +} diff --git a/pkg/transform/engine_lua_test.go b/pkg/transform/engine_lua_test.go new file mode 100644 index 00000000..a1238ffd --- /dev/null +++ b/pkg/transform/engine_lua_test.go @@ -0,0 +1,1958 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package transform + +import ( + "encoding/base64" + "fmt" + "path/filepath" + "reflect" + "strings" + "testing" + + "github.com/davecgh/go-spew/spew" + "github.com/stretchr/testify/assert" + + config "github.com/snowplow-devops/stream-replicator/config" + "github.com/snowplow-devops/stream-replicator/pkg/models" +) + +func TestLuaEngineConfig_ENV(t *testing.T) { + testCases := []struct { + Name string + Plug config.Pluggable + Expected interface{} + }{ + { + Name: "transform-lua-from-env", + Plug: testLuaEngineAdapter(testLuaEngineFunc), + Expected: &luaEngineConfig{ + SourceB64: "CglmdW5jdGlvbiBmb28oeCkKICAgICAgICAgICByZXR1cm4geAogICAgICAgIGVuZAoJ", + RunTimeout: 10, + Sandbox: false, + }, + }, + } + + for _, tt := range testCases { + t.Run(tt.Name, func(t *testing.T) { + assert := assert.New(t) + + t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", "") + + t.Setenv("MESSAGE_TRANSFORMATION", "lua:fun") + t.Setenv("TRANSFORMATION_LAYER_NAME", "lua") + + t.Setenv("TRANSFORMATION_LUA_SOURCE_B64", "CglmdW5jdGlvbiBmb28oeCkKICAgICAgICAgICByZXR1cm4geAogICAgICAgIGVuZAoJ") + t.Setenv("TRANSFORMATION_LUA_TIMEOUT_SEC", "10") + t.Setenv("TRANSFORMATION_LUA_SANDBOX", "false") + + c, err := config.NewConfig() + assert.NotNil(c) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } + + engine := c.Data.Transform.Layer + decoderOpts := &config.DecoderOptions{ + Input: engine.Body, + } + + result, err := c.CreateComponent(tt.Plug, decoderOpts) + assert.NotNil(result) + assert.Nil(err) + + if !reflect.DeepEqual(result, tt.Expected) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(result), + spew.Sdump(tt.Expected)) + } + }) + } +} + +func TestLuaEngineConfig_HCL(t *testing.T) { + fixturesDir := "../../config/test-fixtures" + testCases := []struct { + File string + Plug config.Pluggable + Expected interface{} + }{ + { + File: "transform-lua-simple.hcl", + Plug: testLuaEngineAdapter(testLuaEngineFunc), + Expected: &luaEngineConfig{ + SourceB64: "CglmdW5jdGlvbiBmb28oeCkKICAgICAgICAgICByZXR1cm4geAogICAgICAgIGVuZAoJ", + RunTimeout: 5, + Sandbox: true, + }, + }, + { + File: "transform-lua-extended.hcl", + Plug: testLuaEngineAdapter(testLuaEngineFunc), + Expected: &luaEngineConfig{ + SourceB64: "CglmdW5jdGlvbiBmb28oeCkKICAgICAgICAgICByZXR1cm4geAogICAgICAgIGVuZAoJ", + RunTimeout: 10, + Sandbox: false, + }, + }, + } + + for _, tt := range testCases { + t.Run(tt.File, func(t *testing.T) { + assert := assert.New(t) + + filename := filepath.Join(fixturesDir, tt.File) + t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", filename) + + c, err := config.NewConfig() + assert.NotNil(c) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } + + engine := c.Data.Transform.Layer + decoderOpts := &config.DecoderOptions{ + Input: engine.Body, + } + + result, err := c.CreateComponent(tt.Plug, decoderOpts) + assert.NotNil(result) + assert.Nil(err) + + if !reflect.DeepEqual(result, tt.Expected) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(result), + spew.Sdump(tt.Expected)) + } + }) + } +} + +func TestLuaLayer(t *testing.T) { + layer := LuaLayer() + if _, ok := layer.(config.Pluggable); !ok { + t.Errorf("invalid interface returned from LuaLayer") + } +} + +func TestLuaEngineMakeFunction_SpModeFalse_IntermediateNil(t *testing.T) { + var testInterState interface{} = nil + var testSpMode bool = false + testCases := []struct { + Src string + FunName string + Sandbox bool + Input *models.Message + Expected map[string]*models.Message + ExpInterState interface{} + Error error + }{ + { + Src: ` +function identity(x) + return x +end +`, + FunName: "identity", + Sandbox: true, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: "asdf", + }, + Error: nil, + }, + { + Src: ` +function concatHello(x) + x.Data = "Hello:" .. x.Data + return x +end +`, + FunName: "concatHello", + Sandbox: true, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": { + Data: []byte("Hello:asdf"), + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: "Hello:asdf", + }, + Error: nil, + }, + { + Src: ` +function filterIn(x) + x.FilterOut = false + return x +end +`, + FunName: "filterIn", + Sandbox: true, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: "asdf", + }, + Error: nil, + }, + { + Src: ` +function filterOut(x) + if type(x.Data) == "string" then + return { FilterOut = true } + end + return { FilterOut = false } +end +`, + FunName: "filterOut", + Sandbox: false, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + "failed": nil, + }, + ExpInterState: nil, + Error: nil, + }, + { + Src: ` +local json = require("json") + +function jsonIdentity(x) + local dat = x["Data"] + local jsonObj, decodeErr = json.decode(dat) + if decodeErr then error(decodeErr) end + + local result, encodeErr = json.encode(jsonObj) + if encodeErr then error(encodeErr) end + + x.Data = result + return x +end +`, + FunName: "jsonIdentity", + Sandbox: false, + Input: &models.Message{ + Data: snowplowJSON1, + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": { + Data: snowplowJSON1, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(snowplowJSON1), + }, + Error: nil, + }, + { + Src: ` +local json = require("json") + +function jsonTransformFieldName(x) + local data = x["Data"] + local jsonObj, decodeErr = json.decode(data) + if decodeErr then error(decodeErr) end + + jsonObj["app_id_CHANGED"] = jsonObj["app_id"] + jsonObj["app_id"] = nil + + local result, encodeErr = json.encode(jsonObj) + if encodeErr then error(encodeErr) end + + return { Data = result } +end +`, + FunName: "jsonTransformFieldName", + Sandbox: false, + Input: &models.Message{ + Data: snowplowJSON1, + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": { + Data: snowplowJSON1ChangedLua, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(snowplowJSON1ChangedLua), + }, + Error: nil, + }, + { + Src: ` +local json = require("json") + +function jsonFilterOut(x) + local jsonObj, decodeErr = json.decode(x["Data"]) + if decodeErr then error(decodeErr) end + + if jsonObj["app_id"] == "filterMeOut" then + return { FilterOut = false, Data = x["Data"] } + else + return { FilterOut = true } + end +end +`, + FunName: "jsonFilterOut", + Sandbox: false, + Input: &models.Message{ + Data: snowplowJSON1, + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": { + Data: snowplowJSON1, + PartitionKey: "some-test-key", + }, + "failed": nil, + }, + ExpInterState: nil, + Error: nil, + }, + { + Src: ` +function retWrongType(x) + return 0 +end +`, + FunName: "retWrongType", + Sandbox: true, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("invalid return type from Lua transformation; expected Lua Table"), + }, + { + Src: ` +function noReturn(x) +end +`, + FunName: "noReturn", + Sandbox: true, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("invalid return type from Lua transformation; expected Lua Table"), + }, + { + Src: ` +function returnNil(x) + return nil +end +`, + FunName: "returnNil", + Sandbox: true, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("invalid return type from Lua transformation; expected Lua Table"), + }, + { + Src: ` +function causeRuntimeError(x) + return 2 * x +end +`, + FunName: "causeRuntimeError", + Sandbox: true, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("error running Lua function \"causeRuntimeError\""), + }, + { + Src: ` +function callError(x) + error("Failed") +end +`, + FunName: "callError", + Sandbox: false, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("error running Lua function \"callError\""), + }, + { + Src: ` +local clock = os.clock + +function sleepTenSecs(x) + local t0 = clock() + while clock() - t0 <= 10 do end +end +`, + FunName: "sleepTenSecs", + Sandbox: false, + Input: &models.Message{ + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: []byte("asdf"), + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("context deadline exceeded"), + }, + } + + for _, tt := range testCases { + t.Run(tt.FunName, func(t *testing.T) { + assert := assert.New(t) + + src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) + luaConfig := &luaEngineConfig{ + SourceB64: src, + RunTimeout: 1, + Sandbox: tt.Sandbox, + SpMode: testSpMode, + } + + luaEngine, err := newLuaEngine(luaConfig) + assert.NotNil(luaEngine) + if err != nil { + t.Fatalf("function newLuaEngine failed with error: %q", err.Error()) + } + + if err := luaEngine.SmokeTest(tt.FunName); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + + transFunction := luaEngine.MakeFunction(tt.FunName) + s, f, e, i := transFunction(tt.Input, testInterState) + + if !reflect.DeepEqual(i, tt.ExpInterState) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(i), + spew.Sdump(tt.ExpInterState)) + } + + if e != nil { + gotErr := e.GetError() + expErr := tt.Error + if expErr == nil { + t.Fatalf("got unexpected error: %s", gotErr.Error()) + } + + if !strings.Contains(gotErr.Error(), expErr.Error()) { + t.Errorf("GOT_ERROR:\n%s\n does not contain\nEXPECTED_ERROR:\n%s", + gotErr.Error(), + expErr.Error()) + } + } + + assertMessagesCompareLua(t, s, tt.Expected["success"]) + assertMessagesCompareLua(t, f, tt.Expected["filtered"]) + assertMessagesCompareLua(t, e, tt.Expected["failed"]) + }) + } +} + +func TestLuaEngineMakeFunction_SpModeTrue_IntermediateNil(t *testing.T) { + var testInterState interface{} = nil + var testSpMode bool = true + testCases := []struct { + Scenario string + Src string + FunName string + Sandbox bool + Input *models.Message + Expected map[string]*models.Message + ExpInterState interface{} + Error error + }{ + { + Scenario: "identity", + Src: ` +function identity(x) + return x +end +`, + FunName: "identity", + Sandbox: false, + Input: &models.Message{ + Data: testLuaTsv, + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": { + Data: testLuaJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: testLuaMap, + }, + Error: nil, + }, + { + Scenario: "filtering", + Src: ` +function filterOut(input) + -- input is a lua table + local spData = input["Data"] + if spData["app_id"] == "myApp" then + return input; + end + return { FilterOut = true } +end +`, + FunName: "filterOut", + Sandbox: false, + Input: &models.Message{ + Data: testLuaTsv, + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": { + Data: testLuaTsv, + PartitionKey: "some-test-key", + }, + "failed": nil, + }, + ExpInterState: nil, + Error: nil, + }, + { + Scenario: "filteringOut_ignoresData", + Src: ` +function filterOutIgnores(x) + local ret = { + FilterOut = true, + Data = "shouldNotAppear", + PartitionKey = "notThis" + } + return ret +end +`, + FunName: "filterOutIgnores", + Sandbox: false, + Input: &models.Message{ + Data: testLuaTsv, + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": { + Data: testLuaTsv, + PartitionKey: "some-test-key", + }, + "failed": nil, + }, + ExpInterState: nil, + Error: nil, + }, + { + Scenario: "non_Snowplow_enriched_to_failed", + Src: ` +function willNotRun(x) + return x +end +`, + FunName: "willNotRun", + Sandbox: false, + Input: &models.Message{ + Data: []byte("nonSpEnrichedEvent"), + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: []byte("nonSpEnrichedEvent"), + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("Cannot parse"), + }, + { + Scenario: "return_wrong_type", + Src: ` +function returnWrongType(x) + return 0 +end +`, + FunName: "returnWrongType", + Sandbox: true, + Input: &models.Message{ + Data: testLuaTsv, + PartitionKey: "some-test-key", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: testLuaTsv, + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("invalid return type from Lua transformation; expected Lua Table"), + }, + } + + for _, tt := range testCases { + t.Run(tt.Scenario, func(t *testing.T) { + assert := assert.New(t) + + src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) + luaConfig := &luaEngineConfig{ + SourceB64: src, + RunTimeout: 1, + Sandbox: tt.Sandbox, + SpMode: testSpMode, + } + + luaEngine, err := newLuaEngine(luaConfig) + assert.NotNil(luaEngine) + if err != nil { + t.Fatalf("function newLuaEngine failed with error: %q", err.Error()) + } + + if err := luaEngine.SmokeTest(tt.FunName); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + + transFunction := luaEngine.MakeFunction(tt.FunName) + s, f, e, i := transFunction(tt.Input, testInterState) + + if !reflect.DeepEqual(i, tt.ExpInterState) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(i), + spew.Sdump(tt.ExpInterState)) + } + + if e != nil { + gotErr := e.GetError() + expErr := tt.Error + if expErr == nil { + t.Fatalf("got unexpected error: %s", gotErr.Error()) + } + + if !strings.Contains(gotErr.Error(), expErr.Error()) { + t.Errorf("GOT_ERROR:\n%s\n does not contain\nEXPECTED_ERROR:\n%s", + gotErr.Error(), + expErr.Error()) + } + } + + assertMessagesCompareLua(t, s, tt.Expected["success"]) + assertMessagesCompareLua(t, f, tt.Expected["filtered"]) + assertMessagesCompareLua(t, e, tt.Expected["failed"]) + }) + } +} + +func TestLuaEngineMakeFunction_IntermediateState_SpModeFalse(t *testing.T) { + testSpMode := false + testCases := []struct { + Scenario string + Src string + FunName string + Sandbox bool + Input *models.Message + InterState interface{} + Expected map[string]*models.Message + ExpInterState interface{} + Error error + }{ + { + Scenario: "intermediateState_EngineProtocol_Map", + Src: ` +function identity(x) + return x +end +`, + FunName: "identity", + Sandbox: true, + Input: &models.Message{ + Data: testLuaJSON, + PartitionKey: "some-test-key", + }, + InterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: testLuaMap, + }, + Expected: map[string]*models.Message{ + "success": { + Data: testLuaJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: testLuaMap, + }, + Error: nil, + }, + { + Scenario: "intermediateState_EngineProtocol_String", + Src: ` +function identity(x) + return x +end +`, + FunName: "identity", + Sandbox: true, + Input: &models.Message{ + Data: testLuaJSON, + PartitionKey: "some-test-key", + }, + InterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(testLuaJSON), + }, + Expected: map[string]*models.Message{ + "success": { + Data: testLuaJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(testLuaJSON), + }, + Error: nil, + }, + { + Scenario: "intermediateState_not_EngineProtocol_nonSpEnriched", + Src: ` +function identity(x) + return x; +end +`, + FunName: "identity", + Sandbox: true, + Input: &models.Message{ + Data: testLuaJSON, + PartitionKey: "some-test-key", + }, + InterState: "notEngineProtocol", + Expected: map[string]*models.Message{ + "success": { + Data: testLuaJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(testLuaJSON), + }, + Error: nil, + }, + { + Scenario: "intermediateState_not_EngineProtocol_SpEnriched", + Src: ` +function identity(x) + return x; +end +`, + FunName: "identity", + Sandbox: true, + Input: &models.Message{ + Data: testLuaTsv, + PartitionKey: "some-test-key", + }, + InterState: "notEngineProtocol", + Expected: map[string]*models.Message{ + "success": { + Data: testLuaTsv, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(testLuaTsv), + }, + Error: nil, + }, + } + + for _, tt := range testCases { + t.Run(tt.Scenario, func(t *testing.T) { + assert := assert.New(t) + + src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) + luaConfig := &luaEngineConfig{ + SourceB64: src, + RunTimeout: 1, + Sandbox: tt.Sandbox, + SpMode: testSpMode, + } + + luaEngine, err := newLuaEngine(luaConfig) + assert.NotNil(luaEngine) + if err != nil { + t.Fatalf("function newLuaEngine failed with error: %q", err.Error()) + } + + if err := luaEngine.SmokeTest(tt.FunName); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + + transFunction := luaEngine.MakeFunction(tt.FunName) + s, f, e, i := transFunction(tt.Input, tt.InterState) + + if !reflect.DeepEqual(i, tt.ExpInterState) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(i), + spew.Sdump(tt.ExpInterState)) + } + + if e != nil { + gotErr := e.GetError() + expErr := tt.Error + if expErr == nil { + t.Fatalf("got unexpected error: %s", gotErr.Error()) + } + + if !strings.Contains(gotErr.Error(), expErr.Error()) { + t.Errorf("GOT_ERROR:\n%s\n does not contain\nEXPECTED_ERROR:\n%s", + gotErr.Error(), + expErr.Error()) + } + } + + assertMessagesCompareLua(t, s, tt.Expected["success"]) + assertMessagesCompareLua(t, f, tt.Expected["filtered"]) + assertMessagesCompareLua(t, e, tt.Expected["failed"]) + }) + } +} + +func TestLuaEngineMakeFunction_IntermediateState_SpModeTrue(t *testing.T) { + testSpMode := true + + testCases := []struct { + Scenario string + Src string + FunName string + Sandbox bool + Input *models.Message + InterState interface{} + Expected map[string]*models.Message + ExpInterState interface{} + Error error + }{ + { + Scenario: "intermediateState_EngineProtocol_Map", + Src: ` +function identity(x) + return x +end +`, + FunName: "identity", + Sandbox: true, + Input: &models.Message{ + Data: testLuaJSON, + PartitionKey: "some-test-key", + }, + InterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: testLuaMap, + }, + Expected: map[string]*models.Message{ + "success": { + Data: testLuaJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: testLuaMap, + }, + Error: nil, + }, + { + Scenario: "intermediateState_EngineProtocol_String", + Src: ` +function identity(x) + return x +end +`, + FunName: "identity", + Sandbox: true, + Input: &models.Message{ + Data: testLuaJSON, + PartitionKey: "some-test-key", + }, + InterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(testLuaJSON), + }, + Expected: map[string]*models.Message{ + "success": { + Data: testLuaJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: string(testLuaJSON), + }, + Error: nil, + }, + { + Scenario: "intermediateState_notEngineProtocol_notSpEnriched", + Src: ` +function willNotRun(x) + return x +end +`, + FunName: "willNotRun", + Sandbox: true, + Input: &models.Message{ + Data: testLuaJSON, + PartitionKey: "some-test-key", + }, + InterState: "notEngineProtocol", + Expected: map[string]*models.Message{ + "success": nil, + "filtered": nil, + "failed": { + Data: testLuaJSON, + PartitionKey: "some-test-key", + }, + }, + ExpInterState: nil, + Error: fmt.Errorf("Cannot parse"), + }, + { + Scenario: "intermediateState_notEngineProtocol_SpEnriched", + Src: ` +function identity(x) + return x +end +`, + FunName: "identity", + Sandbox: true, + Input: &models.Message{ + Data: testLuaTsv, + PartitionKey: "some-test-key", + }, + InterState: "notEngineProtocol", + Expected: map[string]*models.Message{ + "success": { + Data: testLuaJSON, + PartitionKey: "some-test-key", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "", + Data: testLuaMap, + }, + Error: nil, + }, + } + + for _, tt := range testCases { + t.Run(tt.Scenario, func(t *testing.T) { + assert := assert.New(t) + + src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) + luaConfig := &luaEngineConfig{ + SourceB64: src, + RunTimeout: 1, + Sandbox: tt.Sandbox, + SpMode: testSpMode, + } + + luaEngine, err := newLuaEngine(luaConfig) + assert.NotNil(luaEngine) + if err != nil { + t.Fatalf("function newLuaEngine failed with error: %q", err.Error()) + } + + if err := luaEngine.SmokeTest(tt.FunName); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + + transFunction := luaEngine.MakeFunction(tt.FunName) + s, f, e, i := transFunction(tt.Input, tt.InterState) + + if !reflect.DeepEqual(i, tt.ExpInterState) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(i), + spew.Sdump(tt.ExpInterState)) + } + + if e != nil { + gotErr := e.GetError() + expErr := tt.Error + if expErr == nil { + t.Fatalf("got unexpected error: %s", gotErr.Error()) + } + + if !strings.Contains(gotErr.Error(), expErr.Error()) { + t.Errorf("GOT_ERROR:\n%s\n does not contain\nEXPECTED_ERROR:\n%s", + gotErr.Error(), + expErr.Error()) + } + } + + assertMessagesCompareLua(t, s, tt.Expected["success"]) + assertMessagesCompareLua(t, f, tt.Expected["filtered"]) + assertMessagesCompareLua(t, e, tt.Expected["failed"]) + }) + } +} + +func TestLuaEngineMakeFunction_SetPK(t *testing.T) { + var testInterState interface{} = nil + testCases := []struct { + Scenario string + Src string + FunName string + Sandbox bool + SpMode bool + Input *models.Message + Expected map[string]*models.Message + ExpInterState interface{} + Error error + }{ + { + Scenario: "onlySetPk_spModeTrue", + Src: ` +function onlySetPk(x) + x["PartitionKey"] = "newPk" + return x +end +`, + FunName: "onlySetPk", + Sandbox: true, + SpMode: true, + Input: &models.Message{ + Data: testLuaTsv, + PartitionKey: "oldPK", + }, + Expected: map[string]*models.Message{ + "success": { + Data: testLuaJSON, + PartitionKey: "newPk", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "newPk", + Data: testLuaMap, + }, + Error: nil, + }, + { + Scenario: "onlySetPk_spModeFalse", + Src: ` +function onlySetPk(x) + x["PartitionKey"] = "newPk" + return x +end +`, + FunName: "onlySetPk", + Sandbox: true, + SpMode: false, + Input: &models.Message{ + Data: testLuaTsv, + PartitionKey: "oldPK", + }, + Expected: map[string]*models.Message{ + "success": { + Data: testLuaTsv, + PartitionKey: "newPk", + }, + "filtered": nil, + "failed": nil, + }, + ExpInterState: &EngineProtocol{ + FilterOut: false, + PartitionKey: "newPk", + Data: string(testLuaTsv), + }, + Error: nil, + }, + { + Scenario: "filterOutIgnores", + Src: ` +function filterOutIgnores(x) + local ret = { + FilterOut = true, + Data = "shouldNotAppear", + PartitionKey = "notThis" + } + return ret +end +`, + FunName: "filterOutIgnores", + Sandbox: true, + SpMode: true, + Input: &models.Message{ + Data: testLuaTsv, + PartitionKey: "oldPk", + }, + Expected: map[string]*models.Message{ + "success": nil, + "filtered": { + Data: testLuaTsv, + PartitionKey: "oldPk", + }, + "failed": nil, + }, + ExpInterState: nil, + Error: nil, + }, + } + + for _, tt := range testCases { + t.Run(tt.Scenario, func(t *testing.T) { + assert := assert.New(t) + + src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) + luaConfig := &luaEngineConfig{ + SourceB64: src, + RunTimeout: 1, + Sandbox: tt.Sandbox, + SpMode: tt.SpMode, + } + + luaEngine, err := newLuaEngine(luaConfig) + assert.NotNil(luaEngine) + if err != nil { + t.Fatalf("function newLuaEngine failed with error: %q", err.Error()) + } + + if err := luaEngine.SmokeTest(tt.FunName); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + + transFunction := luaEngine.MakeFunction(tt.FunName) + s, f, e, i := transFunction(tt.Input, testInterState) + + if !reflect.DeepEqual(i, tt.ExpInterState) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(i), + spew.Sdump(tt.ExpInterState)) + } + + if e != nil { + gotErr := e.GetError() + expErr := tt.Error + if expErr == nil { + t.Fatalf("got unexpected error: %s", gotErr.Error()) + } + + if !strings.Contains(gotErr.Error(), expErr.Error()) { + t.Errorf("GOT_ERROR:\n%s\n does not contain\nEXPECTED_ERROR:\n%s", + gotErr.Error(), + expErr.Error()) + } + } + + assertMessagesCompareLua(t, s, tt.Expected["success"]) + assertMessagesCompareLua(t, f, tt.Expected["filtered"]) + assertMessagesCompareLua(t, e, tt.Expected["failed"]) + }) + } +} + +func TestLuaEngineSmokeTest(t *testing.T) { + testCases := []struct { + Src string + FunName string + Sandbox bool + CompileError error + SmokeError error + }{ + { + Src: ` +function identity(x) + return x +end +`, + FunName: "identity", + Sandbox: true, + CompileError: nil, + SmokeError: nil, + }, + { + Src: ` +function notThisOne(x) + return "something" +end +`, + FunName: "notExists", + Sandbox: true, + CompileError: nil, + SmokeError: fmt.Errorf("global Lua function not found"), + }, + { + Src: ` +local json = require("json") +local clock = os.clock +`, + FunName: "notCalledMissingLibs", + Sandbox: true, + CompileError: nil, + SmokeError: fmt.Errorf("could not load lua code"), + }, + { + Src: ` +function syntaxError(x) + loca y = 0 +end +`, + FunName: "syntaxError", + Sandbox: false, + CompileError: fmt.Errorf("error"), + SmokeError: nil, + }, + } + + for _, tt := range testCases { + t.Run(tt.FunName, func(t *testing.T) { + assert := assert.New(t) + + src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) + luaConfig := &luaEngineConfig{ + SourceB64: src, + RunTimeout: 1, + Sandbox: tt.Sandbox, + } + + luaEngine, compileErr := newLuaEngine(luaConfig) + + if compileErr != nil { + if tt.CompileError == nil { + t.Fatalf("got unexpected error while creating newLuaEngine: %s", compileErr.Error()) + } + + if !strings.Contains(compileErr.Error(), tt.CompileError.Error()) { + t.Errorf("newLuaEngine error mismatch\nGOT_ERROR:\n%q\n does not contain\nEXPECTED_ERROR:\n%q", + compileErr.Error(), + tt.CompileError.Error()) + } + } else { + assert.NotNil(luaEngine) + + smoke := luaEngine.SmokeTest(tt.FunName) + expErr := tt.SmokeError + if smoke != nil { + if expErr == nil { + t.Fatalf("got unexpected smoke-test error: %q", smoke.Error()) + } + + if !strings.Contains(smoke.Error(), expErr.Error()) { + t.Errorf("smoke error mismatch\nGOT_ERROR:\n%q\ndoes not contain\nEXPECTED_ERROR:\n%q", + smoke.Error(), + expErr.Error()) + } + } else { + assert.Nil(tt.SmokeError) + } + } + }) + } +} + +func TestLuaEngineWithBuiltins(t *testing.T) { + var expectedGood = []*models.Message{ + { + Data: snowplowJSON1, + PartitionKey: "test-data1", + }, + { + Data: snowplowJSON2, + PartitionKey: "test-data2", + }, + { + Data: snowplowJSON3, + PartitionKey: "test-data3", + }, + } + + srcCode := ` +function identity(x) + return x +end +` + funcName := "identity" + src := base64.StdEncoding.EncodeToString([]byte(srcCode)) + luaConfig := &luaEngineConfig{ + SourceB64: src, + RunTimeout: 1, + Sandbox: true, + } + + luaEngine, err := newLuaEngine(luaConfig) + if err != nil { + t.Fatalf("newLuaEngine failed with error: %q", err) + } + + if err := luaEngine.SmokeTest(funcName); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + + luaFunc := luaEngine.MakeFunction(funcName) + setPkToAppID := NewSpEnrichedSetPkFunction("app_id") + spEnrichedToJSON := SpEnrichedToJSON + + testCases := []struct { + Name string + Transformation TransformationApplyFunction + }{ + { + Name: "first", + Transformation: NewTransformation( + setPkToAppID, + spEnrichedToJSON, + luaFunc, + ), + }, + } + + for _, tt := range testCases { + t.Run(tt.Name, func(t *testing.T) { + assert := assert.New(t) + transformMultiple := tt.Transformation + + result := transformMultiple(messages) + assert.NotNil(result) + for i, res := range result.Result { + exp := expectedGood[i] + if !reflect.DeepEqual(res.Data, exp.Data) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(res.Data), + spew.Sdump(exp.Data)) + } + assert.Equal(res.PartitionKey, exp.PartitionKey) + + } + }) + } + +} + +func TestLuaEngineWithBuiltinsSpModeFalse(t *testing.T) { + srcCode := ` +function identity(x) + return x +end + +function setPk(x) + x["PartitionKey"] = "testKey" + return x +end +` + // Lua + src := base64.StdEncoding.EncodeToString([]byte(srcCode)) + luaConfig := &luaEngineConfig{ + SourceB64: src, + RunTimeout: 1, + Sandbox: true, + SpMode: false, + } + + luaEngine, err := newLuaEngine(luaConfig) + if err != nil { + t.Fatalf("newLuaEngine failed with error: %q", err) + } + + if err := luaEngine.SmokeTest("identity"); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + if err := luaEngine.SmokeTest("setPk"); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + + luaFuncID := luaEngine.MakeFunction("identity") + luaFuncPk := luaEngine.MakeFunction("setPk") + + // Builtins + setPkToAppID := NewSpEnrichedSetPkFunction("app_id") + spEnrichedToJSON := SpEnrichedToJSON + + testCases := []struct { + Name string + Transformation TransformationApplyFunction + Input []*models.Message + ExpectedGood []*models.Message + }{ + { + Name: "identity0", + Input: messages, + Transformation: NewTransformation( + luaFuncID, + setPkToAppID, + spEnrichedToJSON, + ), + ExpectedGood: []*models.Message{ + { + Data: snowplowJSON1, + PartitionKey: "test-data1", + }, + { + Data: snowplowJSON2, + PartitionKey: "test-data2", + }, + { + Data: snowplowJSON3, + PartitionKey: "test-data3", + }, + }, + }, + { + Name: "identity2", + Input: messages, + Transformation: NewTransformation( + setPkToAppID, + spEnrichedToJSON, + luaFuncID, + ), + ExpectedGood: []*models.Message{ + { + Data: snowplowJSON1, + PartitionKey: "test-data1", + }, + { + Data: snowplowJSON2, + PartitionKey: "test-data2", + }, + { + Data: snowplowJSON3, + PartitionKey: "test-data3", + }, + }, + }, + { + Name: "setPk1", + Input: messages, + Transformation: NewTransformation( + setPkToAppID, + luaFuncPk, + spEnrichedToJSON, + ), + ExpectedGood: []*models.Message{ + { + Data: snowplowJSON1, + PartitionKey: "testKey", + }, + { + Data: snowplowJSON2, + PartitionKey: "testKey", + }, + { + Data: snowplowJSON3, + PartitionKey: "testKey", + }, + }, + }, + } + + for _, tt := range testCases { + t.Run(tt.Name, func(t *testing.T) { + assert := assert.New(t) + + result := tt.Transformation(tt.Input) + assert.NotNil(result) + assert.Equal(len(tt.ExpectedGood), len(result.Result)) + for i, res := range result.Result { + if i < len(tt.ExpectedGood) { + exp := tt.ExpectedGood[i] + if !reflect.DeepEqual(res.Data, exp.Data) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(res.Data), + spew.Sdump(exp.Data)) + } + assert.Equal(res.PartitionKey, exp.PartitionKey) + } + } + }) + } +} + +func TestLuaEngineWithBuiltinsSpModeTrue(t *testing.T) { + srcCode := ` +function identity(x) + return x +end + +function setPk(x) + x["PartitionKey"] = "testKey" + return x +end +` + // Lua + src := base64.StdEncoding.EncodeToString([]byte(srcCode)) + luaConfig := &luaEngineConfig{ + SourceB64: src, + RunTimeout: 1, + Sandbox: true, + SpMode: true, + } + + luaEngine, err := newLuaEngine(luaConfig) + if err != nil { + t.Fatalf("newLuaEngine failed with error: %q", err) + } + + if err := luaEngine.SmokeTest("identity"); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + if err := luaEngine.SmokeTest("setPk"); err != nil { + t.Fatalf("smoke-test failed with error: %q", err.Error()) + } + + luaFuncID := luaEngine.MakeFunction("identity") + luaFuncPk := luaEngine.MakeFunction("setPk") + + // Builtins + setPkToAppID := NewSpEnrichedSetPkFunction("app_id") + spEnrichedToJSON := SpEnrichedToJSON + + testCases := []struct { + Name string + Transformation TransformationApplyFunction + Input []*models.Message + ExpectedGood []*models.Message + }{ + { + Name: "identity", + Input: []*models.Message{ + { + Data: testLuaTsv, + PartitionKey: "prevKey", + }, + }, + Transformation: NewTransformation( + setPkToAppID, + spEnrichedToJSON, + luaFuncID, + ), + ExpectedGood: []*models.Message{ + { + Data: testLuaJSON, + PartitionKey: "test-data<>", + }, + }, + }, + { + Name: "setPk", + Input: []*models.Message{ + { + Data: testLuaTsv, + PartitionKey: "prevKey", + }, + }, + Transformation: NewTransformation( + setPkToAppID, + luaFuncPk, + ), + ExpectedGood: []*models.Message{ + { + Data: testLuaJSON, + PartitionKey: "testKey", + }, + }, + }, + { + Name: "mix", + Input: []*models.Message{ + { + Data: testLuaTsv, + PartitionKey: "prevKey", + }, + }, + Transformation: NewTransformation( + setPkToAppID, + luaFuncID, + luaFuncPk, + luaFuncID, + ), + ExpectedGood: []*models.Message{ + { + Data: testLuaJSON, + PartitionKey: "testKey", + }, + }, + }, + } + + for _, tt := range testCases { + t.Run(tt.Name, func(t *testing.T) { + assert := assert.New(t) + + result := tt.Transformation(tt.Input) + assert.NotNil(result) + assert.Equal(len(tt.ExpectedGood), len(result.Result)) + for i, res := range result.Result { + if i < len(tt.ExpectedGood) { + exp := tt.ExpectedGood[i] + if !reflect.DeepEqual(res.Data, exp.Data) { + t.Errorf("GOT:\n%s\nEXPECTED:\n%s", + spew.Sdump(res.Data), + spew.Sdump(exp.Data)) + } + assert.Equal(res.PartitionKey, exp.PartitionKey) + } + } + }) + } +} + +func Benchmark_LuaEngine_Passthrough_Sandboxed(b *testing.B) { + b.ReportAllocs() + + srcCode := ` +function identity(x) + return x +end +` + src := base64.StdEncoding.EncodeToString([]byte(srcCode)) + + inputMsg := &models.Message{ + Data: snowplowJSON1, + PartitionKey: "some-test-key", + } + luaConfig := &luaEngineConfig{ + SourceB64: src, + RunTimeout: 5, + Sandbox: true, + } + + luaEngine, err := newLuaEngine(luaConfig) + if err != nil { + b.Fatalf("function newLuaEngine failed with error: %q", err.Error()) + } + + transFunction := luaEngine.MakeFunction("identity") + + for n := 0; n < b.N; n++ { + transFunction(inputMsg, nil) + } +} + +func Benchmark_LuaEngine_Passthrough(b *testing.B) { + b.ReportAllocs() + + srcCode := ` +function identity(x) + return x +end +` + src := base64.StdEncoding.EncodeToString([]byte(srcCode)) + + inputMsg := &models.Message{ + Data: snowplowJSON1, + PartitionKey: "some-test-key", + } + luaConfig := &luaEngineConfig{ + SourceB64: src, + RunTimeout: 5, + Sandbox: false, + } + + luaEngine, err := newLuaEngine(luaConfig) + if err != nil { + b.Fatalf("function newLuaEngine failed with error: %q", err.Error()) + } + + transFunction := luaEngine.MakeFunction("identity") + + for n := 0; n < b.N; n++ { + transFunction(inputMsg, nil) + } +} + +func Benchmark_LuaEngine_Passthrough_Json(b *testing.B) { + b.ReportAllocs() + + srcCode := ` +function jsonIdentity(x) + local jsonObj, _ = json.decode(x) + local result, _ = json.encode(jsonObj) + + return result +end +` + src := base64.StdEncoding.EncodeToString([]byte(srcCode)) + + inputMsg := &models.Message{ + Data: snowplowJSON1, + PartitionKey: "some-test-key", + } + luaConfig := &luaEngineConfig{ + SourceB64: src, + RunTimeout: 5, + Sandbox: false, + } + + luaEngine, err := newLuaEngine(luaConfig) + if err != nil { + b.Fatalf("function newLuaEngine failed with error: %q", err.Error()) + } + + transFunction := luaEngine.MakeFunction("jsonIdentity") + + for n := 0; n < b.N; n++ { + transFunction(inputMsg, nil) + } +} + +// Test helpers +func testLuaEngineAdapter(f func(c *luaEngineConfig) (*luaEngineConfig, error)) luaEngineAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*luaEngineConfig) + if !ok { + return nil, fmt.Errorf("invalid input, expected luaEngineConfig") + } + + return f(cfg) + } + +} + +func testLuaEngineFunc(c *luaEngineConfig) (*luaEngineConfig, error) { + + return c, nil +} + +// Helper function to compare messages and avoid using reflect.DeepEqual +// on errors. Compares all but the error field of messages. +func assertMessagesCompareLua(t *testing.T, act, exp *models.Message) { + t.Helper() + + ok := false + switch { + case act == nil: + ok = exp == nil + case exp == nil: + default: + pkOk := act.PartitionKey == exp.PartitionKey + dataOk := reflect.DeepEqual(act.Data, exp.Data) + cTimeOk := reflect.DeepEqual(act.TimeCreated, exp.TimeCreated) + pTimeOk := reflect.DeepEqual(act.TimePulled, exp.TimePulled) + tTimeOk := reflect.DeepEqual(act.TimeTransformed, exp.TimeTransformed) + ackOk := reflect.DeepEqual(act.AckFunc, exp.AckFunc) + + if pkOk && dataOk && cTimeOk && pTimeOk && tTimeOk && ackOk { + ok = true + } + } + + if !ok { + t.Errorf("\nGOT:\n%s\nEXPECTED:\n%s\n", + spew.Sdump(act), + spew.Sdump(exp)) + } +} + +// helper variables +var testLuaTimes = map[string]string{ + "dvceCreatedTstamp": "2019-05-10T14:40:35.551Z", + "etlTstamp": "2019-05-10T14:40:37.436Z", + "derivedTstamp": "2019-05-10T14:40:35.972Z", + "collectorTstamp": "2019-05-10T14:40:35.972Z", + "dvceSentTstamp": "2019-05-10T14:40:35Z", +} + +var testLuaMap = map[string]interface{}{ + "event_version": "1-0-0", + "app_id": "test-data<>", + "dvce_created_tstamp": testLuaTimes["dvceCreatedTstamp"], + "event": "unstruct", + "v_collector": "ssc-0.15.0-googlepubsub", + "network_userid": "d26822f5-52cc-4292-8f77-14ef6b7a27e2", + "event_name": "add_to_cart", + "event_vendor": "com.snowplowanalytics.snowplow", + "event_format": "jsonschema", + "platform": "pc", + "etl_tstamp": testLuaTimes["etlTstamp"], + "collector_tstamp": testLuaTimes["collectorTstamp"], + "user_id": "user", + "dvce_sent_tstamp": testLuaTimes["dvceSentTstamp"], + "derived_tstamp": testLuaTimes["derivedTstamp"], + "event_id": "e9234345-f042-46ad-b1aa-424464066a33", + "v_tracker": "py-0.8.2", + "v_etl": "beam-enrich-0.2.0-common-0.36.0", + "user_ipaddress": "1.2.3.4", + "unstruct_event_com_snowplowanalytics_snowplow_add_to_cart_1": map[string]interface{}{ + "quantity": float64(2), + "unitPrice": 32.4, + "currency": "GBP", + "sku": "item41", + }, + "contexts_nl_basjes_yauaa_context_1": []interface{}{ + map[string]interface{}{ + "deviceName": "Unknown", + "layoutEngineVersionMajor": "??", + "operatingSystemName": "Unknown", + "deviceClass": "Unknown", + "agentVersion": "2.21.0", + "layoutEngineName": "Unknown", + "layoutEngineClass": "Unknown", + "agentName": "python-requests", + "agentNameVersion": "python-requests 2.21.0", + "operatingSystemVersion": "??", + "agentClass": "Special", + "deviceBrand": "Unknown", + "agentVersionMajor": "2", + "agentNameVersionMajor": "python-requests 2", + "operatingSystemClass": "Unknown", + "layoutEngineVersion": "??", + }, + }, + "useragent": "python-requests/2.21.0", +} + +var testLuaTsv = []byte(`test-data<> pc 2019-05-10 14:40:37.436 2019-05-10 14:40:35.972 2019-05-10 14:40:35.551 unstruct e9234345-f042-46ad-b1aa-424464066a33 py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 1.2.3.4 d26822f5-52cc-4292-8f77-14ef6b7a27e2 {"schema":"iglu:com.snowplowanalytics.snowplow/unstruct_event/jsonschema/1-0-0","data":{"schema":"iglu:com.snowplowanalytics.snowplow/add_to_cart/jsonschema/1-0-0","data":{"sku":"item41","quantity":2,"unitPrice":32.4,"currency":"GBP"}}} python-requests/2.21.0 2019-05-10 14:40:35.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??"}}]} 2019-05-10 14:40:35.972 com.snowplowanalytics.snowplow add_to_cart jsonschema 1-0-0 `) + +// corresponding JSON to previous TSV +var testLuaJSON = []byte(`{"app_id":"test-data<>","collector_tstamp":"2019-05-10T14:40:35.972Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:35.972Z","dvce_created_tstamp":"2019-05-10T14:40:35.551Z","dvce_sent_tstamp":"2019-05-10T14:40:35Z","etl_tstamp":"2019-05-10T14:40:37.436Z","event":"unstruct","event_format":"jsonschema","event_id":"e9234345-f042-46ad-b1aa-424464066a33","event_name":"add_to_cart","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"d26822f5-52cc-4292-8f77-14ef6b7a27e2","platform":"pc","unstruct_event_com_snowplowanalytics_snowplow_add_to_cart_1":{"currency":"GBP","quantity":2,"sku":"item41","unitPrice":32.4},"user_id":"user","user_ipaddress":"1.2.3.4","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) + +// json encoded inside Lua +var snowplowJSON1ChangedLua = []byte(`{"app_id_CHANGED":"test-data1","collector_tstamp":"2019-05-10T14:40:35.972Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:35.972Z","dvce_created_tstamp":"2019-05-10T14:40:35.551Z","dvce_sent_tstamp":"2019-05-10T14:40:35Z","etl_tstamp":"2019-05-10T14:40:37.436Z","event":"unstruct","event_format":"jsonschema","event_id":"e9234345-f042-46ad-b1aa-424464066a33","event_name":"add_to_cart","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"d26822f5-52cc-4292-8f77-14ef6b7a27e2","platform":"pc","unstruct_event_com_snowplowanalytics_snowplow_add_to_cart_1":{"currency":"GBP","quantity":2,"sku":"item41","unitPrice":32.4},"user_id":"user\u003cbuilt-in function input\u003e","user_ipaddress":"18.194.133.57","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) diff --git a/pkg/transform/snowplow_enriched_filter_test.go b/pkg/transform/snowplow_enriched_filter_test.go index e8c0f825..ef1da676 100644 --- a/pkg/transform/snowplow_enriched_filter_test.go +++ b/pkg/transform/snowplow_enriched_filter_test.go @@ -192,23 +192,43 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { } func TestNewSpEnrichedFilterFunction_Error(t *testing.T) { - assert := assert.New(t) - error := `invalid filter function config, must be of the format {field name}=={value}[|{value}|...] or {field name}!={value}[|{value}|...]` - - filterFunc, err1 := NewSpEnrichedFilterFunction("") + filterError := `invalid filter function config, must be of the format {field name}=={value}[|{value}|...] or {field name}!={value}[|{value}|...]` - assert.Nil(filterFunc) - assert.Equal(error, err1.Error()) - - filterFunc, err2 := NewSpEnrichedFilterFunction("app_id==abc|") + testCases := []struct { + Name string + Arg string + }{ + { + Name: "incompatible_arg", + Arg: "incompatibleArg", + }, + { + Name: "empty_arg", + Arg: "", + }, + { + Name: "wrong_arg_pipe", + Arg: "app_id==abc|", + }, + { + Name: "wrong_arg_syntax", + Arg: "!=abc", + }, + } - assert.Nil(filterFunc) - assert.Equal(error, err2.Error()) + for _, tt := range testCases { + t.Run(tt.Name, func(t *testing.T) { + assert := assert.New(t) - filterFunc, err3 := NewSpEnrichedFilterFunction("!=abc") + filterFunc, err := NewSpEnrichedFilterFunction(tt.Arg) - assert.Nil(filterFunc) - assert.Equal(error, err3.Error()) + assert.Nil(filterFunc) + if err == nil { + t.Fatalf("expected error, got nil") + } + assert.Equal(filterError, err.Error()) + }) + } } func TestSpEnrichedFilterFunction_Slice(t *testing.T) { diff --git a/pkg/transform/transformconfig/transform_config.go b/pkg/transform/transformconfig/transform_config.go new file mode 100644 index 00000000..ac2fca1c --- /dev/null +++ b/pkg/transform/transformconfig/transform_config.go @@ -0,0 +1,272 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package transformconfig + +import ( + "fmt" + "strings" + + "github.com/snowplow-devops/stream-replicator/config" + "github.com/snowplow-devops/stream-replicator/pkg/transform" +) + +// GetTransformations builds and returns transformationApplyFunction +// from the transformations configured. +func GetTransformations(c configProvider) (transform.TransformationApplyFunction, error) { + registry, err := getLayerRegistry() + if err != nil { + return nil, err + } + + transMessage := c.ProvideTransformMessage() + transUnits, err := parseTransformations(transMessage) + if err != nil { + return nil, err + } + + funcs := make([]transform.TransformationFunction, 0, len(transUnits)) + for _, trans := range transUnits { + switch trans.name { + // Builtin transformations + case "spEnrichedToJson": + funcs = append(funcs, transform.SpEnrichedToJSON) + case "spEnrichedSetPk": + funcs = append(funcs, transform.NewSpEnrichedSetPkFunction(trans.option)) + case "spEnrichedFilter": + filterFunc, err := transform.NewSpEnrichedFilterFunction(trans.option) + if err != nil { + return nil, err + } + funcs = append(funcs, filterFunc) + case "spEnrichedFilterContext": + filterFunc, err := transform.NewSpEnrichedFilterFunctionContext(trans.option) + if err != nil { + return nil, err + } + funcs = append(funcs, filterFunc) + case "spEnrichedFilterUnstructEvent": + filterFunc, err := transform.NewSpEnrichedFilterFunctionUnstructEvent(trans.option) + if err != nil { + return nil, err + } + funcs = append(funcs, filterFunc) + // Custom transformations + case "lua": + luaFunc, err := mkEngineFunction(c, trans, registry) + if err != nil { + return nil, err + } + funcs = append(funcs, luaFunc) + case "js": + jsFunc, err := mkEngineFunction(c, trans, registry) + if err != nil { + return nil, err + } + funcs = append(funcs, jsFunc) + + // we don't need `case 'none'` or `default` + // (see parseTransformations) + } + } + return transform.NewTransformation(funcs...), nil +} + +// configProvider is the interface a config must implement to configure the +// stream-replicator transformations +type configProvider interface { + ProvideTransformMessage() string + ProvideTransformLayerName() string + ProvideTransformComponent(p config.Pluggable) (interface{}, error) +} + +// transformationUnit is a helper struct type for transformations according to +// the transformation message that is being used to configure the sequence of +// transformations. It denotes the distinction we use when we split by ':', +// e.g. 'spEnrichedSetPk:{option}' +type transformationUnit struct { + name string + option string +} + +// layerRegistry is a helper type to map names to the supported Pluggable custom +// transformation layer engines. +type layerRegistry map[string]config.Pluggable + +// getLayerRegistry returns the registry of supported Pluggable transform layers. +func getLayerRegistry() (layerRegistry, error) { + luaLayerPlug, ok := transform.LuaLayer().(config.Pluggable) + if !ok { + return nil, fmt.Errorf("non pluggable lua transformation layer") + } + + jsLayerPlug, ok := transform.JSLayer().(config.Pluggable) + if !ok { + return nil, fmt.Errorf("non pluggable js transformation layer") + } + + return map[string](config.Pluggable){ + "lua": luaLayerPlug, + "js": jsLayerPlug, + }, nil +} + +// parseTransformations validates the message_transformation according to rules. +// The reason for this function is to make the validation part explicit and +// separate it from GetTransformations. +func parseTransformations(input string) ([]*transformationUnit, error) { + if input == "" { + return nil, fmt.Errorf("invalid message transformation found; empty string") + } + + transformations := strings.Split(input, ",") + out := make([]*transformationUnit, 0, len(transformations)) + for _, trans := range transformations { + splitTrans := strings.Split(trans, ":") + name := splitTrans[0] // safe + + switch name { + case "spEnrichedToJson": + // option rules + if len(splitTrans) > 1 { + return nil, fmt.Errorf("invalid message transformation found; unexpected colon after %q", name) + } + + out = append(out, &transformationUnit{name: name}) + case "spEnrichedSetPk": + // option rules + if len(splitTrans) != 2 { + return nil, fmt.Errorf("invalid message transformation found; expected 'spEnrichedSetPk:{option}' but got %q", trans) + } + + if splitTrans[1] == "" { + return nil, fmt.Errorf("invalid message transformation found; empty option for 'spEnrichedSetPk'") + } + + out = append(out, &transformationUnit{ + name: name, + option: splitTrans[1], + }) + case "spEnrichedFilter": + // option rules + if len(splitTrans) != 2 { + return nil, fmt.Errorf("invalid message transformation found; expected 'spEnrichedFilter:{option}' but got %q", trans) + } + + if splitTrans[1] == "" { + return nil, fmt.Errorf("invalid message transformation found; empty option for 'spEnrichedFilter'") + } + + out = append(out, &transformationUnit{ + name: name, + option: splitTrans[1], + }) + case "spEnrichedFilterContext": + // option rules + if len(splitTrans) != 2 { + return nil, fmt.Errorf("invalid message transformation found; expected 'spEnrichedFilterContext:{option}' but got %q", trans) + } + + if splitTrans[1] == "" { + return nil, fmt.Errorf("invalid message transformation found; empty option for 'spEnrichedFilterContext'") + } + + out = append(out, &transformationUnit{ + name: name, + option: splitTrans[1], + }) + case "spEnrichedFilterUnstructEvent": + // option rules + if len(splitTrans) != 2 { + return nil, fmt.Errorf("invalid message transformation found; expected 'spEnrichedFilterUnstructEvent:{option}' but got %q", trans) + } + + if splitTrans[1] == "" { + return nil, fmt.Errorf("invalid message transformation found; empty option for 'spEnrichedFilterUnstructEvent'") + } + + out = append(out, &transformationUnit{ + name: name, + option: splitTrans[1], + }) + case "lua": + // option rules + if len(splitTrans) != 2 { + return nil, fmt.Errorf("invalid message transformation found; expected 'lua:{option}' but got %q", trans) + } + + if splitTrans[1] == "" { + return nil, fmt.Errorf("invalid message transformation found; empty option for 'lua'") + } + + out = append(out, &transformationUnit{ + name: name, + option: splitTrans[1], + }) + case "js": + // option rules + if len(splitTrans) != 2 { + return nil, fmt.Errorf("invalid message transformation found; expected 'js:{option}' but got %q", trans) + } + + if splitTrans[1] == "" { + return nil, fmt.Errorf("invalid message transformation found; empty option for 'js'") + } + + out = append(out, &transformationUnit{ + name: name, + option: splitTrans[1], + }) + case "none": + // option rule + if len(splitTrans) > 1 { + return nil, fmt.Errorf("invalid message transformation found; unexpected colon after %q", name) + } + // none is treated like identity, so ignoring + case "": + // this could be caused by some trailing/excessive comma + // differentiating from default in order to generate a + // more helpful error message + return nil, fmt.Errorf("empty transformation found; please check the message transformation syntax") + default: + return nil, fmt.Errorf("invalid transformation found; expected one of 'spEnrichedToJson', 'spEnrichedSetPk', 'spEnrichedFilter', 'spEnrichedFilterContext', 'spEnrichedFilterUnstructEvent', 'lua', 'js' or 'none' but got %q", name) + } + } + + return out, nil +} + +// mkEngineFunction is a helper method used in GetTransformations +// It creates, smoke-tests and returns a custom transformation function. +func mkEngineFunction(c configProvider, trans *transformationUnit, registry layerRegistry) (transform.TransformationFunction, error) { + useLayerName := c.ProvideTransformLayerName() + + // validate that the expected layer is specified in the configuration + if useLayerName != trans.name { + return nil, fmt.Errorf("missing configuration for the custom transformation layer specified: %q", trans.name) + } + + plug, ok := registry[trans.name] + if !ok { + return nil, fmt.Errorf("unknown transformation layer specified") + } + + component, err := c.ProvideTransformComponent(plug) + if err != nil { + return nil, err + } + + if engine, ok := component.(transform.Engine); ok { + err := engine.SmokeTest(trans.option) + if err != nil { + return nil, err + } + + return engine.MakeFunction(trans.option), nil + } + + return nil, fmt.Errorf("could not interpret custom transformation configuration") +} diff --git a/pkg/transform/transformconfig/transform_config_test.go b/pkg/transform/transformconfig/transform_config_test.go new file mode 100644 index 00000000..ae96c9c4 --- /dev/null +++ b/pkg/transform/transformconfig/transform_config_test.go @@ -0,0 +1,491 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package transformconfig + +import ( + "fmt" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/snowplow-devops/stream-replicator/config" + "github.com/snowplow-devops/stream-replicator/pkg/models" + "github.com/snowplow-devops/stream-replicator/pkg/transform" +) + +func TestParseTransformations_InvalidMessage(t *testing.T) { + testCases := []struct { + Name string + Message string + ExpError string + }{ + { + Name: "message_empty", + Message: "", + ExpError: "invalid message transformation found; empty string", + }, + { + Name: "message_not_found", + Message: "fake", + ExpError: "invalid transformation found; expected one of 'spEnrichedToJson', 'spEnrichedSetPk', 'spEnrichedFilter', 'spEnrichedFilterContext', 'spEnrichedFilterUnstructEvent', 'lua', 'js' or 'none' but got \"fake\"", + }, + { + Name: "message_option_none_a", + Message: "none:wrong", + ExpError: "invalid message transformation found; unexpected colon after \"none\"", + }, + { + Name: "message_option_none_b", + Message: "none:", + ExpError: "invalid message transformation found; unexpected colon after \"none\"", + }, + { + Name: "message_option_spEnrichedToJson", + Message: "spEnrichedToJson:wrong", + ExpError: "invalid message transformation found; unexpected colon after \"spEnrichedToJson\"", + }, + { + Name: "message_no_option_spEnrichedSetPk", + Message: "spEnrichedSetPk", + ExpError: "invalid message transformation found; expected 'spEnrichedSetPk:{option}' but got \"spEnrichedSetPk\"", + }, + { + Name: "message_empty_option_spEnrichedSetPk", + Message: "spEnrichedSetPk:", + ExpError: "invalid message transformation found; empty option for 'spEnrichedSetPk'", + }, + { + Name: "message_no_option_spEnrichedFilter", + Message: "spEnrichedFilter:too:wrong", + ExpError: "invalid message transformation found; expected 'spEnrichedFilter:{option}' but got \"spEnrichedFilter:too:wrong\"", + }, + { + Name: "message_empty_option_spEnrichedFilter", + Message: "spEnrichedFilter:", + ExpError: "invalid message transformation found; empty option for 'spEnrichedFilter'", + }, + { + Name: "message_no_option_spEnrichedFilterContext", + Message: "spEnrichedFilterContext:too:wrong", + ExpError: "invalid message transformation found; expected 'spEnrichedFilterContext:{option}' but got \"spEnrichedFilterContext:too:wrong\"", + }, + { + Name: "message_empty_option_spEnrichedFilterContext", + Message: "spEnrichedFilterContext:", + ExpError: "invalid message transformation found; empty option for 'spEnrichedFilterContext'", + }, + { + Name: "message_no_option_spEnrichedFilterUnstructEvent", + Message: "spEnrichedFilterUnstructEvent:too:wrong", + ExpError: "invalid message transformation found; expected 'spEnrichedFilterUnstructEvent:{option}' but got \"spEnrichedFilterUnstructEvent:too:wrong\"", + }, + { + Name: "message_empty_option_spEnrichedFilterUnstructEvent", + Message: "spEnrichedFilterUnstructEvent:", + ExpError: "invalid message transformation found; empty option for 'spEnrichedFilterUnstructEvent'", + }, + { + Name: "message_no_option_lua", + Message: "lua", + ExpError: "invalid message transformation found; expected 'lua:{option}' but got \"lua\"", + }, + { + Name: "message_empty_option_lua", + Message: "lua:", + ExpError: "invalid message transformation found; empty option for 'lua'", + }, + { + Name: "message_no_option_js", + Message: "js", + ExpError: "invalid message transformation found; expected 'js:{option}' but got \"js\"", + }, + { + Name: "message_empty_option_js", + Message: "js:", + ExpError: "invalid message transformation found; empty option for 'js'", + }, + { + Name: "invalid_transformation_syntax_a", + Message: "spEnrichedToJson,", + ExpError: "empty transformation found; please check the message transformation syntax", + }, + { + Name: "invalid_transformation_syntax_b", + Message: ":", + ExpError: "empty transformation found; please check the message transformation syntax", + }, + { + Name: "invalid_transformation_syntax_c", + Message: ",", + ExpError: "empty transformation found; please check the message transformation syntax", + }, + } + + for _, tt := range testCases { + t.Run(tt.Name, func(t *testing.T) { + assert := assert.New(t) + + parsed, err := parseTransformations(tt.Message) + assert.Nil(parsed) + if err == nil { + t.Fatalf("expected error; got nil") + } + assert.Equal(tt.ExpError, err.Error()) + }) + } +} + +func TestGetTransformations_MissingLayerConfig(t *testing.T) { + fixturesDir := "../../../config/test-fixtures" + testCases := []struct { + Filename string + TransMessage string + ExpectedError string + }{ + { + Filename: "transform-invalid-layer-lua.hcl", + TransMessage: "lua:fun", + ExpectedError: "missing configuration for the custom transformation layer specified: \"lua\"", + }, + { + Filename: "transform-invalid-layer-js.hcl", + TransMessage: "js:fun", + ExpectedError: "missing configuration for the custom transformation layer specified: \"js\"", + }, + } + + for _, tt := range testCases { + t.Run(tt.Filename, func(t *testing.T) { + assert := assert.New(t) + + filename := filepath.Join(fixturesDir, tt.Filename) + t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", filename) + + c, err := config.NewConfig() + assert.NotNil(c) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } + + assert.Equal(c.Data.Transform.Message, tt.TransMessage) + + transformation, err := GetTransformations(c) + assert.Nil(transformation) + assert.NotNil(err) + assert.Equal(tt.ExpectedError, err.Error()) + }) + } +} + +func TestGetTransformations_Builtins(t *testing.T) { + testCases := []struct { + Name string + Provider configProvider + ExpectedErr error + }{ + { + Name: "invalid_transform_message", + Provider: &testConfigProvider{ + message: "tooWrong", + }, + ExpectedErr: fmt.Errorf("invalid transformation found; expected one of 'spEnrichedToJson', 'spEnrichedSetPk', 'spEnrichedFilter', 'spEnrichedFilterContext', 'spEnrichedFilterUnstructEvent', 'lua', 'js' or 'none' but got \"tooWrong\""), + }, + { + Name: "spEnrichedToJson", + Provider: &testConfigProvider{ + message: "spEnrichedToJson", + }, + ExpectedErr: nil, + }, + { + Name: "spEnrichedSetPk", + Provider: &testConfigProvider{ + message: "spEnrichedSetPk:app_id", + }, + ExpectedErr: nil, + }, + { + Name: "spEnrichedFilter", + Provider: &testConfigProvider{ + message: "spEnrichedFilter:app_id==xyz", + }, + ExpectedErr: nil, + }, + { + Name: "spEnrichedFilterContext", + Provider: &testConfigProvider{ + message: "spEnrichedFilterContext:contexts_x_x_x_1.yz==xyz", + }, + ExpectedErr: nil, + }, + { + Name: "spEnrichedFilterUnstructEvent", + Provider: &testConfigProvider{ + message: "spEnrichedFilterUnstructEvent:unstruct_event_x_x_x_1.yz==xyz", + }, + ExpectedErr: nil, + }, + } + + for _, tt := range testCases { + t.Run(tt.Name, func(t *testing.T) { + assert := assert.New(t) + + applyFun, err := GetTransformations(tt.Provider) + + if tt.ExpectedErr != nil { + assert.Equal(tt.ExpectedErr.Error(), err.Error()) + assert.Nil(applyFun) + } else { + assert.Nil(err) + assert.NotNil(applyFun) + } + }) + } +} + +func TestGetTransformations_Custom(t *testing.T) { + testCases := []struct { + Name string + Provider configProvider + ExpectedErr error + }{ + { + Name: "lua", + Provider: &testConfigProvider{ + message: "lua:fun", + layerName: "lua", + component: &testEngine{ + smokeTestErr: nil, + mkFunction: testTransformationFunction, + }, + }, + ExpectedErr: nil, + }, + { + Name: "js", + Provider: &testConfigProvider{ + message: "js:fun", + layerName: "js", + component: &testEngine{ + smokeTestErr: nil, + mkFunction: testTransformationFunction, + }, + }, + ExpectedErr: nil, + }, + } + + for _, tt := range testCases { + t.Run(tt.Name, func(t *testing.T) { + assert := assert.New(t) + + applyFun, err := GetTransformations(tt.Provider) + + if tt.ExpectedErr != nil { + assert.Equal(tt.ExpectedErr.Error(), err.Error()) + assert.Nil(applyFun) + } else { + assert.Nil(err) + assert.NotNil(applyFun) + } + }) + } +} + +func TestLayerRegistry(t *testing.T) { + assert := assert.New(t) + + registry, err := getLayerRegistry() + assert.Nil(err) + + _, okLua := registry["lua"] + assert.True(okLua) + + _, okJs := registry["js"] + assert.True(okJs) +} + +func TestMkEngineFunction(t *testing.T) { + testCases := []struct { + Name string + Provider *testConfigProvider + Unit *transformationUnit + Registry layerRegistry + ExpectedErr error + }{ + { + Name: "missing_layer_config", + Provider: &testConfigProvider{ + layerName: "test", + component: "irrelevant", + err: nil, + }, + Unit: &transformationUnit{ + name: "noTest", + option: "testFun", + }, + Registry: map[string]config.Pluggable{}, + ExpectedErr: fmt.Errorf("missing configuration for the custom transformation layer specified: \"noTest\""), + }, + { + Name: "unknown_layer", + Provider: &testConfigProvider{ + layerName: "test", + component: "irrelevant", + err: nil, + }, + Unit: &transformationUnit{ + name: "test", + option: "testFun", + }, + Registry: map[string]config.Pluggable{}, + ExpectedErr: fmt.Errorf("unknown transformation layer specified"), + }, + { + Name: "provider_error", + Provider: &testConfigProvider{ + layerName: "test", + component: nil, + err: fmt.Errorf("some error"), + }, + Unit: &transformationUnit{ + name: "test", + option: "testFun", + }, + Registry: map[string]config.Pluggable{ + "test": &testPluggable{}, + }, + ExpectedErr: fmt.Errorf("some error"), + }, + { + Name: "no_engine_component", + Provider: &testConfigProvider{ + layerName: "test", + component: "notAnEngine", + err: nil, + }, + Unit: &transformationUnit{ + name: "test", + option: "testFun", + }, + Registry: map[string]config.Pluggable{ + "test": &testPluggable{}, + }, + ExpectedErr: fmt.Errorf("could not interpret custom transformation configuration"), + }, + { + Name: "engine_smoke_test_error", + Provider: &testConfigProvider{ + layerName: "test", + component: &testEngine{ + smokeTestErr: fmt.Errorf("smoke error"), + mkFunction: testTransformationFunction, + }, + err: nil, + }, + Unit: &transformationUnit{ + name: "test", + option: "testFun", + }, + Registry: map[string]config.Pluggable{ + "test": &testPluggable{}, + }, + ExpectedErr: fmt.Errorf("smoke error"), + }, + { + Name: "happy_path", + Provider: &testConfigProvider{ + layerName: "test", + component: &testEngine{ + smokeTestErr: nil, + mkFunction: testTransformationFunction, + }, + err: nil, + }, + Unit: &transformationUnit{ + name: "test", + option: "testFun", + }, + Registry: map[string]config.Pluggable{ + "test": &testPluggable{}, + }, + ExpectedErr: nil, + }, + } + + for _, tt := range testCases { + t.Run(tt.Name, func(t *testing.T) { + assert := assert.New(t) + + fun, err := mkEngineFunction( + tt.Provider, + tt.Unit, + tt.Registry, + ) + + if tt.ExpectedErr != nil { + assert.Equal(tt.ExpectedErr.Error(), err.Error()) + assert.Nil(fun) + } else { + assert.Nil(err) + assert.NotNil(fun) + } + }) + } +} + +// Helpers +type testConfigProvider struct { + message string + layerName string + component interface{} + err error +} + +// *testConfigProvider implements configProvider +func (tc *testConfigProvider) ProvideTransformMessage() string { + return tc.message +} + +func (tc *testConfigProvider) ProvideTransformLayerName() string { + return tc.layerName +} + +func (tc *testConfigProvider) ProvideTransformComponent(p config.Pluggable) (interface{}, error) { + return tc.component, tc.err +} + +type testPluggable struct{} + +// *testPluggable implements config.Pluggable +func (tp *testPluggable) ProvideDefault() (interface{}, error) { + return "placeholder", nil +} + +func (tp *testPluggable) Create(i interface{}) (interface{}, error) { + return "placeholder", nil +} + +type testEngine struct { + smokeTestErr error + mkFunction transform.TransformationFunction +} + +// *testEngine implements transform.Engine +func (te *testEngine) SmokeTest(funName string) error { + return te.smokeTestErr +} + +func (te *testEngine) MakeFunction(funName string) transform.TransformationFunction { + return te.mkFunction +} + +func testTransformationFunction(*models.Message, interface{}) (*models.Message, *models.Message, *models.Message, interface{}) { + return nil, nil, nil, nil +} From 8f3e0c03845712f4b7dbb9daf90db49f9b9be9a7 Mon Sep 17 00:00:00 2001 From: TiganeteaRobert Date: Fri, 10 Jun 2022 12:18:44 +0300 Subject: [PATCH 09/25] Make anything that doesn't need to be exported private (closes #111) --- config/component.go | 12 +- config/config.go | 74 +++++----- config/decode.go | 20 +-- config/decode_test.go | 8 +- pkg/models/filter_result.go | 8 +- pkg/models/filter_result_test.go | 4 +- pkg/models/observer_buffer_test.go | 139 +++++++++++++++++- pkg/source/kinesis/kinesis_source_test.go | 2 +- pkg/statsreceiver/statsd.go | 22 +-- pkg/target/eventhub.go | 9 +- pkg/target/eventhub_test.go | 6 +- pkg/target/http.go | 12 +- pkg/target/http_test.go | 24 +-- pkg/target/kafka.go | 18 +-- pkg/target/kafka_test.go | 6 +- pkg/target/kinesis.go | 12 +- pkg/target/kinesis_test.go | 8 +- pkg/target/pubsub.go | 14 +- pkg/target/sqs.go | 12 +- pkg/target/sqs_test.go | 6 +- pkg/target/stdout.go | 9 +- pkg/target/stdout_test.go | 2 +- third_party/snowplow/badrows/bad_row.go | 4 +- third_party/snowplow/badrows/bad_row_test.go | 2 +- third_party/snowplow/badrows/generic_error.go | 2 +- .../snowplow/badrows/size_violation.go | 2 +- 26 files changed, 288 insertions(+), 149 deletions(-) diff --git a/config/component.go b/config/component.go index de270815..828897d6 100644 --- a/config/component.go +++ b/config/component.go @@ -28,19 +28,19 @@ type Pluggable interface { ComponentCreator } -// DecodingHandler is the type of any function that, given a ComponentConfigurable +// decodingHandler is the type of any function that, given a ComponentConfigurable // and a Decoder, returns a pointer to a structure that was decoded. -type DecodingHandler func(c ComponentConfigurable, d Decoder) (interface{}, error) +type decodingHandler func(c ComponentConfigurable, d Decoder) (interface{}, error) -// WithDecoderOptions returns a DecodingHandler closed over some DecoderOptions. -func WithDecoderOptions(opts *DecoderOptions) DecodingHandler { +// withDecoderOptions returns a decodingHandler closed over some DecoderOptions. +func withDecoderOptions(opts *DecoderOptions) decodingHandler { return func(c ComponentConfigurable, d Decoder) (interface{}, error) { - return Configure(c, d, opts) + return configure(c, d, opts) } } // Configure returns the decoded target. -func Configure(c ComponentConfigurable, d Decoder, opts *DecoderOptions) (interface{}, error) { +func configure(c ComponentConfigurable, d Decoder, opts *DecoderOptions) (interface{}, error) { target, err := c.ProvideDefault() // target is ptr if err != nil { return nil, err diff --git a/config/config.go b/config/config.go index 17f5fd4a..694da223 100644 --- a/config/config.go +++ b/config/config.go @@ -27,9 +27,9 @@ import ( "github.com/snowplow-devops/stream-replicator/pkg/target/targetiface" ) -// Config holds the configuration data along with the decoder to decode them +// Config holds the configuration data along with the Decoder to Decode them type Config struct { - Data *ConfigurationData + Data *configurationData Decoder Decoder } @@ -47,35 +47,35 @@ type ConfigurationData struct { DisableTelemetry bool `hcl:"disable_telemetry,optional" env:"DISABLE_TELEMETRY"` } -// Component is a type to abstract over configuration blocks. -type Component struct { - Use *Use `hcl:"use,block"` +// component is a type to abstract over configuration blocks. +type component struct { + Use *use `hcl:"use,block"` } -// Use is a type to denote what a component will be configured to use. -type Use struct { +// use is a type to denote what a component will be configured to use. +type use struct { Name string `hcl:",label" env:"NAME"` Body hcl.Body `hcl:",remain"` } -// FailureConfig holds configuration for the failure target. +// failureConfig holds configuration for the failure target. // It includes the target component to use. -type FailureConfig struct { - Target *Use `hcl:"use,block" envPrefix:"FAILURE_TARGET_"` +type failureConfig struct { + Target *use `hcl:"use,block" envPrefix:"FAILURE_TARGET_"` Format string `hcl:"format,optional" env:"FAILURE_TARGETS_FORMAT"` } -// SentryConfig configures the Sentry error tracker. -type SentryConfig struct { +// sentryConfig configures the Sentry error tracker. +type sentryConfig struct { Dsn string `hcl:"dsn" env:"SENTRY_DSN"` Tags string `hcl:"tags,optional" env:"SENTRY_TAGS"` Debug bool `hcl:"debug,optional" env:"SENTRY_DEBUG"` } -// StatsConfig holds configuration for stats receivers. +// statsConfig holds configuration for stats receivers. // It includes a receiver component to use. -type StatsConfig struct { - Receiver *Use `hcl:"use,block" envPrefix:"STATS_RECEIVER_"` +type statsConfig struct { + Receiver *use `hcl:"use,block" envPrefix:"STATS_RECEIVER_"` TimeoutSec int `hcl:"timeout_sec,optional" env:"STATS_RECEIVER_TIMEOUT_SEC"` BufferSec int `hcl:"buffer_sec,optional" env:"STATS_RECEIVER_BUFFER_SEC"` } @@ -83,24 +83,24 @@ type StatsConfig struct { // TransformConfig holds configuration for tranformations. type TransformConfig struct { Message string `hcl:"message_transformation,optional" env:"MESSAGE_TRANSFORMATION"` - Layer *Use `hcl:"use,block" envPrefix:"TRANSFORMATION_LAYER_"` + Layer *use `hcl:"use,block" envPrefix:"TRANSFORMATION_LAYER_"` } // defaultConfigData returns the initial main configuration target. -func defaultConfigData() *ConfigurationData { - return &ConfigurationData{ - Source: &Component{&Use{Name: "stdin"}}, - Target: &Component{&Use{Name: "stdout"}}, +func defaultConfigData() *configurationData { + return &configurationData{ + Source: &component{&use{Name: "stdin"}}, + Target: &component{&use{Name: "stdout"}}, - FailureTarget: &FailureConfig{ - Target: &Use{Name: "stdout"}, + FailureTarget: &failureConfig{ + Target: &use{Name: "stdout"}, Format: "snowplow", }, - Sentry: &SentryConfig{ + Sentry: &sentryConfig{ Tags: "{}", }, - StatsReceiver: &StatsConfig{ - Receiver: &Use{}, + StatsReceiver: &statsConfig{ + Receiver: &use{}, TimeoutSec: 1, BufferSec: 15, }, @@ -129,7 +129,7 @@ func newEnvConfig() (*Config, error) { var err error decoderOpts := &DecoderOptions{} - envDecoder := &EnvDecoder{} + envDecoder := &envDecoder{} configData := defaultConfigData() @@ -160,12 +160,12 @@ func newHclConfig(filename string) (*Config, error) { } // Creating EvalContext - evalContext := CreateHclContext() // ptr + evalContext := createHclContext() // ptr // Decoding configData := defaultConfigData() decoderOpts := &DecoderOptions{Input: fileHCL.Body} - hclDecoder := &HclDecoder{EvalContext: evalContext} + hclDecoder := &hclDecoder{EvalContext: evalContext} err = hclDecoder.Decode(decoderOpts, configData) if err != nil { @@ -180,9 +180,9 @@ func newHclConfig(filename string) (*Config, error) { return &mainConfig, nil } -// CreateComponent creates a pluggable component given the decoder options. +// CreateComponent creates a pluggable component given the Decoder options. func (c *Config) CreateComponent(p Pluggable, opts *DecoderOptions) (interface{}, error) { - componentConfigure := WithDecoderOptions(opts) + componentConfigure := withDecoderOptions(opts) decodedConfig, err := componentConfigure(p, c.Decoder) if err != nil { @@ -203,7 +203,7 @@ func (c *Config) GetTarget() (targetiface.Target, error) { switch useTarget.Name { case "stdout": plug = target.AdaptStdoutTargetFunc( - target.NewStdoutTarget, + target.StdoutTargetConfigFunction, ) case "kinesis": plug = target.AdaptKinesisTargetFunc( @@ -223,7 +223,7 @@ func (c *Config) GetTarget() (targetiface.Target, error) { ) case "eventhub": plug = target.AdaptEventHubTargetFunc( - target.NewEventHubTarget, + target.EventHubTargetConfigFunction, ) case "http": plug = target.AdaptHTTPTargetFunc( @@ -259,7 +259,7 @@ func (c *Config) GetFailureTarget(AppName string, AppVersion string) (failureifa switch useFailureTarget.Name { case "stdout": plug = target.AdaptStdoutTargetFunc( - target.NewStdoutTarget, + target.StdoutTargetConfigFunction, ) case "kinesis": plug = target.AdaptKinesisTargetFunc( @@ -279,7 +279,7 @@ func (c *Config) GetFailureTarget(AppName string, AppVersion string) (failureifa ) case "eventhub": plug = target.AdaptEventHubTargetFunc( - target.NewEventHubTarget, + target.EventHubTargetConfigFunction, ) case "http": plug = target.AdaptHTTPTargetFunc( @@ -328,15 +328,15 @@ func (c *Config) GetTags() (map[string]string, error) { // GetObserver builds and returns the observer with the embedded // optional stats receiver func (c *Config) GetObserver(tags map[string]string) (*observer.Observer, error) { - sr, err := c.GetStatsReceiver(tags) + sr, err := c.getStatsReceiver(tags) if err != nil { return nil, err } return observer.New(sr, time.Duration(c.Data.StatsReceiver.TimeoutSec)*time.Second, time.Duration(c.Data.StatsReceiver.BufferSec)*time.Second), nil } -// GetStatsReceiver builds and returns the stats receiver -func (c *Config) GetStatsReceiver(tags map[string]string) (statsreceiveriface.StatsReceiver, error) { +// getStatsReceiver builds and returns the stats receiver +func (c *Config) getStatsReceiver(tags map[string]string) (statsreceiveriface.StatsReceiver, error) { useReceiver := c.Data.StatsReceiver.Receiver decoderOpts := &DecoderOptions{ Input: useReceiver.Body, diff --git a/config/decode.go b/config/decode.go index 58ea16a1..4491d597 100644 --- a/config/decode.go +++ b/config/decode.go @@ -33,15 +33,15 @@ type DecoderOptions struct { Input hcl.Body } -// EnvDecoder implements Decoder. -type EnvDecoder struct{} +// envDecoder implements Decoder. +type envDecoder struct{} // Decode populates target from the environment. // The target argument must be a pointer to a struct type value. -func (e *EnvDecoder) Decode(opts *DecoderOptions, target interface{}) error { +func (e *envDecoder) Decode(opts *DecoderOptions, target interface{}) error { // Decoder Options cannot be missing if opts == nil { - return errors.New("missing DecoderOptions for EnvDecoder") + return errors.New("missing DecoderOptions for envDecoder") } // If target is nil then we assume that target is not decodable. @@ -56,8 +56,8 @@ func (e *EnvDecoder) Decode(opts *DecoderOptions, target interface{}) error { return env.Parse(target, envOpts) } -// HclDecoder implements Decoder. -type HclDecoder struct { +// hclDecoder implements Decoder. +type hclDecoder struct { EvalContext *hcl.EvalContext } @@ -65,10 +65,10 @@ type HclDecoder struct { // The target argument must be a pointer to an allocated structure. // If the HCL input is nil, we assume there is nothing to do and the target // stays unaffected. If the target is nil, we assume is not decodable. -func (h *HclDecoder) Decode(opts *DecoderOptions, target interface{}) error { +func (h *hclDecoder) Decode(opts *DecoderOptions, target interface{}) error { // Decoder Options cannot be missing if opts == nil { - return errors.New("missing DecoderOptions for HclDecoder") + return errors.New("missing DecoderOptions for hclDecoder") } src := opts.Input @@ -90,12 +90,12 @@ func (h *HclDecoder) Decode(opts *DecoderOptions, target interface{}) error { return nil } -// CreateHclContext creates an *hcl.EvalContext that is used in decoding HCL. +// createHclContext creates an *hcl.EvalContext that is used in decoding HCL. // Here we can add the evaluation features available for the HCL configuration // users. // For now, below is an example of 2 different ways users can reference // environment variables in their HCL configuration file. -func CreateHclContext() *hcl.EvalContext { +func createHclContext() *hcl.EvalContext { evalCtx := &hcl.EvalContext{ Functions: hclCtxFunctions(), Variables: hclCtxVariables(), diff --git a/config/decode_test.go b/config/decode_test.go index cb5ea825..4a6cb060 100644 --- a/config/decode_test.go +++ b/config/decode_test.go @@ -21,7 +21,7 @@ type testStruct struct { } func TestEnvDecode(t *testing.T) { - envDecoder := EnvDecoder{} + envDecoder := envDecoder{} testCases := []struct { TestName string @@ -76,7 +76,7 @@ func TestEnvDecode(t *testing.T) { func TestHclDecode(t *testing.T) { evalCtx := &hcl.EvalContext{} - hclDecoder := HclDecoder{evalCtx} + hclDecoder := hclDecoder{evalCtx} hclSrc := ` test_string = "ateststring" ` @@ -147,8 +147,8 @@ func TestCreateHclContext(t *testing.T) { TestInt int `hcl:"test_int"` } - evalCtx := CreateHclContext() - hclDecoder := HclDecoder{evalCtx} + evalCtx := createHclContext() + hclDecoder := hclDecoder{evalCtx} hclSrc := ` test_string = env.TEST_STRING test_int = env("TEST_INT") diff --git a/pkg/models/filter_result.go b/pkg/models/filter_result.go index 23c7a1f7..97ef3c15 100644 --- a/pkg/models/filter_result.go +++ b/pkg/models/filter_result.go @@ -26,15 +26,15 @@ type FilterResult struct { AvgFilterLatency time.Duration } -// NewFilterResult uses the current time as the timeOfFilter and calls NewFilterResultWithTime +// NewFilterResult uses the current time as the timeOfFilter and calls newFilterResultWithTime func NewFilterResult(filtered []*Message) *FilterResult { - return NewFilterResultWithTime(filtered, time.Now().UTC()) + return newFilterResultWithTime(filtered, time.Now().UTC()) } -// NewFilterResultWithTime builds a result structure to return from a filtered message slice +// newFilterResultWithTime builds a result structure to return from a filtered message slice // attempt which contains the filtered message count as well as several // derived latency measures. -func NewFilterResultWithTime(filtered []*Message, timeOfFilter time.Time) *FilterResult { +func newFilterResultWithTime(filtered []*Message, timeOfFilter time.Time) *FilterResult { r := FilterResult{ FilteredCount: int64(len(filtered)), } diff --git a/pkg/models/filter_result_test.go b/pkg/models/filter_result_test.go index eb0520eb..591e3560 100644 --- a/pkg/models/filter_result_test.go +++ b/pkg/models/filter_result_test.go @@ -29,7 +29,7 @@ func TestNewFilterResult_EmptyWithoutTime(t *testing.T) { func TestNewFilterResult_EmptyWithTime(t *testing.T) { assert := assert.New(t) - r := NewFilterResultWithTime(nil, time.Now().UTC()) + r := newFilterResultWithTime(nil, time.Now().UTC()) assert.NotNil(r) assert.Equal(int64(0), r.FilteredCount) @@ -61,7 +61,7 @@ func TestNewFilterResult_WithMessages(t *testing.T) { }, } - r := NewFilterResultWithTime(filtered, timeNow) + r := newFilterResultWithTime(filtered, timeNow) assert.NotNil(r) assert.Equal(int64(2), r.FilteredCount) diff --git a/pkg/models/observer_buffer_test.go b/pkg/models/observer_buffer_test.go index e1f7bedd..59c3f483 100644 --- a/pkg/models/observer_buffer_test.go +++ b/pkg/models/observer_buffer_test.go @@ -68,7 +68,7 @@ func TestObserverBuffer(t *testing.T) { b.AppendWriteInvalid(r) b.AppendWriteInvalid(nil) - fr := NewFilterResultWithTime(filtered, timeNow) + fr := newFilterResultWithTime(filtered, timeNow) b.AppendFiltered(fr) @@ -105,3 +105,140 @@ func TestObserverBuffer(t *testing.T) { assert.Equal("TargetResults:2,MsgFiltered:1,MsgSent:4,MsgFailed:2,OversizedTargetResults:2,OversizedMsgSent:4,OversizedMsgFailed:2,InvalidTargetResults:2,InvalidMsgSent:4,InvalidMsgFailed:2,MaxProcLatency:600000,MaxMsgLatency:4200000,MaxFilterLatency:600000,MaxTransformLatency:180000,SumTransformLatency:720000,SumProcLatency:2520000,SumMsgLatency:18000000", b.String()) } + +// TestObserverBuffer_Basic is a basic version of the above test, stripping away all but one event +// It was created in order to provide a simpler way to investigate whether logging may be misreporting latency +func TestObserverBuffer_Basic(t *testing.T) { + assert := assert.New(t) + + b := ObserverBuffer{} + assert.NotNil(b) + + timeNow := time.Now().UTC() + + sent := []*Message{ + { + Data: []byte("Baz"), + PartitionKey: "partition1", + TimeCreated: timeNow.Add(time.Duration(-50) * time.Minute), + TimePulled: timeNow.Add(time.Duration(-4) * time.Minute), + TimeTransformed: timeNow.Add(time.Duration(-2) * time.Minute), + }, + } + + r := NewTargetWriteResultWithTime(sent, nil, nil, nil, timeNow) + + b.AppendWrite(r) + b.AppendWrite(nil) + // b.AppendWriteOversized(r) + b.AppendWriteOversized(nil) + // b.AppendWriteInvalid(r) + b.AppendWriteInvalid(nil) + + fr := newFilterResultWithTime(nil, timeNow) + + b.AppendFiltered(fr) + + assert.Equal(int64(1), b.TargetResults) + assert.Equal(int64(1), b.MsgSent) + assert.Equal(int64(0), b.MsgFailed) + assert.Equal(int64(1), b.MsgTotal) + + assert.Equal(int64(0), b.MsgFiltered) + + assert.Equal(int64(0), b.OversizedTargetResults) + assert.Equal(int64(0), b.OversizedMsgSent) + assert.Equal(int64(0), b.OversizedMsgFailed) + assert.Equal(int64(0), b.OversizedMsgTotal) + + assert.Equal(int64(0), b.InvalidTargetResults) + assert.Equal(int64(0), b.InvalidMsgSent) + assert.Equal(int64(0), b.InvalidMsgFailed) + assert.Equal(int64(0), b.InvalidMsgTotal) + + assert.Equal(time.Duration(4)*time.Minute, b.MaxProcLatency) + assert.Equal(time.Duration(4)*time.Minute, b.MinProcLatency) + assert.Equal(time.Duration(4)*time.Minute, b.GetAvgProcLatency()) + assert.Equal(time.Duration(50)*time.Minute, b.MaxMsgLatency) + assert.Equal(time.Duration(50)*time.Minute, b.MinMsgLatency) + assert.Equal(time.Duration(50)*time.Minute, b.GetAvgMsgLatency()) + assert.Equal(time.Duration(2)*time.Minute, b.MaxTransformLatency) + assert.Equal(time.Duration(2)*time.Minute, b.MinTransformLatency) + assert.Equal(time.Duration(2)*time.Minute, b.GetAvgTransformLatency()) + + assert.Equal(time.Duration(0), b.MaxFilterLatency) + assert.Equal(time.Duration(0), b.MinFilterLatency) + assert.Equal(time.Duration(0), b.GetAvgFilterLatency()) + + assert.Equal("TargetResults:1,MsgFiltered:0,MsgSent:1,MsgFailed:0,OversizedTargetResults:0,OversizedMsgSent:0,OversizedMsgFailed:0,InvalidTargetResults:0,InvalidMsgSent:0,InvalidMsgFailed:0,MaxProcLatency:240000,MaxMsgLatency:3000000,MaxFilterLatency:0,MaxTransformLatency:120000,SumTransformLatency:120000,SumProcLatency:240000,SumMsgLatency:3000000", b.String()) +} + +// TestObserverBuffer_Basic is a basic version of the above test, stripping away all but one event +// It was created in order to demonstrate an issue with misreporting of transformLatency when we don't have transformations. +// https://github.com/snowplow-devops/stream-replicator/issues/108 +// Commented out but should serve as illustration of and unit test for that bug. +/* +func TestObserverBuffer_BasicNoTransform(t *testing.T) { + assert := assert.New(t) + + b := ObserverBuffer{} + assert.NotNil(b) + + timeNow := time.Now().UTC() + + sent := []*Message{ + { + Data: []byte("Baz"), + PartitionKey: "partition1", + TimeCreated: timeNow.Add(time.Duration(-50) * time.Minute), + TimePulled: timeNow.Add(time.Duration(-4) * time.Minute), + }, + } + + r := NewTargetWriteResultWithTime(sent, nil, nil, nil, timeNow) + + b.AppendWrite(r) + b.AppendWrite(nil) + // b.AppendWriteOversized(r) + b.AppendWriteOversized(nil) + // b.AppendWriteInvalid(r) + b.AppendWriteInvalid(nil) + + fr := NewFilterResultWithTime(nil, timeNow) + + b.AppendFiltered(fr) + + assert.Equal(int64(1), b.TargetResults) + assert.Equal(int64(1), b.MsgSent) + assert.Equal(int64(0), b.MsgFailed) + assert.Equal(int64(1), b.MsgTotal) + + assert.Equal(int64(0), b.MsgFiltered) + + assert.Equal(int64(0), b.OversizedTargetResults) + assert.Equal(int64(0), b.OversizedMsgSent) + assert.Equal(int64(0), b.OversizedMsgFailed) + assert.Equal(int64(0), b.OversizedMsgTotal) + + assert.Equal(int64(0), b.InvalidTargetResults) + assert.Equal(int64(0), b.InvalidMsgSent) + assert.Equal(int64(0), b.InvalidMsgFailed) + assert.Equal(int64(0), b.InvalidMsgTotal) + + assert.Equal(time.Duration(4)*time.Minute, b.MaxProcLatency) + assert.Equal(time.Duration(4)*time.Minute, b.MinProcLatency) + assert.Equal(time.Duration(4)*time.Minute, b.GetAvgProcLatency()) + assert.Equal(time.Duration(50)*time.Minute, b.MaxMsgLatency) + assert.Equal(time.Duration(50)*time.Minute, b.MinMsgLatency) + assert.Equal(time.Duration(50)*time.Minute, b.GetAvgMsgLatency()) + assert.Equal(time.Duration(0), b.MaxTransformLatency) + assert.Equal(time.Duration(0), b.MinTransformLatency) + assert.Equal(time.Duration(0), b.GetAvgTransformLatency()) + + assert.Equal(time.Duration(0), b.MaxFilterLatency) + assert.Equal(time.Duration(0), b.MinFilterLatency) + assert.Equal(time.Duration(0), b.GetAvgFilterLatency()) + + assert.Equal("TargetResults:1,MsgFiltered:0,MsgSent:1,MsgFailed:0,OversizedTargetResults:0,OversizedMsgSent:0,OversizedMsgFailed:0,InvalidTargetResults:0,InvalidMsgSent:0,InvalidMsgFailed:0,MaxProcLatency:240000,MaxMsgLatency:3000000,MaxFilterLatency:0,MaxTransformLatency:0,SumTransformLatency:0,SumProcLatency:240000,SumMsgLatency:3000000", b.String()) +} +*/ diff --git a/pkg/source/kinesis/kinesis_source_test.go b/pkg/source/kinesis/kinesis_source_test.go index ea32c011..390f8cb0 100644 --- a/pkg/source/kinesis/kinesis_source_test.go +++ b/pkg/source/kinesis/kinesis_source_test.go @@ -194,7 +194,7 @@ func TestGetSource_WithKinesisSource(t *testing.T) { t.Fatalf("function NewConfig failed with error: %q", err.Error()) } - // Use our function generator to interact with localstack + // use our function generator to interact with localstack kinesisSourceConfigFunctionWithLocalstack := configFunctionGeneratorWithInterfaces(kinesisClient, dynamodbClient, "00000000000") adaptedHandle := adapterGenerator(kinesisSourceConfigFunctionWithLocalstack) diff --git a/pkg/statsreceiver/statsd.go b/pkg/statsreceiver/statsd.go index 49864739..3af27046 100644 --- a/pkg/statsreceiver/statsd.go +++ b/pkg/statsreceiver/statsd.go @@ -24,13 +24,13 @@ type StatsDStatsReceiverConfig struct { Tags string `hcl:"tags,optional" env:"STATS_RECEIVER_STATSD_TAGS"` } -// StatsDStatsReceiver holds a new client for writing statistics to a StatsD server -type StatsDStatsReceiver struct { +// statsDStatsReceiver holds a new client for writing statistics to a StatsD server +type statsDStatsReceiver struct { client *statsd.Client } -// NewStatsDStatsReceiver creates a new client for writing metrics to StatsD -func NewStatsDStatsReceiver(address string, prefix string, tagsRaw string, tagsMapClient map[string]string) (*StatsDStatsReceiver, error) { +// newStatsDStatsReceiver creates a new client for writing metrics to StatsD +func newStatsDStatsReceiver(address string, prefix string, tagsRaw string, tagsMapClient map[string]string) (*statsDStatsReceiver, error) { tagsMap := map[string]string{} err := json.Unmarshal([]byte(tagsRaw), &tagsMap) if err != nil { @@ -53,16 +53,16 @@ func NewStatsDStatsReceiver(address string, prefix string, tagsRaw string, tagsM statsd.ReconnectInterval(60*time.Second), ) - return &StatsDStatsReceiver{ + return &statsDStatsReceiver{ client: client, }, nil } // NewStatsDReceiverWithTags closes over a given tags map and returns a function -// that creates a StatsDStatsReceiver given a StatsDStatsReceiverConfig. -func NewStatsDReceiverWithTags(tags map[string]string) func(c *StatsDStatsReceiverConfig) (*StatsDStatsReceiver, error) { - return func(c *StatsDStatsReceiverConfig) (*StatsDStatsReceiver, error) { - return NewStatsDStatsReceiver( +// that creates a statsDStatsReceiver given a StatsDStatsReceiverConfig. +func NewStatsDReceiverWithTags(tags map[string]string) func(c *StatsDStatsReceiverConfig) (*statsDStatsReceiver, error) { + return func(c *StatsDStatsReceiverConfig) (*statsDStatsReceiver, error) { + return newStatsDStatsReceiver( c.Address, c.Prefix, c.Tags, @@ -94,7 +94,7 @@ func (f StatsDStatsReceiverAdapter) ProvideDefault() (interface{}, error) { } // AdaptStatsDStatsReceiverFunc returns a StatsDStatsReceiverAdapter. -func AdaptStatsDStatsReceiverFunc(f func(c *StatsDStatsReceiverConfig) (*StatsDStatsReceiver, error)) StatsDStatsReceiverAdapter { +func AdaptStatsDStatsReceiverFunc(f func(c *StatsDStatsReceiverConfig) (*statsDStatsReceiver, error)) StatsDStatsReceiverAdapter { return func(i interface{}) (interface{}, error) { cfg, ok := i.(*StatsDStatsReceiverConfig) if !ok { @@ -106,7 +106,7 @@ func AdaptStatsDStatsReceiverFunc(f func(c *StatsDStatsReceiverConfig) (*StatsDS } // Send emits the bufferred metrics to the receiver -func (s *StatsDStatsReceiver) Send(b *models.ObserverBuffer) { +func (s *statsDStatsReceiver) Send(b *models.ObserverBuffer) { s.client.Incr("message_sent", b.MsgSent) s.client.Incr("message_failed", b.MsgFailed) s.client.Incr("oversized_message_sent", b.OversizedMsgSent) diff --git a/pkg/target/eventhub.go b/pkg/target/eventhub.go index b199bd93..b29b9d65 100644 --- a/pkg/target/eventhub.go +++ b/pkg/target/eventhub.go @@ -45,8 +45,8 @@ type EventHubTarget struct { log *log.Entry } -// NewEventHubTarget creates a new client for writing messages to Azure EventHub -func NewEventHubTarget(cfg *EventHubConfig) (*EventHubTarget, error) { +// newEventHubTarget creates a new client for writing messages to Azure EventHub +func newEventHubTarget(cfg *EventHubConfig) (*EventHubTarget, error) { _, keyNamePresent := os.LookupEnv("EVENTHUB_KEY_NAME") _, keyValuePresent := os.LookupEnv("EVENTHUB_KEY_VALUE") @@ -84,6 +84,11 @@ func NewEventHubTarget(cfg *EventHubConfig) (*EventHubTarget, error) { }, err } +// EventHubTargetConfigFunction creates an EventHubTarget from an EventHubconfig +func EventHubTargetConfigFunction(cfg *EventHubConfig) (*EventHubTarget, error) { + return newEventHubTarget(cfg) +} + // The EventHubTargetAdapter type is an adapter for functions to be used as // pluggable components for EventHub target. Implements the Pluggable interface. type EventHubTargetAdapter func(i interface{}) (interface{}, error) diff --git a/pkg/target/eventhub_test.go b/pkg/target/eventhub_test.go index 3cad6142..715f901d 100644 --- a/pkg/target/eventhub_test.go +++ b/pkg/target/eventhub_test.go @@ -45,7 +45,7 @@ func TestNewEventHubTarget_KeyValue(t *testing.T) { os.Setenv("EVENTHUB_KEY_NAME", "fake") os.Setenv("EVENTHUB_KEY_VALUE", "fake") - tgt, err := NewEventHubTarget(&cfg) + tgt, err := newEventHubTarget(&cfg) assert.Nil(err) assert.NotNil(tgt) } @@ -60,7 +60,7 @@ func TestNewEventHubTarget_ConnString(t *testing.T) { os.Setenv("EVENTHUB_CONNECTION_STRING", "Endpoint=sb://test.servicebus.windows.net/;SharedAccessKeyName=fake;SharedAccessKey=fake") - tgt, err := NewEventHubTarget(&cfg) + tgt, err := newEventHubTarget(&cfg) assert.Nil(err) assert.NotNil(tgt) } @@ -70,7 +70,7 @@ func TestNewEventHubTarget_Failure(t *testing.T) { unsetEverything() - tgt, err := NewEventHubTarget(&cfg) + tgt, err := newEventHubTarget(&cfg) assert.Equal("Error initialising EventHub client: No valid combination of authentication Env vars found. https://pkg.go.dev/github.com/Azure/azure-event-hubs-go#NewHubWithNamespaceNameAndEnvironment", err.Error()) assert.Nil(tgt) } diff --git a/pkg/target/http.go b/pkg/target/http.go index 919b9766..b2ca0bb6 100644 --- a/pkg/target/http.go +++ b/pkg/target/http.go @@ -24,10 +24,6 @@ import ( "github.com/snowplow-devops/stream-replicator/pkg/models" ) -const ( - httpTarget = `http_target` -) - // HTTPTargetConfig configures the destination for records consumed type HTTPTargetConfig struct { HTTPURL string `hcl:"url" env:"TARGET_HTTP_URL"` @@ -61,7 +57,7 @@ func checkURL(str string) error { return err } if u.Scheme == "" || u.Host == "" { - return errors.New(fmt.Sprintf("Invalid url for Http target: '%s'", str)) + return errors.New(fmt.Sprintf("Invalid url for HTTP target: '%s'", str)) } return nil } @@ -92,8 +88,8 @@ func addHeadersToRequest(request *http.Request, headers map[string]string) { } -// NewHTTPTarget creates a client for writing events to HTTP -func NewHTTPTarget(httpURL string, requestTimeout int, byteLimit int, contentType string, headers string, basicAuthUsername string, basicAuthPassword string, +// newHTTPTarget creates a client for writing events to HTTP +func newHTTPTarget(httpURL string, requestTimeout int, byteLimit int, contentType string, headers string, basicAuthUsername string, basicAuthPassword string, certFile string, keyFile string, caFile string, skipVerifyTLS bool) (*HTTPTarget, error) { err := checkURL(httpURL) if err != nil { @@ -130,7 +126,7 @@ func NewHTTPTarget(httpURL string, requestTimeout int, byteLimit int, contentTyp // HTTPTargetConfigFunction creates HTTPTarget from HTTPTargetConfig func HTTPTargetConfigFunction(c *HTTPTargetConfig) (*HTTPTarget, error) { - return NewHTTPTarget( + return newHTTPTarget( c.HTTPURL, c.RequestTimeoutInSeconds, c.ByteLimit, diff --git a/pkg/target/http_test.go b/pkg/target/http_test.go index fbd1f79a..1a0c20ac 100644 --- a/pkg/target/http_test.go +++ b/pkg/target/http_test.go @@ -112,18 +112,18 @@ func TestAddHeadersToRequest(t *testing.T) { func TestNewHTTPTarget(t *testing.T) { assert := assert.New(t) - httpTarget, err := NewHTTPTarget("http://something", 5, 1048576, "application/json", "", "", "", "", "", "", true) + httpTarget, err := newHTTPTarget("http://something", 5, 1048576, "application/json", "", "", "", "", "", "", true) assert.Nil(err) assert.NotNil(httpTarget) - failedHTTPTarget, err1 := NewHTTPTarget("something", 5, 1048576, "application/json", "", "", "", "", "", "", true) + failedHTTPTarget, err1 := newHTTPTarget("something", 5, 1048576, "application/json", "", "", "", "", "", "", true) - assert.Equal("Invalid url for Http target: 'something'", err1.Error()) + assert.Equal("Invalid url for HTTP target: 'something'", err1.Error()) assert.Nil(failedHTTPTarget) - failedHTTPTarget2, err2 := NewHTTPTarget("", 5, 1048576, "application/json", "", "", "", "", "", "", true) - assert.Equal("Invalid url for Http target: ''", err2.Error()) + failedHTTPTarget2, err2 := newHTTPTarget("", 5, 1048576, "application/json", "", "", "", "", "", "", true) + assert.Equal("Invalid url for HTTP target: ''", err2.Error()) assert.Nil(failedHTTPTarget2) } @@ -135,7 +135,7 @@ func TestHttpWrite_Simple(t *testing.T) { server := createTestServer(&results, &wg) defer server.Close() - target, err := NewHTTPTarget(server.URL, 5, 1048576, "application/json", "", "", "", "", "", "", true) + target, err := newHTTPTarget(server.URL, 5, 1048576, "application/json", "", "", "", "", "", "", true) if err != nil { panic(err) } @@ -169,7 +169,7 @@ func TestHttpWrite_Concurrent(t *testing.T) { server := createTestServer(&results, &wg) defer server.Close() - target, err := NewHTTPTarget(server.URL, 5, 1048576, "application/json", "", "", "", "", "", "", true) + target, err := newHTTPTarget(server.URL, 5, 1048576, "application/json", "", "", "", "", "", "", true) if err != nil { panic(err) } @@ -212,7 +212,7 @@ func TestHttpWrite_Failure(t *testing.T) { server := createTestServer(&results, &wg) defer server.Close() - target, err := NewHTTPTarget("http://NonexistentEndpoint", 5, 1048576, "application/json", "", "", "", "", "", "", true) + target, err := newHTTPTarget("http://NonexistentEndpoint", 5, 1048576, "application/json", "", "", "", "", "", "", true) if err != nil { panic(err) } @@ -243,7 +243,7 @@ func TestHttpWrite_Oversized(t *testing.T) { server := createTestServer(&results, &wg) defer server.Close() - target, err := NewHTTPTarget(server.URL, 5, 1048576, "application/json", "", "", "", "", "", "", true) + target, err := newHTTPTarget(server.URL, 5, 1048576, "application/json", "", "", "", "", "", "", true) if err != nil { panic(err) } @@ -288,7 +288,7 @@ func TestHttpWrite_TLS(t *testing.T) { assert := assert.New(t) // Test that https requests work with manually provided certs - target, err := NewHTTPTarget("https://localhost:8999/hello", + target, err := newHTTPTarget("https://localhost:8999/hello", 5, 1048576, "application/json", @@ -322,7 +322,7 @@ func TestHttpWrite_TLS(t *testing.T) { os.RemoveAll(`tmp_replicator`) // Test that https requests work for different endpoints when different certs are provided manually - target2, err2 := NewHTTPTarget(ngrokAddress, + target2, err2 := newHTTPTarget(ngrokAddress, 5, 1048576, "application/json", @@ -350,7 +350,7 @@ func TestHttpWrite_TLS(t *testing.T) { // Test that https works when certs aren't manually provided // Test that https requests work for different endpoints when different certs are provided manually - target3, err4 := NewHTTPTarget(ngrokAddress, + target3, err4 := newHTTPTarget(ngrokAddress, 5, 1048576, "application/json", diff --git a/pkg/target/kafka.go b/pkg/target/kafka.go index 4e2808f8..3b4a2f0b 100644 --- a/pkg/target/kafka.go +++ b/pkg/target/kafka.go @@ -25,10 +25,6 @@ import ( "github.com/snowplow-devops/stream-replicator/pkg/models" ) -const ( - kafkaTarget = `kafka_target` -) - // KafkaConfig contains configurable options for the kafka target type KafkaConfig struct { Brokers string `hcl:"brokers" env:"TARGET_KAFKA_BROKERS"` @@ -57,7 +53,7 @@ type KafkaConfig struct { type KafkaTarget struct { syncProducer sarama.SyncProducer asyncProducer sarama.AsyncProducer - asyncResults chan *SaramaResult + asyncResults chan *saramaResult topicName string brokers string messageByteLimit int @@ -65,8 +61,8 @@ type KafkaTarget struct { log *log.Entry } -// SaramaResult holds the result of a Sarama request -type SaramaResult struct { +// saramaResult holds the result of a Sarama request +type saramaResult struct { Msg *sarama.ProducerMessage Err error } @@ -132,7 +128,7 @@ func NewKafkaTarget(cfg *KafkaConfig) (*KafkaTarget, error) { saramaConfig.Net.TLS.Enable = true } - var asyncResults chan *SaramaResult = nil + var asyncResults chan *saramaResult = nil var asyncProducer sarama.AsyncProducer = nil var syncProducer sarama.SyncProducer = nil var producerError error = nil @@ -154,17 +150,17 @@ func NewKafkaTarget(cfg *KafkaConfig) (*KafkaTarget, error) { return nil, producerError } - asyncResults = make(chan *SaramaResult) + asyncResults = make(chan *saramaResult) go func() { for err := range asyncProducer.Errors() { - asyncResults <- &SaramaResult{Msg: err.Msg, Err: err.Err} + asyncResults <- &saramaResult{Msg: err.Msg, Err: err.Err} } }() go func() { for success := range asyncProducer.Successes() { - asyncResults <- &SaramaResult{Msg: success} + asyncResults <- &saramaResult{Msg: success} } }() } else { diff --git a/pkg/target/kafka_test.go b/pkg/target/kafka_test.go index 3565cedc..4fa990c6 100644 --- a/pkg/target/kafka_test.go +++ b/pkg/target/kafka_test.go @@ -23,17 +23,17 @@ func SetUpMockAsyncProducer(t *testing.T) (*mocks.AsyncProducer, *KafkaTarget) { config.Producer.Return.Errors = true mp := mocks.NewAsyncProducer(t, config) - asyncResults := make(chan *SaramaResult) + asyncResults := make(chan *saramaResult) go func() { for err := range mp.Errors() { - asyncResults <- &SaramaResult{Msg: err.Msg, Err: err.Err} + asyncResults <- &saramaResult{Msg: err.Msg, Err: err.Err} } }() go func() { for success := range mp.Successes() { - asyncResults <- &SaramaResult{Msg: success} + asyncResults <- &saramaResult{Msg: success} } }() diff --git a/pkg/target/kinesis.go b/pkg/target/kinesis.go index 29f1dd24..2dc898cb 100644 --- a/pkg/target/kinesis.go +++ b/pkg/target/kinesis.go @@ -48,20 +48,20 @@ type KinesisTarget struct { log *log.Entry } -// NewKinesisTarget creates a new client for writing messages to kinesis -func NewKinesisTarget(region string, streamName string, roleARN string) (*KinesisTarget, error) { +// newKinesisTarget creates a new client for writing messages to kinesis +func newKinesisTarget(region string, streamName string, roleARN string) (*KinesisTarget, error) { awsSession, awsConfig, awsAccountID, err := common.GetAWSSession(region, roleARN) if err != nil { return nil, err } kinesisClient := kinesis.New(awsSession, awsConfig) - return NewKinesisTargetWithInterfaces(kinesisClient, *awsAccountID, region, streamName) + return newKinesisTargetWithInterfaces(kinesisClient, *awsAccountID, region, streamName) } -// NewKinesisTargetWithInterfaces allows you to provide a Kinesis client directly to allow +// newKinesisTargetWithInterfaces allows you to provide a Kinesis client directly to allow // for mocking and localstack usage -func NewKinesisTargetWithInterfaces(client kinesisiface.KinesisAPI, awsAccountID string, region string, streamName string) (*KinesisTarget, error) { +func newKinesisTargetWithInterfaces(client kinesisiface.KinesisAPI, awsAccountID string, region string, streamName string) (*KinesisTarget, error) { return &KinesisTarget{ client: client, streamName: streamName, @@ -73,7 +73,7 @@ func NewKinesisTargetWithInterfaces(client kinesisiface.KinesisAPI, awsAccountID // KinesisTargetConfigFunction creates KinesisTarget from KinesisTargetConfig. func KinesisTargetConfigFunction(c *KinesisTargetConfig) (*KinesisTarget, error) { - return NewKinesisTarget(c.Region, c.StreamName, c.RoleARN) + return newKinesisTarget(c.Region, c.StreamName, c.RoleARN) } // The KinesisTargetAdapter type is an adapter for functions to be used as diff --git a/pkg/target/kinesis_test.go b/pkg/target/kinesis_test.go index 936d5fa4..4136efaf 100644 --- a/pkg/target/kinesis_test.go +++ b/pkg/target/kinesis_test.go @@ -24,7 +24,7 @@ func TestKinesisTarget_WriteFailure(t *testing.T) { client := testutil.GetAWSLocalstackKinesisClient() - target, err := NewKinesisTargetWithInterfaces(client, "00000000000", testutil.AWSLocalstackRegion, "not-exists") + target, err := newKinesisTargetWithInterfaces(client, "00000000000", testutil.AWSLocalstackRegion, "not-exists") assert.Nil(err) assert.NotNil(target) assert.Equal("arn:aws:kinesis:us-east-1:00000000000:stream/not-exists", target.GetID()) @@ -59,7 +59,7 @@ func TestKinesisTarget_WriteSuccess(t *testing.T) { } defer testutil.DeleteAWSLocalstackKinesisStream(client, streamName) - target, err := NewKinesisTargetWithInterfaces(client, "00000000000", testutil.AWSLocalstackRegion, streamName) + target, err := newKinesisTargetWithInterfaces(client, "00000000000", testutil.AWSLocalstackRegion, streamName) assert.Nil(err) assert.NotNil(target) @@ -101,7 +101,7 @@ func TestKinesisTarget_WriteSuccess_OversizeBatch(t *testing.T) { } defer testutil.DeleteAWSLocalstackKinesisStream(client, streamName) - target, err := NewKinesisTargetWithInterfaces(client, "00000000000", testutil.AWSLocalstackRegion, streamName) + target, err := newKinesisTargetWithInterfaces(client, "00000000000", testutil.AWSLocalstackRegion, streamName) assert.Nil(err) assert.NotNil(target) @@ -144,7 +144,7 @@ func TestKinesisTarget_WriteSuccess_OversizeRecord(t *testing.T) { } defer testutil.DeleteAWSLocalstackKinesisStream(client, streamName) - target, err := NewKinesisTargetWithInterfaces(client, "00000000000", testutil.AWSLocalstackRegion, streamName) + target, err := newKinesisTargetWithInterfaces(client, "00000000000", testutil.AWSLocalstackRegion, streamName) assert.Nil(err) assert.NotNil(target) diff --git a/pkg/target/pubsub.go b/pkg/target/pubsub.go index c25398ed..2af180ce 100644 --- a/pkg/target/pubsub.go +++ b/pkg/target/pubsub.go @@ -41,15 +41,15 @@ type PubSubTarget struct { log *log.Entry } -// PubSubPublishResult contains the publish result and the function to execute +// pubSubPublishResult contains the publish result and the function to execute // on success to ack the send -type PubSubPublishResult struct { +type pubSubPublishResult struct { Result *pubsub.PublishResult Message *models.Message } -// NewPubSubTarget creates a new client for writing messages to Google PubSub -func NewPubSubTarget(projectID string, topicName string) (*PubSubTarget, error) { +// newPubSubTarget creates a new client for writing messages to Google PubSub +func newPubSubTarget(projectID string, topicName string) (*PubSubTarget, error) { ctx := context.Background() client, err := pubsub.NewClient(ctx, projectID) @@ -67,7 +67,7 @@ func NewPubSubTarget(projectID string, topicName string) (*PubSubTarget, error) // PubSubTargetConfigFunction creates PubSubTarget from PubSubTargetConfig func PubSubTargetConfigFunction(c *PubSubTargetConfig) (*PubSubTarget, error) { - return NewPubSubTarget(c.ProjectID, c.TopicName) + return newPubSubTarget(c.ProjectID, c.TopicName) } // The PubSubTargetAdapter type is an adapter for functions to be used as @@ -117,7 +117,7 @@ func (ps *PubSubTarget) Write(messages []*models.Message) (*models.TargetWriteRe ), err } - var results []*PubSubPublishResult + var results []*pubSubPublishResult safeMessages, oversized := models.FilterOversizedMessages( messages, @@ -139,7 +139,7 @@ func (ps *PubSubTarget) Write(messages []*models.Message) (*models.TargetWriteRe } r := ps.topic.Publish(ctx, pubSubMsg) - results = append(results, &PubSubPublishResult{ + results = append(results, &pubSubPublishResult{ Result: r, Message: msg, }) diff --git a/pkg/target/sqs.go b/pkg/target/sqs.go index f612b6b1..0cd365c0 100644 --- a/pkg/target/sqs.go +++ b/pkg/target/sqs.go @@ -50,20 +50,20 @@ type SQSTarget struct { log *log.Entry } -// NewSQSTarget creates a new client for writing messages to sqs -func NewSQSTarget(region string, queueName string, roleARN string) (*SQSTarget, error) { +// newSQSTarget creates a new client for writing messages to sqs +func newSQSTarget(region string, queueName string, roleARN string) (*SQSTarget, error) { awsSession, awsConfig, awsAccountID, err := common.GetAWSSession(region, roleARN) if err != nil { return nil, err } sqsClient := sqs.New(awsSession, awsConfig) - return NewSQSTargetWithInterfaces(sqsClient, *awsAccountID, region, queueName) + return newSQSTargetWithInterfaces(sqsClient, *awsAccountID, region, queueName) } -// NewSQSTargetWithInterfaces allows you to provide an SQS client directly to allow +// newSQSTargetWithInterfaces allows you to provide an SQS client directly to allow // for mocking and localstack usage -func NewSQSTargetWithInterfaces(client sqsiface.SQSAPI, awsAccountID string, region string, queueName string) (*SQSTarget, error) { +func newSQSTargetWithInterfaces(client sqsiface.SQSAPI, awsAccountID string, region string, queueName string) (*SQSTarget, error) { return &SQSTarget{ client: client, queueName: queueName, @@ -75,7 +75,7 @@ func NewSQSTargetWithInterfaces(client sqsiface.SQSAPI, awsAccountID string, reg // SQSTargetConfigFunction creates an SQSTarget from an SQSTargetConfig func SQSTargetConfigFunction(c *SQSTargetConfig) (*SQSTarget, error) { - return NewSQSTarget(c.Region, c.QueueName, c.RoleARN) + return newSQSTarget(c.Region, c.QueueName, c.RoleARN) } // The SQSTargetAdapter type is an adapter for functions to be used as diff --git a/pkg/target/sqs_test.go b/pkg/target/sqs_test.go index b9334c7f..3c66e1e2 100644 --- a/pkg/target/sqs_test.go +++ b/pkg/target/sqs_test.go @@ -24,7 +24,7 @@ func TestSQSTarget_WriteFailure(t *testing.T) { client := testutil.GetAWSLocalstackSQSClient() - target, err := NewSQSTargetWithInterfaces(client, "00000000000", testutil.AWSLocalstackRegion, "not-exists") + target, err := newSQSTargetWithInterfaces(client, "00000000000", testutil.AWSLocalstackRegion, "not-exists") assert.Nil(err) assert.NotNil(target) assert.Equal("arn:aws:sqs:us-east-1:00000000000:not-exists", target.GetID()) @@ -51,7 +51,7 @@ func TestSQSTarget_WriteSuccess(t *testing.T) { queueURL := queueRes.QueueUrl defer testutil.DeleteAWSLocalstackSQSQueue(client, queueURL) - target, err := NewSQSTargetWithInterfaces(client, "00000000000", testutil.AWSLocalstackRegion, queueName) + target, err := newSQSTargetWithInterfaces(client, "00000000000", testutil.AWSLocalstackRegion, queueName) assert.Nil(err) assert.NotNil(target) @@ -94,7 +94,7 @@ func TestSQSTarget_WritePartialFailure_OversizeRecord(t *testing.T) { queueURL := queueRes.QueueUrl defer testutil.DeleteAWSLocalstackSQSQueue(client, queueURL) - target, err := NewSQSTargetWithInterfaces(client, "00000000000", testutil.AWSLocalstackRegion, queueName) + target, err := newSQSTargetWithInterfaces(client, "00000000000", testutil.AWSLocalstackRegion, queueName) assert.Nil(err) assert.NotNil(target) diff --git a/pkg/target/stdout.go b/pkg/target/stdout.go index 4ecc0781..7396ce79 100644 --- a/pkg/target/stdout.go +++ b/pkg/target/stdout.go @@ -20,13 +20,18 @@ type StdoutTarget struct { log *log.Entry } -// NewStdoutTarget creates a new client for writing messages to stdout -func NewStdoutTarget() (*StdoutTarget, error) { +// newStdoutTarget creates a new client for writing messages to stdout +func newStdoutTarget() (*StdoutTarget, error) { return &StdoutTarget{ log: log.WithFields(log.Fields{"target": "stdout"}), }, nil } +// StdoutTargetConfigFunction creates an StdoutTarget +func StdoutTargetConfigFunction() (*StdoutTarget, error) { + return newStdoutTarget() +} + // The StdoutTargetAdapter type is an adapter for functions to be used as // pluggable components for Stdout Target. It implements the Pluggable interface. type StdoutTargetAdapter func(i interface{}) (interface{}, error) diff --git a/pkg/target/stdout_test.go b/pkg/target/stdout_test.go index 271e02fb..aefa9c39 100644 --- a/pkg/target/stdout_test.go +++ b/pkg/target/stdout_test.go @@ -18,7 +18,7 @@ import ( func TestStdoutTarget_WriteSuccess(t *testing.T) { assert := assert.New(t) - target, err := NewStdoutTarget() + target, err := newStdoutTarget() assert.NotNil(target) assert.Nil(err) assert.Equal("stdout", target.GetID()) diff --git a/third_party/snowplow/badrows/bad_row.go b/third_party/snowplow/badrows/bad_row.go index 6eb8acbc..701d5ffd 100644 --- a/third_party/snowplow/badrows/bad_row.go +++ b/third_party/snowplow/badrows/bad_row.go @@ -33,8 +33,8 @@ type BadRow struct { selfDescribingData *iglu.SelfDescribingData } -// NewBadRow returns a new bad-row structure -func NewBadRow(schema string, data map[string]interface{}, payload []byte, targetByteLimit int) (*BadRow, error) { +// newBadRow returns a new bad-row structure +func newBadRow(schema string, data map[string]interface{}, payload []byte, targetByteLimit int) (*BadRow, error) { payloadLength := len(payload) // Ensure data map does not contain anything for payload diff --git a/third_party/snowplow/badrows/bad_row_test.go b/third_party/snowplow/badrows/bad_row_test.go index 281223ce..8fa94dcc 100644 --- a/third_party/snowplow/badrows/bad_row_test.go +++ b/third_party/snowplow/badrows/bad_row_test.go @@ -23,7 +23,7 @@ func TestNewBadRow_InvalidData(t *testing.T) { }, } - br, err := NewBadRow(schema, data, []byte("Hello World!"), 5000) + br, err := newBadRow(schema, data, []byte("Hello World!"), 5000) assert.NotNil(err) assert.Nil(br) } diff --git a/third_party/snowplow/badrows/generic_error.go b/third_party/snowplow/badrows/generic_error.go index 10ede9ac..e2a3b4c8 100644 --- a/third_party/snowplow/badrows/generic_error.go +++ b/third_party/snowplow/badrows/generic_error.go @@ -42,7 +42,7 @@ func NewGenericError(input *GenericErrorInput, targetByteLimit int) (*BadRow, er }, } - return NewBadRow( + return newBadRow( genericErrorSchema, data, input.Payload, diff --git a/third_party/snowplow/badrows/size_violation.go b/third_party/snowplow/badrows/size_violation.go index 24e2727b..2fc65dc8 100644 --- a/third_party/snowplow/badrows/size_violation.go +++ b/third_party/snowplow/badrows/size_violation.go @@ -40,7 +40,7 @@ func NewSizeViolation(input *SizeViolationInput, targetByteLimit int) (*BadRow, }, } - return NewBadRow( + return newBadRow( sizeViolationSchema, data, input.Payload, From a5fe424830e3f78efff38bf0a9e3904e77b73035 Mon Sep 17 00:00:00 2001 From: colmsnowplow Date: Thu, 16 Jun 2022 18:11:59 +0100 Subject: [PATCH 10/25] Revamp unit testing project-wide (closes #129) --- Makefile | 1 + cmd/init_test.go | 50 +-- config/component_test.go | 11 +- config/config_test.go | 83 ++-- config/setup_test.go | 18 + go.mod | 29 +- go.sum | 148 +++++-- integration/docker-compose.yml | 6 + pkg/common/helpers_test.go | 15 +- pkg/models/message_test.go | 4 +- pkg/models/observer_buffer.go | 2 +- pkg/models/target_write_result_test.go | 53 +++ pkg/models/transformation_result_test.go | 39 ++ pkg/source/kinesis/kinesis_source_test.go | 118 ++++-- pkg/source/pubsub/pubsub_source_test.go | 169 +++++++- pkg/source/sourceconfig/source_config_test.go | 114 ++++- pkg/source/sqs/sqs_source_test.go | 56 ++- pkg/source/stdin/stdin_source_test.go | 12 +- pkg/target/eventhub.go | 37 +- pkg/target/eventhub_test.go | 392 ++++++++++++++++-- pkg/target/http_test.go | 45 +- pkg/target/kafka_test.go | 6 + pkg/target/kinesis_test.go | 9 +- pkg/target/pubsub_test.go | 278 +++++++++++++ pkg/target/setup_test.go | 18 + pkg/target/sqs_test.go | 4 +- pkg/testutil/common.go | 15 + pkg/testutil/pubsub_helpers.go | 131 ++++++ pkg/testutil/source_helpers.go | 33 +- pkg/transform/engine_javascript_test.go | 19 +- pkg/transform/engine_lua_test.go | 24 +- pkg/transform/setup_test.go | 18 + .../snowplow_enriched_filter_test.go | 7 +- .../snowplow_enriched_set_pk_test.go | 5 +- .../snowplow_enriched_to_json_test.go | 11 +- pkg/transform/snowplow_enriched_util.go | 5 +- pkg/transform/snowplow_enriched_util_test.go | 58 +++ pkg/transform/transform_test.go | 16 +- .../transformconfig/transform_config_test.go | 27 +- third_party/snowplow/badrows/bad_row_test.go | 3 + .../snowplow/badrows/size_violation_test.go | 6 +- .../iglu/self_describing_data_test.go | 3 + 42 files changed, 1804 insertions(+), 294 deletions(-) create mode 100644 config/setup_test.go create mode 100644 pkg/models/transformation_result_test.go create mode 100644 pkg/target/pubsub_test.go create mode 100644 pkg/target/setup_test.go create mode 100644 pkg/testutil/pubsub_helpers.go create mode 100644 pkg/transform/setup_test.go create mode 100644 pkg/transform/snowplow_enriched_util_test.go diff --git a/Makefile b/Makefile index 2543cf47..39e85f10 100644 --- a/Makefile +++ b/Makefile @@ -111,6 +111,7 @@ integration-down: http-down (cd $(integration_dir) && docker-compose -f ./docker-compose.yml down) rm -rf $(integration_dir)/.localstack +# ngrok needs to be installed and auth token must be configured for this if running locally http-up: (cd "$(integration_dir)/http/server" && go run server.go &) sleep 5 diff --git a/cmd/init_test.go b/cmd/init_test.go index 359e004b..f735ddf2 100644 --- a/cmd/init_test.go +++ b/cmd/init_test.go @@ -13,6 +13,12 @@ import ( "github.com/stretchr/testify/assert" ) +func TestMain(m *testing.M) { + os.Clearenv() + exitVal := m.Run() + os.Exit(exitVal) +} + func TestInit_Success(t *testing.T) { assert := assert.New(t) @@ -24,23 +30,21 @@ func TestInit_Success(t *testing.T) { func TestInit_Failure(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("STATS_RECEIVER_TIMEOUT_SEC") - - os.Setenv("STATS_RECEIVER_TIMEOUT_SEC", "debug") + t.Setenv("STATS_RECEIVER_TIMEOUT_SEC", "debug") cfg, _, err := Init() assert.Nil(cfg) assert.NotNil(err) + if err != nil { + assert.Equal("Failed to build config: env: parse error on field \"TimeoutSec\" of type \"int\": strconv.ParseInt: parsing \"debug\": invalid syntax", err.Error()) + } } func TestInit_Success_Sentry(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("SENTRY_DSN") - defer os.Unsetenv("SENTRY_TAGS") - - os.Setenv("SENTRY_DSN", "https://1111111111111111111111111111111d@sentry.snplow.net/28") - os.Setenv("SENTRY_TAGS", "{\"client_name\":\"com.acme\"}") + t.Setenv("SENTRY_DSN", "https://1111111111111111111111111111111d@sentry.snplow.net/28") + t.Setenv("SENTRY_TAGS", "{\"client_name\":\"com.acme\"}") cfg, _, err := Init() assert.NotNil(cfg) @@ -50,43 +54,39 @@ func TestInit_Success_Sentry(t *testing.T) { func TestInit_Failure_LogLevel(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("LOG_LEVEL") - - os.Setenv("LOG_LEVEL", "DEBUG") + t.Setenv("LOG_LEVEL", "DEBUG") cfg, _, err := Init() assert.Nil(cfg) assert.NotNil(err) - - assert.Equal("Supported log levels are 'debug, info, warning, error, fatal, panic'; provided DEBUG", err.Error()) + if err != nil { + assert.Equal("Supported log levels are 'debug, info, warning, error, fatal, panic'; provided DEBUG", err.Error()) + } } func TestInit_Failure_SentryDSN(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("SENTRY_DSN") - - os.Setenv("SENTRY_DSN", "blahblah") + t.Setenv("SENTRY_DSN", "blahblah") cfg, _, err := Init() assert.Nil(cfg) assert.NotNil(err) - - assert.Equal("Failed to build Sentry: [Sentry] DsnParseError: invalid scheme", err.Error()) + if err != nil { + assert.Equal("Failed to build Sentry: [Sentry] DsnParseError: invalid scheme", err.Error()) + } } func TestInit_Failure_SentryTags(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("SENTRY_DSN") - defer os.Unsetenv("SENTRY_TAGS") - - os.Setenv("SENTRY_DSN", "https://1111111111111111111111111111111d@sentry.snplow.net/28") - os.Setenv("SENTRY_TAGS", "asdasdasd") + t.Setenv("SENTRY_DSN", "https://1111111111111111111111111111111d@sentry.snplow.net/28") + t.Setenv("SENTRY_TAGS", "asdasdasd") cfg, _, err := Init() assert.Nil(cfg) assert.NotNil(err) - - assert.Equal("Failed to unmarshall SENTRY_TAGS to map: invalid character 'a' looking for beginning of value", err.Error()) + if err != nil { + assert.Equal("Failed to unmarshall SENTRY_TAGS to map: invalid character 'a' looking for beginning of value", err.Error()) + } } diff --git a/config/component_test.go b/config/component_test.go index 0e0b7d35..511cef66 100644 --- a/config/component_test.go +++ b/config/component_test.go @@ -8,7 +8,6 @@ package config import ( "errors" - "os" "path/filepath" "reflect" "testing" @@ -20,10 +19,6 @@ import ( "github.com/snowplow-devops/stream-replicator/pkg/target" ) -func init() { - os.Clearenv() -} - func TestCreateTargetComponentHCL(t *testing.T) { testCases := []struct { File string @@ -266,7 +261,7 @@ func TestCreateFailureTargetComponentENV(t *testing.T) { t.Fatalf("function NewConfig failed with error: %q", err.Error()) } - assert.Equal(c.Data.FailureTarget.Target.Name, "kafka") + assert.Equal("kafka", c.Data.FailureTarget.Target.Name) decoderOpts := &DecoderOptions{ Prefix: "FAILURE_", } @@ -313,8 +308,8 @@ func TestCreateObserverComponentHCL(t *testing.T) { t.Fatalf("function NewConfig failed with error: %q", err.Error()) } - assert.Equal(c.Data.StatsReceiver.TimeoutSec, 2) - assert.Equal(c.Data.StatsReceiver.BufferSec, 20) + assert.Equal(2, c.Data.StatsReceiver.TimeoutSec) + assert.Equal(20, c.Data.StatsReceiver.BufferSec) use := c.Data.StatsReceiver.Receiver decoderOpts := &DecoderOptions{ diff --git a/config/config_test.go b/config/config_test.go index 3635bc2f..bbaf00cf 100644 --- a/config/config_test.go +++ b/config/config_test.go @@ -49,13 +49,9 @@ func TestNewConfig(t *testing.T) { func TestNewConfig_FromEnv(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("LOG_LEVEL") - defer os.Unsetenv("TARGET_NAME") - defer os.Unsetenv("SOURCE_NAME") - - os.Setenv("LOG_LEVEL", "debug") - os.Setenv("TARGET_NAME", "kinesis") - os.Setenv("SOURCE_NAME", "kinesis") + t.Setenv("LOG_LEVEL", "debug") + t.Setenv("TARGET_NAME", "kinesis") + t.Setenv("SOURCE_NAME", "kinesis") c, err := NewConfig() assert.NotNil(c) @@ -71,21 +67,20 @@ func TestNewConfig_FromEnv(t *testing.T) { func TestNewConfig_FromEnvInvalid(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("STATS_RECEIVER_TIMEOUT_SEC") - - os.Setenv("STATS_RECEIVER_TIMEOUT_SEC", "debug") + t.Setenv("STATS_RECEIVER_TIMEOUT_SEC", "debug") c, err := NewConfig() assert.Nil(c) assert.NotNil(err) + if err != nil { + assert.Equal("env: parse error on field \"TimeoutSec\" of type \"int\": strconv.ParseInt: parsing \"debug\": invalid syntax", err.Error()) + } } func TestNewConfig_InvalidTarget(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("TARGET_NAME") - - os.Setenv("TARGET_NAME", "fake") + t.Setenv("TARGET_NAME", "fake") c, err := NewConfig() assert.NotNil(c) @@ -96,15 +91,15 @@ func TestNewConfig_InvalidTarget(t *testing.T) { source, err := c.GetTarget() assert.Nil(source) assert.NotNil(err) - assert.Equal("Invalid target found; expected one of 'stdout, kinesis, pubsub, sqs, kafka, eventhub, http' and got 'fake'", err.Error()) + if err != nil { + assert.Equal("Invalid target found; expected one of 'stdout, kinesis, pubsub, sqs, kafka, eventhub, http' and got 'fake'", err.Error()) + } } func TestNewConfig_InvalidFailureTarget(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("FAILURE_TARGET_NAME") - - os.Setenv("FAILURE_TARGET_NAME", "fake") + t.Setenv("FAILURE_TARGET_NAME", "fake") c, err := NewConfig() assert.NotNil(c) @@ -115,15 +110,15 @@ func TestNewConfig_InvalidFailureTarget(t *testing.T) { source, err := c.GetFailureTarget("testAppName", "0.0.0") assert.Nil(source) assert.NotNil(err) - assert.Equal("Invalid failure target found; expected one of 'stdout, kinesis, pubsub, sqs, kafka, eventhub, http' and got 'fake'", err.Error()) + if err != nil { + assert.Equal("Invalid failure target found; expected one of 'stdout, kinesis, pubsub, sqs, kafka, eventhub, http' and got 'fake'", err.Error()) + } } func TestNewConfig_InvalidFailureFormat(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("FAILURE_TARGETS_FORMAT") - - os.Setenv("FAILURE_TARGETS_FORMAT", "fake") + t.Setenv("FAILURE_TARGETS_FORMAT", "fake") c, err := NewConfig() assert.NotNil(c) @@ -134,15 +129,15 @@ func TestNewConfig_InvalidFailureFormat(t *testing.T) { source, err := c.GetFailureTarget("testAppName", "0.0.0") assert.Nil(source) assert.NotNil(err) - assert.Equal("Invalid failure format found; expected one of 'snowplow' and got 'fake'", err.Error()) + if err != nil { + assert.Equal("Invalid failure format found; expected one of 'snowplow' and got 'fake'", err.Error()) + } } func TestNewConfig_InvalidStatsReceiver(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("STATS_RECEIVER_NAME") - - os.Setenv("STATS_RECEIVER_NAME", "fake") + t.Setenv("STATS_RECEIVER_NAME", "fake") c, err := NewConfig() assert.NotNil(c) @@ -153,7 +148,9 @@ func TestNewConfig_InvalidStatsReceiver(t *testing.T) { source, err := c.GetObserver(map[string]string{}) assert.Nil(source) assert.NotNil(err) - assert.Equal("Invalid stats receiver found; expected one of 'statsd' and got 'fake'", err.Error()) + if err != nil { + assert.Equal("Invalid stats receiver found; expected one of 'statsd' and got 'fake'", err.Error()) + } } func TestNewConfig_GetTags(t *testing.T) { @@ -193,14 +190,18 @@ func TestNewConfig_Hcl_invalids(t *testing.T) { target, err := c.GetTarget() assert.Nil(target) assert.NotNil(err) - assert.Equal("Invalid target found; expected one of 'stdout, kinesis, pubsub, sqs, kafka, eventhub, http' and got 'fakeHCL'", err.Error()) + if err != nil { + assert.Equal("Invalid target found; expected one of 'stdout, kinesis, pubsub, sqs, kafka, eventhub, http' and got 'fakeHCL'", err.Error()) + } }) t.Run("invalid_failure_target", func(t *testing.T) { ftarget, err := c.GetFailureTarget("testAppName", "0.0.0") assert.Nil(ftarget) assert.NotNil(err) - assert.Equal("Invalid failure target found; expected one of 'stdout, kinesis, pubsub, sqs, kafka, eventhub, http' and got 'fakeHCL'", err.Error()) + if err != nil { + assert.Equal("Invalid failure target found; expected one of 'stdout, kinesis, pubsub, sqs, kafka, eventhub, http' and got 'fakeHCL'", err.Error()) + } }) } @@ -217,16 +218,16 @@ func TestNewConfig_Hcl_defaults(t *testing.T) { t.Fatalf("function NewConfig failed with error: %q", err.Error()) } - assert.Equal(c.Data.Source.Use.Name, "stdin") - assert.Equal(c.Data.Target.Use.Name, "stdout") - assert.Equal(c.Data.FailureTarget.Target.Name, "stdout") - assert.Equal(c.Data.FailureTarget.Format, "snowplow") - assert.Equal(c.Data.Sentry.Tags, "{}") - assert.Equal(c.Data.Sentry.Debug, false) - assert.Equal(c.Data.StatsReceiver.TimeoutSec, 1) - assert.Equal(c.Data.StatsReceiver.BufferSec, 15) - assert.Equal(c.Data.Transform.Message, "none") - assert.Equal(c.Data.LogLevel, "info") + assert.Equal("stdin", c.Data.Source.Use.Name) + assert.Equal("stdout", c.Data.Target.Use.Name) + assert.Equal("stdout", c.Data.FailureTarget.Target.Name) + assert.Equal("snowplow", c.Data.FailureTarget.Format) + assert.Equal("{}", c.Data.Sentry.Tags) + assert.Equal(false, c.Data.Sentry.Debug) + assert.Equal(1, c.Data.StatsReceiver.TimeoutSec) + assert.Equal(15, c.Data.StatsReceiver.BufferSec) + assert.Equal("none", c.Data.Transform.Message) + assert.Equal("info", c.Data.LogLevel) } func TestNewConfig_Hcl_sentry(t *testing.T) { @@ -241,9 +242,9 @@ func TestNewConfig_Hcl_sentry(t *testing.T) { t.Fatalf("function NewConfig failed with error: %q", err.Error()) } - assert.Equal(c.Data.Sentry.Debug, true) - assert.Equal(c.Data.Sentry.Tags, "{\"testKey\":\"testValue\"}") - assert.Equal(c.Data.Sentry.Dsn, "testDsn") + assert.Equal(true, c.Data.Sentry.Debug) + assert.Equal("{\"testKey\":\"testValue\"}", c.Data.Sentry.Tags) + assert.Equal("testDsn", c.Data.Sentry.Dsn) } func TestDefaultTransformation(t *testing.T) { diff --git a/config/setup_test.go b/config/setup_test.go new file mode 100644 index 00000000..4119babe --- /dev/null +++ b/config/setup_test.go @@ -0,0 +1,18 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package config + +import ( + "os" + "testing" +) + +func TestMain(m *testing.M) { + os.Clearenv() + exitVal := m.Run() + os.Exit(exitVal) +} diff --git a/go.mod b/go.mod index 45fd6602..f2942d97 100644 --- a/go.mod +++ b/go.mod @@ -3,8 +3,8 @@ module github.com/snowplow-devops/stream-replicator go 1.17 require ( - cloud.google.com/go v0.91.1 // indirect - cloud.google.com/go/pubsub v1.14.0 + cloud.google.com/go v0.102.0 // indirect + cloud.google.com/go/pubsub v1.22.2 github.com/Azure/azure-amqp-common-go/v3 v3.1.0 // indirect github.com/Azure/azure-event-hubs-go/v3 v3.3.12 github.com/Azure/azure-sdk-for-go v56.2.0+incompatible // indirect @@ -38,14 +38,13 @@ require ( github.com/urfave/cli v1.22.5 github.com/xdg/scram v1.0.3 golang.org/x/crypto v0.0.0-20220214200702-86341886e292 // indirect - golang.org/x/mod v0.5.0 // indirect golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2 // indirect - golang.org/x/oauth2 v0.0.0-20210810183815-faf39c7919d5 // indirect - golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e // indirect + golang.org/x/oauth2 v0.0.0-20220524215830-622c5d57e401 // indirect + golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a // indirect golang.org/x/text v0.3.7 // indirect - google.golang.org/api v0.54.0 // indirect - google.golang.org/genproto v0.0.0-20210813162853-db860fec028c // indirect - google.golang.org/grpc v1.40.0 // indirect + google.golang.org/api v0.81.0 // indirect + google.golang.org/genproto v0.0.0-20220523171625-347a074981d8 + google.golang.org/grpc v1.46.2 gopkg.in/stretchr/testify.v1 v1.2.2 // indirect ) @@ -62,6 +61,8 @@ require ( ) require ( + cloud.google.com/go/compute v1.6.1 // indirect + cloud.google.com/go/iam v0.3.0 // indirect github.com/Azure/go-autorest v14.2.0+incompatible // indirect github.com/Azure/go-autorest/autorest/date v0.3.0 // indirect github.com/Azure/go-autorest/autorest/to v0.4.0 // indirect @@ -77,8 +78,8 @@ require ( github.com/eapache/queue v1.1.0 // indirect github.com/go-sourcemap/sourcemap v2.1.3+incompatible // indirect github.com/golang/protobuf v1.5.2 // indirect - github.com/google/go-cmp v0.5.6 // indirect - github.com/googleapis/gax-go/v2 v2.0.5 // indirect + github.com/google/go-cmp v0.5.8 // indirect + github.com/googleapis/gax-go/v2 v2.4.0 // indirect github.com/hashicorp/go-immutable-radix v1.1.0 // indirect github.com/hashicorp/go-memdb v1.0.4 // indirect github.com/hashicorp/go-uuid v1.0.2 // indirect @@ -90,7 +91,6 @@ require ( github.com/jcmturner/rpc/v2 v2.0.3 // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect github.com/json-iterator/go v1.1.11 // indirect - github.com/jstemmer/go-junit-report v0.9.1 // indirect github.com/mattn/go-sqlite3 v2.0.2+incompatible // indirect github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect @@ -101,12 +101,9 @@ require ( github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/xdg/stringprep v1.0.3 // indirect go.opencensus.io v0.23.0 // indirect - golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 // indirect - golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect - golang.org/x/tools v0.1.5 // indirect - golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect + golang.org/x/sync v0.0.0-20220513210516-0976fa681c29 // indirect google.golang.org/appengine v1.6.7 // indirect - google.golang.org/protobuf v1.27.1 // indirect + google.golang.org/protobuf v1.28.0 // indirect gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect ) diff --git a/go.sum b/go.sum index cfea9f12..c1393f3d 100644 --- a/go.sum +++ b/go.sum @@ -21,29 +21,46 @@ cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E cloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY= cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM= cloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY= -cloud.google.com/go v0.88.0/go.mod h1:dnKwfYbP9hQhefiUvpbcAyoGSHUrOxR20JVElLiUvEY= cloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ= -cloud.google.com/go v0.91.1 h1:w+u8ttN/QtYrpvgXNUd2G6kwqrqCIQbkINlXQjHP1ek= -cloud.google.com/go v0.91.1/go.mod h1:V358WZfbFQkmC3gv5XCxzZq2e3h7OGvQR0IXtj77ylI= +cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI= +cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW4= +cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc= +cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA= +cloud.google.com/go v0.100.1/go.mod h1:fs4QogzfH5n2pBXBP9vRiU+eCny7lD2vmFZy79Iuw1U= +cloud.google.com/go v0.100.2/go.mod h1:4Xra9TjzAeYHrl5+oeLlzbM2k3mjVhZh4UqTZ//w99A= +cloud.google.com/go v0.102.0 h1:DAq3r8y4mDgyB/ZPJ9v/5VJNqjgJAxTn6ZYLlUywOu8= +cloud.google.com/go v0.102.0/go.mod h1:oWcCzKlqJ5zgHQt9YsaeTY9KzIvjyy0ArmiBUgpQ+nc= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/compute v0.1.0/go.mod h1:GAesmwr110a34z04OlxYkATPBEfVhkymfTBXtfbBFow= +cloud.google.com/go/compute v1.3.0/go.mod h1:cCZiE1NHEtai4wiufUhW8I8S1JKkAnhnQJWM7YD99wM= +cloud.google.com/go/compute v1.5.0/go.mod h1:9SMHyhJlzhlkJqrPAc839t2BZFTSk6Jdj6mkzQJeu0M= +cloud.google.com/go/compute v1.6.0/go.mod h1:T29tfhtVbq1wvAPo0E3+7vhgmkOYeXjhFvz/FMzPu0s= +cloud.google.com/go/compute v1.6.1 h1:2sMmt8prCn7DPaG4Pmh0N3Inmc8cT8ae5k1M6VJ9Wqc= +cloud.google.com/go/compute v1.6.1/go.mod h1:g85FgpzFvNULZ+S8AYq87axRKuf2Kh7deLqV/jJ3thU= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/iam v0.1.0/go.mod h1:vcUNEa0pEm0qRVpmWepWaFMIAI8/hjB9mO8rNCJtF6c= +cloud.google.com/go/iam v0.3.0 h1:exkAomrVUuzx9kWFI1wm3KI0uoDeUFPB4kKGzx6x+Gc= +cloud.google.com/go/iam v0.3.0/go.mod h1:XzJPvDayI+9zsASAFO68Hk07u3z+f+JrT2xXNdp4bnY= +cloud.google.com/go/kms v1.4.0 h1:iElbfoE61VeLhnZcGOltqL8HIly8Nhbe5t6JlH9GXjo= +cloud.google.com/go/kms v1.4.0/go.mod h1:fajBHndQ+6ubNw6Ss2sSd+SWvjL26RNo/dr7uxsnnOA= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= -cloud.google.com/go/pubsub v1.14.0 h1:l58eo7mixlotSL9sQo/+h12Nm+3d30IMccgT2AssMME= -cloud.google.com/go/pubsub v1.14.0/go.mod h1:boZDy/YGYWAyVd11q7KN4HFOJcadGhUfEBfzweSQ7Ww= +cloud.google.com/go/pubsub v1.22.2 h1:e6A4rhtMX4opff/jDWApl4HwLtsCdV9VULVfpFRp6eo= +cloud.google.com/go/pubsub v1.22.2/go.mod h1:LBHGrtgM7+SGKCDKQu2pKIRtGwbZyJvRDkMk0594xdU= cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +cloud.google.com/go/storage v1.22.1/go.mod h1:S8N1cAStu7BOeFfE8KAQzmyyLkK8p/vmRq6kuBTW58Y= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/AndreasBriese/bbloom v0.0.0-20190306092124-e2d15f34fcf9/go.mod h1:bOvUY6CB00SOBii9/FifXqc0awNKxLFCL/+pkDPuyl8= github.com/Azure/azure-amqp-common-go/v3 v3.0.1/go.mod h1:PBIGdzcO1teYoufTKMcGibdKaYZv4avS+O6LNIp8bq0= @@ -131,6 +148,7 @@ github.com/caarlos0/env/v6 v6.9.1/go.mod h1:hvp/ryKXKipEkcuYjs9mI4bBCg+UI0Yhgm5Z github.com/cactus/go-statsd-client/statsd v0.0.0-20190922113730-52b467de415c/go.mod h1:D4RDtP0MffJ3+R36OkGul0LwJLIN8nRb0Ac6jZmJCmo= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= @@ -138,7 +156,12 @@ github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDk github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/codegangsta/inject v0.0.0-20150114235600-33e0aa1cb7c0/go.mod h1:4Zcjuz89kmFXt9morQgcfYZAYZ5n8WHjt81YYWIwtTM= github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= @@ -179,6 +202,8 @@ github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5y github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= +github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= +github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/etcd-io/bbolt v1.3.3/go.mod h1:ZF2nL25h33cCyBtcyWeZ2/I3HQOfTP+0PIEvHjkjCrw= github.com/fasthttp-contrib/websocket v0.0.0-20160511215533-1f3b11f56072/go.mod h1:duJ4Jxv5lDcvg4QuQr0oowTf7dz4/CR8NtyCooz9HL8= @@ -263,8 +288,10 @@ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= +github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg= +github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= @@ -284,17 +311,21 @@ github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLe github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210715191844-86eeefc3e471/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210804190019-f964ff605595/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= -github.com/googleapis/gax-go/v2 v2.0.5 h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= +github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM= +github.com/googleapis/gax-go/v2 v2.2.0/go.mod h1:as02EH8zWkzwUoLbBaFeQ+arQaj/OthfcblKl4IGNaM= +github.com/googleapis/gax-go/v2 v2.3.0/go.mod h1:b8LNqSzNabLiUpXKkY7HAR5jr6bIT99EXz9pXxye9YM= +github.com/googleapis/gax-go/v2 v2.4.0 h1:dS9eYAjhrE2RjmzYw2XAPvcXfmcQLtFEQWn0CR82awk= +github.com/googleapis/gax-go/v2 v2.4.0/go.mod h1:XOTVJ59hdnfJLIP/dh8n5CGryZR2LxK9wbMD5+iXC6c= +github.com/googleapis/go-type-adapters v1.0.0/go.mod h1:zHW75FOG2aur7gAO2B+MLby+cLsWGBF62rFAi7WjWO4= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= @@ -360,7 +391,6 @@ github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/u github.com/json-iterator/go v1.1.11 h1:uVUAXhF2To8cbw/3xN3pxj6kk7TYKs98NIrTqPlMWAQ= github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= -github.com/jstemmer/go-junit-report v0.9.1 h1:6QPYqodiu3GuPL+7mfx+NwDdp2eTkp9IfEUpgAwUN0o= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88/go.mod h1:3w7q1U84EfirKl04SVQ/s7nPm1ZPhiXd34z40TNz36k= @@ -577,7 +607,6 @@ golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRu golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 h1:VLliZ0d+/avPrXXH+OakdXhpJuEoBZuwh1m2j7U6Iug= golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= @@ -590,8 +619,6 @@ golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.5.0 h1:UG21uOlmZabA4fW5i7ZX6bjw1xELEGg/ZLgZq9auk/Q= -golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180811021610-c39426892332/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -635,6 +662,11 @@ golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96b golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220412020605-290c469a71a5/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2 h1:NWy5+hlRbC7HK+PmcXVUmW1IMyFce7to56IUvhUFm7Y= golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= @@ -651,8 +683,13 @@ golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210810183815-faf39c7919d5 h1:Ati8dO7+U7mxpkPSxBZQEvzHVUYB/MqCklCN8ig5w/o= -golang.org/x/oauth2 v0.0.0-20210810183815-faf39c7919d5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= +golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= +golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= +golang.org/x/oauth2 v0.0.0-20220524215830-622c5d57e401 h1:zwrSfklXn0gxyLRX/aR+q6cgHbV/ItVyzbPlbA+dkAw= +golang.org/x/oauth2 v0.0.0-20220524215830-622c5d57e401/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -663,8 +700,9 @@ golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220513210516-0976fa681c29 h1:w8s32wxx3sY+OjLlv9qltkLU5yvJzxjjgiHWLjdIcw4= +golang.org/x/sync v0.0.0-20220513210516-0976fa681c29/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -718,8 +756,20 @@ golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e h1:fLOSk5Q00efkSvAm+4xcoXD+RRmLmmulPn5I3Y9F2EM= +golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220209214540-3681064d5158/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220502124256-b6088ccd6cba/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a h1:dGzPydgVsqGcTRVwiLJ1jVbufYwmzD3LfVPLKsKg+0k= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -735,7 +785,7 @@ golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20210723032227-1f47c861a9ac/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20220411224347-583f2d630306/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181221001348-537d06c36207/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -789,13 +839,13 @@ golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.1.5 h1:ouewzE6p+/VEB31YYnTbEJdi8pFqKp4P4n85vwo3DHA= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= @@ -821,9 +871,21 @@ google.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59t google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4= google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw= google.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00sOU= -google.golang.org/api v0.52.0/go.mod h1:Him/adpjt0sxtkWViy0b6xyKW/SD71CwdJ7HqJo7SrU= -google.golang.org/api v0.54.0 h1:ECJUVngj71QI6XEm7b1sAf8BljU5inEhMbKPR8Lxhhk= google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6z3k= +google.golang.org/api v0.55.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= +google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= +google.golang.org/api v0.57.0/go.mod h1:dVPlbZyBo2/OjBpmvNdpn2GRm6rPy75jyU7bmhdrMgI= +google.golang.org/api v0.61.0/go.mod h1:xQRti5UdCmoCEqFxcz93fTl338AVqDgyaDRuOZ3hg9I= +google.golang.org/api v0.63.0/go.mod h1:gs4ij2ffTRXwuzzgJl/56BdwJaA194ijkfn++9tDuPo= +google.golang.org/api v0.67.0/go.mod h1:ShHKP8E60yPsKNw/w8w+VYaj9H6buA5UqDp8dhbQZ6g= +google.golang.org/api v0.70.0/go.mod h1:Bs4ZM2HGifEvXwd50TtW70ovgJffJYw2oRCOFU/SkfA= +google.golang.org/api v0.71.0/go.mod h1:4PyU6e6JogV1f9eA4voyrTY2batOLdgZ5qZ5HOCc4j8= +google.golang.org/api v0.74.0/go.mod h1:ZpfMZOVRMywNyvJFeqL9HRWBgAuRfSjJFpe9QtRRyDs= +google.golang.org/api v0.75.0/go.mod h1:pU9QmyHLnzlpar1Mjt4IbapUCy8J+6HD6GeELN69ljA= +google.golang.org/api v0.78.0/go.mod h1:1Sg78yoMLOhlQTeF+ARBoytAcH1NNyyl390YMy6rKmw= +google.golang.org/api v0.80.0/go.mod h1:xY3nI94gbvBrE0J6NHXhxOmW97HG7Khjkku6AFB3Hyg= +google.golang.org/api v0.81.0 h1:o8WF5AvfidafWbFjsRyupxyEQJNUWxLZJCK5NXrxZZ8= +google.golang.org/api v0.81.0/go.mod h1:FA6Mb/bZxj706H2j+j2d6mHEEaHBmbbWnkfvmorOCko= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -871,6 +933,7 @@ google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6D google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210329143202-679c6ae281ee/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= google.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= @@ -879,13 +942,36 @@ google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxH google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24= google.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= -google.golang.org/genproto v0.0.0-20210721163202-f1cecdd8b78a/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= -google.golang.org/genproto v0.0.0-20210722135532-667f2b7c528f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= google.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= google.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= -google.golang.org/genproto v0.0.0-20210811021853-ddbe55d93216/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w= -google.golang.org/genproto v0.0.0-20210813162853-db860fec028c h1:iLQakcwWG3k/++1q/46apVb1sUQ3IqIdn9yUE6eh/xA= google.golang.org/genproto v0.0.0-20210813162853-db860fec028c/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w= +google.golang.org/genproto v0.0.0-20210821163610-241b8fcbd6c8/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210909211513-a8c4777a87af/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211221195035-429b39de9b1c/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20220126215142-9970aeb2e350/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20220207164111-0872dc986b00/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20220218161850-94dd64e39d7c/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220222213610-43724f9ea8cf/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220304144024-325a89244dc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220310185008-1973136f34c6/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI= +google.golang.org/genproto v0.0.0-20220324131243-acbaeb5b85eb/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E= +google.golang.org/genproto v0.0.0-20220407144326-9054f6ed7bac/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220413183235-5e96e2839df9/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220414192740-2d67ff6cf2b4/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220421151946-72621c1f0bd3/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220429170224-98d788798c3e/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= +google.golang.org/genproto v0.0.0-20220505152158-f39f71e6c8f3/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= +google.golang.org/genproto v0.0.0-20220518221133-4f43b3371335/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= +google.golang.org/genproto v0.0.0-20220519153652-3a47de7e79bd/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= +google.golang.org/genproto v0.0.0-20220523171625-347a074981d8 h1:4NSrVrQGh6+UqBEd+Kwdh6ZDwESH0Sj2bNUQN+VjoQk= +google.golang.org/genproto v0.0.0-20220523171625-347a074981d8/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -910,8 +996,13 @@ google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQ google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= -google.golang.org/grpc v1.40.0 h1:AGJ0Ih4mHjSeibYkFGh1dD9KJ/eOtZ93I6hoHhukQ5Q= google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= +google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= +google.golang.org/grpc v1.46.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= +google.golang.org/grpc v1.46.2 h1:u+MLGgVf7vRdjEYZ8wDFhAVNmhkbJ5hmrA1LMWK1CAQ= +google.golang.org/grpc v1.46.2/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= @@ -925,8 +1016,9 @@ google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGj google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.27.1 h1:SnqbnDw1V7RiZcXPx5MEeqPv2s79L9i7BJUlG/+RurQ= google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.28.0 h1:w43yiav+6bVFTBQFZX0r7ipe9JQ1QsbMgHwbBziscLw= +google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/integration/docker-compose.yml b/integration/docker-compose.yml index 85bf28a9..8dd2058c 100644 --- a/integration/docker-compose.yml +++ b/integration/docker-compose.yml @@ -16,3 +16,9 @@ services: environment: - SERVICES=sqs,kinesis,dynamodb - DATA_DIR=/tmp/localstack/data + pubsub: + image: bigtruedata/gcloud-pubsub-emulator + command: start --host-port 0.0.0.0:8432 + ports: + - "0.0.0.0:8432:8432" + diff --git a/pkg/common/helpers_test.go b/pkg/common/helpers_test.go index 15fd47ae..3360f29c 100644 --- a/pkg/common/helpers_test.go +++ b/pkg/common/helpers_test.go @@ -8,7 +8,6 @@ package common import ( "crypto/tls" - "os" "strings" "testing" "time" @@ -16,12 +15,7 @@ import ( "github.com/stretchr/testify/assert" ) -func init() { - os.Clearenv() -} - // --- Cloud Helpers - func TestGetGCPServiceAccountFromBase64(t *testing.T) { assert := assert.New(t) defer DeleteTemporaryDir() @@ -39,9 +33,11 @@ func TestGetGCPServiceAccountFromBase64_NotBase64(t *testing.T) { path, err := GetGCPServiceAccountFromBase64("helloworld") - assert.Equal(path, "") + assert.Equal("", path) assert.NotNil(err) - assert.True(strings.HasPrefix(err.Error(), "Failed to Base64 decode")) + if err != nil { + assert.True(strings.HasPrefix(err.Error(), "Failed to Base64 decode")) + } } func TestGetAWSSession(t *testing.T) { @@ -59,6 +55,9 @@ func TestGetAWSSession(t *testing.T) { assert.NotNil(cfg2) assert.Nil(accID2) assert.NotNil(err2) + if err != nil { + assert.Equal("InvalidParameter: 1 validation error(s) found.\n- minimum field size of 20, AssumeRoleInput.RoleArn.\n", err2.Error()) + } } // --- Generic Helpers diff --git a/pkg/models/message_test.go b/pkg/models/message_test.go index efd01de4..95ba8faf 100644 --- a/pkg/models/message_test.go +++ b/pkg/models/message_test.go @@ -27,7 +27,9 @@ func TestMessageString(t *testing.T) { msg.SetError(errors.New("failure")) assert.NotNil(msg.GetError()) - assert.Equal("failure", msg.GetError().Error()) + if msg.GetError() != nil { + assert.Equal("failure", msg.GetError().Error()) + } } func TestGetChunkedMessages(t *testing.T) { diff --git a/pkg/models/observer_buffer.go b/pkg/models/observer_buffer.go index a67f8a28..1598284d 100644 --- a/pkg/models/observer_buffer.go +++ b/pkg/models/observer_buffer.go @@ -176,7 +176,7 @@ func (b *ObserverBuffer) String() string { b.MaxMsgLatency.Milliseconds(), b.MaxFilterLatency.Milliseconds(), b.MaxTransformLatency.Milliseconds(), - b.SumTransformLatency.Milliseconds(), // Reporting sums for rc version in order to compute averages in load tests. + b.SumTransformLatency.Milliseconds(), // Sums are reported to allow us to compute averages across multi-instance deployments b.SumProcLatency.Milliseconds(), b.SumMsgLatency.Milliseconds(), ) diff --git a/pkg/models/target_write_result_test.go b/pkg/models/target_write_result_test.go index 9a156b9f..e563b91c 100644 --- a/pkg/models/target_write_result_test.go +++ b/pkg/models/target_write_result_test.go @@ -13,6 +13,7 @@ import ( "github.com/stretchr/testify/assert" ) +// TestNewTargetWriteResult_EmptyWithoutTime tests that an empty targetWriteResult with no timings will report 0s across the board func TestNewTargetWriteResult_EmptyWithoutTime(t *testing.T) { assert := assert.New(t) @@ -36,6 +37,7 @@ func TestNewTargetWriteResult_EmptyWithoutTime(t *testing.T) { assert.Equal(time.Duration(0), r.AvgTransformLatency) } +// TestNewTargetWriteResult_EmptyWithTime tests that an empty targetWriteResult with no a provided timestamp will report 0s across the board func TestNewTargetWriteResult_EmptyWithTime(t *testing.T) { assert := assert.New(t) @@ -59,6 +61,7 @@ func TestNewTargetWriteResult_EmptyWithTime(t *testing.T) { assert.Equal(time.Duration(0), r.AvgTransformLatency) } +// TestNewTargetWriteResult_WithMessages tests that reporting of statistics is as it should be when we have all data func TestNewTargetWriteResult_WithMessages(t *testing.T) { assert := assert.New(t) @@ -159,3 +162,53 @@ func TestNewTargetWriteResult_WithMessages(t *testing.T) { assert.Equal(time.Duration(1)*time.Minute, r3.MinTransformLatency) assert.Equal(time.Duration(3)*time.Minute, r3.AvgTransformLatency) } + +// TestNewTargetWriteResult_NoTransformation tests that reporting of statistics is as it should be when we don't have a timeTransformed +// At time of writing there is a bug whereby these will report negative transformLatency stats: https://github.com/snowplow-devops/stream-replicator/issues/108 +// Commenting this test out for the time being, it can serve as an illustration of the problem and unit test for fixing that bug +/* +func TestNewTargetWriteResult_NoTransformation(t *testing.T) { + assert := assert.New(t) + + timeNow := time.Now().UTC() + + sent := []*Message{ + { + Data: []byte("Baz"), + PartitionKey: "partition1", + TimeCreated: timeNow.Add(time.Duration(-50) * time.Minute), + TimePulled: timeNow.Add(time.Duration(-4) * time.Minute), + }, + { + Data: []byte("Bar"), + PartitionKey: "partition2", + TimeCreated: timeNow.Add(time.Duration(-70) * time.Minute), + TimePulled: timeNow.Add(time.Duration(-7) * time.Minute), + }, + } + failed := []*Message{ + { + Data: []byte("Foo"), + PartitionKey: "partition3", + TimeCreated: timeNow.Add(time.Duration(-30) * time.Minute), + TimePulled: timeNow.Add(time.Duration(-10) * time.Minute), + }, + } + + r := NewTargetWriteResultWithTime(sent, failed, nil, nil, timeNow) + assert.NotNil(r) + + assert.Equal(int64(2), r.SentCount) + assert.Equal(int64(1), r.FailedCount) + assert.Equal(int64(3), r.Total()) + assert.Equal(time.Duration(10)*time.Minute, r.MaxProcLatency) + assert.Equal(time.Duration(4)*time.Minute, r.MinProcLatency) + assert.Equal(time.Duration(7)*time.Minute, r.AvgProcLatency) + assert.Equal(time.Duration(70)*time.Minute, r.MaxMsgLatency) + assert.Equal(time.Duration(30)*time.Minute, r.MinMsgLatency) + assert.Equal(time.Duration(50)*time.Minute, r.AvgMsgLatency) + assert.Equal(time.Duration(0), r.MaxTransformLatency) + assert.Equal(time.Duration(0), r.MinTransformLatency) + assert.Equal(time.Duration(0), r.AvgTransformLatency) +} +*/ diff --git a/pkg/models/transformation_result_test.go b/pkg/models/transformation_result_test.go new file mode 100644 index 00000000..4ebc1bb0 --- /dev/null +++ b/pkg/models/transformation_result_test.go @@ -0,0 +1,39 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package models + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +// TestNewTransformationResult test NewTransformationResult. +// It doesn't do a whole lot so we don't need much here. +func TestNewTransformationResult(t *testing.T) { + assert := assert.New(t) + + msgs := []*Message{ + { + Data: []byte("Baz"), + PartitionKey: "partition1", + }, + { + Data: []byte("Bar"), + PartitionKey: "partition2", + }, + } + + res := NewTransformationResult(msgs, msgs, msgs) + + assert.Equal(int64(2), res.ResultCount) + assert.Equal(int64(2), res.FilteredCount) + assert.Equal(int64(2), res.InvalidCount) + assert.Equal(msgs, res.Result) + assert.Equal(msgs, res.Filtered) + assert.Equal(msgs, res.Invalid) +} diff --git a/pkg/source/kinesis/kinesis_source_test.go b/pkg/source/kinesis/kinesis_source_test.go index 390f8cb0..be6e08da 100644 --- a/pkg/source/kinesis/kinesis_source_test.go +++ b/pkg/source/kinesis/kinesis_source_test.go @@ -26,6 +26,64 @@ import ( "github.com/snowplow-devops/stream-replicator/pkg/testutil" ) +func TestMain(m *testing.M) { + os.Clearenv() + exitVal := m.Run() + os.Exit(exitVal) +} + +func TestNewKinesisSourceWithInterfaces_Success(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + // Since this requires a localstack client (until we implement a mock and make unit tests), + // We'll only run it with the integration tests for the time being. + assert := assert.New(t) + + // Set up localstack resources + kinesisClient := testutil.GetAWSLocalstackKinesisClient() + dynamodbClient := testutil.GetAWSLocalstackDynamoDBClient() + + streamName := "kinesis-source-integration-1" + createErr := testutil.CreateAWSLocalstackKinesisStream(kinesisClient, streamName) + if createErr != nil { + t.Fatal(createErr) + } + defer testutil.DeleteAWSLocalstackKinesisStream(kinesisClient, streamName) + + appName := "integration" + ddbErr := testutil.CreateAWSLocalstackDynamoDBTables(dynamodbClient, appName) + if ddbErr != nil { + t.Fatal(ddbErr) + } + + defer testutil.DeleteAWSLocalstackDynamoDBTables(dynamodbClient, appName) + + source, err := newKinesisSourceWithInterfaces(kinesisClient, dynamodbClient, "00000000000", 15, testutil.AWSLocalstackRegion, streamName, appName, nil) + + assert.IsType(&kinesisSource{}, source) + assert.Nil(err) +} + +// newKinesisSourceWithInterfaces should fail if we can't reach Kinesis and DDB, commented out this test until we look into https://github.com/snowplow-devops/stream-replicator/issues/151 +/* +func TestNewKinesisSourceWithInterfaces_Failure(t *testing.T) { + // Unlike the success test, we don't require anything to exist for this one + assert := assert.New(t) + + // Set up localstack resources + kinesisClient := testutil.GetAWSLocalstackKinesisClient() + dynamodbClient := testutil.GetAWSLocalstackDynamoDBClient() + + source, err := newKinesisSourceWithInterfaces(kinesisClient, dynamodbClient, "00000000000", 15, testutil.AWSLocalstackRegion, "nonexistent-stream", "test", nil) + + assert.Nil(&kinesisSource{}, source) + assert.NotNil(err) + +} +*/ + +// TODO: When we address https://github.com/snowplow-devops/stream-replicator/issues/151, this test will need to change. func TestKinesisSource_ReadFailure_NoResources(t *testing.T) { if testing.Short() { t.Skip("skipping integration test") @@ -43,7 +101,9 @@ func TestKinesisSource_ReadFailure_NoResources(t *testing.T) { err = source.Read(nil) assert.NotNil(err) - assert.Equal("Failed to start Kinsumer client: error describing table fake-name_checkpoints: ResourceNotFoundException: Cannot do operations on a non-existent table", err.Error()) + if err != nil { + assert.Equal("Failed to start Kinsumer client: error describing table fake-name_checkpoints: ResourceNotFoundException: Cannot do operations on a non-existent table", err.Error()) + } } func TestKinesisSource_ReadMessages(t *testing.T) { @@ -57,32 +117,36 @@ func TestKinesisSource_ReadMessages(t *testing.T) { kinesisClient := testutil.GetAWSLocalstackKinesisClient() dynamodbClient := testutil.GetAWSLocalstackDynamoDBClient() - streamName := "kinesis-source-integration-1" + streamName := "kinesis-source-integration-2" createErr := testutil.CreateAWSLocalstackKinesisStream(kinesisClient, streamName) if createErr != nil { - panic(createErr) + t.Fatal(createErr) } defer testutil.DeleteAWSLocalstackKinesisStream(kinesisClient, streamName) appName := "integration" - testutil.CreateAWSLocalstackDynamoDBTables(dynamodbClient, appName) - + ddbErr := testutil.CreateAWSLocalstackDynamoDBTables(dynamodbClient, appName) + if ddbErr != nil { + t.Fatal(ddbErr) + } defer testutil.DeleteAWSLocalstackDynamoDBTables(dynamodbClient, appName) // Put ten records into kinesis stream putErr := putNRecordsIntoKinesis(kinesisClient, 10, streamName, "Test") if putErr != nil { - panic(putErr) + t.Fatal(putErr) } + time.Sleep(1 * time.Second) + // Create the source and assert that it's there source, err := newKinesisSourceWithInterfaces(kinesisClient, dynamodbClient, "00000000000", 15, testutil.AWSLocalstackRegion, streamName, appName, nil) assert.Nil(err) assert.NotNil(source) - assert.Equal("arn:aws:kinesis:us-east-1:00000000000:stream/kinesis-source-integration-1", source.GetID()) + assert.Equal("arn:aws:kinesis:us-east-1:00000000000:stream/kinesis-source-integration-2", source.GetID()) // Read data from stream and check that we got it all - successfulReads := testutil.ReadAndReturnMessages(source) + successfulReads := testutil.ReadAndReturnMessages(source, 3*time.Second, testutil.DefaultTestWriteBuilder, nil) assert.Equal(10, len(successfulReads)) } @@ -98,22 +162,25 @@ func TestKinesisSource_StartTimestamp(t *testing.T) { kinesisClient := testutil.GetAWSLocalstackKinesisClient() dynamodbClient := testutil.GetAWSLocalstackDynamoDBClient() - streamName := "kinesis-source-integration-2" + streamName := "kinesis-source-integration-3" createErr := testutil.CreateAWSLocalstackKinesisStream(kinesisClient, streamName) if createErr != nil { - panic(createErr) + t.Fatal(createErr) } defer testutil.DeleteAWSLocalstackKinesisStream(kinesisClient, streamName) appName := "integration" - testutil.CreateAWSLocalstackDynamoDBTables(dynamodbClient, appName) + ddbErr := testutil.CreateAWSLocalstackDynamoDBTables(dynamodbClient, appName) + if ddbErr != nil { + t.Fatal(ddbErr) + } defer testutil.DeleteAWSLocalstackDynamoDBTables(dynamodbClient, appName) // Put two batches of 10 records into kinesis stream, grabbing a timestamp in between putErr := putNRecordsIntoKinesis(kinesisClient, 10, streamName, "First batch") if putErr != nil { - panic(putErr) + t.Fatal(putErr) } time.Sleep(1 * time.Second) // Put a 1s buffer either side of the start timestamp @@ -122,17 +189,17 @@ func TestKinesisSource_StartTimestamp(t *testing.T) { putErr2 := putNRecordsIntoKinesis(kinesisClient, 10, streamName, "Second batch") if putErr2 != nil { - panic(putErr2) + t.Fatal(putErr2) } // Create the source (with start timestamp) and assert that it's there source, err := newKinesisSourceWithInterfaces(kinesisClient, dynamodbClient, "00000000000", 15, testutil.AWSLocalstackRegion, streamName, appName, &timeToStart) assert.Nil(err) assert.NotNil(source) - assert.Equal("arn:aws:kinesis:us-east-1:00000000000:stream/kinesis-source-integration-2", source.GetID()) + assert.Equal("arn:aws:kinesis:us-east-1:00000000000:stream/kinesis-source-integration-3", source.GetID()) // Read from stream - successfulReads := testutil.ReadAndReturnMessages(source) + successfulReads := testutil.ReadAndReturnMessages(source, 3*time.Second, testutil.DefaultTestWriteBuilder, nil) // Check that we have ten messages assert.Equal(10, len(successfulReads)) @@ -168,25 +235,22 @@ func TestGetSource_WithKinesisSource(t *testing.T) { streamName := "kinesis-source-config-integration-1" createErr := testutil.CreateAWSLocalstackKinesisStream(kinesisClient, streamName) if createErr != nil { - panic(createErr) + t.Fatal(createErr) } defer testutil.DeleteAWSLocalstackKinesisStream(kinesisClient, streamName) appName := "kinesisSourceIntegration" - testutil.CreateAWSLocalstackDynamoDBTables(dynamodbClient, appName) - + ddbErr := testutil.CreateAWSLocalstackDynamoDBTables(dynamodbClient, appName) + if ddbErr != nil { + t.Fatal(ddbErr) + } defer testutil.DeleteAWSLocalstackDynamoDBTables(dynamodbClient, appName) - defer os.Unsetenv("SOURCE_NAME") - defer os.Unsetenv("SOURCE_KINESIS_STREAM_NAME") - defer os.Unsetenv("SOURCE_KINESIS_REGION") - defer os.Unsetenv("SOURCE_KINESIS_APP_NAME") - - os.Setenv("SOURCE_NAME", "kinesis") + t.Setenv("SOURCE_NAME", "kinesis") - os.Setenv("SOURCE_KINESIS_STREAM_NAME", streamName) - os.Setenv("SOURCE_KINESIS_REGION", testutil.AWSLocalstackRegion) - os.Setenv("SOURCE_KINESIS_APP_NAME", appName) + t.Setenv("SOURCE_KINESIS_STREAM_NAME", streamName) + t.Setenv("SOURCE_KINESIS_REGION", testutil.AWSLocalstackRegion) + t.Setenv("SOURCE_KINESIS_APP_NAME", appName) c, err := config.NewConfig() assert.NotNil(c) diff --git a/pkg/source/pubsub/pubsub_source_test.go b/pkg/source/pubsub/pubsub_source_test.go index 423f12de..8de38102 100644 --- a/pkg/source/pubsub/pubsub_source_test.go +++ b/pkg/source/pubsub/pubsub_source_test.go @@ -6,18 +6,45 @@ package pubsubsource -// Commenting out as it fails on CI - passes on local as I have default creds for a real account -// TODO: Find a way to integration test pubsub +import ( + "os" + "sort" + "strconv" + "sync" + "testing" + "time" -/* -func TestGetSource_WithPubsubSource(t *testing.T) { + "github.com/stretchr/testify/assert" + + "github.com/snowplow-devops/stream-replicator/config" + "github.com/snowplow-devops/stream-replicator/pkg/source/sourceconfig" + "github.com/snowplow-devops/stream-replicator/pkg/testutil" +) + +func TestMain(m *testing.M) { + os.Clearenv() + exitVal := m.Run() + os.Exit(exitVal) +} + +func TestPubSubSource_ReadAndReturnSuccessIntegration(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } assert := assert.New(t) - supportedSources := []sourceconfig.SourceConfigPair{PubsubSourceConfigPair} + // Create pubsub integration resource and populate with 10 messages + testutil.CreatePubsubResourcesAndWrite(10, t) + defer testutil.DeletePubsubResources(t) + + t.Setenv("SOURCE_NAME", "pubsub") + t.Setenv("SOURCE_PUBSUB_SUBSCRIPTION_ID", "test-sub") + t.Setenv("SOURCE_PUBSUB_PROJECT_ID", `project-test`) - defer os.Unsetenv("SOURCE") + adaptedHandle := adapterGenerator(configFunction) - os.Setenv("SOURCE", "pubsub") + pubsubSourceConfigPair := sourceconfig.ConfigPair{Name: "pubsub", Handle: adaptedHandle} + supportedSources := []sourceconfig.ConfigPair{pubsubSourceConfigPair} pubsubConfig, err := config.NewConfig() assert.NotNil(pubsubConfig) @@ -27,6 +54,132 @@ func TestGetSource_WithPubsubSource(t *testing.T) { assert.NotNil(pubsubSource) assert.Nil(err) - assert.Equal("projects//subscriptions/", pubsubSource.GetID()) + assert.Equal("projects/project-test/subscriptions/test-sub", pubsubSource.GetID()) + + output := testutil.ReadAndReturnMessages(pubsubSource, 5*time.Second, testutil.DefaultTestWriteBuilder, nil) + assert.Equal(10, len(output)) + for _, message := range output { + assert.Contains(string(message.Data), `message #`) + assert.Nil(message.GetError()) + } +} + +// newPubSubSource_Failure should fail if we can't reach PubSub, commented out this test until we look into https://github.com/snowplow-devops/stream-replicator/issues/151 +/* +func TestNewPubSubSource_Failure(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + assert := assert.New(t) + + pubsubSource, err := newPubSubSource(10, "nonexistent-project", "nonexistent-subscription") + assert.NotNil(err) + assert.Nil(pubsubSource) + // This should return an error when we can't connect, rather than proceeding to the Write() function before we hit a problem. } */ + +// TestNewPubSubSource_Success tests the typical case of creating a new pubsub source. +func TestNewPubSubSource_Success(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + assert := assert.New(t) + + testutil.InitMockPubsubServer(8010, nil, t) + + pubsubSource, err := newPubSubSource(10, "project-test", "test-sub") + assert.Nil(err) + assert.IsType(&pubSubSource{}, pubsubSource) + // This should return an error when we can't connect, rather than proceeding to the Write() function before we hit a problem. +} + +func TestPubSubSource_ReadAndReturnSuccessWithMock(t *testing.T) { + assert := assert.New(t) + + srv, conn := testutil.InitMockPubsubServer(8008, nil, t) + defer srv.Close() + defer conn.Close() + + // Publish ten messages + numMsgs := 10 + wg := sync.WaitGroup{} + for i := 0; i < numMsgs; i++ { + wg.Add(1) + go func(i int) { + _ = srv.Publish(`projects/project-test/topics/test-topic`, []byte(strconv.Itoa(i)), nil) + wg.Done() + }(i) + } + wg.Wait() + + pubsubSource, err := newPubSubSource(10, "project-test", "test-sub") + + assert.NotNil(pubsubSource) + assert.Nil(err) + assert.Equal("projects/project-test/subscriptions/test-sub", pubsubSource.GetID()) + + output := testutil.ReadAndReturnMessages(pubsubSource, 3*time.Second, testutil.DefaultTestWriteBuilder, nil) + assert.Equal(10, len(output)) + + // Check that we got exactly the 10 messages we want, with no duplicates + msgDatas := make([]string, 0) + for _, msg := range output { + msgDatas = append(msgDatas, string(msg.Data)) + } + expected := []string{"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"} + sort.Strings(msgDatas) + assert.Equal(expected, msgDatas) +} + +// TestPubSubSource_ReadAndReturnSuccessWithMock_DelayedAcks tests the behaviour of pubsub source when some messages take longer to ack than others +func TestPubSubSource_ReadAndReturnSuccessWithMock_DelayedAcks(t *testing.T) { + assert := assert.New(t) + + srv, conn := testutil.InitMockPubsubServer(8008, nil, t) + defer srv.Close() + defer conn.Close() + + // publish 10 messages + numMsgs := 10 + wg := sync.WaitGroup{} + for i := 0; i < numMsgs; i++ { + wg.Add(1) + go func(i int) { + _ = srv.Publish(`projects/project-test/topics/test-topic`, []byte(strconv.Itoa(i)), nil) + wg.Done() + }(i) + } + wg.Wait() + + t.Setenv("SOURCE_NAME", "pubsub") + t.Setenv("SOURCE_PUBSUB_SUBSCRIPTION_ID", "test-sub") + t.Setenv("SOURCE_PUBSUB_PROJECT_ID", `project-test`) + + adaptedHandle := adapterGenerator(configFunction) + + pubsubSourceConfigPair := sourceconfig.ConfigPair{Name: "pubsub", Handle: adaptedHandle} + supportedSources := []sourceconfig.ConfigPair{pubsubSourceConfigPair} + + pubsubConfig, err := config.NewConfig() + assert.NotNil(pubsubConfig) + assert.Nil(err) + + pubsubSource, err := sourceconfig.GetSource(pubsubConfig, supportedSources) + + assert.NotNil(pubsubSource) + assert.Nil(err) + assert.Equal("projects/project-test/subscriptions/test-sub", pubsubSource.GetID()) + + output := testutil.ReadAndReturnMessages(pubsubSource, 5*time.Second, testutil.DelayedAckTestWriteBuilder, 2*time.Second) + assert.Equal(10, len(output)) + + // Check that we got exactly the 10 messages we want, with no duplicates + msgDatas := make([]string, 0) + for _, msg := range output { + msgDatas = append(msgDatas, string(msg.Data)) + } + expected := []string{"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"} + sort.Strings(msgDatas) + assert.Equal(expected, msgDatas) +} diff --git a/pkg/source/sourceconfig/source_config_test.go b/pkg/source/sourceconfig/source_config_test.go index 3c69ad48..ad915ec4 100644 --- a/pkg/source/sourceconfig/source_config_test.go +++ b/pkg/source/sourceconfig/source_config_test.go @@ -11,15 +11,84 @@ import ( "testing" config "github.com/snowplow-devops/stream-replicator/config" + "github.com/snowplow-devops/stream-replicator/pkg/source/sourceiface" "github.com/stretchr/testify/assert" ) -func TestNewConfig_InvalidSource(t *testing.T) { +func TestMain(m *testing.M) { + os.Clearenv() + exitVal := m.Run() + os.Exit(exitVal) +} + +// Mock a Source and configuration +type mockSource struct{} + +func (m mockSource) Read(sf *sourceiface.SourceFunctions) error { + return nil +} + +func (m mockSource) Stop() {} + +func (m mockSource) GetID() string { + return "" +} + +type configuration struct{} + +func configfunction(c *configuration) (sourceiface.Source, error) { + return mockSource{}, nil +} + +type adapter func(i interface{}) (interface{}, error) + +func adapterGenerator(f func(c *configuration) (sourceiface.Source, error)) adapter { + return func(i interface{}) (interface{}, error) { + return mockSource{}, nil + } +} + +func (f adapter) Create(i interface{}) (interface{}, error) { + return f(i) +} + +func (f adapter) ProvideDefault() (interface{}, error) { + // Provide defaults + cfg := &configuration{} + + return cfg, nil +} + +var mockConfigPair = ConfigPair{ + Name: "mock", + Handle: adapterGenerator(configfunction), +} + +// TestGetSource_ValidSource tests the happy path for GetSource +func TestGetSource_ValidSource(t *testing.T) { assert := assert.New(t) - defer os.Unsetenv("SOURCE_NAME") + t.Setenv("SOURCE_NAME", "mock") + + c, err := config.NewConfig() + assert.NotNil(c) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } + + supportedSources := []ConfigPair{mockConfigPair} + + source, err := GetSource(c, supportedSources) + + assert.Equal(mockSource{}, source) + assert.Nil(err) +} + +// TestGetSource_InvalidSource tests that we throw an error when given an invalid source configuration +func TestGetSource_InvalidSource(t *testing.T) { + assert := assert.New(t) - os.Setenv("SOURCE_NAME", "fake") + t.Setenv("SOURCE_NAME", "fake") c, err := config.NewConfig() assert.NotNil(c) @@ -32,5 +101,42 @@ func TestNewConfig_InvalidSource(t *testing.T) { source, err := GetSource(c, supportedSources) assert.Nil(source) assert.NotNil(err) - assert.Equal("Invalid source found: fake. Supported sources in this build: ", err.Error()) + if err != nil { + assert.Equal("Invalid source found: fake. Supported sources in this build: ", err.Error()) + } +} + +// Mock a broken adapter generator implementation +func brokenAdapterGenerator(f func(c *configuration) (sourceiface.Source, error)) adapter { + return func(i interface{}) (interface{}, error) { + return nil, nil + } +} + +var mockUnhappyConfigPair = ConfigPair{ + Name: "mockUnhappy", + Handle: brokenAdapterGenerator(configfunction), +} + +// TestGetSource_BadConfig tests the case where the configuration implementation is broken +func TestGetSource_BadConfig(t *testing.T) { + assert := assert.New(t) + + t.Setenv("SOURCE_NAME", "mockUnhappy") + + c, err := config.NewConfig() + assert.NotNil(c) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } + + supportedSources := []ConfigPair{mockUnhappyConfigPair} + + source, err := GetSource(c, supportedSources) + + assert.Nil(source) + assert.NotNil(err) + if err != nil { + assert.Equal("could not interpret source configuration for \"mockUnhappy\"", err.Error()) + } } diff --git a/pkg/source/sqs/sqs_source_test.go b/pkg/source/sqs/sqs_source_test.go index fba5297b..84aa4b77 100644 --- a/pkg/source/sqs/sqs_source_test.go +++ b/pkg/source/sqs/sqs_source_test.go @@ -24,6 +24,49 @@ import ( "github.com/snowplow-devops/stream-replicator/pkg/testutil" ) +func TestMain(m *testing.M) { + os.Clearenv() + exitVal := m.Run() + os.Exit(exitVal) +} + +// func newSQSSourceWithInterfaces(client sqsiface.SQSAPI, awsAccountID string, concurrentWrites int, region string, queueName string) (*sqsSource, error) { +func TestNewSQSSourceWithInterfaces_Success(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + // Since this requires a localstack client (until we implement a mock and make unit tests), + // We'll only run it with the integration tests for the time being. + assert := assert.New(t) + + client := testutil.GetAWSLocalstackSQSClient() + + queueName := "sqs-queue-source" + queueURL := testutil.SetupAWSLocalstackSQSQueueWithMessages(client, queueName, 50, "Hello SQS!!") + defer testutil.DeleteAWSLocalstackSQSQueue(client, queueURL) + + source, err := newSQSSourceWithInterfaces(client, "00000000000", 10, testutil.AWSLocalstackRegion, queueName) + + assert.IsType(&sqsSource{}, source) + assert.Nil(err) +} + +// newSQSSourceWithInterfaces should fail if we can't reach SQS, commented out this test until we look into https://github.com/snowplow-devops/stream-replicator/issues/151 +/* +func TestNewSQSSourceWithInterfaces_Failure(t *testing.T) { + // Unlike the success test, we don't require anything to exist for this one + assert := assert.New(t) + + client := testutil.GetAWSLocalstackSQSClient() + + source, err := newSQSSourceWithInterfaces(client, "00000000000", 10, testutil.AWSLocalstackRegion, "nonexistent-queue") + + assert.Nil(source) + assert.NotNil(err) +} +*/ + +// TODO: When we address https://github.com/snowplow-devops/stream-replicator/issues/151, this test will need to change. func TestSQSSource_ReadFailure(t *testing.T) { if testing.Short() { t.Skip("skipping integration test") @@ -40,6 +83,9 @@ func TestSQSSource_ReadFailure(t *testing.T) { err = source.Read(nil) assert.NotNil(err) + if err != nil { + assert.Equal("Failed to get SQS queue URL: AWS.SimpleQueueService.NonExistentQueue: AWS.SimpleQueueService.NonExistentQueue; see the SQS docs.\n\tstatus code: 400, request id: 00000000-0000-0000-0000-000000000000", err.Error()) + } } func TestSQSSource_ReadSuccess(t *testing.T) { @@ -88,7 +134,7 @@ func TestSQSSource_ReadSuccess(t *testing.T) { select { case <-done: case <-time.After(5 * time.Second): - panic("TestSQSSource_ReadSuccess timed out!") + t.Fatal("TestSQSSource_ReadSuccess timed out!") } assert.Equal(50, messageCount) @@ -107,15 +153,13 @@ func TestGetSource_WithSQSSource(t *testing.T) { queueName := "sqs-source-config-integration-1" _, createErr := testutil.CreateAWSLocalstackSQSQueue(sqsClient, queueName) if createErr != nil { - panic(createErr) + t.Fatal(createErr) } defer testutil.DeleteAWSLocalstackSQSQueue(sqsClient, &queueName) - defer os.Unsetenv("SOURCE_NAME") - - os.Setenv("SOURCE_NAME", "sqs") - os.Setenv("SOURCE_SQS_QUEUE_NAME", queueName) + t.Setenv("SOURCE_NAME", "sqs") + t.Setenv("SOURCE_SQS_QUEUE_NAME", queueName) c, err := config.NewConfig() assert.NotNil(c) diff --git a/pkg/source/stdin/stdin_source_test.go b/pkg/source/stdin/stdin_source_test.go index eeb7506e..38d78fa3 100644 --- a/pkg/source/stdin/stdin_source_test.go +++ b/pkg/source/stdin/stdin_source_test.go @@ -19,6 +19,12 @@ import ( "github.com/snowplow-devops/stream-replicator/pkg/source/sourceiface" ) +func TestMain(m *testing.M) { + os.Clearenv() + exitVal := m.Run() + os.Exit(exitVal) +} + func TestStdinSource_ReadSuccess(t *testing.T) { assert := assert.New(t) @@ -60,14 +66,12 @@ func TestStdinSource_ReadSuccess(t *testing.T) { } func TestGetSource_WithStdinSource(t *testing.T) { + t.Setenv("SOURCE_NAME", "stdin") + assert := assert.New(t) supportedSources := []sourceconfig.ConfigPair{ConfigPair} - defer os.Unsetenv("SOURCE_NAME") - - os.Setenv("SOURCE_NAME", "stdin") - c, err := config.NewConfig() assert.NotNil(c) if err != nil { diff --git a/pkg/target/eventhub.go b/pkg/target/eventhub.go index b29b9d65..a69d665b 100644 --- a/pkg/target/eventhub.go +++ b/pkg/target/eventhub.go @@ -33,7 +33,7 @@ type EventHubConfig struct { // EventHubTarget holds a new client for writing messages to Azure EventHub type EventHubTarget struct { - client *eventhub.Hub + client clientIface eventHubNamespace string eventHubName string messageByteLimit int @@ -45,6 +45,28 @@ type EventHubTarget struct { log *log.Entry } +// clientIface allows us to mock the entire eventhub.Hub client, since they don't provide interfaces for mocking https://github.com/Azure/azure-event-hubs-go/issues/98 +type clientIface interface { + SendBatch(context.Context, eventhub.BatchIterator, ...eventhub.BatchOption) error + Close(context.Context) error +} + +// newEventHubTargetWithInterfaces allows for mocking the eventhub client +func newEventHubTargetWithInterfaces(client clientIface, cfg *EventHubConfig) *EventHubTarget { + return &EventHubTarget{ + client: client, + eventHubNamespace: cfg.EventHubNamespace, + eventHubName: cfg.EventHubName, + messageByteLimit: cfg.MessageByteLimit, + chunkByteLimit: cfg.ChunkByteLimit, + chunkMessageLimit: cfg.ChunkMessageLimit, + contextTimeoutInSeconds: cfg.ContextTimeoutInSeconds, + batchByteLimit: cfg.BatchByteLimit, + + log: log.WithFields(log.Fields{"target": "eventhub", "cloud": "Azure", "namespace": cfg.EventHubNamespace, "eventhub": cfg.EventHubName}), + } +} + // newEventHubTarget creates a new client for writing messages to Azure EventHub func newEventHubTarget(cfg *EventHubConfig) (*EventHubTarget, error) { @@ -70,18 +92,7 @@ func newEventHubTarget(cfg *EventHubConfig) (*EventHubTarget, error) { // If none is specified, it will retry indefinitely until the context times out, which hides the actual error message // To avoid obscuring errors, contextTimeoutInSeconds should be configured to ensure all retries may be completed before its expiry - return &EventHubTarget{ - client: hub, - eventHubNamespace: cfg.EventHubNamespace, - eventHubName: cfg.EventHubName, - messageByteLimit: cfg.MessageByteLimit, - chunkByteLimit: cfg.ChunkByteLimit, - chunkMessageLimit: cfg.ChunkMessageLimit, - contextTimeoutInSeconds: cfg.ContextTimeoutInSeconds, - batchByteLimit: cfg.BatchByteLimit, - - log: log.WithFields(log.Fields{"target": "eventhub", "cloud": "Azure", "namespace": cfg.EventHubNamespace, "eventhub": cfg.EventHubName}), - }, err + return newEventHubTargetWithInterfaces(hub, cfg), err } // EventHubTargetConfigFunction creates an EventHubTarget from an EventHubconfig diff --git a/pkg/target/eventhub_test.go b/pkg/target/eventhub_test.go index 715f901d..44318820 100644 --- a/pkg/target/eventhub_test.go +++ b/pkg/target/eventhub_test.go @@ -7,70 +7,414 @@ package target import ( - "os" + "context" + "fmt" + "sort" + "strings" + "sync/atomic" "testing" + "time" + eventhub "github.com/Azure/azure-event-hubs-go/v3" + "github.com/pkg/errors" + "github.com/snowplow-devops/stream-replicator/pkg/models" + "github.com/snowplow-devops/stream-replicator/pkg/testutil" "github.com/stretchr/testify/assert" + "github.com/twinj/uuid" ) var cfg = EventHubConfig{ - EventHubNamespace: "test", - EventHubName: "test", + EventHubNamespace: "test", + EventHubName: "test", + MaxAutoRetries: 1, + MessageByteLimit: 1048576, + ChunkByteLimit: 1048576, + ChunkMessageLimit: 500, + ContextTimeoutInSeconds: 20, + BatchByteLimit: 1048576, } -func unsetEverything() { - os.Unsetenv("EVENTHUB_KEY_NAME") - os.Unsetenv("EVENTHUB_KEY_VALUE") +var errMock = errors.New("Mock Failure Path") - os.Unsetenv("EVENTHUB_CONNECTION_STRING") +type mockHub struct { + // Channel to output results + results chan *eventhub.EventBatch + // Boolean to allow us to mock failure path + fail bool +} + +// Sendbatch is a mock of the Eventhubs SendBatch method. If m.fail is true, it returns an error. +// Otherwise, it uses the provided BatchIterator to mimic the batching behaviour in the client, and feeds +// those batches into the m.results channel. +func (m mockHub) SendBatch(ctx context.Context, iterator eventhub.BatchIterator, opts ...eventhub.BatchOption) error { + if m.fail { + return errMock + } + + //mimic eventhubs SendBatch behaviour loosely + batchOptions := &eventhub.BatchOptions{ + MaxSize: eventhub.DefaultMaxMessageSizeInBytes, + } - os.Unsetenv("AZURE_TENANT_ID") - os.Unsetenv("AZURE_CLIENT_ID") + for _, opt := range opts { + if err := opt(batchOptions); err != nil { - os.Unsetenv("AZURE_CLIENT_SECRET") + return err + } + } - os.Unsetenv("AZURE_CERTIFICATE_PATH") - os.Unsetenv("AZURE_CERTIFICATE_PASSWORD") + for !iterator.Done() { + id := uuid.NewV4() + + batch, err := iterator.Next(id.String(), batchOptions) + if err != nil { + return err + } + m.results <- batch + } + return nil } -func TestNewEventHubTarget_KeyValue(t *testing.T) { +// Close isn't used, it's just here to satisfy the mock API interface +func (m mockHub) Close(context.Context) error { + return nil +} + +// getResults retrieves and returns results from the mock's results channel, +// it blocks until no result have come in for the timeout period +func getResults(resultChannel chan *eventhub.EventBatch, timeout time.Duration) []*eventhub.EventBatch { + res := make([]*eventhub.EventBatch, 0) + +ResultsLoop: + for { + select { + case batch := <-resultChannel: + res = append(res, batch) + case <-time.After(1 * time.Second): + break ResultsLoop + } + } + + return res +} + +// TestProcessWithRandomPartitionKeys tests the process() function happy path when we set the eventhub partition key to a random value. +// When we explicitly set the partition key, events are batched by partition key - so random PK should result in batches of 1. +func TestProcessWithRandomPartitionKeys(t *testing.T) { assert := assert.New(t) - unsetEverything() + // Happy path + m := mockHub{ + results: make(chan *eventhub.EventBatch), + } + tgt := newEventHubTargetWithInterfaces(m, &cfg) - // Test that we can initialise a client with Key and Value - defer os.Unsetenv("EVENTHUB_KEY_NAME") - defer os.Unsetenv("EVENTHUB_KEY_VALUE") + // Mechanism for counting acks + var ackOps int64 + ackFunc := func() { + atomic.AddInt64(&ackOps, 1) + } + + messages := testutil.GetTestMessages(10, testutil.GenRandomString(100), ackFunc) + + var twres *models.TargetWriteResult + var err error + + go func() { + twres, err = tgt.process(messages) + }() + res := getResults(m.results, 1*time.Second) + + // Check that we got correct amonut of batches + assert.Equal(10, len(res)) + // Check that we acked correct amount of times + assert.Equal(int64(10), ackOps) + // Check that we got no error and the TargetWriteResult is as expected. + assert.Nil(err) + assert.Equal(10, len(twres.Sent)) + assert.Nil(twres.Failed) + assert.Nil(twres.Oversized) + assert.Nil(twres.Invalid) +} + +// TestProcessFailure tests that we get correct behaviour in a failure scenario. +func TestProcessFailure(t *testing.T) { + assert := assert.New(t) + + // Unhappy path + m := mockHub{ + results: make(chan *eventhub.EventBatch), + fail: true, + } + tgtToFail := newEventHubTargetWithInterfaces(m, &cfg) + + var ackOps int64 + ackFunc := func() { + atomic.AddInt64(&ackOps, 1) + } + + messages := testutil.GetTestMessages(10, testutil.GenRandomString(100), ackFunc) + + var twres *models.TargetWriteResult + var err error + + // We don't really need to spawn a goroutine here, + // however not doing so and reading results will make the test hang when misconfigured + // so for future debuggers' sanity let's do it this way. + go func() { + twres, err = tgtToFail.process(messages) + }() + + failRes := getResults(m.results, 500*time.Millisecond) + + // Check that we got correct amonut of batches + assert.Equal(0, len(failRes)) + // Check that we acked correct amount of times + assert.Equal(int64(0), ackOps) + // Check that we got the desired error and the TargetWriteResult is as expected. + assert.NotNil(err) + if err != nil { + assert.Equal("Failed to send message batch to EventHub: Mock Failure Path", err.Error()) + } + assert.Nil(twres.Sent) + assert.Equal(10, len(twres.Failed)) + assert.Nil(twres.Oversized) + assert.Nil(twres.Invalid) +} + +// TestProcessWithNoPartitionKey tests the process() function happy path when we don't set a partition key. +// Note that at time of writing, we actually cannot do this. However it illustrates the behaviour of the EH client well, +// and can serve as the basis for developing a solution to https://github.com/snowplow-devops/stream-replicator/issues/148 +// (To see it run successfully before we fix that behaviour, comment out `ehEvent.PartitionKey = &msg.PartitionKey` in the process function.) +/* +func TestProcessWithNoPartitionKey(t *testing.T) { + assert := assert.New(t) + + // Happy path + m := mockHub{ + results: make(chan *eventhub.EventBatch), + } + tgt := newEventHubTargetWithInterfaces(m, &cfg) + + // Mechanism for counting acks + var ackOps int64 + ackFunc := func() { + atomic.AddInt64(&ackOps, 1) + } + + messages := testutil.GetTestMessages(10, testutil.GenRandomString(100), ackFunc) + + var twres *models.TargetWriteResult + var err error + + go func() { + twres, err = tgt.process(messages) + }() + res := getResults(m.results, 1*time.Second) + + // Check that we got correct amonut of batches + assert.Equal(1, len(res)) + // Check that we acked correct amount of times + assert.Equal(int64(10), ackOps) + // Check that we got no error and the TargetWriteResult is as expected. + assert.Nil(err) + assert.Equal(10, len(twres.Sent)) + assert.Nil(twres.Failed) + assert.Nil(twres.Oversized) + assert.Nil(twres.Invalid) +} +*/ + +func TestProcessBatchingByPartitionKey(t *testing.T) { + assert := assert.New(t) + + // Happy path + m := mockHub{ + results: make(chan *eventhub.EventBatch), + } + tgt := newEventHubTargetWithInterfaces(m, &cfg) - os.Setenv("EVENTHUB_KEY_NAME", "fake") - os.Setenv("EVENTHUB_KEY_VALUE", "fake") + // Mechanism for counting acks + var ackOps int64 + ackFunc := func() { + atomic.AddInt64(&ackOps, 1) + } + + messages := testutil.GetTestMessages(99, testutil.GenRandomString(100), ackFunc) + + // Assign one of three evenly distributed partition keys + for i, msg := range messages { + msg.PartitionKey = fmt.Sprintf("PK%d", i%3) + } + + var twres *models.TargetWriteResult + var err error + + go func() { + twres, err = tgt.process(messages) + }() + res := getResults(m.results, 1*time.Second) + + // Check that we got correct amonut of batches + assert.Equal(3, len(res)) + // Check that we acked correct amount of times + assert.Equal(int64(99), ackOps) + // Check that we got no error and the TargetWriteResult is as expected. + assert.Nil(err) + assert.Equal(99, len(twres.Sent)) + assert.Nil(twres.Failed) + assert.Nil(twres.Oversized) + assert.Nil(twres.Invalid) + + // The data iteslf isn't public from the EH client, but at least we can check that the partition keys are as expected. + pksFound := make([]string, 0) + for _, r := range res { + pksFound = append(pksFound, *r.Event.PartitionKey) + } + sort.Strings(pksFound) + assert.Equal([]string{"PK0", "PK1", "PK2"}, pksFound) +} + +// TestWriteSuccess test the happy path for the Write() function. +func TestWriteSuccess(t *testing.T) { + assert := assert.New(t) + + // Happy path + m := mockHub{ + results: make(chan *eventhub.EventBatch), + } + tgt := newEventHubTargetWithInterfaces(m, &cfg) + // Max chunk size of 20 just to validate behaviour with some chunking involved. + tgt.chunkMessageLimit = 20 + + // Mechanism for counting acks + var ackOps int64 + ackFunc := func() { + atomic.AddInt64(&ackOps, 1) + } + + messages := testutil.GetTestMessages(100, testutil.GenRandomString(100), ackFunc) + + // Set the partition key all to the same value to ensure that batching behaviour is down to chunking rather than EH client batching (which we test elsewhere) + for _, msg := range messages { + msg.PartitionKey = "testPK" + } + + var twres *models.TargetWriteResult + var err error + + go func() { + twres, err = tgt.Write(messages) + }() + res := getResults(m.results, 1*time.Second) + + // Check that we got correct amonut of batches + assert.Equal(5, len(res)) + // Check that we acked correct amount of times + assert.Equal(int64(100), ackOps) + // Check that we got no error and the TargetWriteResult is as expected. + assert.Nil(err) + assert.Equal(100, len(twres.Sent)) + assert.Nil(twres.Failed) + assert.Nil(twres.Oversized) + assert.Nil(twres.Invalid) +} + +// TestWriteFailure tests the unhappy path for the Write function. +func TestWriteFailure(t *testing.T) { + assert := assert.New(t) + + // Unhappy path + m := mockHub{ + results: make(chan *eventhub.EventBatch), + fail: true, + } + tgt := newEventHubTargetWithInterfaces(m, &cfg) + // Max chunk size of 20 just to validate behaviour with several errors + tgt.chunkMessageLimit = 20 + + // Mechanism for counting acks + var ackOps int64 + ackFunc := func() { + atomic.AddInt64(&ackOps, 1) + } + + messages := testutil.GetTestMessages(100, testutil.GenRandomString(100), ackFunc) + + var twres *models.TargetWriteResult + var err error + + go func() { + twres, err = tgt.Write(messages) + }() + res := getResults(m.results, 1*time.Second) + + // Check that we got correct amonut of batches + assert.Equal(0, len(res)) + // Check that we acked correct amount of times + assert.Equal(int64(0), ackOps) + // Check that we got the expected error and the TargetWriteResult is as expected. + assert.NotNil(err) + if err != nil { + assert.True(strings.Contains(err.Error(), "Error writing messages to EventHub: 5 errors occurred:")) + assert.Equal(5, strings.Count(err.Error(), "Failed to send message batch to EventHub: Mock Failure Path")) + } + assert.Nil(twres.Sent) + assert.Equal(100, len(twres.Failed)) + assert.Nil(twres.Oversized) + assert.Nil(twres.Invalid) +} + +// TestNewEventHubTarget_KeyValue tests that we can initialise a client with key value credentials. +func TestNewEventHubTarget_KeyValue(t *testing.T) { + assert := assert.New(t) + + // Test that we can initialise a client with Key and Value + t.Setenv("EVENTHUB_KEY_NAME", "fake") + t.Setenv("EVENTHUB_KEY_VALUE", "fake") tgt, err := newEventHubTarget(&cfg) assert.Nil(err) assert.NotNil(tgt) } +// TestNewEventHubTarget_ConnString tests that we can initialise a client with connection string credentials. func TestNewEventHubTarget_ConnString(t *testing.T) { assert := assert.New(t) - unsetEverything() - // Test that we can initialise a client with Connection String - defer os.Unsetenv("EVENTHUB_CONNECTION_STRING") - os.Setenv("EVENTHUB_CONNECTION_STRING", "Endpoint=sb://test.servicebus.windows.net/;SharedAccessKeyName=fake;SharedAccessKey=fake") + t.Setenv("EVENTHUB_CONNECTION_STRING", "Endpoint=sb://test.servicebus.windows.net/;SharedAccessKeyName=fake;SharedAccessKey=fake") tgt, err := newEventHubTarget(&cfg) assert.Nil(err) assert.NotNil(tgt) } +// TestNewEventHubTarget_CredentialsNotFound tests that we fail on startup when we're not provided with appropriate credential values. +func TestNewEventHubTarget_CredentialsNotFound(t *testing.T) { + assert := assert.New(t) + + tgt, err := newEventHubTarget(&cfg) + assert.NotNil(err) + if err != nil { + assert.Equal("Error initialising EventHub client: No valid combination of authentication Env vars found. https://pkg.go.dev/github.com/Azure/azure-event-hubs-go#NewHubWithNamespaceNameAndEnvironment", err.Error()) + } + assert.Nil(tgt) +} + +// NewEventHubTarget should fail if we can't reach EventHub, commented out this test until we look into https://github.com/snowplow-devops/stream-replicator/issues/151 +// Note that when we do so, the above tests will need to be changed to use some kind of mock +/* func TestNewEventHubTarget_Failure(t *testing.T) { assert := assert.New(t) - unsetEverything() + // Test that we can initialise a client with Key and Value + t.Setenv("EVENTHUB_KEY_NAME", "fake") + t.Setenv("EVENTHUB_KEY_VALUE", "fake") tgt, err := newEventHubTarget(&cfg) assert.Equal("Error initialising EventHub client: No valid combination of authentication Env vars found. https://pkg.go.dev/github.com/Azure/azure-event-hubs-go#NewHubWithNamespaceNameAndEnvironment", err.Error()) assert.Nil(tgt) } +*/ diff --git a/pkg/target/http_test.go b/pkg/target/http_test.go index 1a0c20ac..c43a2e7c 100644 --- a/pkg/target/http_test.go +++ b/pkg/target/http_test.go @@ -28,7 +28,7 @@ func createTestServer(results *[][]byte, waitgroup *sync.WaitGroup) *httptest.Se defer req.Body.Close() data, err := ioutil.ReadAll(req.Body) if err != nil { - panic(err) // If we hit this error, something went wrong with the test setup, so panic + panic(err) } mutex.Lock() *results = append(*results, data) @@ -68,13 +68,19 @@ func TestGetHeaders(t *testing.T) { invalid1 := `{"Max Forwards": 10}` out4, err4 := getHeaders(invalid1) - assert.Equal("Error parsing headers. Ensure that headers are provided as a JSON of string key-value pairs: json: cannot unmarshal number into Go value of type string", err4.Error()) + assert.NotNil(err4) + if err4 != nil { + assert.Equal("Error parsing headers. Ensure that headers are provided as a JSON of string key-value pairs: json: cannot unmarshal number into Go value of type string", err4.Error()) + } assert.Nil(out4) invalid2 := `[{"Max Forwards": "10"}]` out5, err5 := getHeaders(invalid2) - assert.Equal("Error parsing headers. Ensure that headers are provided as a JSON of string key-value pairs: json: cannot unmarshal array into Go value of type map[string]string", err5.Error()) + assert.NotNil(err5) + if err5 != nil { + assert.Equal("Error parsing headers. Ensure that headers are provided as a JSON of string key-value pairs: json: cannot unmarshal array into Go value of type map[string]string", err5.Error()) + } assert.Nil(out5) } @@ -84,7 +90,7 @@ func TestAddHeadersToRequest(t *testing.T) { req, err := http.NewRequest("POST", "abc", bytes.NewBuffer([]byte("def"))) if err != nil { - panic(err) + t.Fatal(err) } headersToAdd := map[string]string{"Max Forwards": "10", "Accept-Language": "en-US,en-IE", "Accept-Datetime": "Thu, 31 May 2007 20:35:00 GMT"} @@ -99,7 +105,7 @@ func TestAddHeadersToRequest(t *testing.T) { req2, err2 := http.NewRequest("POST", "abc", bytes.NewBuffer([]byte("def"))) if err2 != nil { - panic(err2) + t.Fatal(err2) } var noHeadersToAdd map[string]string noHeadersExpected := http.Header{} @@ -119,11 +125,17 @@ func TestNewHTTPTarget(t *testing.T) { failedHTTPTarget, err1 := newHTTPTarget("something", 5, 1048576, "application/json", "", "", "", "", "", "", true) - assert.Equal("Invalid url for HTTP target: 'something'", err1.Error()) + assert.NotNil(err1) + if err1 != nil { + assert.Equal("Invalid url for HTTP target: 'something'", err1.Error()) + } assert.Nil(failedHTTPTarget) failedHTTPTarget2, err2 := newHTTPTarget("", 5, 1048576, "application/json", "", "", "", "", "", "", true) - assert.Equal("Invalid url for HTTP target: ''", err2.Error()) + assert.NotNil(err2) + if err2 != nil { + assert.Equal("Invalid url for HTTP target: ''", err2.Error()) + } assert.Nil(failedHTTPTarget2) } @@ -137,7 +149,7 @@ func TestHttpWrite_Simple(t *testing.T) { target, err := newHTTPTarget(server.URL, 5, 1048576, "application/json", "", "", "", "", "", "", true) if err != nil { - panic(err) + t.Fatal(err) } var ackOps int64 @@ -171,7 +183,7 @@ func TestHttpWrite_Concurrent(t *testing.T) { target, err := newHTTPTarget(server.URL, 5, 1048576, "application/json", "", "", "", "", "", "", true) if err != nil { - panic(err) + t.Fatal(err) } mu := &sync.Mutex{} @@ -214,7 +226,7 @@ func TestHttpWrite_Failure(t *testing.T) { target, err := newHTTPTarget("http://NonexistentEndpoint", 5, 1048576, "application/json", "", "", "", "", "", "", true) if err != nil { - panic(err) + t.Fatal(err) } var ackOps int64 @@ -227,8 +239,9 @@ func TestHttpWrite_Failure(t *testing.T) { writeResult, err1 := target.Write(messages) assert.NotNil(err1) - - assert.Regexp("Error sending http request: 10 errors occurred:.*", err1.Error()) + if err1 != nil { + assert.Regexp("Error sending http request: 10 errors occurred:.*", err1.Error()) + } assert.Equal(10, len(writeResult.Failed)) assert.Nil(writeResult.Sent) @@ -245,7 +258,7 @@ func TestHttpWrite_Oversized(t *testing.T) { target, err := newHTTPTarget(server.URL, 5, 1048576, "application/json", "", "", "", "", "", "", true) if err != nil { - panic(err) + t.Fatal(err) } var ackOps int64 @@ -300,7 +313,7 @@ func TestHttpWrite_TLS(t *testing.T) { string(`../../integration/http/rootCA.crt`), false) if err != nil { - panic(err) + t.Fatal(err) } var ackOps int64 @@ -335,7 +348,7 @@ func TestHttpWrite_TLS(t *testing.T) { false) if err2 != nil { os.RemoveAll(`tmp_replicator`) - panic(err2) + t.Fatal(err2) } writeResult2, err3 := target2.Write(messages) @@ -363,7 +376,7 @@ func TestHttpWrite_TLS(t *testing.T) { false) if err4 != nil { os.RemoveAll(`tmp_replicator`) - panic(err4) + t.Fatal(err4) } writeResult3, err5 := target3.Write(messages) diff --git a/pkg/target/kafka_test.go b/pkg/target/kafka_test.go index 4fa990c6..28b1a168 100644 --- a/pkg/target/kafka_test.go +++ b/pkg/target/kafka_test.go @@ -75,6 +75,9 @@ func TestKafkaTarget_AsyncWriteFailure(t *testing.T) { writeRes, err := target.Write(messages) assert.NotNil(err) + if err != nil { + assert.Equal("Error writing messages to Kafka topic: : 1 error occurred:\n\t* kafka: client has run out of available brokers to talk to\n\n", err.Error()) + } assert.NotNil(writeRes) // Check results @@ -127,6 +130,9 @@ func TestKafkaTarget_SyncWriteFailure(t *testing.T) { writeRes, err := target.Write(messages) assert.NotNil(err) + if err != nil { + assert.Equal("Error writing messages to Kafka topic: : 1 error occurred:\n\t* kafka: client has run out of available brokers to talk to\n\n", err.Error()) + } assert.NotNil(writeRes) // Check results diff --git a/pkg/target/kinesis_test.go b/pkg/target/kinesis_test.go index 4136efaf..5d7aee8f 100644 --- a/pkg/target/kinesis_test.go +++ b/pkg/target/kinesis_test.go @@ -36,6 +36,9 @@ func TestKinesisTarget_WriteFailure(t *testing.T) { writeRes, err := target.Write(messages) assert.NotNil(err) + if err != nil { + assert.Equal("Error writing messages to Kinesis stream: 1 error occurred:\n\t* Failed to send message batch to Kinesis stream: ResourceNotFoundException: Stream not-exists under account 000000000000 not found.\n\n", err.Error()) + } assert.NotNil(writeRes) // Check results @@ -55,7 +58,7 @@ func TestKinesisTarget_WriteSuccess(t *testing.T) { streamName := "kinesis-stream-target-1" err := testutil.CreateAWSLocalstackKinesisStream(client, streamName) if err != nil { - panic(err) + t.Fatal(err) } defer testutil.DeleteAWSLocalstackKinesisStream(client, streamName) @@ -97,7 +100,7 @@ func TestKinesisTarget_WriteSuccess_OversizeBatch(t *testing.T) { streamName := "kinesis-stream-target-2" err := testutil.CreateAWSLocalstackKinesisStream(client, streamName) if err != nil { - panic(err) + t.Fatal(err) } defer testutil.DeleteAWSLocalstackKinesisStream(client, streamName) @@ -140,7 +143,7 @@ func TestKinesisTarget_WriteSuccess_OversizeRecord(t *testing.T) { streamName := "kinesis-stream-target-3" err := testutil.CreateAWSLocalstackKinesisStream(client, streamName) if err != nil { - panic(err) + t.Fatal(err) } defer testutil.DeleteAWSLocalstackKinesisStream(client, streamName) diff --git a/pkg/target/pubsub_test.go b/pkg/target/pubsub_test.go new file mode 100644 index 00000000..74ffeeaf --- /dev/null +++ b/pkg/target/pubsub_test.go @@ -0,0 +1,278 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package target + +import ( + "context" + "sort" + "strings" + "sync/atomic" + "testing" + + "cloud.google.com/go/pubsub/pstest" + "github.com/stretchr/testify/assert" + pubsubV1 "google.golang.org/genproto/googleapis/pubsub/v1" + "google.golang.org/grpc/codes" + + "github.com/snowplow-devops/stream-replicator/pkg/models" + "github.com/snowplow-devops/stream-replicator/pkg/testutil" +) + +func TestPubSubTarget_WriteSuccessIntegration(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + assert := assert.New(t) + + testutil.CreatePubsubResourcesAndWrite(0, t) + defer testutil.DeletePubsubResources(t) + + pubsubTarget, err := newPubSubTarget(`project-test`, `test-topic`) + assert.NotNil(pubsubTarget) + assert.Nil(err) + assert.Equal("projects/project-test/topics/test-topic", pubsubTarget.GetID()) + pubsubTarget.Open() + defer pubsubTarget.Close() + + messages := testutil.GetTestMessages(10, "Hello Pubsub!!", nil) + + result, err := pubsubTarget.Write(messages) + + assert.Equal(int64(10), result.Total()) + assert.Equal([]*models.Message(nil), result.Failed) + assert.Equal([]*models.Message(nil), result.Oversized) + + assert.Nil(err) +} + +func TestPubSubTarget_WriteTopicUnopenedIntegration(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + assert := assert.New(t) + + testutil.CreatePubsubResourcesAndWrite(0, t) + defer testutil.DeletePubsubResources(t) + + pubsubTarget, err := newPubSubTarget(`project-test`, `test-topic`) + assert.NotNil(pubsubTarget) + assert.Nil(err) + assert.Equal("projects/project-test/topics/test-topic", pubsubTarget.GetID()) + + messages := testutil.GetTestMessages(1, ``, nil) + + _, err = pubsubTarget.Write(messages) + + assert.Error(err) +} + +func TestPubSubTarget_WithInvalidMessageIntegration(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + assert := assert.New(t) + + testutil.CreatePubsubResourcesAndWrite(0, t) + defer testutil.DeletePubsubResources(t) + + pubsubTarget, err := newPubSubTarget(`project-test`, `test-topic`) + assert.NotNil(pubsubTarget) + assert.Nil(err) + assert.Equal("projects/project-test/topics/test-topic", pubsubTarget.GetID()) + pubsubTarget.Open() + defer pubsubTarget.Close() + + messages := testutil.GetTestMessages(1, `test`, nil) + messages = append(messages, testutil.GetTestMessages(1, ``, nil)...) + + result, err := pubsubTarget.Write(messages) + + assert.Equal(int64(1), result.Total()) + assert.Equal(1, len(result.Invalid)) + + assert.Nil(err) +} + +// TestPubSubTarget_WriteSuccessWithMocks unit tests the happy path for PubSub target +func TestPubSubTarget_WriteSuccessWithMocks(t *testing.T) { + assert := assert.New(t) + srv, conn := testutil.InitMockPubsubServer(8563, nil, t) + defer srv.Close() + defer conn.Close() + + pubsubTarget, err := newPubSubTarget(`project-test`, `test-topic`) + assert.NotNil(pubsubTarget) + assert.Nil(err) + assert.Equal("projects/project-test/topics/test-topic", pubsubTarget.GetID()) + pubsubTarget.Open() + defer pubsubTarget.Close() + + // Mechanism for counting acks + var ackOps int64 + ackFunc := func() { + atomic.AddInt64(&ackOps, 1) + } + + messages := testutil.GetSequentialTestMessages(10, ackFunc) + + twres, err := pubsubTarget.Write(messages) + // Check that the TargetWriteResult is correct + assert.Equal(int64(10), twres.SentCount) + assert.Equal(10, len(twres.Sent)) + assert.Nil(twres.Failed) + assert.Nil(twres.Oversized) + assert.Nil(twres.Invalid) + assert.Nil(err) + + res, pullErr := srv.GServer.Pull(context.TODO(), &pubsubV1.PullRequest{ + Subscription: "projects/project-test/subscriptions/test-sub", + MaxMessages: 15, // 15 max messages to ensure we don't miss dupes + }) + if pullErr != nil { + t.Fatal(pullErr) + } + + var results []string + + for _, msg := range res.ReceivedMessages { + results = append(results, string(msg.Message.Data)) + } + + expected := []string{"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"} + sort.Strings(results) + assert.Equal(expected, results) + + // Check that we acked correct amount of times + assert.Equal(int64(10), ackOps) +} + +// TestPubSubTarget_WriteFailureWithMocks unit tests the unhappy path for PubSub target +func TestPubSubTarget_WriteFailureWithMocks(t *testing.T) { + assert := assert.New(t) + + // Initialise the mock server with un-retryable error + opts := []pstest.ServerReactorOption{ + pstest.WithErrorInjection("Publish", codes.PermissionDenied, "Some Error"), + } + srv, conn := testutil.InitMockPubsubServer(8563, opts, t) + defer srv.Close() + defer conn.Close() + + pubsubTarget, err := newPubSubTarget(`project-test`, `test-topic`) + assert.NotNil(pubsubTarget) + if err != nil { + t.Fatal(err) + } + assert.Equal("projects/project-test/topics/test-topic", pubsubTarget.GetID()) + pubsubTarget.Open() + defer pubsubTarget.Close() + + // Mechanism for counting acks + var ackOps int64 + ackFunc := func() { + atomic.AddInt64(&ackOps, 1) + } + + messages := testutil.GetSequentialTestMessages(10, ackFunc) + + twres, err := pubsubTarget.Write(messages) + + // Check that the TargetWriteResult is correct + assert.Equal(int64(0), twres.SentCount) + assert.Equal(int64(10), twres.FailedCount) + assert.Equal(10, len(twres.Failed)) + assert.Nil(twres.Sent) + assert.Nil(twres.Oversized) + assert.Nil(twres.Invalid) + assert.NotNil(err) + if err != nil { + assert.True(strings.Contains(err.Error(), "Error writing messages to PubSub topic: 10 errors occurred:")) + assert.Equal(10, strings.Count(err.Error(), "rpc error: code = PermissionDenied desc = Some Error")) + } +} + +// TestPubSubTarget_WriteFailureRetryableWithMocks unit tests the unhappy path for PubSub target +// This isn't an integration test, but takes a long time so we skip on short runs +// This test demonstrates the case where retryable errors are obscured somewhat. +// We should try to make these more transparent: https://github.com/snowplow-devops/stream-replicator/issues/156 +func TestPubSubTarget_WriteFailureRetryableWithMocks(t *testing.T) { + if testing.Short() { + t.Skip("skipping slow test") + } + assert := assert.New(t) + + // Initialise the mock server with retryable error + opts := []pstest.ServerReactorOption{ + pstest.WithErrorInjection("Publish", codes.Unknown, "Some Error"), + } + srv, conn := testutil.InitMockPubsubServer(8563, opts, t) + defer srv.Close() + defer conn.Close() + + pubsubTarget, err := newPubSubTarget(`project-test`, `test-topic`) + assert.NotNil(pubsubTarget) + if err != nil { + t.Fatal(err) + } + assert.Equal("projects/project-test/topics/test-topic", pubsubTarget.GetID()) + pubsubTarget.Open() + defer pubsubTarget.Close() + + // Mechanism for counting acks + var ackOps int64 + ackFunc := func() { + atomic.AddInt64(&ackOps, 1) + } + + messages := testutil.GetSequentialTestMessages(10, ackFunc) + + twres, err := pubsubTarget.Write(messages) + + // Check that the TargetWriteResult is correct + assert.Equal(int64(0), twres.SentCount) + assert.Equal(int64(10), twres.FailedCount) + assert.Equal(10, len(twres.Failed)) + assert.Nil(twres.Sent) + assert.Nil(twres.Oversized) + assert.Nil(twres.Invalid) + assert.NotNil(err) + if err != nil { + assert.True(strings.Contains(err.Error(), "Error writing messages to PubSub topic: 10 errors occurred:")) + assert.Equal(10, strings.Count(err.Error(), "context deadline exceeded")) + } +} + +// TestNewPubSubTarget_Success tests that we newPubSubTarget returns a PubSubTarget +func TestNewPubSubTarget_Success(t *testing.T) { + assert := assert.New(t) + + // This isn't needed at present, but adding it as we'll need it after https://github.com/snowplow-devops/stream-replicator/issues/151 + srv, conn := testutil.InitMockPubsubServer(8563, nil, t) + defer srv.Close() + defer conn.Close() + + pubsubTarget, err := newPubSubTarget(`project-test`, `test-topic`) + + assert.Nil(err) + assert.NotNil(pubsubTarget) + assert.IsType(PubSubTarget{}, *pubsubTarget) +} + +// TestnewPubSubTarget_Failure tests that we fail early when we cannot reach pubsub +// Commented out as this behaviour is not currently instrumented. +// This test serves to illustrate the desired behaviour for this issue: https://github.com/snowplow-devops/stream-replicator/issues/151 +/* +func TestnewPubSubTarget_Failure(t *testing.T) { + assert := assert.New(t) + + pubsubTarget, err := newPubSubTarget(`nonexistent-project`, `nonexistent-topic`) + + // TODO: Test for the actual error we expect, when we have instrumented failing fast + assert.NotNil(err) + assert.Nil(pubsubTarget) +} +*/ diff --git a/pkg/target/setup_test.go b/pkg/target/setup_test.go new file mode 100644 index 00000000..4f87b80a --- /dev/null +++ b/pkg/target/setup_test.go @@ -0,0 +1,18 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package target + +import ( + "os" + "testing" +) + +func TestMain(m *testing.M) { + os.Clearenv() + exitVal := m.Run() + os.Exit(exitVal) +} diff --git a/pkg/target/sqs_test.go b/pkg/target/sqs_test.go index 3c66e1e2..159c81d9 100644 --- a/pkg/target/sqs_test.go +++ b/pkg/target/sqs_test.go @@ -46,7 +46,7 @@ func TestSQSTarget_WriteSuccess(t *testing.T) { queueName := "sqs-queue-target-1" queueRes, err := testutil.CreateAWSLocalstackSQSQueue(client, queueName) if err != nil { - panic(err) + t.Fatal(err) } queueURL := queueRes.QueueUrl defer testutil.DeleteAWSLocalstackSQSQueue(client, queueURL) @@ -89,7 +89,7 @@ func TestSQSTarget_WritePartialFailure_OversizeRecord(t *testing.T) { queueName := "sqs-queue-target-2" queueRes, err := testutil.CreateAWSLocalstackSQSQueue(client, queueName) if err != nil { - panic(err) + t.Fatal(err) } queueURL := queueRes.QueueUrl defer testutil.DeleteAWSLocalstackSQSQueue(client, queueURL) diff --git a/pkg/testutil/common.go b/pkg/testutil/common.go index 0800cd33..f50aa990 100644 --- a/pkg/testutil/common.go +++ b/pkg/testutil/common.go @@ -7,6 +7,7 @@ package testutil import ( + "fmt" "math/rand" "time" @@ -45,3 +46,17 @@ func GetTestMessages(count int, body string, ackFunc func()) []*models.Message { } return messages } + +// GetSequentialTestMessages will return an array of messages ready to be used for testing +// targets and sources. Message data will be sequential integers for easier testing of accuracy, duplicates, etc. +func GetSequentialTestMessages(count int, ackFunc func()) []*models.Message { + var messages []*models.Message + for i := 0; i < count; i++ { + messages = append(messages, &models.Message{ + Data: []byte(fmt.Sprint(i)), + PartitionKey: uuid.NewV4().String(), + AckFunc: ackFunc, + }) + } + return messages +} diff --git a/pkg/testutil/pubsub_helpers.go b/pkg/testutil/pubsub_helpers.go new file mode 100644 index 00000000..46284a8a --- /dev/null +++ b/pkg/testutil/pubsub_helpers.go @@ -0,0 +1,131 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package testutil + +import ( + "context" + "fmt" + "strconv" + "sync" + "sync/atomic" + "testing" + "time" + + "cloud.google.com/go/pubsub" + "cloud.google.com/go/pubsub/pstest" + "github.com/pkg/errors" + pubsubV1 "google.golang.org/genproto/googleapis/pubsub/v1" + "google.golang.org/grpc" + "google.golang.org/grpc/credentials/insecure" +) + +// InitMockPubsubServer creates a mock PubSub Server for testing +func InitMockPubsubServer(port int, opts []pstest.ServerReactorOption, t *testing.T) (*pstest.Server, *grpc.ClientConn) { + t.Setenv("PUBSUB_PROJECT_ID", `project-test`) + t.Setenv(`PUBSUB_EMULATOR_HOST`, fmt.Sprint("localhost:", port)) + ctx := context.Background() + srv := pstest.NewServerWithPort(port, opts...) + // Connect to the server without using TLS. + conn, err := grpc.Dial(srv.Addr, grpc.WithTransportCredentials(insecure.NewCredentials())) + if err != nil { + t.Fatal(err) + } + + _, err = srv.GServer.CreateTopic(ctx, &pubsubV1.Topic{Name: `projects/project-test/topics/test-topic`}) + if err != nil { + t.Fatal(err) + } + + _, err = srv.GServer.CreateSubscription(ctx, &pubsubV1.Subscription{ + Name: "projects/project-test/subscriptions/test-sub", + Topic: "projects/project-test/topics/test-topic", + AckDeadlineSeconds: 10, + }) + if err != nil { + t.Fatal(err) + } + + return srv, conn +} + +// CreatePubsubResourcesAndWrite creates PubSub integration resources, and writes numMsgs +func CreatePubsubResourcesAndWrite(numMsgs int, t *testing.T) { + ctx, cancelFunc := context.WithTimeout(context.Background(), 10*time.Second) + defer cancelFunc() + t.Setenv("PUBSUB_PROJECT_ID", `project-test`) + t.Setenv(`PUBSUB_EMULATOR_HOST`, "localhost:8432") + + client, err := pubsub.NewClient(ctx, `project-test`) + if err != nil { + t.Fatal(errors.Wrap(err, "Failed to create PubSub client")) + } + defer client.Close() + + topic, err := client.CreateTopic(ctx, `test-topic`) + if err != nil { + t.Fatal(errors.Wrap(err, "Failed to create pubsub topic")) + } + + _, err = client.CreateSubscription(ctx, `test-sub`, pubsub.SubscriptionConfig{ + Topic: topic, + AckDeadline: 10 * time.Second, + }) + if err != nil { + t.Fatal(fmt.Errorf("error creating subscription: %v", err)) + } + + var wg sync.WaitGroup + var totalErrors uint64 + + // publish n messages + for i := 0; i < numMsgs; i++ { + wg.Add(1) + result := topic.Publish(ctx, &pubsub.Message{ + Data: []byte("message #" + strconv.Itoa(i)), + }) + go func(i int, res *pubsub.PublishResult) { + defer wg.Done() + _, err := res.Get(ctx) + if err != nil { + atomic.AddUint64(&totalErrors, 1) + return + } + }(i, result) + } + + wg.Wait() +} + +// DeletePubsubResources tears down Pubsub integration resources +func DeletePubsubResources(t *testing.T) { + ctx, cancelFunc := context.WithTimeout(context.Background(), 10*time.Second) + defer cancelFunc() + t.Setenv("PUBSUB_PROJECT_ID", `project-test`) + t.Setenv(`PUBSUB_EMULATOR_HOST`, "localhost:8432") + + client, err := pubsub.NewClient(ctx, `project-test`) + if err != nil { + t.Fatal(errors.Wrap(err, "Failed to create PubSub client")) + } + defer client.Close() + + subscription := client.Subscription(`test-sub`) + err = subscription.Delete(ctx) + if err != nil { + t.Fatal(errors.Wrap(err, "Failed to delete subscription")) + } + + topic := client.Topic(`test-topic`) + if err != nil { + t.Fatal(errors.Wrap(err, "Failed to get topic")) + } + + err = topic.Delete(ctx) + if err != nil { + t.Fatal(errors.Wrap(err, "Failed to delete topic")) + } +} diff --git a/pkg/testutil/source_helpers.go b/pkg/testutil/source_helpers.go index 797ec26c..c09bfd2f 100644 --- a/pkg/testutil/source_helpers.go +++ b/pkg/testutil/source_helpers.go @@ -14,15 +14,19 @@ import ( "github.com/snowplow-devops/stream-replicator/pkg/source/sourceiface" ) +// TODO: Refactor to provide a means to test errors without panicing + // ReadAndReturnMessages takes a source, runs the read function, and outputs all messages found in a slice, against which we may run assertions. -func ReadAndReturnMessages(source sourceiface.Source) []*models.Message { +// The testWriteBuilder argument allows the test implementation to provide a write function builder, +// and the additionalOpts argument allows one to pass arguments to that builder +func ReadAndReturnMessages(source sourceiface.Source, timeToWait time.Duration, testWriteBuilder func(sourceiface.Source, chan *models.Message, interface{}) func([]*models.Message) error, additionalOpts interface{}) []*models.Message { var successfulReads []*models.Message hitError := make(chan error) msgRecieved := make(chan *models.Message) // run the read function in a goroutine, so that we can close it after a timeout sf := sourceiface.SourceFunctions{ - WriteToTarget: testWriteFuncBuilder(source, msgRecieved), + WriteToTarget: testWriteBuilder(source, msgRecieved, additionalOpts), } go runRead(hitError, source, &sf) @@ -34,7 +38,7 @@ resultLoop: case msg := <-msgRecieved: // Append messages to the result slice successfulReads = append(successfulReads, msg) - case <-time.After(3 * time.Second): + case <-time.After(timeToWait): // Stop source after 3s, and return the result slice fmt.Println("Stopping source.") source.Stop() @@ -51,8 +55,8 @@ func runRead(ch chan error, source sourceiface.Source, sf *sourceiface.SourceFun } } -// testWriteFuncBuiler returns a function which replaces the write function, outputting any messages it finds to be handled via a channel -func testWriteFuncBuilder(source sourceiface.Source, msgChan chan *models.Message) func(messages []*models.Message) error { +// DefaultTestWriteBuilder returns a function which replaces the write function, outputting any messages it finds to be handled via a channel +func DefaultTestWriteBuilder(source sourceiface.Source, msgChan chan *models.Message, additionalOpts interface{}) func(messages []*models.Message) error { return func(messages []*models.Message) error { for _, msg := range messages { // Send each message onto the channel to be appended to results @@ -62,3 +66,22 @@ func testWriteFuncBuilder(source sourceiface.Source, msgChan chan *models.Messag return nil } } + +// DelayedAckTestWriteBuilder delays every third ack, to test the case where some messages are processed slower than others +func DelayedAckTestWriteBuilder(source sourceiface.Source, msgChan chan *models.Message, additionalOpts interface{}) func(messages []*models.Message) error { + return func(messages []*models.Message) error { + duration, ok := additionalOpts.(time.Duration) + if !ok { + panic("DelayedAckTestWriteBuilder requires a duration to be passed as additionalOpts") + } + for i, msg := range messages { + // Send each message onto the channel to be appended to results + msgChan <- msg + if i%3 == 1 { + time.Sleep(duration) + } + msg.AckFunc() + } + return nil + } +} diff --git a/pkg/transform/engine_javascript_test.go b/pkg/transform/engine_javascript_test.go index 4f434c00..9fbaeb3d 100644 --- a/pkg/transform/engine_javascript_test.go +++ b/pkg/transform/engine_javascript_test.go @@ -1547,12 +1547,8 @@ function setPk(x) { for i, res := range result.Result { if i < len(tt.ExpectedGood) { exp := tt.ExpectedGood[i] - if !reflect.DeepEqual(res.Data, exp.Data) { - t.Errorf("GOT:\n%s\nEXPECTED:\n%s", - spew.Sdump(res.Data), - spew.Sdump(exp.Data)) - } - assert.Equal(res.PartitionKey, exp.PartitionKey) + assert.JSONEq(string(exp.Data), string(res.Data)) + assert.Equal(exp.PartitionKey, res.PartitionKey) } } }) @@ -1675,12 +1671,8 @@ function setPk(x) { for i, res := range result.Result { if i < len(tt.ExpectedGood) { exp := tt.ExpectedGood[i] - if !reflect.DeepEqual(res.Data, exp.Data) { - t.Errorf("GOT:\n%s\nEXPECTED:\n%s", - spew.Sdump(res.Data), - spew.Sdump(exp.Data)) - } - assert.Equal(res.PartitionKey, exp.PartitionKey) + assert.JSONEq(string(exp.Data), string(res.Data)) + assert.Equal(exp.PartitionKey, res.PartitionKey) } } }) @@ -1852,8 +1844,9 @@ func assertMessagesCompareJs(t *testing.T, act, exp *models.Message) { ok = exp == nil case exp == nil: default: + var dataOk bool pkOk := act.PartitionKey == exp.PartitionKey - dataOk := reflect.DeepEqual(act.Data, exp.Data) + dataOk = reflect.DeepEqual(act.Data, exp.Data) cTimeOk := reflect.DeepEqual(act.TimeCreated, exp.TimeCreated) pTimeOk := reflect.DeepEqual(act.TimePulled, exp.TimePulled) tTimeOk := reflect.DeepEqual(act.TimeTransformed, exp.TimeTransformed) diff --git a/pkg/transform/engine_lua_test.go b/pkg/transform/engine_lua_test.go index a1238ffd..6b0672ef 100644 --- a/pkg/transform/engine_lua_test.go +++ b/pkg/transform/engine_lua_test.go @@ -1464,12 +1464,8 @@ end assert.NotNil(result) for i, res := range result.Result { exp := expectedGood[i] - if !reflect.DeepEqual(res.Data, exp.Data) { - t.Errorf("GOT:\n%s\nEXPECTED:\n%s", - spew.Sdump(res.Data), - spew.Sdump(exp.Data)) - } - assert.Equal(res.PartitionKey, exp.PartitionKey) + assert.JSONEq(string(exp.Data), string(res.Data)) + assert.Equal(exp.PartitionKey, res.PartitionKey) } }) @@ -1603,12 +1599,8 @@ end for i, res := range result.Result { if i < len(tt.ExpectedGood) { exp := tt.ExpectedGood[i] - if !reflect.DeepEqual(res.Data, exp.Data) { - t.Errorf("GOT:\n%s\nEXPECTED:\n%s", - spew.Sdump(res.Data), - spew.Sdump(exp.Data)) - } - assert.Equal(res.PartitionKey, exp.PartitionKey) + assert.JSONEq(string(exp.Data), string(res.Data)) + assert.Equal(exp.PartitionKey, res.PartitionKey) } } }) @@ -1732,12 +1724,8 @@ end for i, res := range result.Result { if i < len(tt.ExpectedGood) { exp := tt.ExpectedGood[i] - if !reflect.DeepEqual(res.Data, exp.Data) { - t.Errorf("GOT:\n%s\nEXPECTED:\n%s", - spew.Sdump(res.Data), - spew.Sdump(exp.Data)) - } - assert.Equal(res.PartitionKey, exp.PartitionKey) + assert.JSONEq(string(exp.Data), string(res.Data)) + assert.Equal(exp.PartitionKey, res.PartitionKey) } } }) diff --git a/pkg/transform/setup_test.go b/pkg/transform/setup_test.go new file mode 100644 index 00000000..3f2028b5 --- /dev/null +++ b/pkg/transform/setup_test.go @@ -0,0 +1,18 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package transform + +import ( + "os" + "testing" +) + +func TestMain(m *testing.M) { + os.Clearenv() + exitVal := m.Run() + os.Exit(exitVal) +} diff --git a/pkg/transform/snowplow_enriched_filter_test.go b/pkg/transform/snowplow_enriched_filter_test.go index ef1da676..034c29bc 100644 --- a/pkg/transform/snowplow_enriched_filter_test.go +++ b/pkg/transform/snowplow_enriched_filter_test.go @@ -34,7 +34,6 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { // Single value cases aidFilterFuncKeep, _ := NewSpEnrichedFilterFunction("app_id==test-data3") - // TODO: sort out numbering for fail cases... aidKeepIn, aidKeepOut, fail, _ := aidFilterFuncKeep(&messageGood, nil) assert.Equal(snowplowTsv3, aidKeepIn.Data) @@ -223,10 +222,10 @@ func TestNewSpEnrichedFilterFunction_Error(t *testing.T) { filterFunc, err := NewSpEnrichedFilterFunction(tt.Arg) assert.Nil(filterFunc) - if err == nil { - t.Fatalf("expected error, got nil") + assert.NotNil(err) + if err != nil { + assert.Equal(filterError, err.Error()) } - assert.Equal(filterError, err.Error()) }) } } diff --git a/pkg/transform/snowplow_enriched_set_pk_test.go b/pkg/transform/snowplow_enriched_set_pk_test.go index c0958fe0..bd94512a 100644 --- a/pkg/transform/snowplow_enriched_set_pk_test.go +++ b/pkg/transform/snowplow_enriched_set_pk_test.go @@ -57,7 +57,10 @@ func TestNewSpEnrichedSetPkFunction(t *testing.T) { assert.Nil(failureCase) assert.Nil(intermediate) assert.NotNil(fail) - assert.Equal("Cannot parse tsv event - wrong number of fields provided: 4", fail.GetError().Error()) + assert.NotNil(fail.GetError()) + if fail.GetError() != nil { + assert.Equal("Cannot parse tsv event - wrong number of fields provided: 4", fail.GetError().Error()) + } // Nuanced success case // Test to assert behaviour when there's an incompatible intermediateState in the input diff --git a/pkg/transform/snowplow_enriched_to_json_test.go b/pkg/transform/snowplow_enriched_to_json_test.go index 0d86dcaa..e5451302 100644 --- a/pkg/transform/snowplow_enriched_to_json_test.go +++ b/pkg/transform/snowplow_enriched_to_json_test.go @@ -34,7 +34,8 @@ func TestSpEnrichedToJson(t *testing.T) { // Simple success case transformSuccess, _, failure, intermediate := SpEnrichedToJSON(&messageGood, nil) - assert.Equal(&expectedGood, transformSuccess) + assert.Equal(expectedGood.PartitionKey, transformSuccess.PartitionKey) + assert.JSONEq(string(expectedGood.Data), string(transformSuccess.Data)) assert.Equal(spTsv1Parsed, intermediate) assert.Nil(failure) @@ -42,7 +43,10 @@ func TestSpEnrichedToJson(t *testing.T) { success, _, transformFailure, intermediate := SpEnrichedToJSON(&messageBad, nil) // Not matching equivalence of whole object because error stacktrace makes it unfeasible. Doing each component part instead. - assert.Equal("Cannot parse tsv event - wrong number of fields provided: 4", transformFailure.GetError().Error()) + assert.NotNil(transformFailure.GetError()) + if transformFailure.GetError() != nil { + assert.Equal("Cannot parse tsv event - wrong number of fields provided: 4", transformFailure.GetError().Error()) + } assert.Equal([]byte("not a snowplow event"), transformFailure.Data) assert.Equal("some-key4", transformFailure.PartitionKey) // Failure in this case is in parsing to IntermediateState, so none expected in output @@ -64,7 +68,8 @@ func TestSpEnrichedToJson(t *testing.T) { // When we have some incompatible IntermediateState, expected behaviour is to replace it with this transformation's IntermediateState transformSuccess2, _, failure2, intermediate2 := SpEnrichedToJSON(&incompatibleIntermediateMessage, incompatibleIntermediate) - assert.Equal(&expectedGood, transformSuccess2) + assert.Equal(expectedGood.PartitionKey, transformSuccess2.PartitionKey) + assert.JSONEq(string(expectedGood.Data), string(transformSuccess2.Data)) assert.Equal(spTsv1Parsed, intermediate2) assert.Nil(failure2) } diff --git a/pkg/transform/snowplow_enriched_util.go b/pkg/transform/snowplow_enriched_util.go index d0d5cc6e..79bbb2a4 100644 --- a/pkg/transform/snowplow_enriched_util.go +++ b/pkg/transform/snowplow_enriched_util.go @@ -7,11 +7,14 @@ package transform import ( + "strconv" + "github.com/snowplow-devops/stream-replicator/pkg/models" "github.com/snowplow/snowplow-golang-analytics-sdk/analytics" - "strconv" ) +// intermediateAsSpEnrichedParsed checks whether we have a Snowplow Analytics SDK ParsedEvent in the intermediateState +// If we do, we return it. If it don't, we parse the message.Data and return it the result. func intermediateAsSpEnrichedParsed(intermediateState interface{}, message *models.Message) (analytics.ParsedEvent, error) { var parsedMessage, ok = intermediateState.(analytics.ParsedEvent) var parseErr error diff --git a/pkg/transform/snowplow_enriched_util_test.go b/pkg/transform/snowplow_enriched_util_test.go new file mode 100644 index 00000000..276f73d6 --- /dev/null +++ b/pkg/transform/snowplow_enriched_util_test.go @@ -0,0 +1,58 @@ +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. + +package transform + +import ( + "testing" + + "github.com/snowplow-devops/stream-replicator/pkg/models" + "github.com/stretchr/testify/assert" +) + +// TestIntermediateAsSpEnrichedParsed tests that intermediateAsSpEnrichedParsed +// returns the parsed event when provided a snowplow TSV with +func TestIntermediateAsSpEnrichedParsed(t *testing.T) { + assert := assert.New(t) + + // case 1: no intermediate state + res1, err1 := intermediateAsSpEnrichedParsed(nil, &models.Message{Data: snowplowTsv1}) + + assert.Equal(spTsv1Parsed, res1) + assert.Nil(err1) + + // case 2: intermediate state provided as ParsedEvent + res2, err2 := intermediateAsSpEnrichedParsed(spTsv2Parsed, &models.Message{Data: snowplowTsv2}) + + assert.Equal(spTsv2Parsed, res2) + assert.Nil(err2) + + // case 3: intermediate state provided as some other type + res3, err3 := intermediateAsSpEnrichedParsed("not a ParsedEvent", &models.Message{Data: snowplowTsv3}) + + assert.Equal(spTsv3Parsed, res3) + assert.Nil(err3) + + // case 4: message not parseable + res4, err4 := intermediateAsSpEnrichedParsed(nil, &models.Message{Data: []byte("Not a snowplow event")}) + + assert.Nil(res4) + assert.NotNil(err4) + if err4 != nil { + assert.Equal("Cannot parse tsv event - wrong number of fields provided: 1", err4.Error()) + } +} + +// TestConvertPathToInterfaces tests that convertPathToInterfaces returns integers and strings where appropriate +func TestConvertPathToInterfaces(t *testing.T) { + assert := assert.New(t) + + expected := []interface{}{"one", 2, 3, "four", "five", 6} + + res := convertPathToInterfaces([]string{"one", "2", "3", "four", "five", "6"}) + + assert.Equal(expected, res) +} diff --git a/pkg/transform/transform_test.go b/pkg/transform/transform_test.go index bd5eea4b..4b378b02 100644 --- a/pkg/transform/transform_test.go +++ b/pkg/transform/transform_test.go @@ -67,7 +67,7 @@ func TestNewTransformation_EnrichedToJson(t *testing.T) { enrichJSONRes := tranformEnrichJSON(messages) for index, value := range enrichJSONRes.Result { - assert.Equal(expectedGood[index].Data, value.Data) + assert.JSONEq(string(expectedGood[index].Data), string(value.Data)) assert.Equal(expectedGood[index].PartitionKey, value.PartitionKey) assert.NotNil(expectedGood[index].TimeTransformed) @@ -80,7 +80,10 @@ func TestNewTransformation_EnrichedToJson(t *testing.T) { // Not matching equivalence of whole object because error stacktrace makes it unfeasible. Doing each component part instead. assert.Equal(1, len(enrichJSONRes.Invalid)) assert.Equal(int64(1), enrichJSONRes.InvalidCount) - assert.Equal("Cannot parse tsv event - wrong number of fields provided: 4", enrichJSONRes.Invalid[0].GetError().Error()) + assert.NotNil(enrichJSONRes.Invalid[0].GetError()) + if enrichJSONRes.Invalid[0].GetError() != nil { + assert.Equal("Cannot parse tsv event - wrong number of fields provided: 4", enrichJSONRes.Invalid[0].GetError().Error()) + } assert.Equal([]byte("not a snowplow event"), enrichJSONRes.Invalid[0].Data) assert.Equal("some-key4", enrichJSONRes.Invalid[0].PartitionKey) } @@ -127,9 +130,10 @@ func TestNewTransformation_Multiple(t *testing.T) { enrichJSONRes := tranformMultiple(messages) for index, value := range enrichJSONRes.Result { - assert.Equal(expectedGood[index].Data, value.Data) + assert.JSONEq(string(expectedGood[index].Data), string(value.Data)) assert.Equal(expectedGood[index].PartitionKey, value.PartitionKey) assert.NotNil(expectedGood[index].TimeTransformed) + assert.NotNil(value.TimeTransformed) // assertions to ensure we don't accidentally modify the input assert.NotEqual(messages[index].Data, value.Data) @@ -141,7 +145,11 @@ func TestNewTransformation_Multiple(t *testing.T) { // Not matching equivalence of whole object because error stacktrace makes it unfeasible. Doing each component part instead. assert.Equal(1, len(enrichJSONRes.Invalid)) assert.Equal(int64(1), enrichJSONRes.InvalidCount) - assert.Equal("Cannot parse tsv event - wrong number of fields provided: 4", enrichJSONRes.Invalid[0].GetError().Error()) + assert.NotNil(enrichJSONRes.Invalid[0].GetError()) + if enrichJSONRes.Invalid[0].GetError() != nil { + assert.Equal("Cannot parse tsv event - wrong number of fields provided: 4", enrichJSONRes.Invalid[0].GetError().Error()) + } + assert.Equal([]byte("not a snowplow event"), enrichJSONRes.Invalid[0].Data) assert.Equal("some-key4", enrichJSONRes.Invalid[0].PartitionKey) } diff --git a/pkg/transform/transformconfig/transform_config_test.go b/pkg/transform/transformconfig/transform_config_test.go index ae96c9c4..786336c4 100644 --- a/pkg/transform/transformconfig/transform_config_test.go +++ b/pkg/transform/transformconfig/transform_config_test.go @@ -132,10 +132,10 @@ func TestParseTransformations_InvalidMessage(t *testing.T) { parsed, err := parseTransformations(tt.Message) assert.Nil(parsed) - if err == nil { - t.Fatalf("expected error; got nil") + assert.NotNil(err) + if err != nil { + assert.Equal(tt.ExpError, err.Error()) } - assert.Equal(tt.ExpError, err.Error()) }) } } @@ -172,12 +172,14 @@ func TestGetTransformations_MissingLayerConfig(t *testing.T) { t.Fatalf("function NewConfig failed with error: %q", err.Error()) } - assert.Equal(c.Data.Transform.Message, tt.TransMessage) + assert.Equal(tt.TransMessage, c.Data.Transform.Message) transformation, err := GetTransformations(c) assert.Nil(transformation) assert.NotNil(err) - assert.Equal(tt.ExpectedError, err.Error()) + if err != nil { + assert.Equal(tt.ExpectedError, err.Error()) + } }) } } @@ -239,7 +241,10 @@ func TestGetTransformations_Builtins(t *testing.T) { applyFun, err := GetTransformations(tt.Provider) if tt.ExpectedErr != nil { - assert.Equal(tt.ExpectedErr.Error(), err.Error()) + assert.NotNil(err) + if err != nil { + assert.Equal(tt.ExpectedErr.Error(), err.Error()) + } assert.Nil(applyFun) } else { assert.Nil(err) @@ -288,7 +293,10 @@ func TestGetTransformations_Custom(t *testing.T) { applyFun, err := GetTransformations(tt.Provider) if tt.ExpectedErr != nil { - assert.Equal(tt.ExpectedErr.Error(), err.Error()) + assert.NotNil(err) + if err != nil { + assert.Equal(tt.ExpectedErr.Error(), err.Error()) + } assert.Nil(applyFun) } else { assert.Nil(err) @@ -430,7 +438,10 @@ func TestMkEngineFunction(t *testing.T) { ) if tt.ExpectedErr != nil { - assert.Equal(tt.ExpectedErr.Error(), err.Error()) + assert.NotNil(err) + if err != nil { + assert.Equal(tt.ExpectedErr.Error(), err.Error()) + } assert.Nil(fun) } else { assert.Nil(err) diff --git a/third_party/snowplow/badrows/bad_row_test.go b/third_party/snowplow/badrows/bad_row_test.go index 8fa94dcc..127955e0 100644 --- a/third_party/snowplow/badrows/bad_row_test.go +++ b/third_party/snowplow/badrows/bad_row_test.go @@ -25,5 +25,8 @@ func TestNewBadRow_InvalidData(t *testing.T) { br, err := newBadRow(schema, data, []byte("Hello World!"), 5000) assert.NotNil(err) + if err != nil { + assert.Equal("Could not unmarshall bad-row data blob to JSON: json: unsupported type: map[bool]string", err.Error()) + } assert.Nil(br) } diff --git a/third_party/snowplow/badrows/size_violation_test.go b/third_party/snowplow/badrows/size_violation_test.go index c7a60e03..6e2be626 100644 --- a/third_party/snowplow/badrows/size_violation_test.go +++ b/third_party/snowplow/badrows/size_violation_test.go @@ -80,7 +80,9 @@ func TestNewSizeViolation_NotEnoughBytes(t *testing.T) { }, 10, ) - assert.NotNil(err) assert.Nil(sv) - assert.Equal("Failed to create bad-row as resultant payload will exceed the targets byte limit", err.Error()) + assert.NotNil(err) + if err != nil { + assert.Equal("Failed to create bad-row as resultant payload will exceed the targets byte limit", err.Error()) + } } diff --git a/third_party/snowplow/iglu/self_describing_data_test.go b/third_party/snowplow/iglu/self_describing_data_test.go index 91f3a5f7..8d46121d 100644 --- a/third_party/snowplow/iglu/self_describing_data_test.go +++ b/third_party/snowplow/iglu/self_describing_data_test.go @@ -45,5 +45,8 @@ func TestNewSelfDescribingData_InvalidData(t *testing.T) { sddString, err := sdd.String() assert.NotNil(err) + if err != nil { + assert.Equal("json: unsupported type: map[bool]string", err.Error()) + } assert.Equal("", sddString) } From a4a50ad312077580c98295266dbe2ac5bcd0d321 Mon Sep 17 00:00:00 2001 From: colmsnowplow Date: Tue, 28 Jun 2022 13:57:10 +0100 Subject: [PATCH 11/25] Fix bug in makefile that prevents integration-down from completing (closes #162) --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 39e85f10..3307666d 100644 --- a/Makefile +++ b/Makefile @@ -119,7 +119,7 @@ http-up: http-down: (cd "$(integration_dir)/http/shutdown" && go run shutdownRequest.go) - killall ngrok + killall ngrok || true # ----------------------------------------------------------------------------- # RELEASE From fa02756c519075d876c92974255b9c5bd3092fe4 Mon Sep 17 00:00:00 2001 From: colmsnowplow Date: Wed, 29 Jun 2022 16:26:17 +0100 Subject: [PATCH 12/25] Bump analytics SDK version to v0.3.0 (closes #131) --- go.mod | 6 +++--- go.sum | 11 ++++++----- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/go.mod b/go.mod index f2942d97..ce461f9b 100644 --- a/go.mod +++ b/go.mod @@ -31,7 +31,7 @@ require ( github.com/smira/go-statsd v1.3.2 github.com/snowplow-devops/go-retry v0.0.0-20210106090855-8989bbdbae1c github.com/snowplow-devops/go-sentryhook v0.0.0-20210106082031-21bf7f9dac2a - github.com/snowplow/snowplow-golang-analytics-sdk v0.2.2 + github.com/snowplow/snowplow-golang-analytics-sdk v0.3.0 github.com/stretchr/testify v1.7.0 github.com/twinj/uuid v1.0.0 github.com/twitchscience/kinsumer v0.0.0-20210611163023-da24975e2c91 @@ -90,11 +90,11 @@ require ( github.com/jcmturner/gokrb5/v8 v8.4.2 // indirect github.com/jcmturner/rpc/v2 v2.0.3 // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect - github.com/json-iterator/go v1.1.11 // indirect + github.com/json-iterator/go v1.1.12 // indirect github.com/mattn/go-sqlite3 v2.0.2+incompatible // indirect github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect - github.com/modern-go/reflect2 v1.0.1 // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect github.com/pierrec/lz4/v4 v4.1.14 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 // indirect diff --git a/go.sum b/go.sum index c1393f3d..a57788bd 100644 --- a/go.sum +++ b/go.sum @@ -388,8 +388,8 @@ github.com/jpillora/backoff v1.0.0 h1:uvFg412JmmHBHw7iwprIxkPMI+sGQ4kzOWsMeHnm2E github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.11 h1:uVUAXhF2To8cbw/3xN3pxj6kk7TYKs98NIrTqPlMWAQ= -github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= @@ -439,8 +439,9 @@ github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/moul/http2curl v1.0.0/go.mod h1:8UbvGypXm98wA/IqH45anm5Y2Z6ep6O31QGOAZ3H0fQ= github.com/myesui/uuid v1.0.0 h1:xCBmH4l5KuvLYc5L7AS7SZg9/jKdIFubM7OVoLqaQUI= github.com/myesui/uuid v1.0.0/go.mod h1:2CDfNgU0LR8mIdO8vdWd8i9gWWxLlcoIGGpSNgafq84= @@ -492,8 +493,8 @@ github.com/snowplow-devops/go-sentryhook v0.0.0-20210106082031-21bf7f9dac2a h1:9 github.com/snowplow-devops/go-sentryhook v0.0.0-20210106082031-21bf7f9dac2a/go.mod h1:7/jMxl0yrvgiUlv5L37fw6pql71aNh55sKQc4kBFj5s= github.com/snowplow-devops/kinsumer v1.3.0 h1:uN8PPG8EffKjcfTcDqsHWnnsTFvYGMU39XlDPULIQcA= github.com/snowplow-devops/kinsumer v1.3.0/go.mod h1:SebvcasLweQnOygk9SOFkM/JjBtXFviUxoAq19CwrHQ= -github.com/snowplow/snowplow-golang-analytics-sdk v0.2.2 h1:ehPNYJ4tOq+n4Lj8jtentKS4UzzvRv5iQ8vlESQj8qw= -github.com/snowplow/snowplow-golang-analytics-sdk v0.2.2/go.mod h1:Z8ZW805JGCYhnq1wnHe2PIiamUnvoNtAtXPWNyS0mV8= +github.com/snowplow/snowplow-golang-analytics-sdk v0.3.0 h1:lkWd2JDVG8+X8UPJYdru2EgRW4w/TVnWCmKhW5lPJvc= +github.com/snowplow/snowplow-golang-analytics-sdk v0.3.0/go.mod h1:KCL+i2+Uj9lvSdknXOA7lBQoBUWGW6ovJgTao7Fkdxk= github.com/snowplow/snowplow-golang-tracker/v2 v2.4.1 h1:bp1MynC4BkywqTfpt4wddqZxtN4U7d3UUqxjalcGR1s= github.com/snowplow/snowplow-golang-tracker/v2 v2.4.1/go.mod h1:/74pOlgs8xon7CAWihi1peUflolbKSSy2Fu/UDF4PgI= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= From a7ec82c9116da4164fef17ee9e36bdb962d34b02 Mon Sep 17 00:00:00 2001 From: colmsnowplow Date: Fri, 1 Jul 2022 10:44:36 +0100 Subject: [PATCH 13/25] Bump to Go 1.18 (closes #163) --- .github/workflows/cd.yml | 2 +- .github/workflows/ci.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/cd.yml b/.github/workflows/cd.yml index cb7e20d6..5f90ac0c 100644 --- a/.github/workflows/cd.yml +++ b/.github/workflows/cd.yml @@ -10,7 +10,7 @@ jobs: name: Release strategy: matrix: - go-version: [1.17.0] + go-version: [1.18] os: [ubuntu-latest] runs-on: ${{ matrix.os }} env: diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 654ecdc5..e9134bd4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -12,7 +12,7 @@ jobs: name: Compile & Test strategy: matrix: - go-version: [1.17.0] + go-version: [1.18] os: [ubuntu-latest] runs-on: ${{ matrix.os }} env: From 30d8424719fe8c8d62a12fa52d2a6869bd35cae2 Mon Sep 17 00:00:00 2001 From: colmsnowplow Date: Fri, 1 Jul 2022 10:52:44 +0100 Subject: [PATCH 14/25] Resolve CI caching errors (closes #164) --- .github/workflows/cd.yml | 18 ++++++------------ .github/workflows/ci.yml | 18 ++++++------------ .github/workflows/snyk.yml | 2 +- 3 files changed, 13 insertions(+), 25 deletions(-) diff --git a/.github/workflows/cd.yml b/.github/workflows/cd.yml index 5f90ac0c..accb7c67 100644 --- a/.github/workflows/cd.yml +++ b/.github/workflows/cd.yml @@ -18,21 +18,15 @@ jobs: DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} steps: + - name: Checkout code + uses: actions/checkout@v3 + - name: Install Go - uses: actions/setup-go@v2 + uses: actions/setup-go@v3 with: go-version: ${{ matrix.go-version }} - - - name: Checkout code - uses: actions/checkout@v2 - - - name: Cache go modules - uses: actions/cache@v2 - with: - path: ~/go/pkg/mod - key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} - restore-keys: | - ${{ runner.os }}-go- + check-latest: true + cache: true - name: Run linter run: make lint diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e9134bd4..9fe33695 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -19,13 +19,15 @@ jobs: NGROK_TOKEN: ${{ secrets.NGROK_TOKEN }} steps: + - name: Checkout code + uses: actions/checkout@v3 + - name: Install Go - uses: actions/setup-go@v2 + uses: actions/setup-go@v3 with: go-version: ${{ matrix.go-version }} - - - name: Checkout code - uses: actions/checkout@v2 + check-latest: true + cache: true - name: Snyk Setup uses: snyk/actions/setup@master @@ -35,14 +37,6 @@ jobs: env: SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} - - name: Cache go modules - uses: actions/cache@v2 - with: - path: ~/go/pkg/mod - key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} - restore-keys: | - ${{ runner.os }}-go- - - name: Block Concurrent Executions of Integration Tests if: ${{ env.NGROK_TOKEN != '' }} uses: softprops/turnstyle@v1 diff --git a/.github/workflows/snyk.yml b/.github/workflows/snyk.yml index f42d728c..dea58637 100644 --- a/.github/workflows/snyk.yml +++ b/.github/workflows/snyk.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Run Snyk to check for vulnerabilities uses: snyk/actions/golang@master From a458616447353d56316413b8e7b600eb847d6331 Mon Sep 17 00:00:00 2001 From: TiganeteaRobert Date: Tue, 19 Jul 2022 12:19:38 +0300 Subject: [PATCH 15/25] =?UTF-8?q?Rationalise=20transformations=20and=20tra?= =?UTF-8?q?nsformation=20config=C2=A0(closes=20#169)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- config/config.go | 26 - config/config_test.go | 66 -- config/test-fixtures/invalids.hcl | 4 - .../transform-invalid-layer-js.hcl | 7 - .../transform-invalid-layer-lua.hcl | 7 - config/test-fixtures/transform-js-error.hcl | 7 + .../test-fixtures/transform-js-extended.hcl | 12 +- config/test-fixtures/transform-js-simple.hcl | 4 +- .../test-fixtures/transform-lua-extended.hcl | 7 +- config/test-fixtures/transform-lua-simple.hcl | 2 - .../test-fixtures/transform-mixed-error.hcl | 20 + .../transform-mixed-filtered.hcl | 23 + .../test-fixtures/transform-mixed-order.hcl | 20 + config/test-fixtures/transform-mixed.hcl | 17 + go.mod | 2 +- go.sum | 3 +- pkg/transform/{ => engine}/engine.go | 26 +- .../{ => engine}/engine_javascript.go | 125 +-- .../{ => engine}/engine_javascript_test.go | 863 +++++------------- pkg/transform/{ => engine}/engine_lua.go | 97 +- pkg/transform/{ => engine}/engine_lua_test.go | 686 ++++++-------- pkg/transform/engine/engine_test_variables.go | 33 + pkg/transform/snowplow_enriched_filter.go | 91 +- .../snowplow_enriched_filter_test.go | 96 +- pkg/transform/snowplow_enriched_set_pk.go | 2 +- pkg/transform/snowplow_enriched_to_json.go | 2 +- pkg/transform/snowplow_enriched_util.go | 6 +- pkg/transform/snowplow_enriched_util_test.go | 8 +- .../transformconfig/transform_config.go | 411 ++++----- .../transformconfig/transform_config_test.go | 794 ++++++++-------- 30 files changed, 1447 insertions(+), 2020 deletions(-) delete mode 100644 config/test-fixtures/transform-invalid-layer-js.hcl delete mode 100644 config/test-fixtures/transform-invalid-layer-lua.hcl create mode 100644 config/test-fixtures/transform-js-error.hcl create mode 100644 config/test-fixtures/transform-mixed-error.hcl create mode 100644 config/test-fixtures/transform-mixed-filtered.hcl create mode 100644 config/test-fixtures/transform-mixed-order.hcl create mode 100644 config/test-fixtures/transform-mixed.hcl rename pkg/transform/{ => engine}/engine.go (55%) rename pkg/transform/{ => engine}/engine_javascript.go (70%) rename pkg/transform/{ => engine}/engine_javascript_test.go (67%) rename pkg/transform/{ => engine}/engine_lua.go (82%) rename pkg/transform/{ => engine}/engine_lua_test.go (74%) create mode 100644 pkg/transform/engine/engine_test_variables.go diff --git a/config/config.go b/config/config.go index 694da223..bff63540 100644 --- a/config/config.go +++ b/config/config.go @@ -17,7 +17,6 @@ import ( "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hclparse" "github.com/pkg/errors" - "github.com/snowplow-devops/stream-replicator/pkg/failure" "github.com/snowplow-devops/stream-replicator/pkg/failure/failureiface" "github.com/snowplow-devops/stream-replicator/pkg/observer" @@ -80,12 +79,6 @@ type statsConfig struct { BufferSec int `hcl:"buffer_sec,optional" env:"STATS_RECEIVER_BUFFER_SEC"` } -// TransformConfig holds configuration for tranformations. -type TransformConfig struct { - Message string `hcl:"message_transformation,optional" env:"MESSAGE_TRANSFORMATION"` - Layer *use `hcl:"use,block" envPrefix:"TRANSFORMATION_LAYER_"` -} - // defaultConfigData returns the initial main configuration target. func defaultConfigData() *configurationData { return &configurationData{ @@ -363,22 +356,3 @@ func (c *Config) getStatsReceiver(tags map[string]string) (statsreceiveriface.St return nil, errors.New(fmt.Sprintf("Invalid stats receiver found; expected one of 'statsd' and got '%s'", useReceiver.Name)) } } - -// ProvideTransformMessage implements transformconfig.configProvider -func (c *Config) ProvideTransformMessage() string { - return c.Data.Transform.Message -} - -// ProvideTransformLayerName implements transformconfig.configProvider -func (c *Config) ProvideTransformLayerName() string { - return c.Data.Transform.Layer.Name -} - -// ProvideTransformComponent implements transformconfig.configProvider -func (c *Config) ProvideTransformComponent(p Pluggable) (interface{}, error) { - decoderOpts := &DecoderOptions{ - Input: c.Data.Transform.Layer.Body, - } - - return c.CreateComponent(p, decoderOpts) -} diff --git a/config/config_test.go b/config/config_test.go index bbaf00cf..9817c05d 100644 --- a/config/config_test.go +++ b/config/config_test.go @@ -11,7 +11,6 @@ import ( "path/filepath" "testing" - "github.com/snowplow-devops/stream-replicator/pkg/transform" "github.com/stretchr/testify/assert" ) @@ -27,7 +26,6 @@ func TestNewConfig(t *testing.T) { assert.Equal("info", c.Data.LogLevel) assert.Equal("stdout", c.Data.Target.Use.Name) - assert.Equal("none", c.Data.Transform.Message) assert.Equal("stdin", c.Data.Source.Use.Name) // Tests on sources moved to the source package. @@ -226,7 +224,6 @@ func TestNewConfig_Hcl_defaults(t *testing.T) { assert.Equal(false, c.Data.Sentry.Debug) assert.Equal(1, c.Data.StatsReceiver.TimeoutSec) assert.Equal(15, c.Data.StatsReceiver.BufferSec) - assert.Equal("none", c.Data.Transform.Message) assert.Equal("info", c.Data.LogLevel) } @@ -246,66 +243,3 @@ func TestNewConfig_Hcl_sentry(t *testing.T) { assert.Equal("{\"testKey\":\"testValue\"}", c.Data.Sentry.Tags) assert.Equal("testDsn", c.Data.Sentry.Dsn) } - -func TestDefaultTransformation(t *testing.T) { - assert := assert.New(t) - - t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", "") - t.Setenv("MESSAGE_TRANSFORMATION", "") - - c, err := NewConfig() - assert.NotNil(c) - if err != nil { - t.Fatalf("function NewConfig failed with error: %q", err.Error()) - } - - assert.Equal("none", c.Data.Transform.Message) - assert.Equal("none", c.ProvideTransformMessage()) - assert.Equal("", c.ProvideTransformLayerName()) -} - -func TestTransformationProviderImplementation(t *testing.T) { - testFixPath := "./test-fixtures" - testCases := []struct { - File string - Plug Pluggable - Message string - LayerName string - }{ - { - File: "transform-lua-simple.hcl", - Plug: transform.LuaLayer().(Pluggable), - Message: "lua:fun", - LayerName: "lua", - }, - { - File: "transform-js-simple.hcl", - Plug: transform.JSLayer().(Pluggable), - Message: "js:fun", - LayerName: "js", - }, - } - - for _, tt := range testCases { - t.Run(tt.File, func(t *testing.T) { - assert := assert.New(t) - - configFile := filepath.Join(testFixPath, tt.File) - t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", configFile) - - c, err := NewConfig() - assert.NotNil(c) - if err != nil { - t.Fatalf("function NewConfig failed with error: %q", err.Error()) - } - - assert.Equal(tt.Message, c.ProvideTransformMessage()) - assert.Equal(tt.LayerName, c.ProvideTransformLayerName()) - - component, err := c.ProvideTransformComponent(tt.Plug) - assert.Nil(err) - assert.NotNil(component) - - }) - } -} diff --git a/config/test-fixtures/invalids.hcl b/config/test-fixtures/invalids.hcl index 1944444e..5f308e8a 100644 --- a/config/test-fixtures/invalids.hcl +++ b/config/test-fixtures/invalids.hcl @@ -1,9 +1,5 @@ # configuration with various invalid options -transform { - message_transformation = "fakeHCL" -} - target { use "fakeHCL" {} } diff --git a/config/test-fixtures/transform-invalid-layer-js.hcl b/config/test-fixtures/transform-invalid-layer-js.hcl deleted file mode 100644 index fb4a2191..00000000 --- a/config/test-fixtures/transform-invalid-layer-js.hcl +++ /dev/null @@ -1,7 +0,0 @@ -# transform configuration - -transform { - message_transformation = "js:fun" - - use "fake" {} -} diff --git a/config/test-fixtures/transform-invalid-layer-lua.hcl b/config/test-fixtures/transform-invalid-layer-lua.hcl deleted file mode 100644 index f8c46865..00000000 --- a/config/test-fixtures/transform-invalid-layer-lua.hcl +++ /dev/null @@ -1,7 +0,0 @@ -# transform configuration - -transform { - message_transformation = "lua:fun" - - use "fake" {} -} diff --git a/config/test-fixtures/transform-js-error.hcl b/config/test-fixtures/transform-js-error.hcl new file mode 100644 index 00000000..f39d5764 --- /dev/null +++ b/config/test-fixtures/transform-js-error.hcl @@ -0,0 +1,7 @@ +# transform configuration - js - compile error + +transform { + use "js" { + source_b64 = "ZnVuY3Rpb24gbWFpbih4KSB7CiAgICB3aG9vcHMgd2hhdCBpcyB0aGlzCiAgICB2YXIganNvbk9iaiA9IEpTT04ucGFyc2UoeC5EYXRhKTsKICAgIGpzb25PYmpbIndyb25nX2tleSJdID0gIndoeSBhcmUgeW91IGRlY29kaW5nIHRoaXMiOwoKICAgIHJldHVybiB7CiAgICAgICAgRGF0YTogSlNPTi5zdHJpbmdpZnkoanNvbk9iaikKICAgIH07Cn0=" + } +} diff --git a/config/test-fixtures/transform-js-extended.hcl b/config/test-fixtures/transform-js-extended.hcl index 6292e2fe..e6d2ea6c 100644 --- a/config/test-fixtures/transform-js-extended.hcl +++ b/config/test-fixtures/transform-js-extended.hcl @@ -1,12 +1,10 @@ # transform configuration - js - extended transform { - message_transformation = "js:fun" - use "js" { - source_b64 = "CglmdW5jdGlvbiBmb28oeCkgewoJICAgIHJldHVybiB4OwoJfQoJ" - timeout_sec = 10 - disable_source_maps = false - snowplow_mode = true + source_b64 = "ZnVuY3Rpb24gbWFpbih4KSB7CiAgICB2YXIganNvbk9iaiA9IEpTT04ucGFyc2UoeC5EYXRhKTsKICAgIGpzb25PYmpbImFwcF9pZCJdID0gImNoYW5nZWQiOwogICAgcmV0dXJuIHsKICAgICAgICBEYXRhOiBKU09OLnN0cmluZ2lmeShqc29uT2JqKQogICAgfTsKfQ==" + timeout_sec = 20 + disable_source_maps = true + snowplow_mode = false } -} +} \ No newline at end of file diff --git a/config/test-fixtures/transform-js-simple.hcl b/config/test-fixtures/transform-js-simple.hcl index 3e766b10..3aeac3ec 100644 --- a/config/test-fixtures/transform-js-simple.hcl +++ b/config/test-fixtures/transform-js-simple.hcl @@ -1,9 +1,7 @@ # transform configuration - js - simple transform { - message_transformation = "js:fun" - use "js" { - source_b64 = "CglmdW5jdGlvbiBmb28oeCkgewoJICAgIHJldHVybiB4OwoJfQoJ" + source_b64 = "ZnVuY3Rpb24gbWFpbih4KSB7CiAgICByZXR1cm4geDsKfQkgICAKCQ==" } } diff --git a/config/test-fixtures/transform-lua-extended.hcl b/config/test-fixtures/transform-lua-extended.hcl index f728b995..0fcc0db7 100644 --- a/config/test-fixtures/transform-lua-extended.hcl +++ b/config/test-fixtures/transform-lua-extended.hcl @@ -1,11 +1,10 @@ # transform configuration - lua - extended transform { - message_transformation = "lua:fun" - use "lua" { - source_b64 = "CglmdW5jdGlvbiBmb28oeCkKICAgICAgICAgICByZXR1cm4geAogICAgICAgIGVuZAoJ" + source_b64 = "CglmdW5jdGlvbiBmb28oeCkgewoJICAgIHJldHVybiB4OwoJfQoJ" timeout_sec = 10 + snowplow_mode = false sandbox = false } -} +} \ No newline at end of file diff --git a/config/test-fixtures/transform-lua-simple.hcl b/config/test-fixtures/transform-lua-simple.hcl index 3f8c2ba0..8f3d33a9 100644 --- a/config/test-fixtures/transform-lua-simple.hcl +++ b/config/test-fixtures/transform-lua-simple.hcl @@ -1,8 +1,6 @@ # transform configuration - lua - simple transform { - message_transformation = "lua:fun" - use "lua" { source_b64 = "CglmdW5jdGlvbiBmb28oeCkKICAgICAgICAgICByZXR1cm4geAogICAgICAgIGVuZAoJ" } diff --git a/config/test-fixtures/transform-mixed-error.hcl b/config/test-fixtures/transform-mixed-error.hcl new file mode 100644 index 00000000..761e7e26 --- /dev/null +++ b/config/test-fixtures/transform-mixed-error.hcl @@ -0,0 +1,20 @@ +transform { + use "js" { + timeout_sec = 15 + source_b64 = "ZnVuY3Rpb24gbWFpbih4KSB7CiAgICB2YXIganNvbk9iaiA9IEpTT04ucGFyc2UoeC5EYXRhKTsKICAgIGpzb25PYmpbImFwcF9pZCJdID0gImNoYW5nZWQiOwogICAgcmV0dXJuIHsKICAgICAgICBEYXRhOiBKU09OLnN0cmluZ2lmeShqc29uT2JqKQogICAgfTsKfQ==" + } +} + +transform { + use "js" { + timeout_sec = 15 + source_b64 = "ZnVuY3Rpb24gbWFpbih4KSB7CiAgICB3aG9vcHMgd2hhdCBpcyB0aGlzCiAgICB2YXIganNvbk9iaiA9IEpTT04ucGFyc2UoeC5EYXRhKTsKICAgIGpzb25PYmpbIndyb25nX2tleSJdID0gIndoeSBhcmUgeW91IGRlY29kaW5nIHRoaXMiOwoKICAgIHJldHVybiB7CiAgICAgICAgRGF0YTogSlNPTi5zdHJpbmdpZnkoanNvbk9iaikKICAgIH07Cn0=" + } +} + +transform { + use "lua" { + timeout_sec = 15 + source_b64 = "ZnVuY3Rpb24gbWFpbih4KQogICB4LkRhdGEgPSAiSGVsbG86IiAuLiB4LkRhdGEKICAgcmV0dXJuIHgKZW5k" + } +} \ No newline at end of file diff --git a/config/test-fixtures/transform-mixed-filtered.hcl b/config/test-fixtures/transform-mixed-filtered.hcl new file mode 100644 index 00000000..9ba1d870 --- /dev/null +++ b/config/test-fixtures/transform-mixed-filtered.hcl @@ -0,0 +1,23 @@ +transform { + use "js" { + timeout_sec = 15 + // return x; + source_b64 = "ZnVuY3Rpb24gbWFpbih4KSB7CiAgICByZXR1cm4geDsKfQ==" + } +} + +transform { + use "spEnrichedFilter" { + field = "app_id" + regex = "wrong" + regex_timeout = 10 + } +} + +transform { + use "js" { + timeout_sec = 15 + // return x; + source_b64 = "ZnVuY3Rpb24gbWFpbih4KSB7CiAgICByZXR1cm4geDsKfQ==" + } +} \ No newline at end of file diff --git a/config/test-fixtures/transform-mixed-order.hcl b/config/test-fixtures/transform-mixed-order.hcl new file mode 100644 index 00000000..b64fdf05 --- /dev/null +++ b/config/test-fixtures/transform-mixed-order.hcl @@ -0,0 +1,20 @@ +transform { + use "js" { + // changes app_id to "1" + source_b64 = "ZnVuY3Rpb24gbWFpbih4KSB7CiAgICB2YXIganNvbk9iaiA9IEpTT04ucGFyc2UoeC5EYXRhKTsKICAgIGpzb25PYmpbImFwcF9pZCJdID0gIjEiOwogICAgcmV0dXJuIHsKICAgICAgICBEYXRhOiBKU09OLnN0cmluZ2lmeShqc29uT2JqKQogICAgfTsKfQ==" + } +} + +transform { + use "js" { + // if app_id == "1" it is changed to "2" + source_b64 = "ZnVuY3Rpb24gbWFpbih4KSB7CiAgICB2YXIganNvbk9iaiA9IEpTT04ucGFyc2UoeC5EYXRhKTsKICAgIGlmIChqc29uT2JqWyJhcHBfaWQiXSA9PSAiMSIpIHsKICAgICAgICBqc29uT2JqWyJhcHBfaWQiXSA9ICIyIgogICAgfQogICAgcmV0dXJuIHsKICAgICAgICBEYXRhOiBKU09OLnN0cmluZ2lmeShqc29uT2JqKQogICAgfTsKfQ==" + } +} + +transform { + use "js" { + // if app_id == "2" it is changed to "3" + source_b64 = "ZnVuY3Rpb24gbWFpbih4KSB7CiAgICB2YXIganNvbk9iaiA9IEpTT04ucGFyc2UoeC5EYXRhKTsKICAgIGlmIChqc29uT2JqWyJhcHBfaWQiXSA9PSAiMiIpIHsKICAgICAgICBqc29uT2JqWyJhcHBfaWQiXSA9ICIzIgogICAgfQogICAgcmV0dXJuIHsKICAgICAgICBEYXRhOiBKU09OLnN0cmluZ2lmeShqc29uT2JqKQogICAgfTsKfQ==" + } +} \ No newline at end of file diff --git a/config/test-fixtures/transform-mixed.hcl b/config/test-fixtures/transform-mixed.hcl new file mode 100644 index 00000000..aadf0393 --- /dev/null +++ b/config/test-fixtures/transform-mixed.hcl @@ -0,0 +1,17 @@ +transform { + use "js" { + source_b64 = "ZnVuY3Rpb24gbWFpbih4KSB7CiAgICB2YXIganNvbk9iaiA9IEpTT04ucGFyc2UoeC5EYXRhKTsKICAgIGpzb25PYmpbImFwcF9pZCJdID0gImNoYW5nZWQiOwogICAgcmV0dXJuIHsKICAgICAgICBEYXRhOiBKU09OLnN0cmluZ2lmeShqc29uT2JqKQogICAgfTsKfQ==" + } +} + +transform { + use "js" { + source_b64 = "ZnVuY3Rpb24gbWFpbih4KSB7CiAgICB2YXIganNvbk9iaiA9IEpTT04ucGFyc2UoeC5EYXRhKTsKICAgIGpzb25PYmpbImFwcF9pZCJdID0gImFnYWluIjsKICAgIHJldHVybiB7CiAgICAgICAgRGF0YTogSlNPTi5zdHJpbmdpZnkoanNvbk9iaikKICAgIH07Cn0=" + } +} + +transform { + use "lua" { + source_b64 = "ZnVuY3Rpb24gbWFpbih4KQogIHguRGF0YSA9ICJIZWxsbzoiIC4uIHguRGF0YQogIHJldHVybiB4CmVuZA==" + } +} \ No newline at end of file diff --git a/go.mod b/go.mod index ce461f9b..97fcd776 100644 --- a/go.mod +++ b/go.mod @@ -50,6 +50,7 @@ require ( require ( github.com/davecgh/go-spew v1.1.1 + github.com/dlclark/regexp2 v1.7.0 github.com/dop251/goja v0.0.0-20220405120441-9037c2b61cbf github.com/goccy/go-json v0.9.7 github.com/hashicorp/hcl/v2 v2.11.1 @@ -72,7 +73,6 @@ require ( github.com/agext/levenshtein v1.2.1 // indirect github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect github.com/devigned/tab v0.1.1 // indirect - github.com/dlclark/regexp2 v1.4.1-0.20201116162257-a2a8dda75c91 // indirect github.com/eapache/go-resiliency v1.2.0 // indirect github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21 // indirect github.com/eapache/queue v1.1.0 // indirect diff --git a/go.sum b/go.sum index a57788bd..5001602d 100644 --- a/go.sum +++ b/go.sum @@ -182,8 +182,9 @@ github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZm github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= github.com/dimchansky/utfbom v1.1.0 h1:FcM3g+nofKgUteL8dm/UpdRXNC9KmADgTpLKsu0TRo4= github.com/dimchansky/utfbom v1.1.0/go.mod h1:rO41eb7gLfo8SF1jd9F8HplJm1Fewwi4mQvIirEdv+8= -github.com/dlclark/regexp2 v1.4.1-0.20201116162257-a2a8dda75c91 h1:Izz0+t1Z5nI16/II7vuEo/nHjodOg0p7+OiDpjX5t1E= github.com/dlclark/regexp2 v1.4.1-0.20201116162257-a2a8dda75c91/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= +github.com/dlclark/regexp2 v1.7.0 h1:7lJfhqlPssTb1WQx4yvTHN0uElPEv52sbaECrAQxjAo= +github.com/dlclark/regexp2 v1.7.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= github.com/dop251/goja v0.0.0-20220405120441-9037c2b61cbf h1:Yt+4K30SdjOkRoRRm3vYNQgR+/ZIy0RmeUDZo7Y8zeQ= github.com/dop251/goja v0.0.0-20220405120441-9037c2b61cbf/go.mod h1:R9ET47fwRVRPZnOGvHxxhuZcbrMCuiqOz3Rlrh4KSnk= github.com/dop251/goja_nodejs v0.0.0-20210225215109-d91c329300e7/go.mod h1:hn7BA7c8pLvoGndExHudxTDKZ84Pyvv+90pbBjbTz0Y= diff --git a/pkg/transform/engine.go b/pkg/transform/engine/engine.go similarity index 55% rename from pkg/transform/engine.go rename to pkg/transform/engine/engine.go index 4b4af505..6b1cd84e 100644 --- a/pkg/transform/engine.go +++ b/pkg/transform/engine/engine.go @@ -4,31 +4,35 @@ // // Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. -package transform +package engine -// FunctionMaker is the interface that wraps the MakeFunction method -type FunctionMaker interface { +import ( + "github.com/snowplow-devops/stream-replicator/pkg/transform" +) + +// functionMaker is the interface that wraps the MakeFunction method +type functionMaker interface { // MakeFunction returns a TransformationFunction that runs // a given function in a runtime engine. - MakeFunction(funcName string) TransformationFunction + MakeFunction(funcName string) transform.TransformationFunction } -// SmokeTester is the interface that wraps the SmokeTest method. -type SmokeTester interface { +// smokeTester is the interface that wraps the SmokeTest method. +type smokeTester interface { // SmokeTest runs a test spin of the engine trying to get as close to // running the given function as possible. SmokeTest(funcName string) error } // Engine is the interface that groups -// FunctionMaker and SmokeTester. +// functionMaker and smokeTester. type Engine interface { - FunctionMaker - SmokeTester + functionMaker + smokeTester } -// EngineProtocol is the I/O type of an Engine. -type EngineProtocol struct { +// engineProtocol is the I/O type of Engine. +type engineProtocol struct { FilterOut bool PartitionKey string Data interface{} diff --git a/pkg/transform/engine_javascript.go b/pkg/transform/engine/engine_javascript.go similarity index 70% rename from pkg/transform/engine_javascript.go rename to pkg/transform/engine/engine_javascript.go index 5df77df1..0b32be03 100644 --- a/pkg/transform/engine_javascript.go +++ b/pkg/transform/engine/engine_javascript.go @@ -4,10 +4,11 @@ // // Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. -package transform +package engine import ( "encoding/base64" + "errors" "fmt" "time" @@ -17,90 +18,92 @@ import ( "github.com/mitchellh/mapstructure" "github.com/snowplow-devops/stream-replicator/pkg/models" + "github.com/snowplow-devops/stream-replicator/pkg/transform" ) -// jsEngineConfig configures the JavaScript Engine. -type jsEngineConfig struct { - SourceB64 string `hcl:"source_b64" env:"TRANSFORMATION_JS_SOURCE_B64"` - RunTimeout int `hcl:"timeout_sec,optional" env:"TRANSFORMATION_JS_TIMEOUT_SEC"` - DisableSourceMaps bool `hcl:"disable_source_maps,optional" env:"TRANSFORMATION_JS_DISABLE_SOURCE_MAPS"` - SpMode bool `hcl:"snowplow_mode,optional" env:"TRANSFORMATION_JS_SNOWPLOW_MODE"` +// JSEngineConfig configures the JavaScript Engine. +type JSEngineConfig struct { + SourceB64 string `hcl:"source_b64,optional"` + RunTimeout int `hcl:"timeout_sec,optional"` + DisableSourceMaps bool `hcl:"disable_source_maps,optional"` + SpMode bool `hcl:"snowplow_mode,optional"` } -// jsEngine handles the provision of a JavaScript runtime to run transformations. -type jsEngine struct { +// JSEngine handles the provision of a JavaScript runtime to run transformations. +type JSEngine struct { Code *goja.Program RunTimeout time.Duration SpMode bool } -// newJSEngine returns a JavaScript Engine from a jsEngineConfig. -func newJSEngine(c *jsEngineConfig) (*jsEngine, error) { - jsSrc, err := base64.StdEncoding.DecodeString(c.SourceB64) - if err != nil { - return nil, err - } - - compiledCode, err := compileJS(string(jsSrc), c.SourceB64, c.DisableSourceMaps) - if err != nil { - return nil, err - } +// The JSEngineAdapter type is an adapter for functions to be used as +// pluggable components for a JS Engine. It implements the Pluggable interface. +type JSEngineAdapter func(i interface{}) (interface{}, error) - eng := &jsEngine{ - Code: compiledCode, - RunTimeout: time.Duration(c.RunTimeout) * time.Second, - SpMode: c.SpMode, - } - - return eng, nil +// ProvideDefault returns a JSEngineConfig with default configuration values +func (f JSEngineAdapter) ProvideDefault() (interface{}, error) { + return &JSEngineConfig{ + RunTimeout: 15, + DisableSourceMaps: true, + }, nil } -// The jsEngineAdapter type is an adapter for functions to be used as -// pluggable components for JavaScript Engine. Implements the Pluggable interface. -type jsEngineAdapter func(i interface{}) (interface{}, error) - // Create implements the ComponentCreator interface. -func (f jsEngineAdapter) Create(i interface{}) (interface{}, error) { +func (f JSEngineAdapter) Create(i interface{}) (interface{}, error) { return f(i) } -// ProvideDefault implements the ComponentConfigurable interface. -func (f jsEngineAdapter) ProvideDefault() (interface{}, error) { - // Provide defaults for the optional parameters - // whose default is not their zero value. - cfg := &jsEngineConfig{ - RunTimeout: 5, - DisableSourceMaps: true, - } - - return cfg, nil +// JSEngineConfigFunction creates a JSEngine from a JSEngineConfig +func JSEngineConfigFunction(c *JSEngineConfig) (*JSEngine, error) { + return NewJSEngine(&JSEngineConfig{ + SourceB64: c.SourceB64, + RunTimeout: c.RunTimeout, + DisableSourceMaps: c.DisableSourceMaps, + SpMode: c.SpMode, + }) } -// adaptJSEngineFunc returns a jsEngineAdapter. -func adaptJSEngineFunc(f func(c *jsEngineConfig) (*jsEngine, error)) jsEngineAdapter { +// AdaptJSEngineFunc returns an JSEngineAdapter. +func AdaptJSEngineFunc(f func(c *JSEngineConfig) (*JSEngine, error)) JSEngineAdapter { return func(i interface{}) (interface{}, error) { - cfg, ok := i.(*jsEngineConfig) + cfg, ok := i.(*JSEngineConfig) if !ok { - return nil, fmt.Errorf("invalid input, expected jsEngineConfig") + return nil, errors.New("invalid input, expected JSEngineConfig") } return f(cfg) } } -// JSLayer returns the Pluggable transformation layer implemented in JavaScript. -func JSLayer() interface{} { - return adaptJSEngineFunc(newJSEngine) +// NewJSEngine returns a JSEngine from a JSEngineConfig. +func NewJSEngine(c *JSEngineConfig) (*JSEngine, error) { + jsSrc, err := base64.StdEncoding.DecodeString(c.SourceB64) + if err != nil { + return nil, err + } + + compiledCode, err := compileJS(string(jsSrc), c.SourceB64, c.DisableSourceMaps) + if err != nil { + return nil, err + } + + eng := &JSEngine{ + Code: compiledCode, + RunTimeout: time.Duration(c.RunTimeout) * time.Second, + SpMode: c.SpMode, + } + + return eng, nil } -// SmokeTest implements SmokeTester. -func (e *jsEngine) SmokeTest(funcName string) error { +// SmokeTest implements smokeTester. +func (e *JSEngine) SmokeTest(funcName string) error { _, _, err := initRuntime(e, funcName) return err } -// MakeFunction implements FunctionMaker. -func (e *jsEngine) MakeFunction(funcName string) TransformationFunction { +// MakeFunction implements functionMaker. +func (e *JSEngine) MakeFunction(funcName string) transform.TransformationFunction { return func(message *models.Message, interState interface{}) (*models.Message, *models.Message, *models.Message, interface{}) { // making input @@ -200,7 +203,7 @@ func compileJS(code, name string, disableSrcMaps bool) (*goja.Program, error) { } // initRuntime initializes and returns an instance of a JavaScript runtime. -func initRuntime(e *jsEngine, funcName string) (*goja.Runtime, goja.Callable, error) { +func initRuntime(e *JSEngine, funcName string) (*goja.Runtime, goja.Callable, error) { // goja.New returns *goja.Runtime vm := goja.New() timer := time.AfterFunc(e.RunTimeout, func() { @@ -222,14 +225,14 @@ func initRuntime(e *jsEngine, funcName string) (*goja.Runtime, goja.Callable, er // mkJSEngineInput describes the logic for constructing the input to JS engine. // No side effects. -func mkJSEngineInput(e *jsEngine, message *models.Message, interState interface{}) (*EngineProtocol, error) { +func mkJSEngineInput(e *JSEngine, message *models.Message, interState interface{}) (*engineProtocol, error) { if interState != nil { - if i, ok := interState.(*EngineProtocol); ok { + if i, ok := interState.(*engineProtocol); ok { return i, nil } } - candidate := &EngineProtocol{ + candidate := &engineProtocol{ Data: string(message.Data), } @@ -237,7 +240,7 @@ func mkJSEngineInput(e *jsEngine, message *models.Message, interState interface{ return candidate, nil } - parsedMessage, err := intermediateAsSpEnrichedParsed(interState, message) + parsedMessage, err := transform.IntermediateAsSpEnrichedParsed(interState, message) if err != nil { // if spMode, error for non Snowplow enriched event data return nil, err @@ -253,12 +256,12 @@ func mkJSEngineInput(e *jsEngine, message *models.Message, interState interface{ } // validateJSEngineOut validates the value returned by the js engine. -func validateJSEngineOut(output interface{}) (*EngineProtocol, error) { +func validateJSEngineOut(output interface{}) (*engineProtocol, error) { if output == nil { return nil, fmt.Errorf("invalid return type from JavaScript transformation; got null or undefined") } - if out, ok := output.(*EngineProtocol); ok { + if out, ok := output.(*engineProtocol); ok { return out, nil } @@ -267,7 +270,7 @@ func validateJSEngineOut(output interface{}) (*EngineProtocol, error) { return nil, fmt.Errorf("invalid return type from JavaScript transformation") } - result := &EngineProtocol{} + result := &engineProtocol{} err := mapstructure.Decode(outMap, result) if err != nil { return nil, fmt.Errorf("protocol violation in return value from JavaScript transformation") diff --git a/pkg/transform/engine_javascript_test.go b/pkg/transform/engine/engine_javascript_test.go similarity index 67% rename from pkg/transform/engine_javascript_test.go rename to pkg/transform/engine/engine_javascript_test.go index 9fbaeb3d..79b3fb40 100644 --- a/pkg/transform/engine_javascript_test.go +++ b/pkg/transform/engine/engine_javascript_test.go @@ -1,15 +1,14 @@ -// PROPRIETARY AND CONFIDENTIAL +//// PROPRIETARY AND CONFIDENTIAL +//// +//// Unauthorized copying of this file via any medium is strictly prohibited. +//// +//// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. // -// Unauthorized copying of this file via any medium is strictly prohibited. -// -// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. - -package transform +package engine import ( "encoding/base64" "fmt" - "path/filepath" "reflect" "strings" "testing" @@ -18,139 +17,28 @@ import ( "github.com/davecgh/go-spew/spew" "github.com/stretchr/testify/assert" - config "github.com/snowplow-devops/stream-replicator/config" "github.com/snowplow-devops/stream-replicator/pkg/models" ) -func TestJSEngineConfig_ENV(t *testing.T) { - testCases := []struct { - Name string - Plug config.Pluggable - Expected interface{} - }{ - { - Name: "transform-js-from-env", - Plug: testJSEngineAdapter(testJSEngineFunc), - Expected: &jsEngineConfig{ - SourceB64: "CglmdW5jdGlvbiBmb28oeCkgewoJICAgIHJldHVybiB4OwoJfQoJ", - RunTimeout: 10, - DisableSourceMaps: false, - SpMode: false, - }, - }, - } - - for _, tt := range testCases { - t.Run(tt.Name, func(t *testing.T) { - assert := assert.New(t) - - t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", "") - - t.Setenv("MESSAGE_TRANSFORMATION", "js") - t.Setenv("TRANSFORMATION_LAYER_NAME", "js") - - t.Setenv("TRANSFORMATION_JS_SOURCE_B64", "CglmdW5jdGlvbiBmb28oeCkgewoJICAgIHJldHVybiB4OwoJfQoJ") - t.Setenv("TRANSFORMATION_JS_TIMEOUT_SEC", "10") - t.Setenv("TRANSFORMATION_JS_DISABLE_SOURCE_MAPS", "false") - t.Setenv("TRANSFORMATION_JS_SNOWPLOW_MODE", "false") - - c, err := config.NewConfig() - assert.NotNil(c) - if err != nil { - t.Fatalf("function NewConfig failed with error: %q", err.Error()) - } - - engine := c.Data.Transform.Layer - decoderOpts := &config.DecoderOptions{ - Input: engine.Body, - } - - result, err := c.CreateComponent(tt.Plug, decoderOpts) - assert.NotNil(result) - assert.Nil(err) - - if !reflect.DeepEqual(result, tt.Expected) { - t.Errorf("GOT:\n%s\nEXPECTED:\n%s", - spew.Sdump(result), - spew.Sdump(tt.Expected)) - } - }) - } -} - -func TestJSEngineConfig_HCL(t *testing.T) { - testFixPath := "../../config/test-fixtures" - testCases := []struct { - File string - Plug config.Pluggable - Expected interface{} - }{ - { - File: "transform-js-simple.hcl", - Plug: testJSEngineAdapter(testJSEngineFunc), - Expected: &jsEngineConfig{ - SourceB64: "CglmdW5jdGlvbiBmb28oeCkgewoJICAgIHJldHVybiB4OwoJfQoJ", - RunTimeout: 5, - DisableSourceMaps: true, - SpMode: false, - }, - }, - { - File: "transform-js-extended.hcl", - Plug: testJSEngineAdapter(testJSEngineFunc), - Expected: &jsEngineConfig{ - SourceB64: "CglmdW5jdGlvbiBmb28oeCkgewoJICAgIHJldHVybiB4OwoJfQoJ", - RunTimeout: 10, - DisableSourceMaps: false, - SpMode: true, - }, - }, - } - - for _, tt := range testCases { - t.Run(tt.File, func(t *testing.T) { - assert := assert.New(t) - - filename := filepath.Join(testFixPath, tt.File) - t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", filename) - - c, err := config.NewConfig() - assert.NotNil(c) - if err != nil { - t.Fatalf("function NewConfig failed with error: %q", err.Error()) - } - - engine := c.Data.Transform.Layer - decoderOpts := &config.DecoderOptions{ - Input: engine.Body, - } - - result, err := c.CreateComponent(tt.Plug, decoderOpts) - assert.NotNil(result) - assert.Nil(err) - - if !reflect.DeepEqual(result, tt.Expected) { - t.Errorf("GOT:\n%s\nEXPECTED:\n%s", - spew.Sdump(result), - spew.Sdump(tt.Expected)) - } - }) - } -} - func TestJSLayer(t *testing.T) { - layer := JSLayer() - if _, ok := layer.(config.Pluggable); !ok { - t.Errorf("invalid interface returned from JSLayer") - } + assert := assert.New(t) + + jsEngine, err := NewJSEngine(&JSEngineConfig{ + SourceB64: "CglmdW5jdGlvbiBmb28oeCkgewoJICAgIHJldHVybiB4OwoJfQoJ", + RunTimeout: 15, + DisableSourceMaps: true, + SpMode: false, + }) + assert.NotNil(t, jsEngine) + assert.Nil(err) } func TestJSEngineMakeFunction_SpModeFalse_IntermediateNil(t *testing.T) { var testInterState interface{} = nil - var testSpMode bool = false + var testSpMode = false testCases := []struct { Src string - FunName string + Scenario string DisableSourceMaps bool Input *models.Message Expected map[string]*models.Message @@ -159,11 +47,11 @@ func TestJSEngineMakeFunction_SpModeFalse_IntermediateNil(t *testing.T) { }{ { Src: ` -function identity(x) { - return x; +function main(x) { + return x; } `, - FunName: "identity", + Scenario: "identity", DisableSourceMaps: true, Input: &models.Message{ Data: []byte("asdf"), @@ -177,7 +65,7 @@ function identity(x) { "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: "asdf", @@ -186,13 +74,13 @@ function identity(x) { }, { Src: ` -function concatHello(x) { - let newVal = "Hello:" + x.Data; - x.Data = newVal; - return x; +function main(x) { + let newVal = "Hello:" + x.Data; + x.Data = newVal; + return x; } `, - FunName: "concatHello", + Scenario: "concatHello", DisableSourceMaps: true, Input: &models.Message{ Data: []byte("asdf"), @@ -206,7 +94,7 @@ function concatHello(x) { "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: "Hello:asdf", @@ -215,12 +103,12 @@ function concatHello(x) { }, { Src: ` -function filterIn(x) { - x.FilterOut = false - return x; +function main(x) { + x.FilterOut = false + return x; } `, - FunName: "filterIn", + Scenario: "filterIn", DisableSourceMaps: true, Input: &models.Message{ Data: []byte("asdf"), @@ -234,7 +122,7 @@ function filterIn(x) { "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: "asdf", @@ -243,20 +131,20 @@ function filterIn(x) { }, { Src: ` -function filterOut(x) { - if (Object.prototype.toString.call(x.Data) === '[object String]') { - return { - FilterOut: true, - }; - } - - return { - FilterOut: false, - Data: x.Data - }; +function main(x) { + if (Object.prototype.toString.call(x.Data) === '[object String]') { + return { + FilterOut: true, + }; + } + + return { + FilterOut: false, + Data: x.Data + }; } `, - FunName: "filterOut", + Scenario: "filterOut", DisableSourceMaps: true, Input: &models.Message{ Data: []byte("asdf"), @@ -275,16 +163,16 @@ function filterOut(x) { }, { Src: ` -function jsonIdentity(x) { - var jsonObj = JSON.parse(x.Data); - var result = JSON.stringify(jsonObj); +function main(x) { + var jsonObj = JSON.parse(x.Data); + var result = JSON.stringify(jsonObj); - return { - Data: result - }; + return { + Data: result + }; } `, - FunName: "jsonIdentity", + Scenario: "jsonIdentity", DisableSourceMaps: false, Input: &models.Message{ Data: testJsJSON, @@ -298,7 +186,7 @@ function jsonIdentity(x) { "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: string(testJsJSON), @@ -307,17 +195,17 @@ function jsonIdentity(x) { }, { Src: ` -function jsonTransformFieldNameRegex(x) { - var jsonObj = JSON.parse(x.Data); +function main(x) { + var jsonObj = JSON.parse(x.Data); - if (jsonObj.hasOwnProperty("app_id")) { - x.Data = x.Data.replace(/app_id/, 'app_id_CHANGED'); - } + if (jsonObj.hasOwnProperty("app_id")) { + x.Data = x.Data.replace(/app_id/, 'app_id_CHANGED'); + } - return x; + return x; } `, - FunName: "jsonTransformFieldNameRegex", + Scenario: "jsonTransformFieldNameRegex", DisableSourceMaps: false, Input: &models.Message{ Data: testJsJSON, @@ -331,7 +219,7 @@ function jsonTransformFieldNameRegex(x) { "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: string(testJsJSONChanged1), @@ -340,20 +228,20 @@ function jsonTransformFieldNameRegex(x) { }, { Src: ` -function jsonTransformFieldNameObj(x) { +function main(x) { - var jsonObj = JSON.parse(x.Data); + var jsonObj = JSON.parse(x.Data); - var descriptor = Object.getOwnPropertyDescriptor(jsonObj, "app_id"); - Object.defineProperty(jsonObj, "app_id_CHANGED", descriptor); - delete jsonObj["app_id"]; + var descriptor = Object.getOwnPropertyDescriptor(jsonObj, "app_id"); + Object.defineProperty(jsonObj, "app_id_CHANGED", descriptor); + delete jsonObj["app_id"]; - return { - Data: JSON.stringify(jsonObj) - }; + return { + Data: JSON.stringify(jsonObj) + }; } `, - FunName: "jsonTransformFieldNameObj", + Scenario: "jsonTransformFieldNameObj", DisableSourceMaps: false, Input: &models.Message{ Data: testJsJSON, @@ -367,7 +255,7 @@ function jsonTransformFieldNameObj(x) { "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: string(testJsJSONChanged2), @@ -376,19 +264,19 @@ function jsonTransformFieldNameObj(x) { }, { Src: ` -function jsonFilterOut(x) { - var jsonObj = JSON.parse(x.Data); +function main(x) { + var jsonObj = JSON.parse(x.Data); - if (jsonObj.hasOwnProperty("app_id") && jsonObj["app_id"] === "filterMeOut") { - x.FilterOut = false; - } else { - x.FilterOut = true; - } + if (jsonObj.hasOwnProperty("app_id") && jsonObj["app_id"] === "filterMeOut") { + x.FilterOut = false; + } else { + x.FilterOut = true; + } - return x; + return x; } `, - FunName: "jsonFilterOut", + Scenario: "jsonFilterOut", DisableSourceMaps: false, Input: &models.Message{ Data: testJsJSON, @@ -407,11 +295,11 @@ function jsonFilterOut(x) { }, { Src: ` -function returnWrongType(x) { - return 0; +function main(x) { + return 0; } `, - FunName: "returnWrongType", + Scenario: "returnWrongType", DisableSourceMaps: true, Input: &models.Message{ Data: []byte("asdf"), @@ -430,9 +318,9 @@ function returnWrongType(x) { }, { Src: ` -function returnUndefined(x) {} +function main(x) {} `, - FunName: "returnUndefined", + Scenario: "returnUndefined", DisableSourceMaps: true, Input: &models.Message{ Data: []byte("asdf"), @@ -451,11 +339,11 @@ function returnUndefined(x) {} }, { Src: ` -function returnNull(x) { - return null; +function main(x) { + return null; } `, - FunName: "returnNull", + Scenario: "returnNull", DisableSourceMaps: true, Input: &models.Message{ Data: []byte("asdf"), @@ -474,11 +362,11 @@ function returnNull(x) { }, { Src: ` -function causeRuntimeError(x) { - return x.toExponential(2); +function main(x) { + return x.toExponential(2); } `, - FunName: "causeRuntimeError", + Scenario: "causeRuntimeError", DisableSourceMaps: true, Input: &models.Message{ Data: []byte("asdf"), @@ -493,15 +381,15 @@ function causeRuntimeError(x) { }, }, ExpInterState: nil, - Error: fmt.Errorf("error running JavaScript function \"causeRuntimeError\""), + Error: fmt.Errorf(`error running JavaScript function "main": "TypeError: Object has no member 'toExponential' at main`), }, { Src: ` -function callError(x) { - throw("Failed"); +function main(x) { + throw("Failed"); } `, - FunName: "callError", + Scenario: "callError", DisableSourceMaps: false, Input: &models.Message{ Data: []byte("asdf"), @@ -516,17 +404,17 @@ function callError(x) { }, }, ExpInterState: nil, - Error: fmt.Errorf("error running JavaScript function \"callError\""), + Error: fmt.Errorf(`error running JavaScript function "main": "Failed at main`), }, { Src: ` -function sleepTenSecs(x) { - var now = new Date().getTime(); - while(new Date().getTime() < now + 10000) { - } +function main(x) { + var now = new Date().getTime(); + while(new Date().getTime() < now + 10000) { + } } `, - FunName: "sleepTenSecs", + Scenario: "sleepTenSecs", DisableSourceMaps: false, Input: &models.Message{ Data: []byte("asdf"), @@ -546,28 +434,28 @@ function sleepTenSecs(x) { } for _, tt := range testCases { - t.Run(tt.FunName, func(t *testing.T) { + t.Run(tt.Scenario, func(t *testing.T) { assert := assert.New(t) src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) - jsConfig := &jsEngineConfig{ + jsConfig := &JSEngineConfig{ SourceB64: src, - RunTimeout: 1, + RunTimeout: 5, DisableSourceMaps: tt.DisableSourceMaps, SpMode: testSpMode, } - jsEngine, err := newJSEngine(jsConfig) + jsEngine, err := NewJSEngine(jsConfig) assert.NotNil(jsEngine) if err != nil { - t.Fatalf("function newJSEngine failed with error: %q", err.Error()) + t.Fatalf("function NewJSEngine failed with error: %q", err.Error()) } - if err := jsEngine.SmokeTest(tt.FunName); err != nil { + if err := jsEngine.SmokeTest("main"); err != nil { t.Fatalf("smoke-test failed with error: %q", err.Error()) } - transFunction := jsEngine.MakeFunction(tt.FunName) + transFunction := jsEngine.MakeFunction("main") s, f, e, i := transFunction(tt.Input, testInterState) if !reflect.DeepEqual(i, tt.ExpInterState) { @@ -603,7 +491,6 @@ func TestJSEngineMakeFunction_SpModeTrue_IntermediateNil(t *testing.T) { testCases := []struct { Scenario string Src string - FunName string DisableSourceMaps bool Input *models.Message Expected map[string]*models.Message @@ -613,11 +500,10 @@ func TestJSEngineMakeFunction_SpModeTrue_IntermediateNil(t *testing.T) { { Scenario: "identity", Src: ` -function identity(x) { - return x; +function main(x) { + return x; } `, - FunName: "identity", DisableSourceMaps: true, Input: &models.Message{ Data: testJsTsv, @@ -631,7 +517,7 @@ function identity(x) { "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: testJSMap, @@ -641,18 +527,17 @@ function identity(x) { { Scenario: "filtering", Src: ` -function filterOut(input) { - // input is an object - var spData = input.Data; - if (spData["app_id"] === "myApp") { - return input; - } - return { - FilterOut: true - }; +function main(input) { + // input is an object + var spData = input.Data; + if (spData["app_id"] === "myApp") { + return input; + } + return { + FilterOut: true + }; } `, - FunName: "filterOut", DisableSourceMaps: true, Input: &models.Message{ Data: testJsTsv, @@ -672,15 +557,14 @@ function filterOut(input) { { Scenario: "filteringOut_ignoresData", Src: ` -function filterOutIgnores(x) { - return { - FilterOut: true, - Data: "shouldNotAppear", - PartitionKey: "notThis" - }; +function main(x) { + return { + FilterOut: true, + Data: "shouldNotAppear", + PartitionKey: "notThis" + }; } `, - FunName: "filterOutIgnores", DisableSourceMaps: true, Input: &models.Message{ Data: testJsTsv, @@ -700,11 +584,10 @@ function filterOutIgnores(x) { { Scenario: "non_Snowplow_enriched_to_failed", Src: ` -function willNotRun(x) { - return x; +function main(x) { + return x; } `, - FunName: "willNotRun", DisableSourceMaps: false, Input: &models.Message{ Data: []byte("nonSpEnrichedEvent"), @@ -724,11 +607,10 @@ function willNotRun(x) { { Scenario: "return_wrong_type", Src: ` -function returnWrongType(x) { - return 0; +function main(x) { + return 0; } `, - FunName: "returnWrongType", DisableSourceMaps: true, Input: &models.Message{ Data: testJsTsv, @@ -752,24 +634,24 @@ function returnWrongType(x) { assert := assert.New(t) src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) - jsConfig := &jsEngineConfig{ + jsConfig := &JSEngineConfig{ SourceB64: src, - RunTimeout: 1, + RunTimeout: 5, DisableSourceMaps: tt.DisableSourceMaps, SpMode: testSpMode, } - jsEngine, err := newJSEngine(jsConfig) + jsEngine, err := NewJSEngine(jsConfig) assert.NotNil(jsEngine) if err != nil { - t.Fatalf("function newJSEngine failed with error: %q", err.Error()) + t.Fatalf("function NewJSEngine failed with error: %q", err.Error()) } - if err := jsEngine.SmokeTest(tt.FunName); err != nil { + if err := jsEngine.SmokeTest("main"); err != nil { t.Fatalf("smoke-test failed with error: %q", err.Error()) } - transFunction := jsEngine.MakeFunction(tt.FunName) + transFunction := jsEngine.MakeFunction("main") s, f, e, i := transFunction(tt.Input, testInterState) if !reflect.DeepEqual(i, tt.ExpInterState) { @@ -804,7 +686,6 @@ func TestJSEngineMakeFunction_IntermediateState_SpModeFalse(t *testing.T) { testCases := []struct { Scenario string Src string - FunName string DisableSourceMaps bool Input *models.Message InterState interface{} @@ -815,17 +696,16 @@ func TestJSEngineMakeFunction_IntermediateState_SpModeFalse(t *testing.T) { { Scenario: "intermediateState_EngineProtocol_Map", Src: ` -function identity(x) { - return x; +function main(x) { + return x; } `, - FunName: "identity", DisableSourceMaps: true, Input: &models.Message{ Data: testJsJSON, PartitionKey: "some-test-key", }, - InterState: &EngineProtocol{ + InterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: testJSMap, @@ -838,7 +718,7 @@ function identity(x) { "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: testJSMap, @@ -848,17 +728,16 @@ function identity(x) { { Scenario: "intermediateState_EngineProtocol_String", Src: ` -function identity(x) { - return x; +function main(x) { + return x; } `, - FunName: "identity", DisableSourceMaps: true, Input: &models.Message{ Data: testJsJSON, PartitionKey: "some-test-key", }, - InterState: &EngineProtocol{ + InterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: string(testJsJSON), @@ -871,7 +750,7 @@ function identity(x) { "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: string(testJsJSON), @@ -881,11 +760,10 @@ function identity(x) { { Scenario: "intermediateState_not_EngineProtocol_spMode_true", Src: ` -function identity(x) { - return x; +function main(x) { + return x; } `, - FunName: "identity", DisableSourceMaps: true, Input: &models.Message{ Data: testJsJSON, @@ -900,7 +778,7 @@ function identity(x) { "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: string(testJsJSON), @@ -910,11 +788,10 @@ function identity(x) { { Scenario: "intermediateState_not_EngineProtocol_spMode_false", Src: ` -function identity(x) { - return x; +function main(x) { + return x; } `, - FunName: "identity", DisableSourceMaps: true, Input: &models.Message{ Data: testJsJSON, @@ -929,7 +806,7 @@ function identity(x) { "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: string(testJsJSON), @@ -943,24 +820,24 @@ function identity(x) { assert := assert.New(t) src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) - jsConfig := &jsEngineConfig{ + jsConfig := &JSEngineConfig{ SourceB64: src, - RunTimeout: 1, + RunTimeout: 5, DisableSourceMaps: tt.DisableSourceMaps, SpMode: testSpMode, } - jsEngine, err := newJSEngine(jsConfig) + jsEngine, err := NewJSEngine(jsConfig) assert.NotNil(jsEngine) if err != nil { - t.Fatalf("function newJSEngine failed with error: %q", err.Error()) + t.Fatalf("function NewJSEngine failed with error: %q", err.Error()) } - if err := jsEngine.SmokeTest(tt.FunName); err != nil { + if err := jsEngine.SmokeTest("main"); err != nil { t.Fatalf("smoke-test failed with error: %q", err.Error()) } - transFunction := jsEngine.MakeFunction(tt.FunName) + transFunction := jsEngine.MakeFunction("main") s, f, e, i := transFunction(tt.Input, tt.InterState) if !reflect.DeepEqual(i, tt.ExpInterState) { @@ -995,7 +872,6 @@ func TestJSEngineMakeFunction_IntermediateState_SpModeTrue(t *testing.T) { testCases := []struct { Scenario string Src string - FunName string DisableSourceMaps bool Input *models.Message InterState interface{} @@ -1006,17 +882,16 @@ func TestJSEngineMakeFunction_IntermediateState_SpModeTrue(t *testing.T) { { Scenario: "intermediateState_EngineProtocol_Map", Src: ` -function identity(x) { - return x; +function main(x) { + return x; } `, - FunName: "identity", DisableSourceMaps: true, Input: &models.Message{ Data: testJsJSON, PartitionKey: "some-test-key", }, - InterState: &EngineProtocol{ + InterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: testJSMap, @@ -1029,7 +904,7 @@ function identity(x) { "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: testJSMap, @@ -1039,17 +914,16 @@ function identity(x) { { Scenario: "intermediateState_EngineProtocol_String", Src: ` -function identity(x) { - return x; +function main(x) { + return x; } `, - FunName: "identity", DisableSourceMaps: true, Input: &models.Message{ Data: testJsJSON, PartitionKey: "some-test-key", }, - InterState: &EngineProtocol{ + InterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: string(testJsJSON), @@ -1062,7 +936,7 @@ function identity(x) { "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: string(testJsJSON), @@ -1072,11 +946,10 @@ function identity(x) { { Scenario: "intermediateState_notEngineProtocol_notSpEnriched", Src: ` -function willNotRun(x) { - return x; +function main(x) { + return x; } `, - FunName: "willNotRun", DisableSourceMaps: true, Input: &models.Message{ Data: testJsJSON, @@ -1097,11 +970,10 @@ function willNotRun(x) { { Scenario: "intermediateState_notEngineProtocol_SpEnriched", Src: ` -function identity(x) { - return x; +function main(x) { + return x; } `, - FunName: "identity", DisableSourceMaps: true, Input: &models.Message{ Data: testJsTsv, @@ -1116,7 +988,7 @@ function identity(x) { "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: testJSMap, @@ -1130,24 +1002,24 @@ function identity(x) { assert := assert.New(t) src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) - jsConfig := &jsEngineConfig{ + jsConfig := &JSEngineConfig{ SourceB64: src, - RunTimeout: 1, + RunTimeout: 5, DisableSourceMaps: tt.DisableSourceMaps, SpMode: testSpMode, } - jsEngine, err := newJSEngine(jsConfig) + jsEngine, err := NewJSEngine(jsConfig) assert.NotNil(jsEngine) if err != nil { - t.Fatalf("function newJSEngine failed with error: %q", err.Error()) + t.Fatalf("function NewJSEngine failed with error: %q", err.Error()) } - if err := jsEngine.SmokeTest(tt.FunName); err != nil { + if err := jsEngine.SmokeTest("main"); err != nil { t.Fatalf("smoke-test failed with error: %q", err.Error()) } - transFunction := jsEngine.MakeFunction(tt.FunName) + transFunction := jsEngine.MakeFunction("main") s, f, e, i := transFunction(tt.Input, tt.InterState) if !reflect.DeepEqual(i, tt.ExpInterState) { @@ -1182,7 +1054,6 @@ func TestJSEngineMakeFunction_SetPK(t *testing.T) { testCases := []struct { Scenario string Src string - FunName string DisableSourceMaps bool SpMode bool Input *models.Message @@ -1193,12 +1064,11 @@ func TestJSEngineMakeFunction_SetPK(t *testing.T) { { Scenario: "onlySetPk_spModeTrue", Src: ` -function onlySetPk(x) { - x.PartitionKey = "newPk"; - return x; +function main(x) { + x.PartitionKey = "newPk"; + return x; } `, - FunName: "onlySetPk", DisableSourceMaps: true, SpMode: true, Input: &models.Message{ @@ -1213,7 +1083,7 @@ function onlySetPk(x) { "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "newPk", Data: testJSMap, @@ -1223,12 +1093,11 @@ function onlySetPk(x) { { Scenario: "onlySetPk_spModeFalse", Src: ` -function onlySetPk(x) { - x.PartitionKey = "newPk"; - return x; +function main(x) { + x.PartitionKey = "newPk"; + return x; } `, - FunName: "onlySetPk", DisableSourceMaps: true, SpMode: false, Input: &models.Message{ @@ -1243,7 +1112,7 @@ function onlySetPk(x) { "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "newPk", Data: string(testJsTsv), @@ -1253,15 +1122,14 @@ function onlySetPk(x) { { Scenario: "filterOutIgnores", Src: ` -function filterOutIgnores(x) { - return { - FilterOut: true, - Data: "shouldNotAppear", - PartitionKey: "notThis" - }; +function main(x) { + return { + FilterOut: true, + Data: "shouldNotAppear", + PartitionKey: "notThis" + }; } `, - FunName: "filterOutIgnores", DisableSourceMaps: true, SpMode: true, Input: &models.Message{ @@ -1286,24 +1154,24 @@ function filterOutIgnores(x) { assert := assert.New(t) src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) - jsConfig := &jsEngineConfig{ + jsConfig := &JSEngineConfig{ SourceB64: src, - RunTimeout: 1, + RunTimeout: 5, DisableSourceMaps: tt.DisableSourceMaps, SpMode: tt.SpMode, } - jsEngine, err := newJSEngine(jsConfig) + jsEngine, err := NewJSEngine(jsConfig) assert.NotNil(jsEngine) if err != nil { - t.Fatalf("function newJSEngine failed with error: %q", err.Error()) + t.Fatalf("function NewJSEngine failed with error: %q", err.Error()) } - if err := jsEngine.SmokeTest(tt.FunName); err != nil { + if err := jsEngine.SmokeTest("main"); err != nil { t.Fatalf("smoke-test failed with error: %q", err.Error()) } - transFunction := jsEngine.MakeFunction(tt.FunName) + transFunction := jsEngine.MakeFunction("main") s, f, e, i := transFunction(tt.Input, testInterState) if !reflect.DeepEqual(i, tt.ExpInterState) { @@ -1344,7 +1212,7 @@ func TestJSEngineSmokeTest(t *testing.T) { { Src: ` function identity(x) { - return x; + return x; } `, FunName: "identity", @@ -1354,8 +1222,8 @@ function identity(x) { }, { Src: ` -function notThisOne(x) { - return x; +function notMain(x) { + return x; } `, FunName: "notExists", @@ -1365,8 +1233,8 @@ function notThisOne(x) { }, { Src: ` -function syntaxError(x) { - loca y = 0; +function main(x) { + local y = 0; } `, FunName: "syntaxError", @@ -1381,21 +1249,21 @@ function syntaxError(x) { assert := assert.New(t) src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) - jsConfig := &jsEngineConfig{ + jsConfig := &JSEngineConfig{ SourceB64: src, - RunTimeout: 1, + RunTimeout: 5, DisableSourceMaps: tt.DisableSourceMaps, } - jsEngine, compileErr := newJSEngine(jsConfig) + jsEngine, compileErr := NewJSEngine(jsConfig) if compileErr != nil { if tt.CompileError == nil { - t.Fatalf("got unexpected error while creating newJSEngine: %s", compileErr.Error()) + t.Fatalf("got unexpected error while creating NewJSEngine: %s", compileErr.Error()) } if !strings.Contains(compileErr.Error(), tt.CompileError.Error()) { - t.Errorf("newJSEngine error mismatch\nGOT_ERROR:\n%q\n does not contain\nEXPECTED_ERROR:\n%q", + t.Errorf("NewJSEngine error mismatch\nGOT_ERROR:\n%q\n does not contain\nEXPECTED_ERROR:\n%q", compileErr.Error(), tt.CompileError.Error()) } @@ -1422,269 +1290,12 @@ function syntaxError(x) { } } -func TestJSEngineWithBuiltinsSpModeFalse(t *testing.T) { - srcCode := ` -function identity(x) { - return x; -} - -function setPk(x) { - x.PartitionKey = "testKey"; - return x; -} -` - // JS - src := base64.StdEncoding.EncodeToString([]byte(srcCode)) - jsConfig := &jsEngineConfig{ - SourceB64: src, - RunTimeout: 1, - SpMode: false, - } - - jsEngine, err := newJSEngine(jsConfig) - if err != nil { - t.Fatalf("newJSEngine failed with error: %q", err) - } - - if err := jsEngine.SmokeTest("identity"); err != nil { - t.Fatalf("smoke-test failed with error: %q", err.Error()) - } - if err := jsEngine.SmokeTest("setPk"); err != nil { - t.Fatalf("smoke-test failed with error: %q", err.Error()) - } - - jsFuncID := jsEngine.MakeFunction("identity") - jsFuncPk := jsEngine.MakeFunction("setPk") - - // Builtins - setPkToAppID := NewSpEnrichedSetPkFunction("app_id") - spEnrichedToJSON := SpEnrichedToJSON - - testCases := []struct { - Name string - Transformation TransformationApplyFunction - Input []*models.Message - ExpectedGood []*models.Message - }{ - { - Name: "identity0", - Input: messages, - Transformation: NewTransformation( - jsFuncID, - setPkToAppID, - spEnrichedToJSON, - ), - ExpectedGood: []*models.Message{ - { - Data: snowplowJSON1, - PartitionKey: "test-data1", - }, - { - Data: snowplowJSON2, - PartitionKey: "test-data2", - }, - { - Data: snowplowJSON3, - PartitionKey: "test-data3", - }, - }, - }, - { - Name: "identity2", - Input: messages, - Transformation: NewTransformation( - setPkToAppID, - spEnrichedToJSON, - jsFuncID, - ), - ExpectedGood: []*models.Message{ - { - Data: snowplowJSON1, - PartitionKey: "test-data1", - }, - { - Data: snowplowJSON2, - PartitionKey: "test-data2", - }, - { - Data: snowplowJSON3, - PartitionKey: "test-data3", - }, - }, - }, - { - Name: "setPk1", - Input: messages, - Transformation: NewTransformation( - setPkToAppID, - jsFuncPk, - spEnrichedToJSON, - ), - ExpectedGood: []*models.Message{ - { - Data: snowplowJSON1, - PartitionKey: "testKey", - }, - { - Data: snowplowJSON2, - PartitionKey: "testKey", - }, - { - Data: snowplowJSON3, - PartitionKey: "testKey", - }, - }, - }, - } - - for _, tt := range testCases { - t.Run(tt.Name, func(t *testing.T) { - assert := assert.New(t) - - result := tt.Transformation(tt.Input) - assert.NotNil(result) - assert.Equal(len(tt.ExpectedGood), len(result.Result)) - for i, res := range result.Result { - if i < len(tt.ExpectedGood) { - exp := tt.ExpectedGood[i] - assert.JSONEq(string(exp.Data), string(res.Data)) - assert.Equal(exp.PartitionKey, res.PartitionKey) - } - } - }) - } -} - -func TestJSEngineWithBuiltinsSpModeTrue(t *testing.T) { - srcCode := ` -function identity(x) { - return x; -} - -function setPk(x) { - x.PartitionKey = "testKey"; - return x; -} -` - // JS - src := base64.StdEncoding.EncodeToString([]byte(srcCode)) - jsConfig := &jsEngineConfig{ - SourceB64: src, - RunTimeout: 1, - SpMode: true, - } - - jsEngine, err := newJSEngine(jsConfig) - if err != nil { - t.Fatalf("newJSEngine failed with error: %q", err) - } - - if err := jsEngine.SmokeTest("identity"); err != nil { - t.Fatalf("smoke-test failed with error: %q", err.Error()) - } - if err := jsEngine.SmokeTest("setPk"); err != nil { - t.Fatalf("smoke-test failed with error: %q", err.Error()) - } - - jsFuncID := jsEngine.MakeFunction("identity") - jsFuncPk := jsEngine.MakeFunction("setPk") - - // Builtins - setPkToAppID := NewSpEnrichedSetPkFunction("app_id") - spEnrichedToJSON := SpEnrichedToJSON - - testCases := []struct { - Name string - Transformation TransformationApplyFunction - Input []*models.Message - ExpectedGood []*models.Message - }{ - { - Name: "identity", - Input: []*models.Message{ - { - Data: testJsTsv, - PartitionKey: "prevKey", - }, - }, - Transformation: NewTransformation( - setPkToAppID, - spEnrichedToJSON, - jsFuncID, - ), - ExpectedGood: []*models.Message{ - { - Data: testJsJSON, - PartitionKey: "test-data<>", - }, - }, - }, - { - Name: "setPk", - Input: []*models.Message{ - { - Data: testJsTsv, - PartitionKey: "prevKey", - }, - }, - Transformation: NewTransformation( - setPkToAppID, - jsFuncPk, - ), - ExpectedGood: []*models.Message{ - { - Data: testJsJSON, - PartitionKey: "testKey", - }, - }, - }, - { - Name: "mix", - Input: []*models.Message{ - { - Data: testJsTsv, - PartitionKey: "prevKey", - }, - }, - Transformation: NewTransformation( - setPkToAppID, - jsFuncID, - jsFuncPk, - jsFuncID, - ), - ExpectedGood: []*models.Message{ - { - Data: testJsJSON, - PartitionKey: "testKey", - }, - }, - }, - } - - for _, tt := range testCases { - t.Run(tt.Name, func(t *testing.T) { - assert := assert.New(t) - - result := tt.Transformation(tt.Input) - assert.NotNil(result) - assert.Equal(len(tt.ExpectedGood), len(result.Result)) - for i, res := range result.Result { - if i < len(tt.ExpectedGood) { - exp := tt.ExpectedGood[i] - assert.JSONEq(string(exp.Data), string(res.Data)) - assert.Equal(exp.PartitionKey, res.PartitionKey) - } - } - }) - } -} - func Benchmark_JSEngine_Passthrough_DisabledSrcMaps(b *testing.B) { b.ReportAllocs() srcCode := ` -function identity(x) { - return x; +function main(x) { + return x; } ` src := base64.StdEncoding.EncodeToString([]byte(srcCode)) @@ -1693,15 +1304,15 @@ function identity(x) { PartitionKey: "some-test-key", } - jsConfig := &jsEngineConfig{ + jsConfig := &JSEngineConfig{ SourceB64: src, RunTimeout: 5, DisableSourceMaps: true, } - jsEngine, err := newJSEngine(jsConfig) + jsEngine, err := NewJSEngine(jsConfig) if err != nil { - b.Fatalf("function newJSEngine failed with error: %q", err.Error()) + b.Fatalf("function NewJSEngine failed with error: %q", err.Error()) } // not Smoke-Tested @@ -1716,8 +1327,8 @@ func Benchmark_JSEngine_Passthrough(b *testing.B) { b.ReportAllocs() srcCode := ` -function identity(x) { - return x; +function main(x) { + return x; } ` src := base64.StdEncoding.EncodeToString([]byte(srcCode)) @@ -1726,15 +1337,15 @@ function identity(x) { PartitionKey: "some-test-key", } - jsConfig := &jsEngineConfig{ + jsConfig := &JSEngineConfig{ SourceB64: src, RunTimeout: 5, DisableSourceMaps: false, } - jsEngine, err := newJSEngine(jsConfig) + jsEngine, err := NewJSEngine(jsConfig) if err != nil { - b.Fatalf("function newJSEngine failed with error: %q", err.Error()) + b.Fatalf("function NewJSEngine failed with error: %q", err.Error()) } // not Smoke-Tested @@ -1749,8 +1360,8 @@ func Benchmark_JSEngine_PassthroughSpMode(b *testing.B) { b.ReportAllocs() srcCode := ` -function identity(x) { - return x; +function main(x) { + return x; } ` src := base64.StdEncoding.EncodeToString([]byte(srcCode)) @@ -1759,15 +1370,15 @@ function identity(x) { PartitionKey: "some-test-key", } - jsConfig := &jsEngineConfig{ + jsConfig := &JSEngineConfig{ SourceB64: src, RunTimeout: 5, DisableSourceMaps: false, } - jsEngine, err := newJSEngine(jsConfig) + jsEngine, err := NewJSEngine(jsConfig) if err != nil { - b.Fatalf("function newJSEngine failed with error: %q", err.Error()) + b.Fatalf("function NewJSEngine failed with error: %q", err.Error()) } // not Smoke-Tested @@ -1782,13 +1393,13 @@ func Benchmark_JSEngine_Passthrough_JsJson(b *testing.B) { b.ReportAllocs() srcCode := ` -function jsonIdentity(x) { - var jsonObj = JSON.parse(x.Data); - var result = JSON.stringify(jsonObj); +function main(x) { + var jsonObj = JSON.parse(x.Data); + var result = JSON.stringify(jsonObj); - return { - Data: result - }; + return { + Data: result + }; } ` src := base64.StdEncoding.EncodeToString([]byte(srcCode)) @@ -1797,15 +1408,15 @@ function jsonIdentity(x) { PartitionKey: "some-test-key", } - jsConfig := &jsEngineConfig{ + jsConfig := &JSEngineConfig{ SourceB64: src, RunTimeout: 5, DisableSourceMaps: false, } - jsEngine, err := newJSEngine(jsConfig) + jsEngine, err := NewJSEngine(jsConfig) if err != nil { - b.Fatalf("function newJSEngine failed with error: %q", err.Error()) + b.Fatalf("function NewJSEngine failed with error: %q", err.Error()) } // not Smoke-Tested @@ -1816,20 +1427,7 @@ function jsonIdentity(x) { } } -// Test helpers -func testJSEngineAdapter(f func(c *jsEngineConfig) (*jsEngineConfig, error)) jsEngineAdapter { - return func(i interface{}) (interface{}, error) { - cfg, ok := i.(*jsEngineConfig) - if !ok { - return nil, fmt.Errorf("invalid input, expected jsEngineConfig") - } - - return f(cfg) - } - -} - -func testJSEngineFunc(c *jsEngineConfig) (*jsEngineConfig, error) { +func testJSEngineFunc(c *JSEngineConfig) (*JSEngineConfig, error) { return c, nil } @@ -1921,6 +1519,7 @@ var testJSMap = map[string]interface{}{ var testJsTsv = []byte(`test-data<> pc 2019-05-10 14:40:37.436 2019-05-10 14:40:35.972 2019-05-10 14:40:35.551 unstruct e9234345-f042-46ad-b1aa-424464066a33 py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 1.2.3.4 d26822f5-52cc-4292-8f77-14ef6b7a27e2 {"schema":"iglu:com.snowplowanalytics.snowplow/unstruct_event/jsonschema/1-0-0","data":{"schema":"iglu:com.snowplowanalytics.snowplow/add_to_cart/jsonschema/1-0-0","data":{"sku":"item41","quantity":2,"unitPrice":32.4,"currency":"GBP"}}} python-requests/2.21.0 2019-05-10 14:40:35.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??"}}]} 2019-05-10 14:40:35.972 com.snowplowanalytics.snowplow add_to_cart jsonschema 1-0-0 `) +// // corresponding JSON to previous TSV var testJsJSON = []byte(`{"app_id":"test-data<>","collector_tstamp":"2019-05-10T14:40:35.972Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:35.972Z","dvce_created_tstamp":"2019-05-10T14:40:35.551Z","dvce_sent_tstamp":"2019-05-10T14:40:35Z","etl_tstamp":"2019-05-10T14:40:37.436Z","event":"unstruct","event_format":"jsonschema","event_id":"e9234345-f042-46ad-b1aa-424464066a33","event_name":"add_to_cart","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"d26822f5-52cc-4292-8f77-14ef6b7a27e2","platform":"pc","unstruct_event_com_snowplowanalytics_snowplow_add_to_cart_1":{"currency":"GBP","quantity":2,"sku":"item41","unitPrice":32.4},"user_id":"user","user_ipaddress":"1.2.3.4","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) diff --git a/pkg/transform/engine_lua.go b/pkg/transform/engine/engine_lua.go similarity index 82% rename from pkg/transform/engine_lua.go rename to pkg/transform/engine/engine_lua.go index bd1f5ba5..2b6e6eca 100644 --- a/pkg/transform/engine_lua.go +++ b/pkg/transform/engine/engine_lua.go @@ -4,7 +4,7 @@ // // Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. -package transform +package engine import ( "context" @@ -15,32 +15,34 @@ import ( gojson "github.com/goccy/go-json" "github.com/mitchellh/mapstructure" + "github.com/pkg/errors" "github.com/yuin/gluamapper" lua "github.com/yuin/gopher-lua" luaparse "github.com/yuin/gopher-lua/parse" luajson "layeh.com/gopher-json" "github.com/snowplow-devops/stream-replicator/pkg/models" + "github.com/snowplow-devops/stream-replicator/pkg/transform" ) -// luaEngineConfig configures the Lua Engine. -type luaEngineConfig struct { - SourceB64 string `hcl:"source_b64" env:"TRANSFORMATION_LUA_SOURCE_B64"` - RunTimeout int `hcl:"timeout_sec,optional" env:"TRANSFORMATION_LUA_TIMEOUT_SEC"` - Sandbox bool `hcl:"sandbox,optional" env:"TRANSFORMATION_LUA_SANDBOX"` - SpMode bool `hcl:"snowplow_mode,optional" env:"TRANSFORMATION_LUA_SNOWPLOW_MODE"` +// LuaEngineConfig configures the Lua Engine. +type LuaEngineConfig struct { + SourceB64 string `hcl:"source_b64"` + RunTimeout int `hcl:"timeout_sec,optional"` + Sandbox bool `hcl:"sandbox,optional"` + SpMode bool `hcl:"snowplow_mode,optional"` } -// luaEngine handles the provision of a Lua runtime to run transformations. -type luaEngine struct { +// LuaEngine handles the provision of a Lua runtime to run transformations. +type LuaEngine struct { Code *lua.FunctionProto RunTimeout time.Duration Options *lua.Options SpMode bool } -// newLuaEngine returns a Lua Engine from a luaEngineConfig. -func newLuaEngine(c *luaEngineConfig) (*luaEngine, error) { +// NewLuaEngine returns a Lua Engine from a LuaEngineConfig. +func NewLuaEngine(c *LuaEngineConfig) (*LuaEngine, error) { luaSrc, err := base64.StdEncoding.DecodeString(c.SourceB64) if err != nil { return nil, err @@ -51,7 +53,7 @@ func newLuaEngine(c *luaEngineConfig) (*luaEngine, error) { return nil, err } - eng := &luaEngine{ + eng := &LuaEngine{ Code: compiledCode, RunTimeout: time.Duration(c.RunTimeout) * time.Second, Options: &lua.Options{SkipOpenLibs: c.Sandbox}, @@ -61,20 +63,32 @@ func newLuaEngine(c *luaEngineConfig) (*luaEngine, error) { return eng, nil } -// The luaEngineAdapter type is an adapter for functions to be used as +// The LuaEngineAdapter type is an adapter for functions to be used as // pluggable components for Lua Engine. It implements the Pluggable interface. -type luaEngineAdapter func(i interface{}) (interface{}, error) +type LuaEngineAdapter func(i interface{}) (interface{}, error) + +// AdaptLuaEngineFunc returns a LuaEngineAdapter. +func AdaptLuaEngineFunc(f func(c *LuaEngineConfig) (*LuaEngine, error)) LuaEngineAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*LuaEngineConfig) + if !ok { + return nil, errors.New("invalid input, expected LuaEngineConfig") + } + + return f(cfg) + } +} // Create implements the ComponentCreator interface. -func (f luaEngineAdapter) Create(i interface{}) (interface{}, error) { +func (f LuaEngineAdapter) Create(i interface{}) (interface{}, error) { return f(i) } // ProvideDefault implements the ComponentConfigurable interface. -func (f luaEngineAdapter) ProvideDefault() (interface{}, error) { +func (f LuaEngineAdapter) ProvideDefault() (interface{}, error) { // Provide defaults for the optional parameters // whose default is not their zero value. - cfg := &luaEngineConfig{ + cfg := &LuaEngineConfig{ RunTimeout: 5, Sandbox: true, } @@ -82,25 +96,18 @@ func (f luaEngineAdapter) ProvideDefault() (interface{}, error) { return cfg, nil } -// adaptLuaEngineFunc returns a luaEngineAdapter. -func adaptLuaEngineFunc(f func(c *luaEngineConfig) (*luaEngine, error)) luaEngineAdapter { - return func(i interface{}) (interface{}, error) { - cfg, ok := i.(*luaEngineConfig) - if !ok { - return nil, fmt.Errorf("invalid input, expected luaEngineConfig") - } - - return f(cfg) - } -} - -// LuaLayer returns the Pluggable transformation layer implemented in Lua. -func LuaLayer() interface{} { - return adaptLuaEngineFunc(newLuaEngine) +// LuaEngineConfigFunction returns the Pluggable transformation layer implemented in Lua. +func LuaEngineConfigFunction(t *LuaEngineConfig) (*LuaEngine, error) { + return NewLuaEngine(&LuaEngineConfig{ + SourceB64: t.SourceB64, + RunTimeout: t.RunTimeout, + Sandbox: t.Sandbox, + SpMode: t.SpMode, + }) } -// SmokeTest implements SmokeTester. -func (e *luaEngine) SmokeTest(funcName string) error { +// SmokeTest implements smokeTester. +func (e *LuaEngine) SmokeTest(funcName string) error { // setup the Lua state L := lua.NewState(*e.Options) // L is ptr defer L.Close() @@ -113,8 +120,8 @@ func (e *luaEngine) SmokeTest(funcName string) error { return initVM(e, L, funcName) } -// MakeFunction implements FunctionMaker. -func (e *luaEngine) MakeFunction(funcName string) TransformationFunction { +// MakeFunction implements functionMaker. +func (e *LuaEngine) MakeFunction(funcName string) transform.TransformationFunction { return func(message *models.Message, interState interface{}) (*models.Message, *models.Message, *models.Message, interface{}) { // making input @@ -233,7 +240,7 @@ func loadLuaCode(ls *lua.LState, proto *lua.FunctionProto) error { } // initVM performs the initialization steps for a Lua state. -func initVM(e *luaEngine, L *lua.LState, funcName string) error { +func initVM(e *LuaEngine, L *lua.LState, funcName string) error { if e.Options.SkipOpenLibs == false { luajson.Preload(L) } @@ -252,14 +259,14 @@ func initVM(e *luaEngine, L *lua.LState, funcName string) error { // mkLuaEngineInput describes the process of constructing input to Lua engine. // No side effects. -func mkLuaEngineInput(e *luaEngine, message *models.Message, interState interface{}) (*lua.LTable, error) { +func mkLuaEngineInput(e *LuaEngine, message *models.Message, interState interface{}) (*lua.LTable, error) { if interState != nil { - if i, ok := interState.(*EngineProtocol); ok { + if i, ok := interState.(*engineProtocol); ok { return toLuaTable(i) } } - candidate := &EngineProtocol{ + candidate := &engineProtocol{ Data: string(message.Data), } @@ -267,7 +274,7 @@ func mkLuaEngineInput(e *luaEngine, message *models.Message, interState interfac return toLuaTable(candidate) } - parsedMessage, err := intermediateAsSpEnrichedParsed(interState, message) + parsedMessage, err := transform.IntermediateAsSpEnrichedParsed(interState, message) if err != nil { // if spMode, error for non Snowplow enriched event data return nil, err @@ -283,7 +290,7 @@ func mkLuaEngineInput(e *luaEngine, message *models.Message, interState interfac } // toLuaTable -func toLuaTable(p *EngineProtocol) (*lua.LTable, error) { +func toLuaTable(p *engineProtocol) (*lua.LTable, error) { var tmpMap map[string]interface{} err := mapstructure.Decode(p, &tmpMap) @@ -367,14 +374,14 @@ func mapToLTable(m map[string]interface{}) (*lua.LTable, error) { } // validateLuaEngineOut validates the value returned from the Lua engine is a -// Lua Table (lua.LTable) and that it maps to EngineProtocol. -func validateLuaEngineOut(output interface{}) (*EngineProtocol, error) { +// Lua Table (lua.LTable) and that it maps to engineProtocol. +func validateLuaEngineOut(output interface{}) (*engineProtocol, error) { if output == nil { return nil, fmt.Errorf("invalid return type from Lua transformation; got nil") } if luaTablePtr, ok := output.(*lua.LTable); ok { - result := &EngineProtocol{} + result := &engineProtocol{} luaMapper := gluamapper.NewMapper(gluamapper.Option{ NameFunc: gluamapper.Id, }) diff --git a/pkg/transform/engine_lua_test.go b/pkg/transform/engine/engine_lua_test.go similarity index 74% rename from pkg/transform/engine_lua_test.go rename to pkg/transform/engine/engine_lua_test.go index 6b0672ef..e7dfdd09 100644 --- a/pkg/transform/engine_lua_test.go +++ b/pkg/transform/engine/engine_lua_test.go @@ -4,12 +4,11 @@ // // Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. -package transform +package engine import ( "encoding/base64" "fmt" - "path/filepath" "reflect" "strings" "testing" @@ -17,127 +16,20 @@ import ( "github.com/davecgh/go-spew/spew" "github.com/stretchr/testify/assert" - config "github.com/snowplow-devops/stream-replicator/config" "github.com/snowplow-devops/stream-replicator/pkg/models" + "github.com/snowplow-devops/stream-replicator/pkg/transform" ) -func TestLuaEngineConfig_ENV(t *testing.T) { - testCases := []struct { - Name string - Plug config.Pluggable - Expected interface{} - }{ - { - Name: "transform-lua-from-env", - Plug: testLuaEngineAdapter(testLuaEngineFunc), - Expected: &luaEngineConfig{ - SourceB64: "CglmdW5jdGlvbiBmb28oeCkKICAgICAgICAgICByZXR1cm4geAogICAgICAgIGVuZAoJ", - RunTimeout: 10, - Sandbox: false, - }, - }, - } - - for _, tt := range testCases { - t.Run(tt.Name, func(t *testing.T) { - assert := assert.New(t) - - t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", "") - - t.Setenv("MESSAGE_TRANSFORMATION", "lua:fun") - t.Setenv("TRANSFORMATION_LAYER_NAME", "lua") - - t.Setenv("TRANSFORMATION_LUA_SOURCE_B64", "CglmdW5jdGlvbiBmb28oeCkKICAgICAgICAgICByZXR1cm4geAogICAgICAgIGVuZAoJ") - t.Setenv("TRANSFORMATION_LUA_TIMEOUT_SEC", "10") - t.Setenv("TRANSFORMATION_LUA_SANDBOX", "false") - - c, err := config.NewConfig() - assert.NotNil(c) - if err != nil { - t.Fatalf("function NewConfig failed with error: %q", err.Error()) - } - - engine := c.Data.Transform.Layer - decoderOpts := &config.DecoderOptions{ - Input: engine.Body, - } - - result, err := c.CreateComponent(tt.Plug, decoderOpts) - assert.NotNil(result) - assert.Nil(err) - - if !reflect.DeepEqual(result, tt.Expected) { - t.Errorf("GOT:\n%s\nEXPECTED:\n%s", - spew.Sdump(result), - spew.Sdump(tt.Expected)) - } - }) - } -} - -func TestLuaEngineConfig_HCL(t *testing.T) { - fixturesDir := "../../config/test-fixtures" - testCases := []struct { - File string - Plug config.Pluggable - Expected interface{} - }{ - { - File: "transform-lua-simple.hcl", - Plug: testLuaEngineAdapter(testLuaEngineFunc), - Expected: &luaEngineConfig{ - SourceB64: "CglmdW5jdGlvbiBmb28oeCkKICAgICAgICAgICByZXR1cm4geAogICAgICAgIGVuZAoJ", - RunTimeout: 5, - Sandbox: true, - }, - }, - { - File: "transform-lua-extended.hcl", - Plug: testLuaEngineAdapter(testLuaEngineFunc), - Expected: &luaEngineConfig{ - SourceB64: "CglmdW5jdGlvbiBmb28oeCkKICAgICAgICAgICByZXR1cm4geAogICAgICAgIGVuZAoJ", - RunTimeout: 10, - Sandbox: false, - }, - }, - } - - for _, tt := range testCases { - t.Run(tt.File, func(t *testing.T) { - assert := assert.New(t) - - filename := filepath.Join(fixturesDir, tt.File) - t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", filename) - - c, err := config.NewConfig() - assert.NotNil(c) - if err != nil { - t.Fatalf("function NewConfig failed with error: %q", err.Error()) - } - - engine := c.Data.Transform.Layer - decoderOpts := &config.DecoderOptions{ - Input: engine.Body, - } - - result, err := c.CreateComponent(tt.Plug, decoderOpts) - assert.NotNil(result) - assert.Nil(err) - - if !reflect.DeepEqual(result, tt.Expected) { - t.Errorf("GOT:\n%s\nEXPECTED:\n%s", - spew.Sdump(result), - spew.Sdump(tt.Expected)) - } - }) - } -} - func TestLuaLayer(t *testing.T) { - layer := LuaLayer() - if _, ok := layer.(config.Pluggable); !ok { - t.Errorf("invalid interface returned from LuaLayer") - } + assert := assert.New(t) + layer, err := LuaEngineConfigFunction(&LuaEngineConfig{ + SourceB64: "CglmdW5jdGlvbiBmb28oeCkKICAgICAgICAgICByZXR1cm4geAogICAgICAgIGVuZAoJ", + RunTimeout: 5, + Sandbox: false, + SpMode: false, + }) + assert.Nil(err) + assert.NotNil(layer) } func TestLuaEngineMakeFunction_SpModeFalse_IntermediateNil(t *testing.T) { @@ -145,7 +37,7 @@ func TestLuaEngineMakeFunction_SpModeFalse_IntermediateNil(t *testing.T) { var testSpMode bool = false testCases := []struct { Src string - FunName string + Scenario string Sandbox bool Input *models.Message Expected map[string]*models.Message @@ -154,12 +46,12 @@ func TestLuaEngineMakeFunction_SpModeFalse_IntermediateNil(t *testing.T) { }{ { Src: ` -function identity(x) - return x +function main(x) + return x end `, - FunName: "identity", - Sandbox: true, + Scenario: "main", + Sandbox: true, Input: &models.Message{ Data: []byte("asdf"), PartitionKey: "some-test-key", @@ -172,7 +64,7 @@ end "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: "asdf", @@ -181,13 +73,13 @@ end }, { Src: ` -function concatHello(x) - x.Data = "Hello:" .. x.Data - return x +function main(x) + x.Data = "Hello:" .. x.Data + return x end `, - FunName: "concatHello", - Sandbox: true, + Scenario: "main", + Sandbox: true, Input: &models.Message{ Data: []byte("asdf"), PartitionKey: "some-test-key", @@ -200,7 +92,7 @@ end "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: "Hello:asdf", @@ -209,13 +101,13 @@ end }, { Src: ` -function filterIn(x) - x.FilterOut = false - return x +function main(x) + x.FilterOut = false + return x end `, - FunName: "filterIn", - Sandbox: true, + Scenario: "main", + Sandbox: true, Input: &models.Message{ Data: []byte("asdf"), PartitionKey: "some-test-key", @@ -228,7 +120,7 @@ end "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: "asdf", @@ -237,15 +129,15 @@ end }, { Src: ` -function filterOut(x) - if type(x.Data) == "string" then - return { FilterOut = true } - end - return { FilterOut = false } +function main(x) + if type(x.Data) == "string" then + return { FilterOut = true } + end + return { FilterOut = false } end `, - FunName: "filterOut", - Sandbox: false, + Scenario: "main", + Sandbox: false, Input: &models.Message{ Data: []byte("asdf"), PartitionKey: "some-test-key", @@ -265,20 +157,20 @@ end Src: ` local json = require("json") -function jsonIdentity(x) - local dat = x["Data"] - local jsonObj, decodeErr = json.decode(dat) - if decodeErr then error(decodeErr) end +function main(x) + local dat = x["Data"] + local jsonObj, decodeErr = json.decode(dat) + if decodeErr then error(decodeErr) end - local result, encodeErr = json.encode(jsonObj) - if encodeErr then error(encodeErr) end + local result, encodeErr = json.encode(jsonObj) + if encodeErr then error(encodeErr) end - x.Data = result - return x + x.Data = result + return x end `, - FunName: "jsonIdentity", - Sandbox: false, + Scenario: "main", + Sandbox: false, Input: &models.Message{ Data: snowplowJSON1, PartitionKey: "some-test-key", @@ -291,7 +183,7 @@ end "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: string(snowplowJSON1), @@ -302,22 +194,22 @@ end Src: ` local json = require("json") -function jsonTransformFieldName(x) - local data = x["Data"] - local jsonObj, decodeErr = json.decode(data) - if decodeErr then error(decodeErr) end +function main(x) + local data = x["Data"] + local jsonObj, decodeErr = json.decode(data) + if decodeErr then error(decodeErr) end - jsonObj["app_id_CHANGED"] = jsonObj["app_id"] - jsonObj["app_id"] = nil + jsonObj["app_id_CHANGED"] = jsonObj["app_id"] + jsonObj["app_id"] = nil - local result, encodeErr = json.encode(jsonObj) - if encodeErr then error(encodeErr) end + local result, encodeErr = json.encode(jsonObj) + if encodeErr then error(encodeErr) end - return { Data = result } + return { Data = result } end `, - FunName: "jsonTransformFieldName", - Sandbox: false, + Scenario: "main", + Sandbox: false, Input: &models.Message{ Data: snowplowJSON1, PartitionKey: "some-test-key", @@ -330,7 +222,7 @@ end "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: string(snowplowJSON1ChangedLua), @@ -341,19 +233,19 @@ end Src: ` local json = require("json") -function jsonFilterOut(x) - local jsonObj, decodeErr = json.decode(x["Data"]) - if decodeErr then error(decodeErr) end +function main(x) + local jsonObj, decodeErr = json.decode(x["Data"]) + if decodeErr then error(decodeErr) end - if jsonObj["app_id"] == "filterMeOut" then - return { FilterOut = false, Data = x["Data"] } - else - return { FilterOut = true } - end + if jsonObj["app_id"] == "filterMeOut" then + return { FilterOut = false, Data = x["Data"] } + else + return { FilterOut = true } + end end `, - FunName: "jsonFilterOut", - Sandbox: false, + Scenario: "main", + Sandbox: false, Input: &models.Message{ Data: snowplowJSON1, PartitionKey: "some-test-key", @@ -371,12 +263,12 @@ end }, { Src: ` -function retWrongType(x) - return 0 +function main(x) + return 0 end `, - FunName: "retWrongType", - Sandbox: true, + Scenario: "main", + Sandbox: true, Input: &models.Message{ Data: []byte("asdf"), PartitionKey: "some-test-key", @@ -394,11 +286,11 @@ end }, { Src: ` -function noReturn(x) +function main(x) end `, - FunName: "noReturn", - Sandbox: true, + Scenario: "main", + Sandbox: true, Input: &models.Message{ Data: []byte("asdf"), PartitionKey: "some-test-key", @@ -416,12 +308,12 @@ end }, { Src: ` -function returnNil(x) - return nil +function main(x) + return nil end `, - FunName: "returnNil", - Sandbox: true, + Scenario: "main", + Sandbox: true, Input: &models.Message{ Data: []byte("asdf"), PartitionKey: "some-test-key", @@ -439,12 +331,12 @@ end }, { Src: ` -function causeRuntimeError(x) - return 2 * x +function main(x) + return 2 * x end `, - FunName: "causeRuntimeError", - Sandbox: true, + Scenario: "main", + Sandbox: true, Input: &models.Message{ Data: []byte("asdf"), PartitionKey: "some-test-key", @@ -458,16 +350,16 @@ end }, }, ExpInterState: nil, - Error: fmt.Errorf("error running Lua function \"causeRuntimeError\""), + Error: fmt.Errorf("error running Lua function \"main\""), }, { Src: ` -function callError(x) - error("Failed") +function main(x) + error("Failed") end `, - FunName: "callError", - Sandbox: false, + Scenario: "main", + Sandbox: false, Input: &models.Message{ Data: []byte("asdf"), PartitionKey: "some-test-key", @@ -481,19 +373,19 @@ end }, }, ExpInterState: nil, - Error: fmt.Errorf("error running Lua function \"callError\""), + Error: fmt.Errorf("error running Lua function \"main\""), }, { Src: ` local clock = os.clock -function sleepTenSecs(x) - local t0 = clock() - while clock() - t0 <= 10 do end +function main(x) + local t0 = clock() + while clock() - t0 <= 10 do end end `, - FunName: "sleepTenSecs", - Sandbox: false, + Scenario: "main", + Sandbox: false, Input: &models.Message{ Data: []byte("asdf"), PartitionKey: "some-test-key", @@ -512,28 +404,28 @@ end } for _, tt := range testCases { - t.Run(tt.FunName, func(t *testing.T) { + t.Run(tt.Scenario, func(t *testing.T) { assert := assert.New(t) src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) - luaConfig := &luaEngineConfig{ + luaConfig := &LuaEngineConfig{ SourceB64: src, RunTimeout: 1, Sandbox: tt.Sandbox, SpMode: testSpMode, } - luaEngine, err := newLuaEngine(luaConfig) + luaEngine, err := NewLuaEngine(luaConfig) assert.NotNil(luaEngine) if err != nil { - t.Fatalf("function newLuaEngine failed with error: %q", err.Error()) + t.Fatalf("function NewLuaEngine failed with error: %q", err.Error()) } - if err := luaEngine.SmokeTest(tt.FunName); err != nil { + if err := luaEngine.SmokeTest(tt.Scenario); err != nil { t.Fatalf("smoke-test failed with error: %q", err.Error()) } - transFunction := luaEngine.MakeFunction(tt.FunName) + transFunction := luaEngine.MakeFunction(tt.Scenario) s, f, e, i := transFunction(tt.Input, testInterState) if !reflect.DeepEqual(i, tt.ExpInterState) { @@ -569,7 +461,6 @@ func TestLuaEngineMakeFunction_SpModeTrue_IntermediateNil(t *testing.T) { testCases := []struct { Scenario string Src string - FunName string Sandbox bool Input *models.Message Expected map[string]*models.Message @@ -577,13 +468,12 @@ func TestLuaEngineMakeFunction_SpModeTrue_IntermediateNil(t *testing.T) { Error error }{ { - Scenario: "identity", + Scenario: "main", Src: ` -function identity(x) - return x +function main(x) + return x end `, - FunName: "identity", Sandbox: false, Input: &models.Message{ Data: testLuaTsv, @@ -597,7 +487,7 @@ end "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: testLuaMap, @@ -607,16 +497,15 @@ end { Scenario: "filtering", Src: ` -function filterOut(input) - -- input is a lua table - local spData = input["Data"] - if spData["app_id"] == "myApp" then - return input; - end - return { FilterOut = true } +function main(input) + -- input is a lua table + local spData = input["Data"] + if spData["app_id"] == "myApp" then + return input; + end + return { FilterOut = true } end `, - FunName: "filterOut", Sandbox: false, Input: &models.Message{ Data: testLuaTsv, @@ -636,16 +525,15 @@ end { Scenario: "filteringOut_ignoresData", Src: ` -function filterOutIgnores(x) - local ret = { - FilterOut = true, - Data = "shouldNotAppear", - PartitionKey = "notThis" - } - return ret +function main(x) + local ret = { + FilterOut = true, + Data = "shouldNotAppear", + PartitionKey = "notThis" + } + return ret end `, - FunName: "filterOutIgnores", Sandbox: false, Input: &models.Message{ Data: testLuaTsv, @@ -665,11 +553,10 @@ end { Scenario: "non_Snowplow_enriched_to_failed", Src: ` -function willNotRun(x) - return x +function main(x) + return x end `, - FunName: "willNotRun", Sandbox: false, Input: &models.Message{ Data: []byte("nonSpEnrichedEvent"), @@ -689,11 +576,10 @@ end { Scenario: "return_wrong_type", Src: ` -function returnWrongType(x) - return 0 +function main(x) + return 0 end `, - FunName: "returnWrongType", Sandbox: true, Input: &models.Message{ Data: testLuaTsv, @@ -717,24 +603,24 @@ end assert := assert.New(t) src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) - luaConfig := &luaEngineConfig{ + luaConfig := &LuaEngineConfig{ SourceB64: src, RunTimeout: 1, Sandbox: tt.Sandbox, SpMode: testSpMode, } - luaEngine, err := newLuaEngine(luaConfig) + luaEngine, err := NewLuaEngine(luaConfig) assert.NotNil(luaEngine) if err != nil { - t.Fatalf("function newLuaEngine failed with error: %q", err.Error()) + t.Fatalf("function NewLuaEngine failed with error: %q", err.Error()) } - if err := luaEngine.SmokeTest(tt.FunName); err != nil { + if err := luaEngine.SmokeTest(`main`); err != nil { t.Fatalf("smoke-test failed with error: %q", err.Error()) } - transFunction := luaEngine.MakeFunction(tt.FunName) + transFunction := luaEngine.MakeFunction(`main`) s, f, e, i := transFunction(tt.Input, testInterState) if !reflect.DeepEqual(i, tt.ExpInterState) { @@ -769,7 +655,6 @@ func TestLuaEngineMakeFunction_IntermediateState_SpModeFalse(t *testing.T) { testCases := []struct { Scenario string Src string - FunName string Sandbox bool Input *models.Message InterState interface{} @@ -780,17 +665,16 @@ func TestLuaEngineMakeFunction_IntermediateState_SpModeFalse(t *testing.T) { { Scenario: "intermediateState_EngineProtocol_Map", Src: ` -function identity(x) - return x +function main(x) + return x end `, - FunName: "identity", Sandbox: true, Input: &models.Message{ Data: testLuaJSON, PartitionKey: "some-test-key", }, - InterState: &EngineProtocol{ + InterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: testLuaMap, @@ -803,7 +687,7 @@ end "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: testLuaMap, @@ -813,17 +697,16 @@ end { Scenario: "intermediateState_EngineProtocol_String", Src: ` -function identity(x) - return x +function main(x) + return x end `, - FunName: "identity", Sandbox: true, Input: &models.Message{ Data: testLuaJSON, PartitionKey: "some-test-key", }, - InterState: &EngineProtocol{ + InterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: string(testLuaJSON), @@ -836,7 +719,7 @@ end "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: string(testLuaJSON), @@ -846,11 +729,10 @@ end { Scenario: "intermediateState_not_EngineProtocol_nonSpEnriched", Src: ` -function identity(x) - return x; +function main(x) + return x; end `, - FunName: "identity", Sandbox: true, Input: &models.Message{ Data: testLuaJSON, @@ -865,7 +747,7 @@ end "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: string(testLuaJSON), @@ -875,11 +757,10 @@ end { Scenario: "intermediateState_not_EngineProtocol_SpEnriched", Src: ` -function identity(x) - return x; +function main(x) + return x; end `, - FunName: "identity", Sandbox: true, Input: &models.Message{ Data: testLuaTsv, @@ -894,7 +775,7 @@ end "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: string(testLuaTsv), @@ -908,24 +789,24 @@ end assert := assert.New(t) src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) - luaConfig := &luaEngineConfig{ + luaConfig := &LuaEngineConfig{ SourceB64: src, RunTimeout: 1, Sandbox: tt.Sandbox, SpMode: testSpMode, } - luaEngine, err := newLuaEngine(luaConfig) + luaEngine, err := NewLuaEngine(luaConfig) assert.NotNil(luaEngine) if err != nil { - t.Fatalf("function newLuaEngine failed with error: %q", err.Error()) + t.Fatalf("function NewLuaEngine failed with error: %q", err.Error()) } - if err := luaEngine.SmokeTest(tt.FunName); err != nil { + if err := luaEngine.SmokeTest(`main`); err != nil { t.Fatalf("smoke-test failed with error: %q", err.Error()) } - transFunction := luaEngine.MakeFunction(tt.FunName) + transFunction := luaEngine.MakeFunction(`main`) s, f, e, i := transFunction(tt.Input, tt.InterState) if !reflect.DeepEqual(i, tt.ExpInterState) { @@ -961,7 +842,6 @@ func TestLuaEngineMakeFunction_IntermediateState_SpModeTrue(t *testing.T) { testCases := []struct { Scenario string Src string - FunName string Sandbox bool Input *models.Message InterState interface{} @@ -972,17 +852,16 @@ func TestLuaEngineMakeFunction_IntermediateState_SpModeTrue(t *testing.T) { { Scenario: "intermediateState_EngineProtocol_Map", Src: ` -function identity(x) - return x +function main(x) + return x end `, - FunName: "identity", Sandbox: true, Input: &models.Message{ Data: testLuaJSON, PartitionKey: "some-test-key", }, - InterState: &EngineProtocol{ + InterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: testLuaMap, @@ -995,7 +874,7 @@ end "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: testLuaMap, @@ -1005,17 +884,16 @@ end { Scenario: "intermediateState_EngineProtocol_String", Src: ` -function identity(x) - return x +function main(x) + return x end `, - FunName: "identity", Sandbox: true, Input: &models.Message{ Data: testLuaJSON, PartitionKey: "some-test-key", }, - InterState: &EngineProtocol{ + InterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: string(testLuaJSON), @@ -1028,7 +906,7 @@ end "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: string(testLuaJSON), @@ -1038,11 +916,10 @@ end { Scenario: "intermediateState_notEngineProtocol_notSpEnriched", Src: ` -function willNotRun(x) - return x +function main(x) + return x end `, - FunName: "willNotRun", Sandbox: true, Input: &models.Message{ Data: testLuaJSON, @@ -1063,11 +940,10 @@ end { Scenario: "intermediateState_notEngineProtocol_SpEnriched", Src: ` -function identity(x) - return x +function main(x) + return x end `, - FunName: "identity", Sandbox: true, Input: &models.Message{ Data: testLuaTsv, @@ -1082,7 +958,7 @@ end "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "", Data: testLuaMap, @@ -1096,24 +972,24 @@ end assert := assert.New(t) src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) - luaConfig := &luaEngineConfig{ + luaConfig := &LuaEngineConfig{ SourceB64: src, RunTimeout: 1, Sandbox: tt.Sandbox, SpMode: testSpMode, } - luaEngine, err := newLuaEngine(luaConfig) + luaEngine, err := NewLuaEngine(luaConfig) assert.NotNil(luaEngine) if err != nil { - t.Fatalf("function newLuaEngine failed with error: %q", err.Error()) + t.Fatalf("function NewLuaEngine failed with error: %q", err.Error()) } - if err := luaEngine.SmokeTest(tt.FunName); err != nil { + if err := luaEngine.SmokeTest(`main`); err != nil { t.Fatalf("smoke-test failed with error: %q", err.Error()) } - transFunction := luaEngine.MakeFunction(tt.FunName) + transFunction := luaEngine.MakeFunction(`main`) s, f, e, i := transFunction(tt.Input, tt.InterState) if !reflect.DeepEqual(i, tt.ExpInterState) { @@ -1148,7 +1024,6 @@ func TestLuaEngineMakeFunction_SetPK(t *testing.T) { testCases := []struct { Scenario string Src string - FunName string Sandbox bool SpMode bool Input *models.Message @@ -1159,12 +1034,11 @@ func TestLuaEngineMakeFunction_SetPK(t *testing.T) { { Scenario: "onlySetPk_spModeTrue", Src: ` -function onlySetPk(x) - x["PartitionKey"] = "newPk" - return x +function main(x) + x["PartitionKey"] = "newPk" + return x end `, - FunName: "onlySetPk", Sandbox: true, SpMode: true, Input: &models.Message{ @@ -1179,7 +1053,7 @@ end "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "newPk", Data: testLuaMap, @@ -1189,12 +1063,11 @@ end { Scenario: "onlySetPk_spModeFalse", Src: ` -function onlySetPk(x) - x["PartitionKey"] = "newPk" - return x +function main(x) + x["PartitionKey"] = "newPk" + return x end `, - FunName: "onlySetPk", Sandbox: true, SpMode: false, Input: &models.Message{ @@ -1209,7 +1082,7 @@ end "filtered": nil, "failed": nil, }, - ExpInterState: &EngineProtocol{ + ExpInterState: &engineProtocol{ FilterOut: false, PartitionKey: "newPk", Data: string(testLuaTsv), @@ -1219,16 +1092,15 @@ end { Scenario: "filterOutIgnores", Src: ` -function filterOutIgnores(x) - local ret = { - FilterOut = true, - Data = "shouldNotAppear", - PartitionKey = "notThis" - } - return ret +function main(x) + local ret = { + FilterOut = true, + Data = "shouldNotAppear", + PartitionKey = "notThis" + } + return ret end `, - FunName: "filterOutIgnores", Sandbox: true, SpMode: true, Input: &models.Message{ @@ -1253,24 +1125,24 @@ end assert := assert.New(t) src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) - luaConfig := &luaEngineConfig{ + luaConfig := &LuaEngineConfig{ SourceB64: src, RunTimeout: 1, Sandbox: tt.Sandbox, SpMode: tt.SpMode, } - luaEngine, err := newLuaEngine(luaConfig) + luaEngine, err := NewLuaEngine(luaConfig) assert.NotNil(luaEngine) if err != nil { - t.Fatalf("function newLuaEngine failed with error: %q", err.Error()) + t.Fatalf("function NewLuaEngine failed with error: %q", err.Error()) } - if err := luaEngine.SmokeTest(tt.FunName); err != nil { + if err := luaEngine.SmokeTest(`main`); err != nil { t.Fatalf("smoke-test failed with error: %q", err.Error()) } - transFunction := luaEngine.MakeFunction(tt.FunName) + transFunction := luaEngine.MakeFunction(`main`) s, f, e, i := transFunction(tt.Input, testInterState) if !reflect.DeepEqual(i, tt.ExpInterState) { @@ -1310,25 +1182,25 @@ func TestLuaEngineSmokeTest(t *testing.T) { }{ { Src: ` -function identity(x) - return x +function main(x) + return x end `, - FunName: "identity", + FunName: "main", Sandbox: true, CompileError: nil, SmokeError: nil, }, { Src: ` -function notThisOne(x) - return "something" +function wrong_name(x) + return "something" end `, - FunName: "notExists", + FunName: "main", Sandbox: true, CompileError: nil, - SmokeError: fmt.Errorf("global Lua function not found"), + SmokeError: fmt.Errorf("global Lua function not found: \"main\""), }, { Src: ` @@ -1342,8 +1214,8 @@ local clock = os.clock }, { Src: ` -function syntaxError(x) - loca y = 0 +function main(x) + loca y = 0 end `, FunName: "syntaxError", @@ -1358,21 +1230,21 @@ end assert := assert.New(t) src := base64.StdEncoding.EncodeToString([]byte(tt.Src)) - luaConfig := &luaEngineConfig{ + luaConfig := &LuaEngineConfig{ SourceB64: src, RunTimeout: 1, Sandbox: tt.Sandbox, } - luaEngine, compileErr := newLuaEngine(luaConfig) + luaEngine, compileErr := NewLuaEngine(luaConfig) if compileErr != nil { if tt.CompileError == nil { - t.Fatalf("got unexpected error while creating newLuaEngine: %s", compileErr.Error()) + t.Fatalf("got unexpected error while creating NewLuaEngine: %s", compileErr.Error()) } if !strings.Contains(compileErr.Error(), tt.CompileError.Error()) { - t.Errorf("newLuaEngine error mismatch\nGOT_ERROR:\n%q\n does not contain\nEXPECTED_ERROR:\n%q", + t.Errorf("NewLuaEngine error mismatch\nGOT_ERROR:\n%q\n does not contain\nEXPECTED_ERROR:\n%q", compileErr.Error(), tt.CompileError.Error()) } @@ -1416,38 +1288,37 @@ func TestLuaEngineWithBuiltins(t *testing.T) { } srcCode := ` -function identity(x) - return x +function main(x) + return x end ` - funcName := "identity" + funcname := "main" src := base64.StdEncoding.EncodeToString([]byte(srcCode)) - luaConfig := &luaEngineConfig{ + luaConfig := &LuaEngineConfig{ SourceB64: src, RunTimeout: 1, Sandbox: true, } - luaEngine, err := newLuaEngine(luaConfig) + luaEngine, err := NewLuaEngine(luaConfig) if err != nil { - t.Fatalf("newLuaEngine failed with error: %q", err) + t.Fatalf("NewLuaEngine failed with error: %q", err) } - if err := luaEngine.SmokeTest(funcName); err != nil { + if err := luaEngine.SmokeTest(funcname); err != nil { t.Fatalf("smoke-test failed with error: %q", err.Error()) } - luaFunc := luaEngine.MakeFunction(funcName) - setPkToAppID := NewSpEnrichedSetPkFunction("app_id") - spEnrichedToJSON := SpEnrichedToJSON + luaFunc := luaEngine.MakeFunction(funcname) + setPkToAppID := transform.NewSpEnrichedSetPkFunction("app_id") + spEnrichedToJSON := transform.SpEnrichedToJSON testCases := []struct { Name string - Transformation TransformationApplyFunction + Transformation transform.TransformationApplyFunction }{ { - Name: "first", - Transformation: NewTransformation( + Transformation: transform.NewTransformation( setPkToAppID, spEnrichedToJSON, luaFunc, @@ -1464,8 +1335,8 @@ end assert.NotNil(result) for i, res := range result.Result { exp := expectedGood[i] - assert.JSONEq(string(exp.Data), string(res.Data)) - assert.Equal(exp.PartitionKey, res.PartitionKey) + assert.JSONEq(string(res.Data), string(exp.Data)) + assert.Equal(res.PartitionKey, exp.PartitionKey) } }) @@ -1475,53 +1346,49 @@ end func TestLuaEngineWithBuiltinsSpModeFalse(t *testing.T) { srcCode := ` -function identity(x) - return x +function main(x) + return x end function setPk(x) - x["PartitionKey"] = "testKey" - return x + x["PartitionKey"] = "testKey" + return x end ` // Lua src := base64.StdEncoding.EncodeToString([]byte(srcCode)) - luaConfig := &luaEngineConfig{ + luaConfig := &LuaEngineConfig{ SourceB64: src, RunTimeout: 1, Sandbox: true, SpMode: false, } - luaEngine, err := newLuaEngine(luaConfig) + luaEngine, err := NewLuaEngine(luaConfig) if err != nil { - t.Fatalf("newLuaEngine failed with error: %q", err) + t.Fatalf("NewLuaEngine failed with error: %q", err) } - if err := luaEngine.SmokeTest("identity"); err != nil { - t.Fatalf("smoke-test failed with error: %q", err.Error()) - } - if err := luaEngine.SmokeTest("setPk"); err != nil { + if err := luaEngine.SmokeTest("main"); err != nil { t.Fatalf("smoke-test failed with error: %q", err.Error()) } - luaFuncID := luaEngine.MakeFunction("identity") + luaFuncID := luaEngine.MakeFunction("main") luaFuncPk := luaEngine.MakeFunction("setPk") // Builtins - setPkToAppID := NewSpEnrichedSetPkFunction("app_id") - spEnrichedToJSON := SpEnrichedToJSON + setPkToAppID := transform.NewSpEnrichedSetPkFunction("app_id") + spEnrichedToJSON := transform.SpEnrichedToJSON testCases := []struct { Name string - Transformation TransformationApplyFunction + Transformation transform.TransformationApplyFunction Input []*models.Message ExpectedGood []*models.Message }{ { - Name: "identity0", Input: messages, - Transformation: NewTransformation( + Transformation: transform.NewTransformation( luaFuncID, setPkToAppID, spEnrichedToJSON, @@ -1542,9 +1409,8 @@ end }, }, { - Name: "identity2", Input: messages, - Transformation: NewTransformation( + Transformation: transform.NewTransformation( setPkToAppID, spEnrichedToJSON, luaFuncID, @@ -1565,9 +1431,8 @@ end }, }, { - Name: "setPk1", Input: messages, - Transformation: NewTransformation( + Transformation: transform.NewTransformation( setPkToAppID, luaFuncPk, spEnrichedToJSON, @@ -1599,8 +1464,8 @@ end for i, res := range result.Result { if i < len(tt.ExpectedGood) { exp := tt.ExpectedGood[i] - assert.JSONEq(string(exp.Data), string(res.Data)) - assert.Equal(exp.PartitionKey, res.PartitionKey) + assert.JSONEq(string(res.Data), string(exp.Data)) + assert.Equal(res.PartitionKey, exp.PartitionKey) } } }) @@ -1609,58 +1474,57 @@ end func TestLuaEngineWithBuiltinsSpModeTrue(t *testing.T) { srcCode := ` -function identity(x) - return x +function main(x) + return x end function setPk(x) - x["PartitionKey"] = "testKey" - return x + x["PartitionKey"] = "testKey" + return x end ` // Lua src := base64.StdEncoding.EncodeToString([]byte(srcCode)) - luaConfig := &luaEngineConfig{ + luaConfig := &LuaEngineConfig{ SourceB64: src, RunTimeout: 1, Sandbox: true, SpMode: true, } - luaEngine, err := newLuaEngine(luaConfig) + luaEngine, err := NewLuaEngine(luaConfig) if err != nil { - t.Fatalf("newLuaEngine failed with error: %q", err) + t.Fatalf("NewLuaEngine failed with error: %q", err) } - if err := luaEngine.SmokeTest("identity"); err != nil { + if err := luaEngine.SmokeTest("main"); err != nil { t.Fatalf("smoke-test failed with error: %q", err.Error()) } if err := luaEngine.SmokeTest("setPk"); err != nil { t.Fatalf("smoke-test failed with error: %q", err.Error()) } - luaFuncID := luaEngine.MakeFunction("identity") + luaFuncID := luaEngine.MakeFunction("main") luaFuncPk := luaEngine.MakeFunction("setPk") // Builtins - setPkToAppID := NewSpEnrichedSetPkFunction("app_id") - spEnrichedToJSON := SpEnrichedToJSON + setPkToAppID := transform.NewSpEnrichedSetPkFunction("app_id") + spEnrichedToJSON := transform.SpEnrichedToJSON testCases := []struct { Name string - Transformation TransformationApplyFunction + Transformation transform.TransformationApplyFunction Input []*models.Message ExpectedGood []*models.Message }{ { - Name: "identity", Input: []*models.Message{ { Data: testLuaTsv, PartitionKey: "prevKey", }, }, - Transformation: NewTransformation( + Transformation: transform.NewTransformation( setPkToAppID, spEnrichedToJSON, luaFuncID, @@ -1673,14 +1537,13 @@ end }, }, { - Name: "setPk", Input: []*models.Message{ { Data: testLuaTsv, PartitionKey: "prevKey", }, }, - Transformation: NewTransformation( + Transformation: transform.NewTransformation( setPkToAppID, luaFuncPk, ), @@ -1692,14 +1555,13 @@ end }, }, { - Name: "mix", Input: []*models.Message{ { Data: testLuaTsv, PartitionKey: "prevKey", }, }, - Transformation: NewTransformation( + Transformation: transform.NewTransformation( setPkToAppID, luaFuncID, luaFuncPk, @@ -1724,8 +1586,8 @@ end for i, res := range result.Result { if i < len(tt.ExpectedGood) { exp := tt.ExpectedGood[i] - assert.JSONEq(string(exp.Data), string(res.Data)) - assert.Equal(exp.PartitionKey, res.PartitionKey) + assert.JSONEq(string(res.Data), string(exp.Data)) + assert.Equal(res.PartitionKey, exp.PartitionKey) } } }) @@ -1736,8 +1598,8 @@ func Benchmark_LuaEngine_Passthrough_Sandboxed(b *testing.B) { b.ReportAllocs() srcCode := ` -function identity(x) - return x +function main(x) + return x end ` src := base64.StdEncoding.EncodeToString([]byte(srcCode)) @@ -1746,18 +1608,18 @@ end Data: snowplowJSON1, PartitionKey: "some-test-key", } - luaConfig := &luaEngineConfig{ + luaConfig := &LuaEngineConfig{ SourceB64: src, RunTimeout: 5, Sandbox: true, } - luaEngine, err := newLuaEngine(luaConfig) + luaEngine, err := NewLuaEngine(luaConfig) if err != nil { - b.Fatalf("function newLuaEngine failed with error: %q", err.Error()) + b.Fatalf("function NewLuaEngine failed with error: %q", err.Error()) } - transFunction := luaEngine.MakeFunction("identity") + transFunction := luaEngine.MakeFunction("main") for n := 0; n < b.N; n++ { transFunction(inputMsg, nil) @@ -1768,8 +1630,8 @@ func Benchmark_LuaEngine_Passthrough(b *testing.B) { b.ReportAllocs() srcCode := ` -function identity(x) - return x +function main(x) + return x end ` src := base64.StdEncoding.EncodeToString([]byte(srcCode)) @@ -1778,18 +1640,18 @@ end Data: snowplowJSON1, PartitionKey: "some-test-key", } - luaConfig := &luaEngineConfig{ + luaConfig := &LuaEngineConfig{ SourceB64: src, RunTimeout: 5, Sandbox: false, } - luaEngine, err := newLuaEngine(luaConfig) + luaEngine, err := NewLuaEngine(luaConfig) if err != nil { - b.Fatalf("function newLuaEngine failed with error: %q", err.Error()) + b.Fatalf("function NewLuaEngine failed with error: %q", err.Error()) } - transFunction := luaEngine.MakeFunction("identity") + transFunction := luaEngine.MakeFunction("main") for n := 0; n < b.N; n++ { transFunction(inputMsg, nil) @@ -1800,11 +1662,11 @@ func Benchmark_LuaEngine_Passthrough_Json(b *testing.B) { b.ReportAllocs() srcCode := ` -function jsonIdentity(x) - local jsonObj, _ = json.decode(x) - local result, _ = json.encode(jsonObj) +function main(x) + local jsonObj, _ = json.decode(x) + local result, _ = json.encode(jsonObj) - return result + return result end ` src := base64.StdEncoding.EncodeToString([]byte(srcCode)) @@ -1813,15 +1675,15 @@ end Data: snowplowJSON1, PartitionKey: "some-test-key", } - luaConfig := &luaEngineConfig{ + luaConfig := &LuaEngineConfig{ SourceB64: src, RunTimeout: 5, Sandbox: false, } - luaEngine, err := newLuaEngine(luaConfig) + luaEngine, err := NewLuaEngine(luaConfig) if err != nil { - b.Fatalf("function newLuaEngine failed with error: %q", err.Error()) + b.Fatalf("function NewLuaEngine failed with error: %q", err.Error()) } transFunction := luaEngine.MakeFunction("jsonIdentity") @@ -1831,24 +1693,6 @@ end } } -// Test helpers -func testLuaEngineAdapter(f func(c *luaEngineConfig) (*luaEngineConfig, error)) luaEngineAdapter { - return func(i interface{}) (interface{}, error) { - cfg, ok := i.(*luaEngineConfig) - if !ok { - return nil, fmt.Errorf("invalid input, expected luaEngineConfig") - } - - return f(cfg) - } - -} - -func testLuaEngineFunc(c *luaEngineConfig) (*luaEngineConfig, error) { - - return c, nil -} - // Helper function to compare messages and avoid using reflect.DeepEqual // on errors. Compares all but the error field of messages. func assertMessagesCompareLua(t *testing.T, act, exp *models.Message) { diff --git a/pkg/transform/engine/engine_test_variables.go b/pkg/transform/engine/engine_test_variables.go new file mode 100644 index 00000000..147623e9 --- /dev/null +++ b/pkg/transform/engine/engine_test_variables.go @@ -0,0 +1,33 @@ +package engine + +import ( + "github.com/snowplow-devops/stream-replicator/pkg/models" +) + +var snowplowTsv1 = []byte(`test-data1 pc 2019-05-10 14:40:37.436 2019-05-10 14:40:35.972 2019-05-10 14:40:35.551 unstruct e9234345-f042-46ad-b1aa-424464066a33 py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 18.194.133.57 d26822f5-52cc-4292-8f77-14ef6b7a27e2 {"schema":"iglu:com.snowplowanalytics.snowplow/unstruct_event/jsonschema/1-0-0","data":{"schema":"iglu:com.snowplowanalytics.snowplow/add_to_cart/jsonschema/1-0-0","data":{"sku":"item41","quantity":2,"unitPrice":32.4,"currency":"GBP"}}} python-requests/2.21.0 2019-05-10 14:40:35.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??"}}]} 2019-05-10 14:40:35.972 com.snowplowanalytics.snowplow add_to_cart jsonschema 1-0-0 `) +var snowplowJSON1 = []byte(`{"app_id":"test-data1","collector_tstamp":"2019-05-10T14:40:35.972Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:35.972Z","dvce_created_tstamp":"2019-05-10T14:40:35.551Z","dvce_sent_tstamp":"2019-05-10T14:40:35Z","etl_tstamp":"2019-05-10T14:40:37.436Z","event":"unstruct","event_format":"jsonschema","event_id":"e9234345-f042-46ad-b1aa-424464066a33","event_name":"add_to_cart","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"d26822f5-52cc-4292-8f77-14ef6b7a27e2","platform":"pc","unstruct_event_com_snowplowanalytics_snowplow_add_to_cart_1":{"currency":"GBP","quantity":2,"sku":"item41","unitPrice":32.4},"user_id":"user\u003cbuilt-in function input\u003e","user_ipaddress":"18.194.133.57","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) +var snowplowTsv2 = []byte(`test-data2 pc 2019-05-10 14:40:32.392 2019-05-10 14:40:31.105 2019-05-10 14:40:30.218 transaction_item 5071169f-3050-473f-b03f-9748319b1ef2 py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 18.194.133.57 68220ade-307b-4898-8e25-c4c8ac92f1d7 transaction item58 35.87 1 python-requests/2.21.0 2019-05-10 14:40:30.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??"}}]} 2019-05-10 14:40:31.105 com.snowplowanalytics.snowplow transaction_item jsonschema 1-0-0 `) +var snowplowJSON2 = []byte(`{"app_id":"test-data2","collector_tstamp":"2019-05-10T14:40:31.105Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:31.105Z","dvce_created_tstamp":"2019-05-10T14:40:30.218Z","dvce_sent_tstamp":"2019-05-10T14:40:30Z","etl_tstamp":"2019-05-10T14:40:32.392Z","event":"transaction_item","event_format":"jsonschema","event_id":"5071169f-3050-473f-b03f-9748319b1ef2","event_name":"transaction_item","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"68220ade-307b-4898-8e25-c4c8ac92f1d7","platform":"pc","ti_orderid":"transaction\u003cbuilt-in function input\u003e","ti_price":35.87,"ti_quantity":1,"ti_sku":"item58","user_id":"user\u003cbuilt-in function input\u003e","user_ipaddress":"18.194.133.57","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) +var snowplowTsv3 = []byte(`test-data3 pc 2019-05-10 14:40:30.836 2019-05-10 14:40:29.576 2019-05-10 14:40:29.204 page_view e8aef68d-8533-45c6-a672-26a0f01be9bd py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 18.194.133.57 b66c4a12-8584-4c7a-9a5d-7c96f59e2556 www.demo-site.com/campaign-landing-page landing-page 80 www.demo-site.com/campaign-landing-page python-requests/2.21.0 2019-05-10 14:40:29.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??","test1":{"test2":[{"test3":"testValue"}]}}}]} 2019-05-10 14:40:29.576 com.snowplowanalytics.snowplow page_view jsonschema 1-0-0 `) +var snowplowJSON3 = []byte(`{"app_id":"test-data3","collector_tstamp":"2019-05-10T14:40:29.576Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??","test1":{"test2":[{"test3":"testValue"}]}}],"derived_tstamp":"2019-05-10T14:40:29.576Z","dvce_created_tstamp":"2019-05-10T14:40:29.204Z","dvce_sent_tstamp":"2019-05-10T14:40:29Z","etl_tstamp":"2019-05-10T14:40:30.836Z","event":"page_view","event_format":"jsonschema","event_id":"e8aef68d-8533-45c6-a672-26a0f01be9bd","event_name":"page_view","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"b66c4a12-8584-4c7a-9a5d-7c96f59e2556","page_title":"landing-page","page_url":"www.demo-site.com/campaign-landing-page","page_urlpath":"www.demo-site.com/campaign-landing-page","page_urlport":80,"platform":"pc","user_id":"user\u003cbuilt-in function input\u003e","user_ipaddress":"18.194.133.57","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) + +var nonSnowplowString = []byte(`not a snowplow event`) + +var messages = []*models.Message{ + { + Data: snowplowTsv1, + PartitionKey: "some-key", + }, + { + Data: snowplowTsv2, + PartitionKey: "some-key1", + }, + { + Data: snowplowTsv3, + PartitionKey: "some-key2", + }, + { + Data: nonSnowplowString, + PartitionKey: "some-key4", + }, +} diff --git a/pkg/transform/snowplow_enriched_filter.go b/pkg/transform/snowplow_enriched_filter.go index b52a5bd5..0efcbe21 100644 --- a/pkg/transform/snowplow_enriched_filter.go +++ b/pkg/transform/snowplow_enriched_filter.go @@ -7,10 +7,14 @@ package transform import ( - "errors" "fmt" + "github.com/dlclark/regexp2" + "log" "regexp" "strings" + "time" + + "github.com/pkg/errors" "github.com/snowplow-devops/stream-replicator/pkg/models" "github.com/snowplow/snowplow-golang-analytics-sdk/analytics" @@ -45,57 +49,30 @@ func findSpEnrichedFilterValue(queriedField, parsedEventName, eventVer, field st return valueFound, nil } -func evaluateSpEnrichedFilter(valuesToMatch string, valuesFound []interface{}, isNegationFilter, shouldKeepMessage *bool) { - for _, valueToMatch := range strings.Split(valuesToMatch, "|") { - for _, v := range valuesFound { - if fmt.Sprintf("%v", v) == valueToMatch { - // Once config value is matched once, change shouldKeepMessage, and stop looking for matches - if *isNegationFilter { - *shouldKeepMessage = false - } else { - *shouldKeepMessage = true - } - return - - } +func evaluateSpEnrichedFilter(valuesFound []interface{}, regex string, regexTimeout int) bool { + re, err := regexp2.Compile(regex, 0) + re.MatchTimeout = time.Duration(regexTimeout) * time.Second + if err != nil { + log.Fatal(errors.Wrap(err, `error compiling regex for filter`)) + } + for _, v := range valuesFound { + if ok, _ := re.MatchString(fmt.Sprintf("%v", v)); ok { + return true } } + return false } -// createSpEnrichedFilterFunction returns a TransformationFunction which filters messages based on a field in the Snowplow enriched event. -// The filterconfig should describe the conditions for including a message. -// For example "aid=abc|def" includes all events with app IDs of abc or def, and filters out the rest. -// aid!=abc|def includes all events whose app IDs do not match abc or def, and filters out the rest. -func createSpEnrichedFilterFunction(filterConfig string, isUnstructEvent bool, isContext bool) (TransformationFunction, error) { - // This regex prevents whitespace characters in the value provided - regex := `\S+(!=|==)[^\s\|]+((?:\|[^\s|]+)*)$` - re := regexp.MustCompile(regex) - - if !(re.MatchString(filterConfig)) { - // If invalid, return an error which will be returned by the main function - return nil, errors.New("invalid filter function config, must be of the format {field name}=={value}[|{value}|...] or {field name}!={value}[|{value}|...]") - } - - // Check for a negation condition first - keyValues := strings.SplitN(filterConfig, "!=", 2) - - // isNegationFilter determines whether a match sets shouldKeepMessage to true or false, and consequently whether message is kept or filtered - var isNegationFilter bool - if len(keyValues) > 1 { - // If negation condition is found, default to keep the message, and change this when match found - isNegationFilter = true - } else { - // Otherwise, look for affirmation condition, default to drop the message and change when match found - keyValues = strings.SplitN(filterConfig, "==", 2) - isNegationFilter = false - } - +// createSpEnrichedFilterFunction returns a TransformationFunction which filters messages based on a field in the Snowplow enriched event +// and a regex declared by the user. +func createSpEnrichedFilterFunction(field, regex string, regexTimeout int, isUnstructEvent bool) (TransformationFunction, error) { return func(message *models.Message, intermediateState interface{}) (*models.Message, *models.Message, *models.Message, interface{}) { - // Start by resetting shouldKeepMessage to isNegationFilter - shouldKeepMessage := isNegationFilter - + if regexTimeout == 0 { + // default timeout for regex is 10 seconds + regexTimeout = 10 + } // Evaluate intermediateState to parsedEvent - parsedMessage, parseErr := intermediateAsSpEnrichedParsed(intermediateState, message) + parsedMessage, parseErr := IntermediateAsSpEnrichedParsed(intermediateState, message) if parseErr != nil { message.SetError(parseErr) return nil, nil, message, nil @@ -103,11 +80,11 @@ func createSpEnrichedFilterFunction(filterConfig string, isUnstructEvent bool, i // This regex retrieves the path fields // (e.g. field1.field2[0].field3 -> [field1, field2, 0, field3]) - regex = `\w+` - re = regexp.MustCompile(regex) + regexWords := `\w+` + re := regexp.MustCompile(regexWords) // separate the path string into words using regex - path := re.FindAllString(keyValues[0], -1) + path := re.FindAllString(field, -1) separatedPath := make([]string, len(path)-1) for idx, pathField := range path[1:] { separatedPath[idx] = pathField @@ -145,7 +122,7 @@ func createSpEnrichedFilterFunction(filterConfig string, isUnstructEvent bool, i path[0], parsedEventName, eventMajorVer, - keyValues[0], + field, parsedMessage, convertPathToInterfaces(separatedPath), ) @@ -155,7 +132,7 @@ func createSpEnrichedFilterFunction(filterConfig string, isUnstructEvent bool, i } // evaluate whether the found value passes the filter, determining if the message should be kept - evaluateSpEnrichedFilter(keyValues[1], valueFound, &isNegationFilter, &shouldKeepMessage) + shouldKeepMessage := evaluateSpEnrichedFilter(valueFound, regex, regexTimeout) // if message is not to be kept, return it as a filtered message to be acked in the main function if !shouldKeepMessage { @@ -168,16 +145,16 @@ func createSpEnrichedFilterFunction(filterConfig string, isUnstructEvent bool, i } // NewSpEnrichedFilterFunction returns a TransformationFunction which filters messages based on a field in the Snowplow enriched event. -func NewSpEnrichedFilterFunction(filterConfig string) (TransformationFunction, error) { - return createSpEnrichedFilterFunction(filterConfig, false, false) +func NewSpEnrichedFilterFunction(field, regex string, regexTimeout int) (TransformationFunction, error) { + return createSpEnrichedFilterFunction(field, regex, regexTimeout, false) } // NewSpEnrichedFilterFunctionContext returns a TransformationFunction for filtering a context -func NewSpEnrichedFilterFunctionContext(filterConfig string) (TransformationFunction, error) { - return createSpEnrichedFilterFunction(filterConfig, false, true) +func NewSpEnrichedFilterFunctionContext(field, regex string, regexTimeout int) (TransformationFunction, error) { + return createSpEnrichedFilterFunction(field, regex, regexTimeout, false) } // NewSpEnrichedFilterFunctionUnstructEvent returns a TransformationFunction for filtering an unstruct_event -func NewSpEnrichedFilterFunctionUnstructEvent(filterConfig string) (TransformationFunction, error) { - return createSpEnrichedFilterFunction(filterConfig, true, false) +func NewSpEnrichedFilterFunctionUnstructEvent(field, regex string, regexTimeout int) (TransformationFunction, error) { + return createSpEnrichedFilterFunction(field, regex, regexTimeout, true) } diff --git a/pkg/transform/snowplow_enriched_filter_test.go b/pkg/transform/snowplow_enriched_filter_test.go index 034c29bc..2e679286 100644 --- a/pkg/transform/snowplow_enriched_filter_test.go +++ b/pkg/transform/snowplow_enriched_filter_test.go @@ -32,7 +32,7 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { } // Single value cases - aidFilterFuncKeep, _ := NewSpEnrichedFilterFunction("app_id==test-data3") + aidFilterFuncKeep, _ := NewSpEnrichedFilterFunction("app_id", "test-data3", 0) aidKeepIn, aidKeepOut, fail, _ := aidFilterFuncKeep(&messageGood, nil) @@ -40,7 +40,7 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Nil(aidKeepOut) assert.Nil(fail) - aidFilterFuncDiscard, _ := NewSpEnrichedFilterFunction("app_id==failThis") + aidFilterFuncDiscard, _ := NewSpEnrichedFilterFunction("app_id", "failThis", 10) aidDiscardIn, aidDiscardOut, fail2, _ := aidFilterFuncDiscard(&messageGood, nil) @@ -49,7 +49,7 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Nil(fail2) // int value - urlPrtFilterFuncKeep, _ := NewSpEnrichedFilterFunction("page_urlport==80") + urlPrtFilterFuncKeep, _ := NewSpEnrichedFilterFunction("page_urlport", "80", 10) urlPrtKeepIn, urlPrtKeepOut, fail, _ := urlPrtFilterFuncKeep(&messageGood, nil) @@ -58,7 +58,7 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Nil(fail) // Multiple value cases - aidFilterFuncKeepWithMultiple, _ := NewSpEnrichedFilterFunction("app_id==someotherValue|test-data3") + aidFilterFuncKeepWithMultiple, _ := NewSpEnrichedFilterFunction("app_id", "someotherValue|test-data3", 10) aidMultipleNegationFailedIn, aidMultipleKeepOut, fail3, _ := aidFilterFuncKeepWithMultiple(&messageGood, nil) @@ -66,7 +66,7 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Nil(aidMultipleKeepOut) assert.Nil(fail3) - aidFilterFuncDiscardWithMultiple, _ := NewSpEnrichedFilterFunction("app_id==someotherValue|failThis") + aidFilterFuncDiscardWithMultiple, _ := NewSpEnrichedFilterFunction("app_id", "someotherValue|failThis", 10) aidNegationMultipleIn, aidMultipleDiscardOut, fail3, _ := aidFilterFuncDiscardWithMultiple(&messageGood, nil) @@ -76,7 +76,7 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { // Single value negation cases - aidFilterFuncNegationDiscard, _ := NewSpEnrichedFilterFunction("app_id!=test-data3") + aidFilterFuncNegationDiscard, _ := NewSpEnrichedFilterFunction("app_id", "^((?!test-data3).)*$", 10) aidNegationIn, aidNegationOut, fail4, _ := aidFilterFuncNegationDiscard(&messageGood, nil) @@ -84,7 +84,7 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Equal(snowplowTsv3, aidNegationOut.Data) assert.Nil(fail4) - aidFilterFuncNegationKeep, _ := NewSpEnrichedFilterFunction("app_id!=failThis") + aidFilterFuncNegationKeep, _ := NewSpEnrichedFilterFunction("app_id", "^((?!failThis).)*$", 10) aidNegationFailedIn, aidNegationFailedOut, fail5, _ := aidFilterFuncNegationKeep(&messageGood, nil) @@ -93,7 +93,7 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Nil(fail5) // Multiple value negation cases - aidFilterFuncNegationDiscardMultiple, _ := NewSpEnrichedFilterFunction("app_id!=someotherValue|test-data1|test-data2|test-data3") + aidFilterFuncNegationDiscardMultiple, _ := NewSpEnrichedFilterFunction("app_id", "^((?!someotherValue|test-data1|test-data2|test-data3).)*$", 10) aidNegationMultipleIn, aidNegationMultipleOut, fail6, _ := aidFilterFuncNegationDiscardMultiple(&messageGood, nil) @@ -101,7 +101,7 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Equal(snowplowTsv3, aidNegationMultipleOut.Data) assert.Nil(fail6) - aidFilterFuncNegationKeptMultiple, _ := NewSpEnrichedFilterFunction("app_id!=someotherValue|failThis") + aidFilterFuncNegationKeptMultiple, _ := NewSpEnrichedFilterFunction("app_id", "^((?!someotherValue|failThis).)*$", 10) aidMultipleNegationFailedIn, aidMultipleNegationFailedOut, fail7, _ := aidFilterFuncNegationKeptMultiple(&messageGood, nil) @@ -110,7 +110,7 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Nil(fail7) // Filters on a nil field - txnFilterFunctionAffirmation, _ := NewSpEnrichedFilterFunction("txn_id==something") + txnFilterFunctionAffirmation, _ := NewSpEnrichedFilterFunction("txn_id", "something", 10) nilAffirmationIn, nilAffirmationOut, fail8, _ := txnFilterFunctionAffirmation(&messageGood, nil) @@ -118,16 +118,16 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Equal(snowplowTsv3, nilAffirmationOut.Data) assert.Nil(fail8) - txnFilterFunctionNegation, _ := NewSpEnrichedFilterFunction("txn_id!=something") + txnFilterFunctionNegation, _ := NewSpEnrichedFilterFunction("txn_id", "^((?!something).)*$", 10) nilNegationIn, nilNegationOut, fail8, _ := txnFilterFunctionNegation(&messageGood, nil) - assert.Equal(snowplowTsv3, nilNegationIn.Data) - assert.Nil(nilNegationOut) + assert.Nil(nilNegationIn) + assert.Equal(snowplowTsv3, nilNegationOut.Data) assert.Nil(fail8) // context filter success - contextFuncKeep, _ := NewSpEnrichedFilterFunctionContext("contexts_nl_basjes_yauaa_context_1.test1.test2[0].test3==testValue") + contextFuncKeep, _ := NewSpEnrichedFilterFunctionContext("contexts_nl_basjes_yauaa_context_1.test1.test2[0].test3", "testValue", 10) contextKeepIn, contextKeepOut, fail9, _ := contextFuncKeep(&messageGood, nil) @@ -136,7 +136,7 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Nil(fail9) // context filter success (integer value) - contextFuncKeep, _ = NewSpEnrichedFilterFunctionContext("contexts_nl_basjes_yauaa_context_1.test1.test2[0].test3==1") + contextFuncKeep, _ = NewSpEnrichedFilterFunctionContext("contexts_nl_basjes_yauaa_context_1.test1.test2[0].test3", "1", 10) contextKeepIn, contextKeepOut, fail9, _ = contextFuncKeep(&messageGoodInt, nil) @@ -145,7 +145,7 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Nil(fail9) // context filter failure - contextFuncKeep, _ = NewSpEnrichedFilterFunctionContext("contexts_nl_basjes_yauaa_context_2.test1.test2[0].test3==testValue") + contextFuncKeep, _ = NewSpEnrichedFilterFunctionContext("contexts_nl_basjes_yauaa_context_2.test1.test2[0].test3", "testValue", 10) contextKeepIn, contextKeepOut, fail9, _ = contextFuncKeep(&messageGood, nil) @@ -154,7 +154,7 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Nil(fail9) // event filter success, filtered event name - eventFilterFunCkeep, _ := NewSpEnrichedFilterFunctionUnstructEvent("unstruct_event_add_to_cart_1.sku==item41") + eventFilterFunCkeep, _ := NewSpEnrichedFilterFunctionUnstructEvent("unstruct_event_add_to_cart_1.sku", "item41", 10) eventKeepIn, eventKeepOut, fail10, _ := eventFilterFunCkeep(&messageWithUnstructEvent, nil) @@ -163,7 +163,7 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Nil(fail10) // event filter success, filtered event name, no event ver - eventFilterFunCkeep, _ = NewSpEnrichedFilterFunctionUnstructEvent("unstruct_event_add_to_cart.sku==item41") + eventFilterFunCkeep, _ = NewSpEnrichedFilterFunctionUnstructEvent("unstruct_event_add_to_cart.sku", "item41", 10) eventKeepIn, eventKeepOut, fail10, _ = eventFilterFunCkeep(&messageWithUnstructEvent, nil) @@ -172,7 +172,7 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Nil(fail10) // event filter failure, wrong event name - eventFilterFunCkeep, _ = NewSpEnrichedFilterFunctionUnstructEvent("unstruct_event_wrong_name.sku==item41") + eventFilterFunCkeep, _ = NewSpEnrichedFilterFunctionUnstructEvent("unstruct_event_wrong_name.sku", "item41", 10) eventKeepIn, eventKeepOut, fail11, _ := eventFilterFunCkeep(&messageWithUnstructEvent, nil) @@ -181,7 +181,7 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Nil(fail11) // event filter failure, field not found - eventFilterFunCkeep, _ = NewSpEnrichedFilterFunctionUnstructEvent("unstruct_event_add_to_cart.ska==item41") + eventFilterFunCkeep, _ = NewSpEnrichedFilterFunctionUnstructEvent("unstruct_event_add_to_cart.ska", "item41", 10) eventNoFieldIn, eventNoFieldOut, fail12, _ := eventFilterFunCkeep(&messageWithUnstructEvent, nil) @@ -190,46 +190,6 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.NotNil(fail12) } -func TestNewSpEnrichedFilterFunction_Error(t *testing.T) { - filterError := `invalid filter function config, must be of the format {field name}=={value}[|{value}|...] or {field name}!={value}[|{value}|...]` - - testCases := []struct { - Name string - Arg string - }{ - { - Name: "incompatible_arg", - Arg: "incompatibleArg", - }, - { - Name: "empty_arg", - Arg: "", - }, - { - Name: "wrong_arg_pipe", - Arg: "app_id==abc|", - }, - { - Name: "wrong_arg_syntax", - Arg: "!=abc", - }, - } - - for _, tt := range testCases { - t.Run(tt.Name, func(t *testing.T) { - assert := assert.New(t) - - filterFunc, err := NewSpEnrichedFilterFunction(tt.Arg) - - assert.Nil(filterFunc) - assert.NotNil(err) - if err != nil { - assert.Equal(filterError, err.Error()) - } - }) - } -} - func TestSpEnrichedFilterFunction_Slice(t *testing.T) { assert := assert.New(t) @@ -251,7 +211,7 @@ func TestSpEnrichedFilterFunction_Slice(t *testing.T) { }, } - filterFunc, _ := NewSpEnrichedFilterFunction("app_id==test-data1") + filterFunc, _ := NewSpEnrichedFilterFunction("app_id", "test-data1", 10) filter1 := NewTransformation(filterFunc) filter1Res := filter1(messages) @@ -279,7 +239,7 @@ func TestSpEnrichedFilterFunction_Slice(t *testing.T) { }, } - filterFunc2, _ := NewSpEnrichedFilterFunction("app_id==test-data1|test-data2") + filterFunc2, _ := NewSpEnrichedFilterFunction("app_id", "test-data1|test-data2", 10) filter2 := NewTransformation(filterFunc2) filter2Res := filter2(messages) @@ -295,7 +255,7 @@ func TestSpEnrichedFilterFunction_Slice(t *testing.T) { }, } - filterFunc3, _ := NewSpEnrichedFilterFunction("app_id!=test-data1|test-data2") + filterFunc3, _ := NewSpEnrichedFilterFunction("app_id", "^((?!test-data1|test-data2).)*$", 10) filter3 := NewTransformation(filterFunc3) filter3Res := filter3(messages) @@ -304,3 +264,13 @@ func TestSpEnrichedFilterFunction_Slice(t *testing.T) { assert.Equal(1, len(filter3Res.Invalid)) } + +func TestEvaluateSpEnrichedFilter(t *testing.T) { + assert := assert.New(t) + + valuesFound := []interface{}{"NO", "maybe", "yes"} + assert.True(evaluateSpEnrichedFilter(valuesFound, "yes", 10)) + + valuesFound = []interface{}{"NO", "maybe", "nope"} + assert.False(evaluateSpEnrichedFilter(valuesFound, "yes", 10)) +} diff --git a/pkg/transform/snowplow_enriched_set_pk.go b/pkg/transform/snowplow_enriched_set_pk.go index 0a3323e8..a928554c 100644 --- a/pkg/transform/snowplow_enriched_set_pk.go +++ b/pkg/transform/snowplow_enriched_set_pk.go @@ -16,7 +16,7 @@ import ( func NewSpEnrichedSetPkFunction(pkField string) TransformationFunction { return func(message *models.Message, intermediateState interface{}) (*models.Message, *models.Message, *models.Message, interface{}) { // Evalute intermediateState to parsedEvent - parsedMessage, parseErr := intermediateAsSpEnrichedParsed(intermediateState, message) + parsedMessage, parseErr := IntermediateAsSpEnrichedParsed(intermediateState, message) if parseErr != nil { message.SetError(parseErr) return nil, nil, message, nil diff --git a/pkg/transform/snowplow_enriched_to_json.go b/pkg/transform/snowplow_enriched_to_json.go index b0fce26c..020b2884 100644 --- a/pkg/transform/snowplow_enriched_to_json.go +++ b/pkg/transform/snowplow_enriched_to_json.go @@ -13,7 +13,7 @@ import ( // SpEnrichedToJSON is a specific transformation implementation to transform good enriched data within a message to Json func SpEnrichedToJSON(message *models.Message, intermediateState interface{}) (*models.Message, *models.Message, *models.Message, interface{}) { // Evalute intermediateState to parsedEvent - parsedMessage, parseErr := intermediateAsSpEnrichedParsed(intermediateState, message) + parsedMessage, parseErr := IntermediateAsSpEnrichedParsed(intermediateState, message) if parseErr != nil { message.SetError(parseErr) return nil, nil, message, nil diff --git a/pkg/transform/snowplow_enriched_util.go b/pkg/transform/snowplow_enriched_util.go index 79bbb2a4..6c6468fd 100644 --- a/pkg/transform/snowplow_enriched_util.go +++ b/pkg/transform/snowplow_enriched_util.go @@ -13,9 +13,9 @@ import ( "github.com/snowplow/snowplow-golang-analytics-sdk/analytics" ) -// intermediateAsSpEnrichedParsed checks whether we have a Snowplow Analytics SDK ParsedEvent in the intermediateState -// If we do, we return it. If it don't, we parse the message.Data and return it the result. -func intermediateAsSpEnrichedParsed(intermediateState interface{}, message *models.Message) (analytics.ParsedEvent, error) { +// IntermediateAsSpEnrichedParsed returns the intermediate state as a ParsedEvent if valid or parses +// the message as an event +func IntermediateAsSpEnrichedParsed(intermediateState interface{}, message *models.Message) (analytics.ParsedEvent, error) { var parsedMessage, ok = intermediateState.(analytics.ParsedEvent) var parseErr error if ok { diff --git a/pkg/transform/snowplow_enriched_util_test.go b/pkg/transform/snowplow_enriched_util_test.go index 276f73d6..cc27bb88 100644 --- a/pkg/transform/snowplow_enriched_util_test.go +++ b/pkg/transform/snowplow_enriched_util_test.go @@ -19,25 +19,25 @@ func TestIntermediateAsSpEnrichedParsed(t *testing.T) { assert := assert.New(t) // case 1: no intermediate state - res1, err1 := intermediateAsSpEnrichedParsed(nil, &models.Message{Data: snowplowTsv1}) + res1, err1 := IntermediateAsSpEnrichedParsed(nil, &models.Message{Data: snowplowTsv1}) assert.Equal(spTsv1Parsed, res1) assert.Nil(err1) // case 2: intermediate state provided as ParsedEvent - res2, err2 := intermediateAsSpEnrichedParsed(spTsv2Parsed, &models.Message{Data: snowplowTsv2}) + res2, err2 := IntermediateAsSpEnrichedParsed(spTsv2Parsed, &models.Message{Data: snowplowTsv2}) assert.Equal(spTsv2Parsed, res2) assert.Nil(err2) // case 3: intermediate state provided as some other type - res3, err3 := intermediateAsSpEnrichedParsed("not a ParsedEvent", &models.Message{Data: snowplowTsv3}) + res3, err3 := IntermediateAsSpEnrichedParsed("not a ParsedEvent", &models.Message{Data: snowplowTsv3}) assert.Equal(spTsv3Parsed, res3) assert.Nil(err3) // case 4: message not parseable - res4, err4 := intermediateAsSpEnrichedParsed(nil, &models.Message{Data: []byte("Not a snowplow event")}) + res4, err4 := IntermediateAsSpEnrichedParsed(nil, &models.Message{Data: []byte("Not a snowplow event")}) assert.Nil(res4) assert.NotNil(err4) diff --git a/pkg/transform/transformconfig/transform_config.go b/pkg/transform/transformconfig/transform_config.go index ac2fca1c..362f153f 100644 --- a/pkg/transform/transformconfig/transform_config.go +++ b/pkg/transform/transformconfig/transform_config.go @@ -8,265 +8,256 @@ package transformconfig import ( "fmt" - "strings" + "regexp" + + "github.com/pkg/errors" + log "github.com/sirupsen/logrus" "github.com/snowplow-devops/stream-replicator/config" "github.com/snowplow-devops/stream-replicator/pkg/transform" + "github.com/snowplow-devops/stream-replicator/pkg/transform/engine" ) -// GetTransformations builds and returns transformationApplyFunction -// from the transformations configured. -func GetTransformations(c configProvider) (transform.TransformationApplyFunction, error) { - registry, err := getLayerRegistry() - if err != nil { - return nil, err - } - - transMessage := c.ProvideTransformMessage() - transUnits, err := parseTransformations(transMessage) - if err != nil { - return nil, err - } +// Transformation represents a transformation's configuration +type Transformation struct { + Description string `hcl:"description,optional"` + Field string `hcl:"field,optional"` + Regex string `hcl:"regex,optional"` + RegexTimeout int `hcl:"regex_timeout,optional"` + // for JS and Lua transformations + SourceB64 string `hcl:"source_b64,optional"` + TimeoutSec int `hcl:"timeout_sec,optional"` + Sandbox bool `hcl:"sandbox,optional"` + SpMode bool `hcl:"snowplow_mode,optional"` + DisableSourceMaps bool `hcl:"disable_source_maps,optional"` + + Engine engine.Engine + Name string +} - funcs := make([]transform.TransformationFunction, 0, len(transUnits)) - for _, trans := range transUnits { - switch trans.name { - // Builtin transformations - case "spEnrichedToJson": - funcs = append(funcs, transform.SpEnrichedToJSON) - case "spEnrichedSetPk": - funcs = append(funcs, transform.NewSpEnrichedSetPkFunction(trans.option)) - case "spEnrichedFilter": - filterFunc, err := transform.NewSpEnrichedFilterFunction(trans.option) - if err != nil { - return nil, err - } - funcs = append(funcs, filterFunc) - case "spEnrichedFilterContext": - filterFunc, err := transform.NewSpEnrichedFilterFunctionContext(trans.option) - if err != nil { - return nil, err - } - funcs = append(funcs, filterFunc) - case "spEnrichedFilterUnstructEvent": - filterFunc, err := transform.NewSpEnrichedFilterFunctionUnstructEvent(trans.option) - if err != nil { - return nil, err - } - funcs = append(funcs, filterFunc) - // Custom transformations - case "lua": - luaFunc, err := mkEngineFunction(c, trans, registry) - if err != nil { - return nil, err - } - funcs = append(funcs, luaFunc) - case "js": - jsFunc, err := mkEngineFunction(c, trans, registry) - if err != nil { - return nil, err - } - funcs = append(funcs, jsFunc) +// TransformationAdapter is an adapter for transformations to be used +// as pluggable components. It implements the Pluggable interface. +type TransformationAdapter func(i interface{}) (interface{}, error) - // we don't need `case 'none'` or `default` - // (see parseTransformations) - } - } - return transform.NewTransformation(funcs...), nil +// ProvideDefault returns an empty Transformation to be used as default +func (t TransformationAdapter) ProvideDefault() (interface{}, error) { + return &Transformation{}, nil } -// configProvider is the interface a config must implement to configure the -// stream-replicator transformations -type configProvider interface { - ProvideTransformMessage() string - ProvideTransformLayerName() string - ProvideTransformComponent(p config.Pluggable) (interface{}, error) +// Create implements the ComponentCreator interface +func (t TransformationAdapter) Create(i interface{}) (interface{}, error) { + return t(i) } -// transformationUnit is a helper struct type for transformations according to -// the transformation message that is being used to configure the sequence of -// transformations. It denotes the distinction we use when we split by ':', -// e.g. 'spEnrichedSetPk:{option}' -type transformationUnit struct { - name string - option string +// TransformationConfigFunction creates a Transformation from a TransformationConfig +func TransformationConfigFunction(c *Transformation) (*Transformation, error) { + return c, nil } -// layerRegistry is a helper type to map names to the supported Pluggable custom -// transformation layer engines. -type layerRegistry map[string]config.Pluggable - -// getLayerRegistry returns the registry of supported Pluggable transform layers. -func getLayerRegistry() (layerRegistry, error) { - luaLayerPlug, ok := transform.LuaLayer().(config.Pluggable) - if !ok { - return nil, fmt.Errorf("non pluggable lua transformation layer") - } +// AdaptTransformationsFunc returns an TransformationsAdapter. +func AdaptTransformationsFunc(f func(c *Transformation) (*Transformation, error)) TransformationAdapter { + return func(i interface{}) (interface{}, error) { + cfg, ok := i.(*Transformation) + if !ok { + return nil, errors.New("invalid input, expected Transformation") + } - jsLayerPlug, ok := transform.JSLayer().(config.Pluggable) - if !ok { - return nil, fmt.Errorf("non pluggable js transformation layer") + return f(cfg) } - - return map[string](config.Pluggable){ - "lua": luaLayerPlug, - "js": jsLayerPlug, - }, nil } -// parseTransformations validates the message_transformation according to rules. +// ValidateTransformations validates the transformation according to rules. // The reason for this function is to make the validation part explicit and // separate it from GetTransformations. -func parseTransformations(input string) ([]*transformationUnit, error) { - if input == "" { - return nil, fmt.Errorf("invalid message transformation found; empty string") - } - - transformations := strings.Split(input, ",") - out := make([]*transformationUnit, 0, len(transformations)) - for _, trans := range transformations { - splitTrans := strings.Split(trans, ":") - name := splitTrans[0] // safe - - switch name { +func ValidateTransformations(transformations []*Transformation) []error { + var validationErrors []error + for idx, transformation := range transformations { + switch transformation.Name { case "spEnrichedToJson": - // option rules - if len(splitTrans) > 1 { - return nil, fmt.Errorf("invalid message transformation found; unexpected colon after %q", name) - } - - out = append(out, &transformationUnit{name: name}) + continue case "spEnrichedSetPk": - // option rules - if len(splitTrans) != 2 { - return nil, fmt.Errorf("invalid message transformation found; expected 'spEnrichedSetPk:{option}' but got %q", trans) - } - - if splitTrans[1] == "" { - return nil, fmt.Errorf("invalid message transformation found; empty option for 'spEnrichedSetPk'") + if transformation.Field == `` { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedSetPk, empty field`, idx)) + continue } - - out = append(out, &transformationUnit{ - name: name, - option: splitTrans[1], - }) case "spEnrichedFilter": - // option rules - if len(splitTrans) != 2 { - return nil, fmt.Errorf("invalid message transformation found; expected 'spEnrichedFilter:{option}' but got %q", trans) + if transformation.Field != `` && transformation.Regex != `` { + _, err := regexp.Compile(transformation.Regex) + if err != nil { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilter, regex does not compile. error: %v`, idx, err)) + continue + } + continue } - - if splitTrans[1] == "" { - return nil, fmt.Errorf("invalid message transformation found; empty option for 'spEnrichedFilter'") + if transformation.Field == `` { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilter, empty field`, idx)) + } + if transformation.Regex == `` { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilter, empty regex`, idx)) } - - out = append(out, &transformationUnit{ - name: name, - option: splitTrans[1], - }) case "spEnrichedFilterContext": - // option rules - if len(splitTrans) != 2 { - return nil, fmt.Errorf("invalid message transformation found; expected 'spEnrichedFilterContext:{option}' but got %q", trans) + if transformation.Field != `` && transformation.Regex != `` { + _, err := regexp.Compile(transformation.Regex) + if err != nil { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilterContext, regex does not compile. error: %v`, idx, err)) + continue + } + continue } - - if splitTrans[1] == "" { - return nil, fmt.Errorf("invalid message transformation found; empty option for 'spEnrichedFilterContext'") + if transformation.Field == `` { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilterContext, empty field`, idx)) + } + if transformation.Regex == `` { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilterContext, empty regex`, idx)) } - - out = append(out, &transformationUnit{ - name: name, - option: splitTrans[1], - }) case "spEnrichedFilterUnstructEvent": - // option rules - if len(splitTrans) != 2 { - return nil, fmt.Errorf("invalid message transformation found; expected 'spEnrichedFilterUnstructEvent:{option}' but got %q", trans) + if transformation.Field != `` && transformation.Regex != `` { + _, err := regexp.Compile(transformation.Regex) + if err != nil { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilterUnstructEvent, regex does not compile. error: %v`, idx, err)) + continue + } + continue } - - if splitTrans[1] == "" { - return nil, fmt.Errorf("invalid message transformation found; empty option for 'spEnrichedFilterUnstructEvent'") + if transformation.Field == `` { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilterUnstructEvent, empty field`, idx)) } - - out = append(out, &transformationUnit{ - name: name, - option: splitTrans[1], - }) - case "lua": - // option rules - if len(splitTrans) != 2 { - return nil, fmt.Errorf("invalid message transformation found; expected 'lua:{option}' but got %q", trans) + if transformation.Regex == `` { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilterUnstructEvent, empty regex`, idx)) } - - if splitTrans[1] == "" { - return nil, fmt.Errorf("invalid message transformation found; empty option for 'lua'") + case "lua": + if transformation.Engine.SmokeTest(`main`) != nil { + validationErrors = append(validationErrors, fmt.Errorf(`validation error in lua transformation #%d, main() smoke test failed`, idx)) + continue } - - out = append(out, &transformationUnit{ - name: name, - option: splitTrans[1], - }) case "js": - // option rules - if len(splitTrans) != 2 { - return nil, fmt.Errorf("invalid message transformation found; expected 'js:{option}' but got %q", trans) + if transformation.Engine.SmokeTest(`main`) != nil { + validationErrors = append(validationErrors, fmt.Errorf(`validation error in js transformation #%d, main() smoke test failed`, idx)) + continue } - - if splitTrans[1] == "" { - return nil, fmt.Errorf("invalid message transformation found; empty option for 'js'") - } - - out = append(out, &transformationUnit{ - name: name, - option: splitTrans[1], - }) - case "none": - // option rule - if len(splitTrans) > 1 { - return nil, fmt.Errorf("invalid message transformation found; unexpected colon after %q", name) - } - // none is treated like identity, so ignoring - case "": - // this could be caused by some trailing/excessive comma - // differentiating from default in order to generate a - // more helpful error message - return nil, fmt.Errorf("empty transformation found; please check the message transformation syntax") default: - return nil, fmt.Errorf("invalid transformation found; expected one of 'spEnrichedToJson', 'spEnrichedSetPk', 'spEnrichedFilter', 'spEnrichedFilterContext', 'spEnrichedFilterUnstructEvent', 'lua', 'js' or 'none' but got %q", name) + validationErrors = append(validationErrors, fmt.Errorf(`invalid transformation name: %s`, transformation.Name)) } } - - return out, nil + return validationErrors } -// mkEngineFunction is a helper method used in GetTransformations +// MkEngineFunction is a helper method used in GetTransformations // It creates, smoke-tests and returns a custom transformation function. -func mkEngineFunction(c configProvider, trans *transformationUnit, registry layerRegistry) (transform.TransformationFunction, error) { - useLayerName := c.ProvideTransformLayerName() - - // validate that the expected layer is specified in the configuration - if useLayerName != trans.name { - return nil, fmt.Errorf("missing configuration for the custom transformation layer specified: %q", trans.name) +func MkEngineFunction(trans *Transformation) (transform.TransformationFunction, error) { + if trans.Engine != nil { + return trans.Engine.MakeFunction(`main`), nil } - plug, ok := registry[trans.name] - if !ok { - return nil, fmt.Errorf("unknown transformation layer specified") - } + return nil, errors.New(`could not find engine for transformation`) +} - component, err := c.ProvideTransformComponent(plug) - if err != nil { - return nil, err - } +// GetTransformations builds and returns transformationApplyFunction +// from the transformations configured. +func GetTransformations(c *config.Config) (transform.TransformationApplyFunction, error) { + transformations := make([]*Transformation, len(c.Data.Transformations)) + for idx, transformation := range c.Data.Transformations { + var enginePlug config.Pluggable + var eng engine.Engine + decoderOpts := &config.DecoderOptions{ + Input: transformation.Use.Body, + } + if transformation.Use.Name == `lua` { + enginePlug = engine.AdaptLuaEngineFunc(engine.LuaEngineConfigFunction) + component, err := c.CreateComponent(enginePlug, decoderOpts) + if err != nil { + return nil, err + } + + engine, ok := component.(engine.Engine) + if !ok { + return nil, errors.New("cannot create lua engine") + } + eng = engine + } + if transformation.Use.Name == `js` { + enginePlug = engine.AdaptJSEngineFunc(engine.JSEngineConfigFunction) + component, err := c.CreateComponent(enginePlug, decoderOpts) + if err != nil { + return nil, err + } + + engine, ok := component.(engine.Engine) + if !ok { + return nil, errors.New("cannot create js engine") + } + eng = engine + } - if engine, ok := component.(transform.Engine); ok { - err := engine.SmokeTest(trans.option) + plug := AdaptTransformationsFunc(TransformationConfigFunction) + + component, err := c.CreateComponent(plug, &config.DecoderOptions{ + Input: transformation.Use.Body, + }) if err != nil { return nil, err } - return engine.MakeFunction(trans.option), nil + trans, ok := component.(*Transformation) + if !ok { + return nil, fmt.Errorf(`error parsing transformation: %s`, transformation.Use.Name) + } + if eng != nil { + trans.Engine = eng + } + trans.Name = transformation.Use.Name + transformations[idx] = trans + } + + validationErrors := ValidateTransformations(transformations) + if validationErrors != nil { + for _, err := range validationErrors { + log.Errorf("validation error: %v", err) + } + return nil, errors.New(`transformations validation returned errors`) + } + + funcs := make([]transform.TransformationFunction, 0, len(transformations)) + for _, transformation := range transformations { + switch transformation.Name { + // Builtin transformations + case "spEnrichedToJson": + funcs = append(funcs, transform.SpEnrichedToJSON) + case "spEnrichedSetPk": + funcs = append(funcs, transform.NewSpEnrichedSetPkFunction(transformation.Field)) + case "spEnrichedFilter": + filterFunc, err := transform.NewSpEnrichedFilterFunction(transformation.Field, transformation.Regex, transformation.RegexTimeout) + if err != nil { + return nil, err + } + funcs = append(funcs, filterFunc) + case "spEnrichedFilterContext": + filterFunc, err := transform.NewSpEnrichedFilterFunctionContext(transformation.Field, transformation.Regex, transformation.RegexTimeout) + if err != nil { + return nil, err + } + funcs = append(funcs, filterFunc) + case "spEnrichedFilterUnstructEvent": + filterFunc, err := transform.NewSpEnrichedFilterFunctionUnstructEvent(transformation.Field, transformation.Regex, transformation.RegexTimeout) + if err != nil { + return nil, err + } + funcs = append(funcs, filterFunc) + // Custom transformations + case "lua": + luaFunc, err := MkEngineFunction(transformation) + if err != nil { + return nil, err + } + funcs = append(funcs, luaFunc) + case "js": + jsFunc, err := MkEngineFunction(transformation) + if err != nil { + return nil, err + } + funcs = append(funcs, jsFunc) + } } - return nil, fmt.Errorf("could not interpret custom transformation configuration") + return transform.NewTransformation(funcs...), nil } diff --git a/pkg/transform/transformconfig/transform_config_test.go b/pkg/transform/transformconfig/transform_config_test.go index 786336c4..b442a9dd 100644 --- a/pkg/transform/transformconfig/transform_config_test.go +++ b/pkg/transform/transformconfig/transform_config_test.go @@ -7,282 +7,323 @@ package transformconfig import ( + "encoding/base64" + "errors" "fmt" "path/filepath" + "strings" "testing" "github.com/stretchr/testify/assert" "github.com/snowplow-devops/stream-replicator/config" "github.com/snowplow-devops/stream-replicator/pkg/models" - "github.com/snowplow-devops/stream-replicator/pkg/transform" + "github.com/snowplow-devops/stream-replicator/pkg/transform/engine" ) -func TestParseTransformations_InvalidMessage(t *testing.T) { +func TestMkEngineFunction(t *testing.T) { + var eng engine.Engine + eng = &engine.JSEngine{ + Code: nil, + RunTimeout: 15, + SpMode: false, + } testCases := []struct { - Name string - Message string - ExpError string + Name string + Engines []engine.Engine + Transformation *Transformation + ExpectedErr error }{ { - Name: "message_empty", - Message: "", - ExpError: "invalid message transformation found; empty string", - }, - { - Name: "message_not_found", - Message: "fake", - ExpError: "invalid transformation found; expected one of 'spEnrichedToJson', 'spEnrichedSetPk', 'spEnrichedFilter', 'spEnrichedFilterContext', 'spEnrichedFilterUnstructEvent', 'lua', 'js' or 'none' but got \"fake\"", - }, - { - Name: "message_option_none_a", - Message: "none:wrong", - ExpError: "invalid message transformation found; unexpected colon after \"none\"", - }, - { - Name: "message_option_none_b", - Message: "none:", - ExpError: "invalid message transformation found; unexpected colon after \"none\"", - }, - { - Name: "message_option_spEnrichedToJson", - Message: "spEnrichedToJson:wrong", - ExpError: "invalid message transformation found; unexpected colon after \"spEnrichedToJson\"", + Name: "no engine", + Engines: nil, + Transformation: &Transformation{ + Name: "js", + }, + ExpectedErr: fmt.Errorf("could not find engine for transformation"), }, { - Name: "message_no_option_spEnrichedSetPk", - Message: "spEnrichedSetPk", - ExpError: "invalid message transformation found; expected 'spEnrichedSetPk:{option}' but got \"spEnrichedSetPk\"", + Name: "success", + Engines: []engine.Engine{eng}, + Transformation: &Transformation{ + Name: "js", + Engine: eng, + }, }, + } + + for _, tt := range testCases { + t.Run(tt.Name, func(t *testing.T) { + assert := assert.New(t) + + fun, err := MkEngineFunction(tt.Transformation) + + if tt.ExpectedErr != nil { + assert.Equal(tt.ExpectedErr.Error(), err.Error()) + assert.Nil(fun) + } else { + assert.Nil(err) + assert.NotNil(fun) + } + }) + } +} + +func TestValidateTransformations(t *testing.T) { + srcCode := ` +function main(x) + local jsonObj, _ = json.decode(x) + local result, _ = json.encode(jsonObj) + + return result +end +` + src := base64.StdEncoding.EncodeToString([]byte(srcCode)) + + luaConfig := &engine.LuaEngineConfig{ + SourceB64: src, + RunTimeout: 5, + Sandbox: false, + } + + luaEngine, err := engine.NewLuaEngine(luaConfig) + assert.NotNil(t, luaEngine) + if err != nil { + t.Fatalf("function NewLuaEngine failed with error: %q", err.Error()) + } + + srcCode = ` +function notMain(x) + return x +end +` + src = base64.StdEncoding.EncodeToString([]byte(srcCode)) + + luaConfig = &engine.LuaEngineConfig{ + SourceB64: src, + RunTimeout: 5, + Sandbox: false, + } + + luaEngineNoMain, err := engine.NewLuaEngine(luaConfig) + assert.NotNil(t, luaEngineNoMain) + if err != nil { + t.Fatalf("function NewLuaEngine failed with error: %q", err.Error()) + } + + srcCode = ` +function main(x) { + return x; +} +` + src = base64.StdEncoding.EncodeToString([]byte(srcCode)) + jsConfig := &engine.JSEngineConfig{ + SourceB64: src, + RunTimeout: 5, + } + + jsEngine, err := engine.NewJSEngine(jsConfig) + assert.NotNil(t, jsEngine) + if err != nil { + t.Fatalf("function NewJSEngine failed with error: %q", err.Error()) + } + + srcCode = ` +function notMain(x) { + return x; +} +` + src = base64.StdEncoding.EncodeToString([]byte(srcCode)) + jsConfig = &engine.JSEngineConfig{ + SourceB64: src, + RunTimeout: 5, + } + + jsEngineNoMain, err := engine.NewJSEngine(jsConfig) + assert.NotNil(t, jsEngine) + if err != nil { + t.Fatalf("function NewJSEngine failed with error: %q", err.Error()) + } + + testCases := []struct { + Name string + Transformations []*Transformation + ExpectedErrs []error + }{ { - Name: "message_empty_option_spEnrichedSetPk", - Message: "spEnrichedSetPk:", - ExpError: "invalid message transformation found; empty option for 'spEnrichedSetPk'", + Name: "invalid name", + Transformations: []*Transformation{{ + Name: "wrongName", + }}, + ExpectedErrs: []error{fmt.Errorf("invalid transformation name: wrongName")}, }, { - Name: "message_no_option_spEnrichedFilter", - Message: "spEnrichedFilter:too:wrong", - ExpError: "invalid message transformation found; expected 'spEnrichedFilter:{option}' but got \"spEnrichedFilter:too:wrong\"", + Name: "spEnrichedSetPk success", + Transformations: []*Transformation{{ + Name: "spEnrichedSetPk", + Field: `app_id`, + }}, }, { - Name: "message_empty_option_spEnrichedFilter", - Message: "spEnrichedFilter:", - ExpError: "invalid message transformation found; empty option for 'spEnrichedFilter'", + Name: "spEnrichedSetPk no field", + Transformations: []*Transformation{{ + Name: "spEnrichedSetPk", + }}, + ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedSetPk, empty field")}, }, { - Name: "message_no_option_spEnrichedFilterContext", - Message: "spEnrichedFilterContext:too:wrong", - ExpError: "invalid message transformation found; expected 'spEnrichedFilterContext:{option}' but got \"spEnrichedFilterContext:too:wrong\"", + Name: "spEnrichedFilter success", + Transformations: []*Transformation{{ + Name: "spEnrichedFilter", + Field: "app_id", + Regex: "test.+", + }}, }, { - Name: "message_empty_option_spEnrichedFilterContext", - Message: "spEnrichedFilterContext:", - ExpError: "invalid message transformation found; empty option for 'spEnrichedFilterContext'", + Name: "spEnrichedFilter regexp does not compile", + Transformations: []*Transformation{{ + Name: "spEnrichedFilter", + Field: "app_id", + Regex: "?(?=-)", + }}, + ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilter, regex does not compile. error: error parsing regexp: missing argument to repetition operator: `?`")}, }, { - Name: "message_no_option_spEnrichedFilterUnstructEvent", - Message: "spEnrichedFilterUnstructEvent:too:wrong", - ExpError: "invalid message transformation found; expected 'spEnrichedFilterUnstructEvent:{option}' but got \"spEnrichedFilterUnstructEvent:too:wrong\"", + Name: "spEnrichedFilter empty field", + Transformations: []*Transformation{{ + Name: "spEnrichedFilter", + Regex: "test.+", + }}, + ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilter, empty field")}, }, { - Name: "message_empty_option_spEnrichedFilterUnstructEvent", - Message: "spEnrichedFilterUnstructEvent:", - ExpError: "invalid message transformation found; empty option for 'spEnrichedFilterUnstructEvent'", + Name: "spEnrichedFilter empty regex", + Transformations: []*Transformation{{ + Name: "spEnrichedFilter", + Field: "app_id", + }}, + ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilter, empty regex")}, }, { - Name: "message_no_option_lua", - Message: "lua", - ExpError: "invalid message transformation found; expected 'lua:{option}' but got \"lua\"", + Name: "spEnrichedFilterContext success", + Transformations: []*Transformation{{ + Name: "spEnrichedFilterContext", + Field: "contexts_nl_basjes_yauaa_context_1.test1.test2[0]", + Regex: "test.+", + }}, }, { - Name: "message_empty_option_lua", - Message: "lua:", - ExpError: "invalid message transformation found; empty option for 'lua'", + Name: "spEnrichedFilterContext regexp does not compile", + Transformations: []*Transformation{{ + Name: "spEnrichedFilterContext", + Field: "contexts_nl_basjes_yauaa_context_1.test1.test2[0]", + Regex: "?(?=-)", + }}, + ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilterContext, regex does not compile. error: error parsing regexp: missing argument to repetition operator: `?`")}, }, { - Name: "message_no_option_js", - Message: "js", - ExpError: "invalid message transformation found; expected 'js:{option}' but got \"js\"", + Name: "spEnrichedFilterContext empty field", + Transformations: []*Transformation{{ + Name: "spEnrichedFilterContext", + Regex: "test.+", + }}, + ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilterContext, empty field")}, }, { - Name: "message_empty_option_js", - Message: "js:", - ExpError: "invalid message transformation found; empty option for 'js'", + Name: "spEnrichedFilterContext empty regex", + Transformations: []*Transformation{{ + Name: "spEnrichedFilterContext", + Field: "contexts_nl_basjes_yauaa_context_1.test1.test2[0]", + }}, + ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilterContext, empty regex")}, }, { - Name: "invalid_transformation_syntax_a", - Message: "spEnrichedToJson,", - ExpError: "empty transformation found; please check the message transformation syntax", + Name: "spEnrichedFilterUnstructEvent success", + Transformations: []*Transformation{{ + Name: "spEnrichedFilterUnstructEvent", + Field: "unstruct_event_add_to_cart_1.sku", + Regex: "test.+", + }}, }, { - Name: "invalid_transformation_syntax_b", - Message: ":", - ExpError: "empty transformation found; please check the message transformation syntax", + Name: "spEnrichedFilterUnstructEvent regexp does not compile", + Transformations: []*Transformation{{ + Name: "spEnrichedFilterUnstructEvent", + Field: "unstruct_event_add_to_cart_1.sku", + Regex: "?(?=-)", + }}, + ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilterUnstructEvent, regex does not compile. error: error parsing regexp: missing argument to repetition operator: `?`")}, }, { - Name: "invalid_transformation_syntax_c", - Message: ",", - ExpError: "empty transformation found; please check the message transformation syntax", + Name: "spEnrichedFilterUnstructEvent empty field", + Transformations: []*Transformation{{ + Name: "spEnrichedFilterUnstructEvent", + Regex: "test.+", + }}, + ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilterUnstructEvent, empty field")}, }, - } - - for _, tt := range testCases { - t.Run(tt.Name, func(t *testing.T) { - assert := assert.New(t) - - parsed, err := parseTransformations(tt.Message) - assert.Nil(parsed) - assert.NotNil(err) - if err != nil { - assert.Equal(tt.ExpError, err.Error()) - } - }) - } -} - -func TestGetTransformations_MissingLayerConfig(t *testing.T) { - fixturesDir := "../../../config/test-fixtures" - testCases := []struct { - Filename string - TransMessage string - ExpectedError string - }{ { - Filename: "transform-invalid-layer-lua.hcl", - TransMessage: "lua:fun", - ExpectedError: "missing configuration for the custom transformation layer specified: \"lua\"", + Name: "spEnrichedFilterUnstructEvent empty regex", + Transformations: []*Transformation{{ + Name: "spEnrichedFilterUnstructEvent", + Field: "unstruct_event_add_to_cart_1.sku", + }}, + ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilterUnstructEvent, empty regex")}, }, { - Filename: "transform-invalid-layer-js.hcl", - TransMessage: "js:fun", - ExpectedError: "missing configuration for the custom transformation layer specified: \"js\"", + Name: "lua success", + Transformations: []*Transformation{{ + Name: "lua", + Engine: luaEngine, + }}, }, - } - - for _, tt := range testCases { - t.Run(tt.Filename, func(t *testing.T) { - assert := assert.New(t) - - filename := filepath.Join(fixturesDir, tt.Filename) - t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", filename) - - c, err := config.NewConfig() - assert.NotNil(c) - if err != nil { - t.Fatalf("function NewConfig failed with error: %q", err.Error()) - } - - assert.Equal(tt.TransMessage, c.Data.Transform.Message) - - transformation, err := GetTransformations(c) - assert.Nil(transformation) - assert.NotNil(err) - if err != nil { - assert.Equal(tt.ExpectedError, err.Error()) - } - }) - } -} - -func TestGetTransformations_Builtins(t *testing.T) { - testCases := []struct { - Name string - Provider configProvider - ExpectedErr error - }{ { - Name: "invalid_transform_message", - Provider: &testConfigProvider{ - message: "tooWrong", - }, - ExpectedErr: fmt.Errorf("invalid transformation found; expected one of 'spEnrichedToJson', 'spEnrichedSetPk', 'spEnrichedFilter', 'spEnrichedFilterContext', 'spEnrichedFilterUnstructEvent', 'lua', 'js' or 'none' but got \"tooWrong\""), + Name: "lua main() smoke test failed", + Transformations: []*Transformation{{ + Name: "lua", + Engine: luaEngineNoMain, + }}, + ExpectedErrs: []error{fmt.Errorf("validation error in lua transformation #0, main() smoke test failed")}, }, { - Name: "spEnrichedToJson", - Provider: &testConfigProvider{ - message: "spEnrichedToJson", - }, - ExpectedErr: nil, + Name: "js success", + Transformations: []*Transformation{{ + Name: "js", + Engine: jsEngine, + }}, }, { - Name: "spEnrichedSetPk", - Provider: &testConfigProvider{ - message: "spEnrichedSetPk:app_id", - }, - ExpectedErr: nil, + Name: "js main() smoke test failed", + Transformations: []*Transformation{{ + Name: "js", + Engine: jsEngineNoMain, + }}, + ExpectedErrs: []error{fmt.Errorf("validation error in js transformation #0, main() smoke test failed")}, }, { - Name: "spEnrichedFilter", - Provider: &testConfigProvider{ - message: "spEnrichedFilter:app_id==xyz", - }, - ExpectedErr: nil, - }, - { - Name: "spEnrichedFilterContext", - Provider: &testConfigProvider{ - message: "spEnrichedFilterContext:contexts_x_x_x_1.yz==xyz", - }, - ExpectedErr: nil, - }, - { - Name: "spEnrichedFilterUnstructEvent", - Provider: &testConfigProvider{ - message: "spEnrichedFilterUnstructEvent:unstruct_event_x_x_x_1.yz==xyz", - }, - ExpectedErr: nil, - }, - } - - for _, tt := range testCases { - t.Run(tt.Name, func(t *testing.T) { - assert := assert.New(t) - - applyFun, err := GetTransformations(tt.Provider) - - if tt.ExpectedErr != nil { - assert.NotNil(err) - if err != nil { - assert.Equal(tt.ExpectedErr.Error(), err.Error()) - } - assert.Nil(applyFun) - } else { - assert.Nil(err) - assert.NotNil(applyFun) - } - }) - } -} - -func TestGetTransformations_Custom(t *testing.T) { - testCases := []struct { - Name string - Provider configProvider - ExpectedErr error - }{ - { - Name: "lua", - Provider: &testConfigProvider{ - message: "lua:fun", - layerName: "lua", - component: &testEngine{ - smokeTestErr: nil, - mkFunction: testTransformationFunction, + Name: "multiple validation errors", + Transformations: []*Transformation{ + { + Name: "js", + Engine: jsEngineNoMain, }, - }, - ExpectedErr: nil, - }, - { - Name: "js", - Provider: &testConfigProvider{ - message: "js:fun", - layerName: "js", - component: &testEngine{ - smokeTestErr: nil, - mkFunction: testTransformationFunction, + { + Name: "spEnrichedFilter", + Regex: "test.+", + }, + // a successful transformation mixed in to test transformation counter + { + Name: "spEnrichedToJson", + }, + { + Name: "spEnrichedSetPk", }, }, - ExpectedErr: nil, + ExpectedErrs: []error{ + fmt.Errorf("validation error in js transformation #0, main() smoke test failed"), + fmt.Errorf("validation error #1 spEnrichedFilter, empty field"), + fmt.Errorf("validation error #3 spEnrichedSetPk, empty field"), + }, }, } @@ -290,213 +331,200 @@ func TestGetTransformations_Custom(t *testing.T) { t.Run(tt.Name, func(t *testing.T) { assert := assert.New(t) - applyFun, err := GetTransformations(tt.Provider) + valErrs := ValidateTransformations(tt.Transformations) - if tt.ExpectedErr != nil { - assert.NotNil(err) - if err != nil { - assert.Equal(tt.ExpectedErr.Error(), err.Error()) + if tt.ExpectedErrs != nil { + for idx, valErr := range valErrs { + assert.Equal(valErr.Error(), tt.ExpectedErrs[idx].Error()) } - assert.Nil(applyFun) } else { - assert.Nil(err) - assert.NotNil(applyFun) + assert.Nil(valErrs) } }) } } -func TestLayerRegistry(t *testing.T) { - assert := assert.New(t) - - registry, err := getLayerRegistry() - assert.Nil(err) - - _, okLua := registry["lua"] - assert.True(okLua) - - _, okJs := registry["js"] - assert.True(okJs) -} +func TestEnginesAndTransformations(t *testing.T) { + var messageJSCompileErr = &models.Message{ + Data: snowplowTsv1, + PartitionKey: "some-key", + } + messageJSCompileErr.SetError(errors.New(`failed initializing JavaScript runtime: "could not assert as function: \"main\""`)) -func TestMkEngineFunction(t *testing.T) { + testFixPath := "../../../config/test-fixtures" testCases := []struct { - Name string - Provider *testConfigProvider - Unit *transformationUnit - Registry layerRegistry - ExpectedErr error + Description string + File string + ExpectedTransforms []Transformation + ExpectedMessages expectedMessages + CompileErr string }{ { - Name: "missing_layer_config", - Provider: &testConfigProvider{ - layerName: "test", - component: "irrelevant", - err: nil, - }, - Unit: &transformationUnit{ - name: "noTest", - option: "testFun", - }, - Registry: map[string]config.Pluggable{}, - ExpectedErr: fmt.Errorf("missing configuration for the custom transformation layer specified: \"noTest\""), - }, - { - Name: "unknown_layer", - Provider: &testConfigProvider{ - layerName: "test", - component: "irrelevant", - err: nil, + Description: "simple transform success", + File: "transform-js-simple.hcl", + ExpectedMessages: expectedMessages{ + Before: []*models.Message{{ + Data: snowplowTsv1, + PartitionKey: "some-key", + }}, + After: []*models.Message{{ + Data: snowplowTsv1, + PartitionKey: "some-key", + }}, }, - Unit: &transformationUnit{ - name: "test", - option: "testFun", - }, - Registry: map[string]config.Pluggable{}, - ExpectedErr: fmt.Errorf("unknown transformation layer specified"), }, { - Name: "provider_error", - Provider: &testConfigProvider{ - layerName: "test", - component: nil, - err: fmt.Errorf("some error"), - }, - Unit: &transformationUnit{ - name: "test", - option: "testFun", + Description: "simple transform with js compile error", + File: "transform-js-error.hcl", + ExpectedMessages: expectedMessages{ + Before: []*models.Message{{ + Data: snowplowJSON1, + PartitionKey: "some-key", + }}, + After: []*models.Message{messageJSCompileErr}, }, - Registry: map[string]config.Pluggable{ - "test": &testPluggable{}, + CompileErr: `SyntaxError`, + }, + { + Description: `mixed success`, + File: "transform-mixed.hcl", + ExpectedMessages: expectedMessages{ + Before: []*models.Message{{ + Data: snowplowJSON1, + PartitionKey: "some-key", + }}, + After: []*models.Message{{ + Data: snowplowJSON1Mixed, + PartitionKey: "some-key", + }}, }, - ExpectedErr: fmt.Errorf("some error"), }, { - Name: "no_engine_component", - Provider: &testConfigProvider{ - layerName: "test", - component: "notAnEngine", - err: nil, - }, - Unit: &transformationUnit{ - name: "test", - option: "testFun", + Description: `mixed success, order test`, + File: "transform-mixed-order.hcl", + // initial app_id should be changed to 1, then if the app_id is 1, it should be changed to 2, then 3 + ExpectedMessages: expectedMessages{ + Before: []*models.Message{{ + Data: snowplowJSON1, + PartitionKey: "some-key", + }}, + After: []*models.Message{{ + Data: snowplowJSON1Order, + PartitionKey: "some-key", + }}, }, - Registry: map[string]config.Pluggable{ - "test": &testPluggable{}, - }, - ExpectedErr: fmt.Errorf("could not interpret custom transformation configuration"), }, { - Name: "engine_smoke_test_error", - Provider: &testConfigProvider{ - layerName: "test", - component: &testEngine{ - smokeTestErr: fmt.Errorf("smoke error"), - mkFunction: testTransformationFunction, - }, - err: nil, - }, - Unit: &transformationUnit{ - name: "test", - option: "testFun", - }, - Registry: map[string]config.Pluggable{ - "test": &testPluggable{}, - }, - ExpectedErr: fmt.Errorf("smoke error"), - }, - { - Name: "happy_path", - Provider: &testConfigProvider{ - layerName: "test", - component: &testEngine{ - smokeTestErr: nil, - mkFunction: testTransformationFunction, - }, - err: nil, + Description: `mixed with error`, + File: "transform-mixed-error.hcl", + ExpectedMessages: expectedMessages{ + Before: []*models.Message{{ + Data: snowplowJSON1, + PartitionKey: "some-key", + }}, + After: []*models.Message{messageJSCompileErr}, }, - Unit: &transformationUnit{ - name: "test", - option: "testFun", + CompileErr: `SyntaxError`, + }, + { + Description: `mixed with filter success`, + File: "transform-mixed-filtered.hcl", + ExpectedMessages: expectedMessages{ + Before: []*models.Message{{ + Data: snowplowTsv1, + PartitionKey: "some-key", + }}, + After: []*models.Message{{ + Data: snowplowTsv1, + PartitionKey: "some-key", + }}, }, - Registry: map[string]config.Pluggable{ - "test": &testPluggable{}, - }, - ExpectedErr: nil, }, } for _, tt := range testCases { - t.Run(tt.Name, func(t *testing.T) { + t.Run(tt.Description, func(t *testing.T) { assert := assert.New(t) - fun, err := mkEngineFunction( - tt.Provider, - tt.Unit, - tt.Registry, - ) + filename := filepath.Join(testFixPath, tt.File) + t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", filename) - if tt.ExpectedErr != nil { - assert.NotNil(err) - if err != nil { - assert.Equal(tt.ExpectedErr.Error(), err.Error()) - } - assert.Nil(fun) - } else { - assert.Nil(err) - assert.NotNil(fun) + c, err := config.NewConfig() + assert.NotNil(c) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) } - }) - } -} -// Helpers -type testConfigProvider struct { - message string - layerName string - component interface{} - err error -} - -// *testConfigProvider implements configProvider -func (tc *testConfigProvider) ProvideTransformMessage() string { - return tc.message -} + // get transformations, and run the transformations on the expected messages + tr, err := GetTransformations(c) + if tt.CompileErr != `` { + assert.True(strings.HasPrefix(err.Error(), tt.CompileErr)) + assert.Nil(tr) + return + } -func (tc *testConfigProvider) ProvideTransformLayerName() string { - return tc.layerName -} + if err != nil { + t.Fatalf(err.Error()) + } -func (tc *testConfigProvider) ProvideTransformComponent(p config.Pluggable) (interface{}, error) { - return tc.component, tc.err -} + result := tr(tt.ExpectedMessages.Before) + assert.NotNil(result) + assert.Equal(int(result.ResultCount+result.FilteredCount+result.InvalidCount), len(tt.ExpectedMessages.After)) -type testPluggable struct{} + // check result for successfully transformed messages + for idx, resultMessage := range result.Result { + assert.Equal(resultMessage.Data, tt.ExpectedMessages.After[idx].Data) + } -// *testPluggable implements config.Pluggable -func (tp *testPluggable) ProvideDefault() (interface{}, error) { - return "placeholder", nil -} + // check errors for invalid messages + for idx, resultMessage := range result.Invalid { + assert.Equal(resultMessage.GetError(), tt.ExpectedMessages.After[idx].GetError()) + } -func (tp *testPluggable) Create(i interface{}) (interface{}, error) { - return "placeholder", nil + // check result for transformed messages in case of filtered results + if result.FilteredCount != 0 { + assert.NotNil(result.Filtered) + for idx, resultMessage := range result.Filtered { + assert.Equal(resultMessage.Data, tt.ExpectedMessages.After[idx].Data) + } + } + }) + } } -type testEngine struct { - smokeTestErr error - mkFunction transform.TransformationFunction +type expectedMessages struct { + Before []*models.Message + After []*models.Message } -// *testEngine implements transform.Engine -func (te *testEngine) SmokeTest(funName string) error { - return te.smokeTestErr +var snowplowTsv1 = []byte(`test-data1 pc 2019-05-10 14:40:37.436 2019-05-10 14:40:35.972 2019-05-10 14:40:35.551 unstruct e9234345-f042-46ad-b1aa-424464066a33 py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 18.194.133.57 d26822f5-52cc-4292-8f77-14ef6b7a27e2 {"schema":"iglu:com.snowplowanalytics.snowplow/unstruct_event/jsonschema/1-0-0","data":{"schema":"iglu:com.snowplowanalytics.snowplow/add_to_cart/jsonschema/1-0-0","data":{"sku":"item41","quantity":2,"unitPrice":32.4,"currency":"GBP"}}} python-requests/2.21.0 2019-05-10 14:40:35.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??"}}]} 2019-05-10 14:40:35.972 com.snowplowanalytics.snowplow add_to_cart jsonschema 1-0-0 `) +var snowplowJSON1 = []byte(`{"app_id":"test-data1","collector_tstamp":"2019-05-10T14:40:35.972Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:35.972Z","dvce_created_tstamp":"2019-05-10T14:40:35.551Z","dvce_sent_tstamp":"2019-05-10T14:40:35Z","etl_tstamp":"2019-05-10T14:40:37.436Z","event":"unstruct","event_format":"jsonschema","event_id":"e9234345-f042-46ad-b1aa-424464066a33","event_name":"add_to_cart","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"d26822f5-52cc-4292-8f77-14ef6b7a27e2","platform":"pc","unstruct_event_com_snowplowanalytics_snowplow_add_to_cart_1":{"currency":"GBP","quantity":2,"sku":"item41","unitPrice":32.4},"user_id":"user\u003cbuilt-in function input\u003e","user_ipaddress":"18.194.133.57","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) +var snowplowTsv2 = []byte(`test-data2 pc 2019-05-10 14:40:32.392 2019-05-10 14:40:31.105 2019-05-10 14:40:30.218 transaction_item 5071169f-3050-473f-b03f-9748319b1ef2 py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 18.194.133.57 68220ade-307b-4898-8e25-c4c8ac92f1d7 transaction item58 35.87 1 python-requests/2.21.0 2019-05-10 14:40:30.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??"}}]} 2019-05-10 14:40:31.105 com.snowplowanalytics.snowplow transaction_item jsonschema 1-0-0 `) +var snowplowTsv3 = []byte(`test-data3 pc 2019-05-10 14:40:30.836 2019-05-10 14:40:29.576 2019-05-10 14:40:29.204 page_view e8aef68d-8533-45c6-a672-26a0f01be9bd py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 18.194.133.57 b66c4a12-8584-4c7a-9a5d-7c96f59e2556 www.demo-site.com/campaign-landing-page landing-page 80 www.demo-site.com/campaign-landing-page python-requests/2.21.0 2019-05-10 14:40:29.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??","test1":{"test2":[{"test3":"testValue"}]}}}]} 2019-05-10 14:40:29.576 com.snowplowanalytics.snowplow page_view jsonschema 1-0-0 `) + +var nonSnowplowString = []byte(`not a snowplow event`) + +var messages = []*models.Message{ + { + Data: snowplowTsv1, + PartitionKey: "some-key", + }, + { + Data: snowplowTsv2, + PartitionKey: "some-key1", + }, + { + Data: snowplowTsv3, + PartitionKey: "some-key2", + }, + { + Data: nonSnowplowString, + PartitionKey: "some-key4", + }, } -func (te *testEngine) MakeFunction(funName string) transform.TransformationFunction { - return te.mkFunction -} +// snowplowJSON1 with 3 transformations applied +var snowplowJSON1Mixed = []byte(`Hello:{"app_id":"again","collector_tstamp":"2019-05-10T14:40:35.972Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:35.972Z","dvce_created_tstamp":"2019-05-10T14:40:35.551Z","dvce_sent_tstamp":"2019-05-10T14:40:35Z","etl_tstamp":"2019-05-10T14:40:37.436Z","event":"unstruct","event_format":"jsonschema","event_id":"e9234345-f042-46ad-b1aa-424464066a33","event_name":"add_to_cart","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"d26822f5-52cc-4292-8f77-14ef6b7a27e2","platform":"pc","unstruct_event_com_snowplowanalytics_snowplow_add_to_cart_1":{"currency":"GBP","quantity":2,"sku":"item41","unitPrice":32.4},"user_id":"user","user_ipaddress":"18.194.133.57","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) -func testTransformationFunction(*models.Message, interface{}) (*models.Message, *models.Message, *models.Message, interface{}) { - return nil, nil, nil, nil -} +// snowplowJSON1 with 3 transformations applied, for order test +var snowplowJSON1Order = []byte(`{"app_id":"3","collector_tstamp":"2019-05-10T14:40:35.972Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:35.972Z","dvce_created_tstamp":"2019-05-10T14:40:35.551Z","dvce_sent_tstamp":"2019-05-10T14:40:35Z","etl_tstamp":"2019-05-10T14:40:37.436Z","event":"unstruct","event_format":"jsonschema","event_id":"e9234345-f042-46ad-b1aa-424464066a33","event_name":"add_to_cart","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"d26822f5-52cc-4292-8f77-14ef6b7a27e2","platform":"pc","unstruct_event_com_snowplowanalytics_snowplow_add_to_cart_1":{"currency":"GBP","quantity":2,"sku":"item41","unitPrice":32.4},"user_id":"user","user_ipaddress":"18.194.133.57","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) From 3f486f2c2ffe96aefb695cdb9434cb5b9dd262f8 Mon Sep 17 00:00:00 2001 From: colmsnowplow Date: Mon, 4 Jul 2022 13:55:42 +0100 Subject: [PATCH 16/25] Fix latency reporting with no transformations (closes #108) --- pkg/models/observer_buffer_test.go | 12 +++--------- pkg/models/target_write_result.go | 5 ++++- pkg/models/target_write_result_test.go | 4 ---- 3 files changed, 7 insertions(+), 14 deletions(-) diff --git a/pkg/models/observer_buffer_test.go b/pkg/models/observer_buffer_test.go index 59c3f483..aed0f979 100644 --- a/pkg/models/observer_buffer_test.go +++ b/pkg/models/observer_buffer_test.go @@ -173,11 +173,8 @@ func TestObserverBuffer_Basic(t *testing.T) { assert.Equal("TargetResults:1,MsgFiltered:0,MsgSent:1,MsgFailed:0,OversizedTargetResults:0,OversizedMsgSent:0,OversizedMsgFailed:0,InvalidTargetResults:0,InvalidMsgSent:0,InvalidMsgFailed:0,MaxProcLatency:240000,MaxMsgLatency:3000000,MaxFilterLatency:0,MaxTransformLatency:120000,SumTransformLatency:120000,SumProcLatency:240000,SumMsgLatency:3000000", b.String()) } -// TestObserverBuffer_Basic is a basic version of the above test, stripping away all but one event -// It was created in order to demonstrate an issue with misreporting of transformLatency when we don't have transformations. -// https://github.com/snowplow-devops/stream-replicator/issues/108 -// Commented out but should serve as illustration of and unit test for that bug. -/* +// TestObserverBuffer_Basic is a basic version of the above test, stripping away all but one event. +// It exists purely to simplify reasoning through bugs. func TestObserverBuffer_BasicNoTransform(t *testing.T) { assert := assert.New(t) @@ -199,12 +196,10 @@ func TestObserverBuffer_BasicNoTransform(t *testing.T) { b.AppendWrite(r) b.AppendWrite(nil) - // b.AppendWriteOversized(r) b.AppendWriteOversized(nil) - // b.AppendWriteInvalid(r) b.AppendWriteInvalid(nil) - fr := NewFilterResultWithTime(nil, timeNow) + fr := newFilterResultWithTime(nil, timeNow) b.AppendFiltered(fr) @@ -241,4 +236,3 @@ func TestObserverBuffer_BasicNoTransform(t *testing.T) { assert.Equal("TargetResults:1,MsgFiltered:0,MsgSent:1,MsgFailed:0,OversizedTargetResults:0,OversizedMsgSent:0,OversizedMsgFailed:0,InvalidTargetResults:0,InvalidMsgSent:0,InvalidMsgFailed:0,MaxProcLatency:240000,MaxMsgLatency:3000000,MaxFilterLatency:0,MaxTransformLatency:0,SumTransformLatency:0,SumProcLatency:240000,SumMsgLatency:3000000", b.String()) } -*/ diff --git a/pkg/models/target_write_result.go b/pkg/models/target_write_result.go index 08ab9604..ebfd0182 100644 --- a/pkg/models/target_write_result.go +++ b/pkg/models/target_write_result.go @@ -98,7 +98,10 @@ func NewTargetWriteResultWithTime(sent []*Message, failed []*Message, oversized } sumMessageLatency += messageLatency - transformLatency := msg.TimeTransformed.Sub(msg.TimePulled) + var transformLatency time.Duration + if !msg.TimeTransformed.IsZero() { + transformLatency = msg.TimeTransformed.Sub(msg.TimePulled) + } if r.MaxTransformLatency < transformLatency { r.MaxTransformLatency = transformLatency } diff --git a/pkg/models/target_write_result_test.go b/pkg/models/target_write_result_test.go index e563b91c..1853a9f0 100644 --- a/pkg/models/target_write_result_test.go +++ b/pkg/models/target_write_result_test.go @@ -164,9 +164,6 @@ func TestNewTargetWriteResult_WithMessages(t *testing.T) { } // TestNewTargetWriteResult_NoTransformation tests that reporting of statistics is as it should be when we don't have a timeTransformed -// At time of writing there is a bug whereby these will report negative transformLatency stats: https://github.com/snowplow-devops/stream-replicator/issues/108 -// Commenting this test out for the time being, it can serve as an illustration of the problem and unit test for fixing that bug -/* func TestNewTargetWriteResult_NoTransformation(t *testing.T) { assert := assert.New(t) @@ -211,4 +208,3 @@ func TestNewTargetWriteResult_NoTransformation(t *testing.T) { assert.Equal(time.Duration(0), r.MinTransformLatency) assert.Equal(time.Duration(0), r.AvgTransformLatency) } -*/ From 5d9f0ed6371211e5f61418bc9b0ecd75f1aa7c16 Mon Sep 17 00:00:00 2001 From: colmsnowplow Date: Mon, 4 Jul 2022 17:39:02 +0100 Subject: [PATCH 17/25] Make setting of EventHub Partition Key configurable (closes #148) --- config/component_test.go | 2 ++ config/test-fixtures/target-eventhub-extended.hcl | 1 + pkg/target/eventhub.go | 8 +++++++- pkg/target/eventhub_test.go | 8 +++----- 4 files changed, 13 insertions(+), 6 deletions(-) diff --git a/config/component_test.go b/config/component_test.go index 511cef66..d70548f7 100644 --- a/config/component_test.go +++ b/config/component_test.go @@ -46,6 +46,7 @@ func TestCreateTargetComponentHCL(t *testing.T) { ChunkMessageLimit: 500, ContextTimeoutInSeconds: 20, BatchByteLimit: 1048576, + SetEHPartitionKey: true, }, }, { @@ -60,6 +61,7 @@ func TestCreateTargetComponentHCL(t *testing.T) { ChunkMessageLimit: 501, ContextTimeoutInSeconds: 21, BatchByteLimit: 1000000, + SetEHPartitionKey: false, }, }, { diff --git a/config/test-fixtures/target-eventhub-extended.hcl b/config/test-fixtures/target-eventhub-extended.hcl index 7dd4fd1c..d8b2bcfe 100644 --- a/config/test-fixtures/target-eventhub-extended.hcl +++ b/config/test-fixtures/target-eventhub-extended.hcl @@ -10,5 +10,6 @@ target { chunk_message_limit = 501 context_timeout_in_seconds = 21 batch_byte_limit = 1000000 + set_eh_partition_key = false } } diff --git a/pkg/target/eventhub.go b/pkg/target/eventhub.go index a69d665b..81e25d44 100644 --- a/pkg/target/eventhub.go +++ b/pkg/target/eventhub.go @@ -29,6 +29,7 @@ type EventHubConfig struct { ChunkMessageLimit int `hcl:"chunk_message_limit,optional" env:"TARGET_EVENTHUB_CHUNK_MESSAGE_LIMIT"` ContextTimeoutInSeconds int `hcl:"context_timeout_in_seconds,optional" env:"TARGET_EVENTHUB_CONTEXT_TIMEOUT_SECONDS"` BatchByteLimit int `hcl:"batch_byte_limit,optional" env:"TARGET_EVENTHUB_BATCH_BYTE_LIMIT"` + SetEHPartitionKey bool `hcl:"set_eh_partition_key,optional" env:"TARGET_EVENTHUB_SET_EH_PK"` } // EventHubTarget holds a new client for writing messages to Azure EventHub @@ -41,6 +42,7 @@ type EventHubTarget struct { chunkMessageLimit int contextTimeoutInSeconds int batchByteLimit int + setEHPartitionKey bool log *log.Entry } @@ -62,6 +64,7 @@ func newEventHubTargetWithInterfaces(client clientIface, cfg *EventHubConfig) *E chunkMessageLimit: cfg.ChunkMessageLimit, contextTimeoutInSeconds: cfg.ContextTimeoutInSeconds, batchByteLimit: cfg.BatchByteLimit, + setEHPartitionKey: cfg.SetEHPartitionKey, log: log.WithFields(log.Fields{"target": "eventhub", "cloud": "Azure", "namespace": cfg.EventHubNamespace, "eventhub": cfg.EventHubName}), } @@ -120,6 +123,7 @@ func (f EventHubTargetAdapter) ProvideDefault() (interface{}, error) { ChunkMessageLimit: 500, ContextTimeoutInSeconds: 20, BatchByteLimit: 1048576, + SetEHPartitionKey: true, } return cfg, nil @@ -177,7 +181,9 @@ func (eht *EventHubTarget) process(messages []*models.Message) (*models.TargetWr ehBatch := make([]*eventhub.Event, messageCount) for i, msg := range messages { ehEvent := eventhub.NewEvent(msg.Data) - ehEvent.PartitionKey = &msg.PartitionKey + if eht.setEHPartitionKey { + ehEvent.PartitionKey = &msg.PartitionKey + } ehBatch[i] = ehEvent } diff --git a/pkg/target/eventhub_test.go b/pkg/target/eventhub_test.go index 44318820..e4c21aff 100644 --- a/pkg/target/eventhub_test.go +++ b/pkg/target/eventhub_test.go @@ -32,6 +32,7 @@ var cfg = EventHubConfig{ ChunkMessageLimit: 500, ContextTimeoutInSeconds: 20, BatchByteLimit: 1048576, + SetEHPartitionKey: true, } var errMock = errors.New("Mock Failure Path") @@ -183,10 +184,6 @@ func TestProcessFailure(t *testing.T) { } // TestProcessWithNoPartitionKey tests the process() function happy path when we don't set a partition key. -// Note that at time of writing, we actually cannot do this. However it illustrates the behaviour of the EH client well, -// and can serve as the basis for developing a solution to https://github.com/snowplow-devops/stream-replicator/issues/148 -// (To see it run successfully before we fix that behaviour, comment out `ehEvent.PartitionKey = &msg.PartitionKey` in the process function.) -/* func TestProcessWithNoPartitionKey(t *testing.T) { assert := assert.New(t) @@ -195,6 +192,7 @@ func TestProcessWithNoPartitionKey(t *testing.T) { results: make(chan *eventhub.EventBatch), } tgt := newEventHubTargetWithInterfaces(m, &cfg) + tgt.setEHPartitionKey = false // Mechanism for counting acks var ackOps int64 @@ -223,8 +221,8 @@ func TestProcessWithNoPartitionKey(t *testing.T) { assert.Nil(twres.Oversized) assert.Nil(twres.Invalid) } -*/ +// TestProcessBatchingByPartitionKey tests that the process function batches per partition key as expected. func TestProcessBatchingByPartitionKey(t *testing.T) { assert := assert.New(t) From 82113a4c4ad66a8c3b1d7d1aa8b54e1709df5506 Mon Sep 17 00:00:00 2001 From: colmsnowplow Date: Tue, 5 Jul 2022 11:05:15 +0100 Subject: [PATCH 18/25] Cleanup Makefile (closes #112) --- Makefile | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/Makefile b/Makefile index 3307666d..e4f9c6ff 100644 --- a/Makefile +++ b/Makefile @@ -35,7 +35,7 @@ gcp_container_name = snowplow/stream-replicator-gcp all: cli container gox: - GO111MODULE=on go install github.com/mitchellh/gox@latest + go install github.com/mitchellh/gox@latest mkdir -p $(compiled_dir) cli: gox cli-linux cli-darwin cli-windows @@ -53,16 +53,16 @@ cli: gox cli-linux cli-darwin cli-windows mv $(windows_out_dir)/gcp/cli/staging.zip $(compiled_dir)/gcp_cli_stream_replicator_$(version)_windows_amd64.zip cli-linux: gox - GO111MODULE=on CGO_ENABLED=0 gox -osarch=linux/amd64 -output=$(linux_out_dir)/aws/cli/stream-replicator ./cmd/aws/cli/ - GO111MODULE=on CGO_ENABLED=0 gox -osarch=linux/amd64 -output=$(linux_out_dir)/gcp/cli/stream-replicator ./cmd/gcp/cli/ + CGO_ENABLED=0 gox -osarch=linux/amd64 -output=$(linux_out_dir)/aws/cli/stream-replicator ./cmd/aws/cli/ + CGO_ENABLED=0 gox -osarch=linux/amd64 -output=$(linux_out_dir)/gcp/cli/stream-replicator ./cmd/gcp/cli/ cli-darwin: gox - GO111MODULE=on CGO_ENABLED=0 gox -osarch=darwin/amd64 -output=$(darwin_out_dir)/aws/cli/stream-replicator ./cmd/aws/cli/ - GO111MODULE=on CGO_ENABLED=0 gox -osarch=darwin/amd64 -output=$(darwin_out_dir)/gcp/cli/stream-replicator ./cmd/gcp/cli/ + CGO_ENABLED=0 gox -osarch=darwin/amd64 -output=$(darwin_out_dir)/aws/cli/stream-replicator ./cmd/aws/cli/ + CGO_ENABLED=0 gox -osarch=darwin/amd64 -output=$(darwin_out_dir)/gcp/cli/stream-replicator ./cmd/gcp/cli/ cli-windows: gox - GO111MODULE=on CGO_ENABLED=0 gox -osarch=windows/amd64 -output=$(windows_out_dir)/aws/cli/stream-replicator ./cmd/aws/cli/ - GO111MODULE=on CGO_ENABLED=0 gox -osarch=windows/amd64 -output=$(windows_out_dir)/gcp/cli/stream-replicator ./cmd/gcp/cli/ + CGO_ENABLED=0 gox -osarch=windows/amd64 -output=$(windows_out_dir)/aws/cli/stream-replicator ./cmd/aws/cli/ + CGO_ENABLED=0 gox -osarch=windows/amd64 -output=$(windows_out_dir)/gcp/cli/stream-replicator ./cmd/gcp/cli/ container: cli-linux docker build -t $(aws_container_name):$(version) -f Dockerfile.aws . @@ -73,15 +73,15 @@ container: cli-linux # ----------------------------------------------------------------------------- format: - GO111MODULE=on go fmt $(go_dirs) - GO111MODULE=on gofmt -s -w . + go fmt $(go_dirs) + gofmt -s -w . lint: - GO111MODULE=on go install golang.org/x/lint/golint@latest + go install golang.org/x/lint/golint@latest LINTRESULT=$$(golint $(go_dirs)); echo "$$LINTRESULT"; [ -z "$$LINTRESULT" ]; tidy: - GO111MODULE=on go mod tidy + go mod tidy # ----------------------------------------------------------------------------- # TESTING @@ -89,17 +89,17 @@ tidy: test-setup: mkdir -p $(coverage_dir) - GO111MODULE=on go install golang.org/x/tools/cmd/cover@latest + go install golang.org/x/tools/cmd/cover@latest test: test-setup - GO111MODULE=on go test $(go_dirs) -v -short -covermode=count -coverprofile=$(coverage_out) - GO111MODULE=on go tool cover -html=$(coverage_out) -o $(coverage_html) - GO111MODULE=on go tool cover -func=$(coverage_out) + go test $(go_dirs) -v -short -covermode=count -coverprofile=$(coverage_out) + go tool cover -html=$(coverage_out) -o $(coverage_html) + go tool cover -func=$(coverage_out) integration-test: test-setup - GO111MODULE=on go test $(go_dirs) -v -covermode=count -coverprofile=$(coverage_out) - GO111MODULE=on go tool cover -html=$(coverage_out) -o $(coverage_html) - GO111MODULE=on go tool cover -func=$(coverage_out) + go test $(go_dirs) -v -covermode=count -coverprofile=$(coverage_out) + go tool cover -html=$(coverage_out) -o $(coverage_html) + go tool cover -func=$(coverage_out) integration-reset: integration-down integration-up From 0bf19ab4404266350338e327a0b4746c65dbafbd Mon Sep 17 00:00:00 2001 From: colmsnowplow Date: Tue, 5 Jul 2022 17:24:07 +0100 Subject: [PATCH 19/25] Fix typo in statsd reporting (closes #158) --- pkg/statsreceiver/statsd.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/statsreceiver/statsd.go b/pkg/statsreceiver/statsd.go index 3af27046..bf75e676 100644 --- a/pkg/statsreceiver/statsd.go +++ b/pkg/statsreceiver/statsd.go @@ -113,6 +113,6 @@ func (s *statsDStatsReceiver) Send(b *models.ObserverBuffer) { s.client.Incr("oversized_message_failed", b.OversizedMsgFailed) s.client.Incr("invalid_message_sent", b.InvalidMsgSent) s.client.Incr("invalid_message_failed", b.InvalidMsgFailed) - s.client.PrecisionTiming("latency_proccesing_max", b.MaxProcLatency) + s.client.PrecisionTiming("latency_processing_max", b.MaxProcLatency) s.client.PrecisionTiming("latency_message_max", b.MaxMsgLatency) } From 9a38dbca4a16f1c278d63f918412a09b6db9fb90 Mon Sep 17 00:00:00 2001 From: colmsnowplow Date: Thu, 21 Jul 2022 16:57:20 +0100 Subject: [PATCH 20/25] Allow env var configuration of updated transformation config (closes #174) --- config/config.go | 17 +++++ config/config_test.go | 71 +++++++++++++++++++ .../test-fixtures/transform-mocked-order.hcl | 24 +++++++ 3 files changed, 112 insertions(+) create mode 100644 config/test-fixtures/transform-mocked-order.hcl diff --git a/config/config.go b/config/config.go index bff63540..a63ff7a8 100644 --- a/config/config.go +++ b/config/config.go @@ -17,6 +17,7 @@ import ( "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hclparse" "github.com/pkg/errors" + "github.com/snowplow-devops/stream-replicator/pkg/common" "github.com/snowplow-devops/stream-replicator/pkg/failure" "github.com/snowplow-devops/stream-replicator/pkg/failure/failureiface" "github.com/snowplow-devops/stream-replicator/pkg/observer" @@ -136,6 +137,22 @@ func newEnvConfig() (*Config, error) { Decoder: envDecoder, } + // If the TRANSFORM_CONFIG_B64 env var is set, parse it, and use the Transformations in our mainConfig. + b64Transformations := os.Getenv("TRANSFORM_CONFIG_B64") + if b64Transformations != "" { + err := common.DecodeB64ToFile(b64Transformations, "tmp_replicator/transform.hcl") + if err != nil { + return nil, errors.Wrap(err, "Error decoding transformation config base64 from env") + } + + confFromFile, err := newHclConfig("tmp_replicator/transform.hcl") + if err != nil { + return nil, errors.Wrap(err, "Error parsing transformation config from env") + } + + mainConfig.Data.Transformations = confFromFile.Data.Transformations + } + return &mainConfig, nil } diff --git a/config/config_test.go b/config/config_test.go index 9817c05d..b653507b 100644 --- a/config/config_test.go +++ b/config/config_test.go @@ -50,6 +50,7 @@ func TestNewConfig_FromEnv(t *testing.T) { t.Setenv("LOG_LEVEL", "debug") t.Setenv("TARGET_NAME", "kinesis") t.Setenv("SOURCE_NAME", "kinesis") + t.Setenv("TRANSFORM_CONFIG_B64", `dHJhbnNmb3JtIHsKICB1c2UgImpzIiB7CiAgICAvLyBjaGFuZ2VzIGFwcF9pZCB0byAiMSIKICAgIHNvdXJjZV9iNjQgPSAiWm5WdVkzUnBiMjRnYldGcGJpaDRLU0I3Q2lBZ0lDQjJZWElnYW5OdmJrOWlhaUE5SUVwVFQwNHVjR0Z5YzJVb2VDNUVZWFJoS1RzS0lDQWdJR3B6YjI1UFltcGJJbUZ3Y0Y5cFpDSmRJRDBnSWpFaU93b2dJQ0FnY21WMGRYSnVJSHNLSUNBZ0lDQWdJQ0JFWVhSaE9pQktVMDlPTG5OMGNtbHVaMmxtZVNocWMyOXVUMkpxS1FvZ0lDQWdmVHNLZlE9PSIKICB9Cn0KCnRyYW5zZm9ybSB7CiAgdXNlICJqcyIgewogICAgLy8gaWYgYXBwX2lkID09ICIxIiBpdCBpcyBjaGFuZ2VkIHRvICIyIgogICAgc291cmNlX2I2NCA9ICJablZ1WTNScGIyNGdiV0ZwYmloNEtTQjdDaUFnSUNCMllYSWdhbk52Yms5aWFpQTlJRXBUVDA0dWNHRnljMlVvZUM1RVlYUmhLVHNLSUNBZ0lHbG1JQ2hxYzI5dVQySnFXeUpoY0hCZmFXUWlYU0E5UFNBaU1TSXBJSHNLSUNBZ0lDQWdJQ0JxYzI5dVQySnFXeUpoY0hCZmFXUWlYU0E5SUNJeUlnb2dJQ0FnZlFvZ0lDQWdjbVYwZFhKdUlIc0tJQ0FnSUNBZ0lDQkVZWFJoT2lCS1UwOU9Mbk4wY21sdVoybG1lU2hxYzI5dVQySnFLUW9nSUNBZ2ZUc0tmUT09IgogIH0KfQoKdHJhbnNmb3JtIHsKICB1c2UgImpzIiB7CiAgICAvLyBpZiBhcHBfaWQgPT0gIjIiIGl0IGlzIGNoYW5nZWQgdG8gIjMiCiAgICBzb3VyY2VfYjY0ID0gIlpuVnVZM1JwYjI0Z2JXRnBiaWg0S1NCN0NpQWdJQ0IyWVhJZ2FuTnZiazlpYWlBOUlFcFRUMDR1Y0dGeWMyVW9lQzVFWVhSaEtUc0tJQ0FnSUdsbUlDaHFjMjl1VDJKcVd5SmhjSEJmYVdRaVhTQTlQU0FpTWlJcElIc0tJQ0FnSUNBZ0lDQnFjMjl1VDJKcVd5SmhjSEJmYVdRaVhTQTlJQ0l6SWdvZ0lDQWdmUW9nSUNBZ2NtVjBkWEp1SUhzS0lDQWdJQ0FnSUNCRVlYUmhPaUJLVTA5T0xuTjBjbWx1WjJsbWVTaHFjMjl1VDJKcUtRb2dJQ0FnZlRzS2ZRPT0iCiAgfQp9`) c, err := NewConfig() assert.NotNil(c) @@ -60,6 +61,11 @@ func TestNewConfig_FromEnv(t *testing.T) { assert.Equal("debug", c.Data.LogLevel) assert.Equal("kinesis", c.Data.Target.Use.Name) assert.Equal("kinesis", c.Data.Source.Use.Name) + assert.Equal(3, len(c.Data.Transformations)) + for _, transf := range c.Data.Transformations { + assert.Equal("js", transf.Use.Name) + + } } func TestNewConfig_FromEnvInvalid(t *testing.T) { @@ -151,6 +157,33 @@ func TestNewConfig_InvalidStatsReceiver(t *testing.T) { } } +func TestNewConfig_InvalidTransformationB64(t *testing.T) { + assert := assert.New(t) + + t.Setenv("TRANSFORM_CONFIG_B64", `fdssdnpfdspnm`) + + c, err := NewConfig() + assert.Nil(c) + assert.NotNil(err) + if err != nil { + assert.Equal("Error decoding transformation config base64 from env: Failed to Base64 decode for creating file tmp_replicator/transform.hcl: illegal base64 data at input byte 12", err.Error()) + } + +} + +func TestNewConfig_UnparseableTransformationB64(t *testing.T) { + assert := assert.New(t) + + t.Setenv("TRANSFORM_CONFIG_B64", `dHJhbnNmb3JtIHsKICB1c2UgImpzIiB7CiAgICAvLyBjaGFuZ2VzIGFwcF9pZCB0byAiMSIKICAgIHNvdXJjZV9iNjQgPSAiWm5WdVkzUnBiMjRnYldGcGJpaDRLU0I3Q2lBZ0lDQjJZWElnYW5OdmJrOWlhaUE5SUVwVFQwNHVjR0Z5YzJVb2VDNUVZWFJoS1RzS0lDQWdJR3B6YjI1UFltcGJJbUZ3Y0Y5cFpDSmRJRDBnSWpFaU93b2dJQ0FnY21WMGRYSnVJSHNLSUNBZ0lDQWdJQ0JFWVhSaE9pQktVMDlPTG5OMGNtbHVaMmxtZVNocWMyOXVUMkpxS1FvZ0lDQWdmVHNLZlE9PSIKICB9Cn0KCnRyYW5zZm9ybSB7CiAgdXNlICJqcyIgewogICAgLy8gaWYgYXBwX2lkID09ICIxIiBpdCBpcyBjaGFuZ2VkIHRvICIyIgogICAgc291cmNlX2I2NCA9ICJablZ1WTNScGIyNGdiV0ZwYmloNEtTQjdDaUFnSUNCMllYSWdhbk52Yms5aWFpQTlJRXBUVDA0dWNHRnljMlVvZUM1RVlYUmhLVHNLSUNBZ0lHbG1JQ2hxYzI5dVQySnFXeUpoY0hCZmFXUWlYU0E5UFNBaU1TSXBJSHNLSUNBZ0lDQWdJQ0JxYzI5dVQySnFXeUpoY0hCZmFXUWlYU0E5SUNJeUlnb2dJQ0FnZlFvZ0lDQWdjbVYwZFhKdUlIc0tJQ0FnSUNBZ0lDQkVZWFJoT2lCS1UwOU9Mbk4wY21sdVoybG1lU2hxYzI5dVQySnFLUW9nSUNBZ2ZUc0tmUT09IgoKfQoKdHJhbnNmb3JtIHsKICB1c2UgImpzIiB7CiAgICAvLyBpZiBhcHBfaWQgPT0gIjIiIGl0IGlzIGNoYW5nZWQgdG8gIjMiCiAgICBzb3VyY2VfYjY0ID0gIlpuVnVZM1JwYjI0Z2JXRnBiaWg0S1NCN0NpQWdJQ0IyWVhJZ2FuTnZiazlpYWlBOUlFcFRUMDR1Y0dGeWMyVW9lQzVFWVhSaEtUc0tJQ0FnSUdsbUlDaHFjMjl1VDJKcVd5SmhjSEJmYVdRaVhTQTlQU0FpTWlJcElIc0tJQ0FnSUNBZ0lDQnFjMjl1VDJKcVd5SmhjSEJmYVdRaVhTQTlJQ0l6SWdvZ0lDQWdmUW9nSUNBZ2NtVjBkWEp1SUhzS0lDQWdJQ0FnSUNCRVlYUmhPaUJLVTA5T0xuTjBjbWx1WjJsbWVTaHFjMjl1VDJKcUtRb2dJQ0FnZlRzS2ZRPT0iCiAgfQp9`) + + c, err := NewConfig() + assert.Nil(c) + assert.NotNil(err) + if err != nil { + assert.Equal("Error parsing transformation config from env: tmp_replicator/transform.hcl:8,11-12: Unclosed configuration block; There is no closing brace for this block before the end of the file. This may be caused by incorrect brace nesting elsewhere in this file.", err.Error()) + } +} + func TestNewConfig_GetTags(t *testing.T) { assert := assert.New(t) @@ -243,3 +276,41 @@ func TestNewConfig_Hcl_sentry(t *testing.T) { assert.Equal("{\"testKey\":\"testValue\"}", c.Data.Sentry.Tags) assert.Equal("testDsn", c.Data.Sentry.Dsn) } + +func TestNewConfig_HclTransformationOrder(t *testing.T) { + assert := assert.New(t) + + filename := filepath.Join("test-fixtures", "transform-mocked-order.hcl") + t.Setenv("STREAM_REPLICATOR_CONFIG_FILE", filename) + + c, err := NewConfig() + assert.NotNil(c) + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } + + assert.Equal(5, len(c.Data.Transformations)) + assert.Equal("one", c.Data.Transformations[0].Use.Name) + assert.Equal("two", c.Data.Transformations[1].Use.Name) + assert.Equal("three", c.Data.Transformations[2].Use.Name) + assert.Equal("four", c.Data.Transformations[3].Use.Name) + assert.Equal("five", c.Data.Transformations[4].Use.Name) +} + +func TestNewConfig_B64TransformationOrder(t *testing.T) { + assert := assert.New(t) + + t.Setenv("TRANSFORM_CONFIG_B64", `dHJhbnNmb3JtIHsKICB1c2UgIm9uZSIgewogIH0KfQoKdHJhbnNmb3JtIHsKICB1c2UgInR3byIgewogIH0KfQoKdHJhbnNmb3JtIHsKICB1c2UgInRocmVlIiB7CiAgfQp9Cgp0cmFuc2Zvcm0gewogIHVzZSAiZm91ciIgewogIH0KfQoKdHJhbnNmb3JtIHsKICB1c2UgImZpdmUiIHsKICB9Cn0=`) + + c, err := NewConfig() + if err != nil { + t.Fatalf("function NewConfig failed with error: %q", err.Error()) + } + + assert.Equal(5, len(c.Data.Transformations)) + assert.Equal("one", c.Data.Transformations[0].Use.Name) + assert.Equal("two", c.Data.Transformations[1].Use.Name) + assert.Equal("three", c.Data.Transformations[2].Use.Name) + assert.Equal("four", c.Data.Transformations[3].Use.Name) + assert.Equal("five", c.Data.Transformations[4].Use.Name) +} diff --git a/config/test-fixtures/transform-mocked-order.hcl b/config/test-fixtures/transform-mocked-order.hcl new file mode 100644 index 00000000..74ebc8fa --- /dev/null +++ b/config/test-fixtures/transform-mocked-order.hcl @@ -0,0 +1,24 @@ +transform { + use "one" { + } +} + +transform { + use "two" { + } +} + +transform { + use "three" { + } +} + +transform { + use "four" { + } +} + +transform { + use "five" { + } +} \ No newline at end of file From d89a8ffeeb315af1846d3e81d0c0259bc15319b1 Mon Sep 17 00:00:00 2001 From: colmsnowplow Date: Fri, 22 Jul 2022 13:55:01 +0100 Subject: [PATCH 21/25] Fix import order (closes #175) --- pkg/source/sourceconfig/source_config_test.go | 3 ++- pkg/target/eventhub.go | 1 + pkg/target/eventhub_test.go | 5 +++-- pkg/target/http.go | 9 ++++----- pkg/target/http_test.go | 3 ++- pkg/target/kafka.go | 3 +-- pkg/target/kafka_test.go | 1 + pkg/transform/engine/engine_javascript_test.go | 10 +++++----- pkg/transform/snowplow_enriched_filter.go | 5 +++-- pkg/transform/snowplow_enriched_filter_test.go | 3 ++- pkg/transform/snowplow_enriched_set_pk_test.go | 3 ++- pkg/transform/snowplow_enriched_to_json_test.go | 3 ++- pkg/transform/snowplow_enriched_util.go | 3 ++- pkg/transform/snowplow_enriched_util_test.go | 1 + pkg/transform/transform_test.go | 3 ++- pkg/transform/transform_test_variables.go | 3 ++- 16 files changed, 35 insertions(+), 24 deletions(-) diff --git a/pkg/source/sourceconfig/source_config_test.go b/pkg/source/sourceconfig/source_config_test.go index ad915ec4..f82e1808 100644 --- a/pkg/source/sourceconfig/source_config_test.go +++ b/pkg/source/sourceconfig/source_config_test.go @@ -10,9 +10,10 @@ import ( "os" "testing" + "github.com/stretchr/testify/assert" + config "github.com/snowplow-devops/stream-replicator/config" "github.com/snowplow-devops/stream-replicator/pkg/source/sourceiface" - "github.com/stretchr/testify/assert" ) func TestMain(m *testing.M) { diff --git a/pkg/target/eventhub.go b/pkg/target/eventhub.go index 81e25d44..62a31abf 100644 --- a/pkg/target/eventhub.go +++ b/pkg/target/eventhub.go @@ -16,6 +16,7 @@ import ( "github.com/hashicorp/go-multierror" "github.com/pkg/errors" log "github.com/sirupsen/logrus" + "github.com/snowplow-devops/stream-replicator/pkg/models" ) diff --git a/pkg/target/eventhub_test.go b/pkg/target/eventhub_test.go index e4c21aff..611c5c18 100644 --- a/pkg/target/eventhub_test.go +++ b/pkg/target/eventhub_test.go @@ -17,10 +17,11 @@ import ( eventhub "github.com/Azure/azure-event-hubs-go/v3" "github.com/pkg/errors" - "github.com/snowplow-devops/stream-replicator/pkg/models" - "github.com/snowplow-devops/stream-replicator/pkg/testutil" "github.com/stretchr/testify/assert" "github.com/twinj/uuid" + + "github.com/snowplow-devops/stream-replicator/pkg/models" + "github.com/snowplow-devops/stream-replicator/pkg/testutil" ) var cfg = EventHubConfig{ diff --git a/pkg/target/http.go b/pkg/target/http.go index b2ca0bb6..28f8d9a3 100644 --- a/pkg/target/http.go +++ b/pkg/target/http.go @@ -15,21 +15,20 @@ import ( "net/url" "time" - "github.com/snowplow-devops/stream-replicator/pkg/common" - "github.com/hashicorp/go-multierror" "github.com/pkg/errors" log "github.com/sirupsen/logrus" + "github.com/snowplow-devops/stream-replicator/pkg/common" "github.com/snowplow-devops/stream-replicator/pkg/models" ) // HTTPTargetConfig configures the destination for records consumed type HTTPTargetConfig struct { HTTPURL string `hcl:"url" env:"TARGET_HTTP_URL"` - ByteLimit int `hcl:"byte_limit,optional" env:"TARGET_HTTP_BYTE_LIMIT"` // defBytes - RequestTimeoutInSeconds int `hcl:"request_timeout_in_seconds,optional" env:"TARGET_HTTP_TIMEOUT_IN_SECONDS"` // def ???? Request timeout in seconds - ContentType string `hcl:"content_type,optional" env:"TARGET_HTTP_CONTENT_TYPE"` // application/json + ByteLimit int `hcl:"byte_limit,optional" env:"TARGET_HTTP_BYTE_LIMIT"` + RequestTimeoutInSeconds int `hcl:"request_timeout_in_seconds,optional" env:"TARGET_HTTP_TIMEOUT_IN_SECONDS"` + ContentType string `hcl:"content_type,optional" env:"TARGET_HTTP_CONTENT_TYPE"` Headers string `hcl:"headers,optional" env:"TARGET_HTTP_HEADERS" ` BasicAuthUsername string `hcl:"basic_auth_username,optional" env:"TARGET_HTTP_BASICAUTH_USERNAME"` BasicAuthPassword string `hcl:"basic_auth_password,optional" env:"TARGET_HTTP_BASICAUTH_PASSWORD"` diff --git a/pkg/target/http_test.go b/pkg/target/http_test.go index c43a2e7c..7ab3eb86 100644 --- a/pkg/target/http_test.go +++ b/pkg/target/http_test.go @@ -17,9 +17,10 @@ import ( "sync/atomic" "testing" + "github.com/stretchr/testify/assert" + "github.com/snowplow-devops/stream-replicator/pkg/models" "github.com/snowplow-devops/stream-replicator/pkg/testutil" - "github.com/stretchr/testify/assert" ) func createTestServer(results *[][]byte, waitgroup *sync.WaitGroup) *httptest.Server { diff --git a/pkg/target/kafka.go b/pkg/target/kafka.go index 3b4a2f0b..f9bbc8ad 100644 --- a/pkg/target/kafka.go +++ b/pkg/target/kafka.go @@ -14,14 +14,13 @@ import ( "strings" "time" - "github.com/snowplow-devops/stream-replicator/pkg/common" - "github.com/Shopify/sarama" "github.com/hashicorp/go-multierror" "github.com/pkg/errors" log "github.com/sirupsen/logrus" "github.com/xdg/scram" + "github.com/snowplow-devops/stream-replicator/pkg/common" "github.com/snowplow-devops/stream-replicator/pkg/models" ) diff --git a/pkg/target/kafka_test.go b/pkg/target/kafka_test.go index 28b1a168..9de02a2b 100644 --- a/pkg/target/kafka_test.go +++ b/pkg/target/kafka_test.go @@ -13,6 +13,7 @@ import ( "github.com/Shopify/sarama" "github.com/Shopify/sarama/mocks" log "github.com/sirupsen/logrus" + "github.com/snowplow-devops/stream-replicator/pkg/testutil" "github.com/stretchr/testify/assert" ) diff --git a/pkg/transform/engine/engine_javascript_test.go b/pkg/transform/engine/engine_javascript_test.go index 79b3fb40..0736325f 100644 --- a/pkg/transform/engine/engine_javascript_test.go +++ b/pkg/transform/engine/engine_javascript_test.go @@ -1,8 +1,8 @@ -//// PROPRIETARY AND CONFIDENTIAL -//// -//// Unauthorized copying of this file via any medium is strictly prohibited. -//// -//// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. +// PROPRIETARY AND CONFIDENTIAL +// +// Unauthorized copying of this file via any medium is strictly prohibited. +// +// Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. // package engine diff --git a/pkg/transform/snowplow_enriched_filter.go b/pkg/transform/snowplow_enriched_filter.go index 0efcbe21..3cec9a2c 100644 --- a/pkg/transform/snowplow_enriched_filter.go +++ b/pkg/transform/snowplow_enriched_filter.go @@ -8,16 +8,17 @@ package transform import ( "fmt" - "github.com/dlclark/regexp2" "log" "regexp" "strings" "time" + "github.com/dlclark/regexp2" + "github.com/pkg/errors" + "github.com/snowplow/snowplow-golang-analytics-sdk/analytics" "github.com/snowplow-devops/stream-replicator/pkg/models" - "github.com/snowplow/snowplow-golang-analytics-sdk/analytics" ) func findSpEnrichedFilterValue(queriedField, parsedEventName, eventVer, field string, parsedMessage analytics.ParsedEvent, path []interface{}) ([]interface{}, error) { diff --git a/pkg/transform/snowplow_enriched_filter_test.go b/pkg/transform/snowplow_enriched_filter_test.go index 2e679286..738c5505 100644 --- a/pkg/transform/snowplow_enriched_filter_test.go +++ b/pkg/transform/snowplow_enriched_filter_test.go @@ -9,8 +9,9 @@ package transform import ( "testing" - "github.com/snowplow-devops/stream-replicator/pkg/models" "github.com/stretchr/testify/assert" + + "github.com/snowplow-devops/stream-replicator/pkg/models" ) func TestNewSpEnrichedFilterFunction(t *testing.T) { diff --git a/pkg/transform/snowplow_enriched_set_pk_test.go b/pkg/transform/snowplow_enriched_set_pk_test.go index bd94512a..04988efb 100644 --- a/pkg/transform/snowplow_enriched_set_pk_test.go +++ b/pkg/transform/snowplow_enriched_set_pk_test.go @@ -9,8 +9,9 @@ package transform import ( "testing" - "github.com/snowplow-devops/stream-replicator/pkg/models" "github.com/stretchr/testify/assert" + + "github.com/snowplow-devops/stream-replicator/pkg/models" ) func TestNewSpEnrichedSetPkFunction(t *testing.T) { diff --git a/pkg/transform/snowplow_enriched_to_json_test.go b/pkg/transform/snowplow_enriched_to_json_test.go index e5451302..e037e442 100644 --- a/pkg/transform/snowplow_enriched_to_json_test.go +++ b/pkg/transform/snowplow_enriched_to_json_test.go @@ -9,8 +9,9 @@ package transform import ( "testing" - "github.com/snowplow-devops/stream-replicator/pkg/models" "github.com/stretchr/testify/assert" + + "github.com/snowplow-devops/stream-replicator/pkg/models" ) func TestSpEnrichedToJson(t *testing.T) { diff --git a/pkg/transform/snowplow_enriched_util.go b/pkg/transform/snowplow_enriched_util.go index 6c6468fd..d494c823 100644 --- a/pkg/transform/snowplow_enriched_util.go +++ b/pkg/transform/snowplow_enriched_util.go @@ -9,8 +9,9 @@ package transform import ( "strconv" - "github.com/snowplow-devops/stream-replicator/pkg/models" "github.com/snowplow/snowplow-golang-analytics-sdk/analytics" + + "github.com/snowplow-devops/stream-replicator/pkg/models" ) // IntermediateAsSpEnrichedParsed returns the intermediate state as a ParsedEvent if valid or parses diff --git a/pkg/transform/snowplow_enriched_util_test.go b/pkg/transform/snowplow_enriched_util_test.go index cc27bb88..47ea5469 100644 --- a/pkg/transform/snowplow_enriched_util_test.go +++ b/pkg/transform/snowplow_enriched_util_test.go @@ -10,6 +10,7 @@ import ( "testing" "github.com/snowplow-devops/stream-replicator/pkg/models" + "github.com/stretchr/testify/assert" ) diff --git a/pkg/transform/transform_test.go b/pkg/transform/transform_test.go index 4b378b02..7bec84f7 100644 --- a/pkg/transform/transform_test.go +++ b/pkg/transform/transform_test.go @@ -10,8 +10,9 @@ import ( "testing" "time" - "github.com/snowplow-devops/stream-replicator/pkg/models" "github.com/stretchr/testify/assert" + + "github.com/snowplow-devops/stream-replicator/pkg/models" ) // To test a function which creates a function, we're creating the function then testing that. Not sure if there's a better way? diff --git a/pkg/transform/transform_test_variables.go b/pkg/transform/transform_test_variables.go index 9302f063..6464bd01 100644 --- a/pkg/transform/transform_test_variables.go +++ b/pkg/transform/transform_test_variables.go @@ -7,8 +7,9 @@ package transform import ( - "github.com/snowplow-devops/stream-replicator/pkg/models" "github.com/snowplow/snowplow-golang-analytics-sdk/analytics" + + "github.com/snowplow-devops/stream-replicator/pkg/models" ) var snowplowTsv1 = []byte(`test-data1 pc 2019-05-10 14:40:37.436 2019-05-10 14:40:35.972 2019-05-10 14:40:35.551 unstruct e9234345-f042-46ad-b1aa-424464066a33 py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 18.194.133.57 d26822f5-52cc-4292-8f77-14ef6b7a27e2 {"schema":"iglu:com.snowplowanalytics.snowplow/unstruct_event/jsonschema/1-0-0","data":{"schema":"iglu:com.snowplowanalytics.snowplow/add_to_cart/jsonschema/1-0-0","data":{"sku":"item41","quantity":2,"unitPrice":32.4,"currency":"GBP"}}} python-requests/2.21.0 2019-05-10 14:40:35.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??"}}]} 2019-05-10 14:40:35.972 com.snowplowanalytics.snowplow add_to_cart jsonschema 1-0-0 `) From 82e8e3ecb2a41378511d071b19589b2b1f96e989 Mon Sep 17 00:00:00 2001 From: colmsnowplow Date: Fri, 22 Jul 2022 18:16:06 +0100 Subject: [PATCH 22/25] Bump dependencies (closes #120) --- go.mod | 85 ++++++++--------- go.sum | 291 +++++++++++++++++++++++++++++++++++++++++---------------- 2 files changed, 252 insertions(+), 124 deletions(-) diff --git a/go.mod b/go.mod index 97fcd776..e421fbd4 100644 --- a/go.mod +++ b/go.mod @@ -3,66 +3,65 @@ module github.com/snowplow-devops/stream-replicator go 1.17 require ( - cloud.google.com/go v0.102.0 // indirect - cloud.google.com/go/pubsub v1.22.2 - github.com/Azure/azure-amqp-common-go/v3 v3.1.0 // indirect - github.com/Azure/azure-event-hubs-go/v3 v3.3.12 - github.com/Azure/azure-sdk-for-go v56.2.0+incompatible // indirect - github.com/Azure/go-amqp v0.13.11 // indirect - github.com/Azure/go-autorest/autorest v0.11.19 // indirect - github.com/Azure/go-autorest/autorest/adal v0.9.14 // indirect - github.com/Shopify/sarama v1.34.0 - github.com/aws/aws-sdk-go v1.40.22 - github.com/caarlos0/env/v6 v6.9.1 - github.com/cpuguy83/go-md2man/v2 v2.0.1 // indirect - github.com/form3tech-oss/jwt-go v3.2.5+incompatible // indirect - github.com/getsentry/sentry-go v0.11.0 + cloud.google.com/go v0.103.0 // indirect + cloud.google.com/go/pubsub v1.24.0 + github.com/Azure/azure-amqp-common-go/v3 v3.2.3 // indirect + github.com/Azure/azure-event-hubs-go/v3 v3.3.18 + github.com/Azure/azure-sdk-for-go v66.0.0+incompatible // indirect + github.com/Azure/go-amqp v0.17.5 // indirect + github.com/Azure/go-autorest/autorest v0.11.27 // indirect + github.com/Azure/go-autorest/autorest/adal v0.9.20 // indirect + github.com/Shopify/sarama v1.34.1 + github.com/aws/aws-sdk-go v1.44.60 + github.com/caarlos0/env/v6 v6.9.3 + github.com/cpuguy83/go-md2man/v2 v2.0.2 // indirect + github.com/getsentry/sentry-go v0.13.0 github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/snappy v0.0.4 // indirect github.com/google/uuid v1.3.0 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/go-multierror v1.1.1 github.com/jpillora/backoff v1.0.0 // indirect - github.com/klauspost/compress v1.15.0 // indirect - github.com/mitchellh/mapstructure v1.4.1 + github.com/klauspost/compress v1.15.9 // indirect + github.com/mitchellh/mapstructure v1.5.0 github.com/myesui/uuid v1.0.0 // indirect github.com/pkg/errors v0.9.1 - github.com/sirupsen/logrus v1.8.1 + github.com/sirupsen/logrus v1.9.0 github.com/smira/go-statsd v1.3.2 github.com/snowplow-devops/go-retry v0.0.0-20210106090855-8989bbdbae1c github.com/snowplow-devops/go-sentryhook v0.0.0-20210106082031-21bf7f9dac2a github.com/snowplow/snowplow-golang-analytics-sdk v0.3.0 - github.com/stretchr/testify v1.7.0 + github.com/stretchr/testify v1.7.1 github.com/twinj/uuid v1.0.0 github.com/twitchscience/kinsumer v0.0.0-20210611163023-da24975e2c91 - github.com/urfave/cli v1.22.5 - github.com/xdg/scram v1.0.3 - golang.org/x/crypto v0.0.0-20220214200702-86341886e292 // indirect - golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2 // indirect - golang.org/x/oauth2 v0.0.0-20220524215830-622c5d57e401 // indirect - golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a // indirect + github.com/urfave/cli v1.22.9 + github.com/xdg/scram v1.0.5 + golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa // indirect + golang.org/x/net v0.0.0-20220722155237-a158d28d115b // indirect + golang.org/x/oauth2 v0.0.0-20220722155238-128564f6959c // indirect + golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f // indirect golang.org/x/text v0.3.7 // indirect - google.golang.org/api v0.81.0 // indirect - google.golang.org/genproto v0.0.0-20220523171625-347a074981d8 - google.golang.org/grpc v1.46.2 + google.golang.org/api v0.88.0 // indirect + google.golang.org/genproto v0.0.0-20220720214146-176da50484ac + google.golang.org/grpc v1.48.0 gopkg.in/stretchr/testify.v1 v1.2.2 // indirect ) require ( github.com/davecgh/go-spew v1.1.1 github.com/dlclark/regexp2 v1.7.0 - github.com/dop251/goja v0.0.0-20220405120441-9037c2b61cbf - github.com/goccy/go-json v0.9.7 - github.com/hashicorp/hcl/v2 v2.11.1 + github.com/dop251/goja v0.0.0-20220722151623-4765a9872229 + github.com/goccy/go-json v0.9.10 + github.com/hashicorp/hcl/v2 v2.13.0 github.com/snowplow/snowplow-golang-tracker/v2 v2.4.1 github.com/yuin/gluamapper v0.0.0-20150323120927-d836955830e7 - github.com/yuin/gopher-lua v0.0.0-20210529063254-f4c35e4016d9 + github.com/yuin/gopher-lua v0.0.0-20220504180219-658193537a64 github.com/zclconf/go-cty v1.10.0 layeh.com/gopher-json v0.0.0-20201124131017-552bb3c4c3bf ) require ( - cloud.google.com/go/compute v1.6.1 // indirect + cloud.google.com/go/compute v1.7.0 // indirect cloud.google.com/go/iam v0.3.0 // indirect github.com/Azure/go-autorest v14.2.0+incompatible // indirect github.com/Azure/go-autorest/autorest/date v0.3.0 // indirect @@ -70,20 +69,22 @@ require ( github.com/Azure/go-autorest/autorest/validation v0.3.1 // indirect github.com/Azure/go-autorest/logger v0.2.1 // indirect github.com/Azure/go-autorest/tracing v0.6.0 // indirect - github.com/agext/levenshtein v1.2.1 // indirect + github.com/agext/levenshtein v1.2.3 // indirect github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect github.com/devigned/tab v0.1.1 // indirect - github.com/eapache/go-resiliency v1.2.0 // indirect + github.com/eapache/go-resiliency v1.3.0 // indirect github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21 // indirect github.com/eapache/queue v1.1.0 // indirect github.com/go-sourcemap/sourcemap v2.1.3+incompatible // indirect + github.com/golang-jwt/jwt/v4 v4.4.2 // indirect github.com/golang/protobuf v1.5.2 // indirect github.com/google/go-cmp v0.5.8 // indirect + github.com/googleapis/enterprise-certificate-proxy v0.1.0 // indirect github.com/googleapis/gax-go/v2 v2.4.0 // indirect - github.com/hashicorp/go-immutable-radix v1.1.0 // indirect - github.com/hashicorp/go-memdb v1.0.4 // indirect - github.com/hashicorp/go-uuid v1.0.2 // indirect - github.com/hashicorp/golang-lru v0.5.1 // indirect + github.com/hashicorp/go-immutable-radix v1.3.1 // indirect + github.com/hashicorp/go-memdb v1.3.3 // indirect + github.com/hashicorp/go-uuid v1.0.3 // indirect + github.com/hashicorp/golang-lru v0.5.4 // indirect github.com/jcmturner/aescts/v2 v2.0.0 // indirect github.com/jcmturner/dnsutils/v2 v2.0.0 // indirect github.com/jcmturner/gofork v1.0.0 // indirect @@ -91,17 +92,17 @@ require ( github.com/jcmturner/rpc/v2 v2.0.3 // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect github.com/json-iterator/go v1.1.12 // indirect - github.com/mattn/go-sqlite3 v2.0.2+incompatible // indirect - github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 // indirect + github.com/mattn/go-sqlite3 v2.0.3+incompatible // indirect + github.com/mitchellh/go-wordwrap v1.0.1 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect - github.com/pierrec/lz4/v4 v4.1.14 // indirect + github.com/pierrec/lz4/v4 v4.1.15 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/xdg/stringprep v1.0.3 // indirect go.opencensus.io v0.23.0 // indirect - golang.org/x/sync v0.0.0-20220513210516-0976fa681c29 // indirect + golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4 // indirect google.golang.org/appengine v1.6.7 // indirect google.golang.org/protobuf v1.28.0 // indirect gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect diff --git a/go.sum b/go.sum index 5001602d..17e927ba 100644 --- a/go.sum +++ b/go.sum @@ -28,8 +28,10 @@ cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Ud cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA= cloud.google.com/go v0.100.1/go.mod h1:fs4QogzfH5n2pBXBP9vRiU+eCny7lD2vmFZy79Iuw1U= cloud.google.com/go v0.100.2/go.mod h1:4Xra9TjzAeYHrl5+oeLlzbM2k3mjVhZh4UqTZ//w99A= -cloud.google.com/go v0.102.0 h1:DAq3r8y4mDgyB/ZPJ9v/5VJNqjgJAxTn6ZYLlUywOu8= cloud.google.com/go v0.102.0/go.mod h1:oWcCzKlqJ5zgHQt9YsaeTY9KzIvjyy0ArmiBUgpQ+nc= +cloud.google.com/go v0.102.1/go.mod h1:XZ77E9qnTEnrgEOvr4xzfdX5TRo7fB4T2F4O6+34hIU= +cloud.google.com/go v0.103.0 h1:YXtxp9ymmZjlGzxV7VrYQ8aaQuAgcqxSy6YhDX4I458= +cloud.google.com/go v0.103.0/go.mod h1:vwLx1nqLrzLX/fpwSMOXmFIqBOyHsvHbnAdbGSJ+mKk= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= @@ -40,8 +42,9 @@ cloud.google.com/go/compute v0.1.0/go.mod h1:GAesmwr110a34z04OlxYkATPBEfVhkymfTB cloud.google.com/go/compute v1.3.0/go.mod h1:cCZiE1NHEtai4wiufUhW8I8S1JKkAnhnQJWM7YD99wM= cloud.google.com/go/compute v1.5.0/go.mod h1:9SMHyhJlzhlkJqrPAc839t2BZFTSk6Jdj6mkzQJeu0M= cloud.google.com/go/compute v1.6.0/go.mod h1:T29tfhtVbq1wvAPo0E3+7vhgmkOYeXjhFvz/FMzPu0s= -cloud.google.com/go/compute v1.6.1 h1:2sMmt8prCn7DPaG4Pmh0N3Inmc8cT8ae5k1M6VJ9Wqc= cloud.google.com/go/compute v1.6.1/go.mod h1:g85FgpzFvNULZ+S8AYq87axRKuf2Kh7deLqV/jJ3thU= +cloud.google.com/go/compute v1.7.0 h1:v/k9Eueb8aAJ0vZuxKMrgm6kPhCLZU9HxFU+AFDs9Uk= +cloud.google.com/go/compute v1.7.0/go.mod h1:435lt8av5oL9P3fv1OEzSbSUe+ybHXGMPQHHZWZxy9U= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= cloud.google.com/go/iam v0.1.0/go.mod h1:vcUNEa0pEm0qRVpmWepWaFMIAI8/hjB9mO8rNCJtF6c= @@ -53,46 +56,44 @@ cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2k cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= -cloud.google.com/go/pubsub v1.22.2 h1:e6A4rhtMX4opff/jDWApl4HwLtsCdV9VULVfpFRp6eo= -cloud.google.com/go/pubsub v1.22.2/go.mod h1:LBHGrtgM7+SGKCDKQu2pKIRtGwbZyJvRDkMk0594xdU= +cloud.google.com/go/pubsub v1.24.0 h1:aCS6wSMzrc602OeXUMA66KGlyXxpdkHdwN+FSBv/sUg= +cloud.google.com/go/pubsub v1.24.0/go.mod h1:rWv09Te1SsRpRGPiWOMDKraMQTJyJps4MkUCoMGUgqw= cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= cloud.google.com/go/storage v1.22.1/go.mod h1:S8N1cAStu7BOeFfE8KAQzmyyLkK8p/vmRq6kuBTW58Y= +cloud.google.com/go/storage v1.23.0/go.mod h1:vOEEDNFnciUMhBeT6hsJIn3ieU5cFRmzeLgDvXzfIXc= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/AndreasBriese/bbloom v0.0.0-20190306092124-e2d15f34fcf9/go.mod h1:bOvUY6CB00SOBii9/FifXqc0awNKxLFCL/+pkDPuyl8= -github.com/Azure/azure-amqp-common-go/v3 v3.0.1/go.mod h1:PBIGdzcO1teYoufTKMcGibdKaYZv4avS+O6LNIp8bq0= -github.com/Azure/azure-amqp-common-go/v3 v3.1.0 h1:1N4YSkWYWffOpQHromYdOucBSQXhNRKzqtgICy6To8Q= -github.com/Azure/azure-amqp-common-go/v3 v3.1.0/go.mod h1:PBIGdzcO1teYoufTKMcGibdKaYZv4avS+O6LNIp8bq0= -github.com/Azure/azure-event-hubs-go/v3 v3.3.12 h1:jaZxZtDdOKSMxg1bJb6Yv2R4pUEKvEhok6BoHpcHvr4= -github.com/Azure/azure-event-hubs-go/v3 v3.3.12/go.mod h1:vWHatYv3Y8J9rY4GGKECEs6fF3fSUHuFS/m+ErhP0gw= +github.com/Azure/azure-amqp-common-go/v3 v3.2.3 h1:uDF62mbd9bypXWi19V1bN5NZEO84JqgmI5G73ibAmrk= +github.com/Azure/azure-amqp-common-go/v3 v3.2.3/go.mod h1:7rPmbSfszeovxGfc5fSAXE4ehlXQZHpMja2OtxC2Tas= +github.com/Azure/azure-event-hubs-go/v3 v3.3.18 h1:jgWDk2qmknA0UsfyzjHiW5yciOw3aBY0Oq9p/M9lz2Q= +github.com/Azure/azure-event-hubs-go/v3 v3.3.18/go.mod h1:R5H325+EzgxcBDkUerEwtor7ZQg77G7HiOTwpcuIVXY= github.com/Azure/azure-pipeline-go v0.1.8/go.mod h1:XA1kFWRVhSK+KNFiOhfv83Fv8L9achrP7OxIzeTn1Yg= github.com/Azure/azure-pipeline-go v0.1.9/go.mod h1:XA1kFWRVhSK+KNFiOhfv83Fv8L9achrP7OxIzeTn1Yg= github.com/Azure/azure-sdk-for-go v51.1.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= -github.com/Azure/azure-sdk-for-go v56.2.0+incompatible h1:2GrG1JkTSMqLquy1pqVsjeRJhNtZLjss2+rx8ogZXx4= -github.com/Azure/azure-sdk-for-go v56.2.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= +github.com/Azure/azure-sdk-for-go v66.0.0+incompatible h1:bmmC38SlE8/E81nNADlgmVGurPWMHDX2YNXVQMrBpEE= +github.com/Azure/azure-sdk-for-go v66.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= github.com/Azure/azure-storage-blob-go v0.6.0/go.mod h1:oGfmITT1V6x//CswqY2gtAHND+xIP64/qL7a5QJix0Y= -github.com/Azure/go-amqp v0.13.0/go.mod h1:qj+o8xPCz9tMSbQ83Vp8boHahuRDl5mkNHyt1xlxUTs= -github.com/Azure/go-amqp v0.13.10/go.mod h1:D5ZrjQqB1dyp1A+G73xeL/kNn7D5qHJIIsNNps7YNmk= -github.com/Azure/go-amqp v0.13.11 h1:E28zKoWuzO4+D80iUD88BUorI5PqvIZ/S/77md3hIvA= -github.com/Azure/go-amqp v0.13.11/go.mod h1:D5ZrjQqB1dyp1A+G73xeL/kNn7D5qHJIIsNNps7YNmk= +github.com/Azure/go-amqp v0.17.0/go.mod h1:9YJ3RhxRT1gquYnzpZO1vcYMMpAdJT+QEg6fwmw9Zlg= +github.com/Azure/go-amqp v0.17.5 h1:7Lsi9H9ijCAfqOaMiNmQ4c+GL9bdrpCjebNKhV/eQ+c= +github.com/Azure/go-amqp v0.17.5/go.mod h1:9YJ3RhxRT1gquYnzpZO1vcYMMpAdJT+QEg6fwmw9Zlg= github.com/Azure/go-autorest v14.2.0+incompatible h1:V5VMDjClD3GiElqLWO7mz2MxNAK/vTfRHdAubSIPRgs= github.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= github.com/Azure/go-autorest/autorest v0.9.0/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI= github.com/Azure/go-autorest/autorest v0.9.3/go.mod h1:GsRuLYvwzLjjjRoWEIyMUaYq8GNUx2nRB378IPt/1p0= -github.com/Azure/go-autorest/autorest v0.11.3/go.mod h1:JFgpikqFJ/MleTTxwepExTKnFUKKszPS8UavbQYUMuw= github.com/Azure/go-autorest/autorest v0.11.18/go.mod h1:dSiJPy22c3u0OtOKDNttNgqpNFY/GeWa7GH/Pz56QRA= -github.com/Azure/go-autorest/autorest v0.11.19 h1:7/IqD2fEYVha1EPeaiytVKhzmPV223pfkRIQUGOK2IE= -github.com/Azure/go-autorest/autorest v0.11.19/go.mod h1:dSiJPy22c3u0OtOKDNttNgqpNFY/GeWa7GH/Pz56QRA= +github.com/Azure/go-autorest/autorest v0.11.27 h1:F3R3q42aWytozkV8ihzcgMO4OA4cuqr3bNlsEuF6//A= +github.com/Azure/go-autorest/autorest v0.11.27/go.mod h1:7l8ybrIdUmGqZMTD0sRtAr8NvbHjfofbf8RSP2q7w7U= github.com/Azure/go-autorest/autorest/adal v0.5.0/go.mod h1:8Z9fGy2MpX0PvDjB1pEgQTmVqjGhiHBW7RJJEciWzS0= github.com/Azure/go-autorest/autorest/adal v0.8.0/go.mod h1:Z6vX6WXXuyieHAXwMj0S6HY6e6wcHn37qQMBQlvY3lc= github.com/Azure/go-autorest/autorest/adal v0.8.1/go.mod h1:ZjhuQClTqx435SRJ2iMlOxPYt3d2C/T/7TiQCVZSn3Q= -github.com/Azure/go-autorest/autorest/adal v0.9.0/go.mod h1:/c022QCutn2P7uY+/oQWWNcK9YU+MH96NgK+jErpbcg= github.com/Azure/go-autorest/autorest/adal v0.9.13/go.mod h1:W/MM4U6nLxnIskrw4UwWzlHfGjwUS50aOsc/I3yuU8M= -github.com/Azure/go-autorest/autorest/adal v0.9.14 h1:G8hexQdV5D4khOXrWG2YuLCFKhWYmWD8bHYaXN5ophk= -github.com/Azure/go-autorest/autorest/adal v0.9.14/go.mod h1:W/MM4U6nLxnIskrw4UwWzlHfGjwUS50aOsc/I3yuU8M= +github.com/Azure/go-autorest/autorest/adal v0.9.18/go.mod h1:XVVeme+LZwABT8K5Lc3hA4nAe8LDBVle26gTrguhhPQ= +github.com/Azure/go-autorest/autorest/adal v0.9.20 h1:gJ3E98kMpFB1MFqQCvA1yFab8vthOeD4VlFRQULxahg= +github.com/Azure/go-autorest/autorest/adal v0.9.20/go.mod h1:XVVeme+LZwABT8K5Lc3hA4nAe8LDBVle26gTrguhhPQ= github.com/Azure/go-autorest/autorest/azure/auth v0.4.2 h1:iM6UAvjR97ZIeR93qTcwpKNMpV+/FTWjwEbuPD495Tk= github.com/Azure/go-autorest/autorest/azure/auth v0.4.2/go.mod h1:90gmfKdlmKgfjUpnCEpOJzsUEjrWDSLwHIG73tSXddM= github.com/Azure/go-autorest/autorest/azure/cli v0.3.1 h1:LXl088ZQlP0SBppGFsRZonW6hSvwgL5gRByMbvUbx8U= @@ -104,15 +105,14 @@ github.com/Azure/go-autorest/autorest/date v0.3.0/go.mod h1:BI0uouVdmngYNUzGWeSY github.com/Azure/go-autorest/autorest/mocks v0.1.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0= github.com/Azure/go-autorest/autorest/mocks v0.2.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0= github.com/Azure/go-autorest/autorest/mocks v0.3.0/go.mod h1:a8FDP3DYzQ4RYfVAxAN3SVSiiO77gL2j2ronKKP0syM= -github.com/Azure/go-autorest/autorest/mocks v0.4.0/go.mod h1:LTp+uSrOhSkaKrUy935gNZuuIPPVsHlr9DSOxSayd+k= -github.com/Azure/go-autorest/autorest/mocks v0.4.1 h1:K0laFcLE6VLTOwNgSxaGbUcLPuGXlNkbVvq4cW4nIHk= github.com/Azure/go-autorest/autorest/mocks v0.4.1/go.mod h1:LTp+uSrOhSkaKrUy935gNZuuIPPVsHlr9DSOxSayd+k= +github.com/Azure/go-autorest/autorest/mocks v0.4.2 h1:PGN4EDXnuQbojHbU0UWoNvmu9AGVwYHG9/fkDYhtAfw= +github.com/Azure/go-autorest/autorest/mocks v0.4.2/go.mod h1:Vy7OitM9Kei0i1Oj+LvyAWMXJHeKH1MVlzFugfVrmyU= github.com/Azure/go-autorest/autorest/to v0.4.0 h1:oXVqrxakqqV1UZdSazDOPOLvOIz+XA683u8EctwboHk= github.com/Azure/go-autorest/autorest/to v0.4.0/go.mod h1:fE8iZBn7LQR7zH/9XU2NcPR4o9jEImooCeWJcYV/zLE= github.com/Azure/go-autorest/autorest/validation v0.3.1 h1:AgyqjAd94fwNAoTjl/WQXg4VvFeRFpO+UhNyRXqF1ac= github.com/Azure/go-autorest/autorest/validation v0.3.1/go.mod h1:yhLgjC0Wda5DYXl6JAsWyUe4KVNffhoDhG0zVzUMo3E= github.com/Azure/go-autorest/logger v0.1.0/go.mod h1:oExouG+K6PryycPJfVSxi/koC6LSNgds39diKLz7Vrc= -github.com/Azure/go-autorest/logger v0.2.0/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8= github.com/Azure/go-autorest/logger v0.2.1 h1:IG7i4p/mDa2Ce4TRyAO8IHnVhAVF3RFU+ZtXWSmf4Tg= github.com/Azure/go-autorest/logger v0.2.1/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8= github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk= @@ -125,13 +125,19 @@ github.com/CloudyKit/jet/v3 v3.0.0/go.mod h1:HKQPgSJmdK8hdoAbKUUWajkHyHo4RaU5rMd github.com/Joker/hpp v1.0.0/go.mod h1:8x5n+M1Hp5hC0g8okX3sR3vFQwynaX/UgSOM9MeBKzY= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/Shopify/goreferrer v0.0.0-20181106222321-ec9c9a553398/go.mod h1:a1uqRtAwp2Xwc6WNPJEufxJ7fx3npB4UV/JOLmbu5I0= -github.com/Shopify/sarama v1.34.0 h1:j4zTaFHFnfvuV2fdLZyXqIg0Tu4Mzl9f064Z5/H+o4o= -github.com/Shopify/sarama v1.34.0/go.mod h1:V2ceE9UupUf4/oP1Z38SI49fAnD0/MtkqDDHvolIeeQ= -github.com/Shopify/toxiproxy/v2 v2.3.0 h1:62YkpiP4bzdhKMH+6uC5E95y608k3zDwdzuBMsnn3uQ= -github.com/Shopify/toxiproxy/v2 v2.3.0/go.mod h1:KvQTtB6RjCJY4zqNJn7C7JDFgsG5uoHYDirfUfpIm0c= -github.com/agext/levenshtein v1.2.1 h1:QmvMAjj2aEICytGiWzmxoE0x2KZvE0fvmqMOfy2tjT8= +github.com/Shopify/sarama v1.34.1 h1:pVCQO7BMAK3s1jWhgi5v1W6lwZ6Veiekfc2vsgRS06Y= +github.com/Shopify/sarama v1.34.1/go.mod h1:NZSNswsnStpq8TUdFaqnpXm2Do6KRzTIjdBdVlL1YRM= +github.com/Shopify/toxiproxy/v2 v2.4.0 h1:O1e4Jfvr/hefNTNu+8VtdEG5lSeamJRo4aKhMOKNM64= +github.com/Shopify/toxiproxy/v2 v2.4.0/go.mod h1:3ilnjng821bkozDRxNoo64oI/DKqM+rOyJzb564+bvg= github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= +github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo= +github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3/go.mod h1:oL81AME2rN47vu18xqj1S1jPIPuN7afo62yKTNn3XMM= github.com/apparentlymart/go-textseg v1.0.0 h1:rRmlIsPEEhUTIKQb7T++Nz/A5Q6C9IuX2wFoYVvnCs0= @@ -140,15 +146,19 @@ github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6 github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo= github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= github.com/aws/aws-sdk-go v1.25.19/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= -github.com/aws/aws-sdk-go v1.40.22 h1:iit4tJ1hjL2GlNCrbE4aJza6jTmvEE2pDTnShct/yyY= -github.com/aws/aws-sdk-go v1.40.22/go.mod h1:585smgzpB/KqRA+K3y/NL/oYRqQvpNJYvLm+LY1U59Q= +github.com/aws/aws-sdk-go v1.44.60 h1:KTTogelVR+4dWiIPl7eyxoxaJkziChON6/Y/hVfTipk= +github.com/aws/aws-sdk-go v1.44.60/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo= github.com/aymerick/raymond v2.0.3-0.20180322193309-b565731e1464+incompatible/go.mod h1:osfaiScAUVup+UC9Nfq76eWqDhXlp+4UYaA8uhTBO6g= -github.com/caarlos0/env/v6 v6.9.1 h1:zOkkjM0F6ltnQ5eBX6IPI41UP/KDGEK7rRPwGCNos8k= -github.com/caarlos0/env/v6 v6.9.1/go.mod h1:hvp/ryKXKipEkcuYjs9mI4bBCg+UI0Yhgm5Zu0ddvwc= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/caarlos0/env/v6 v6.9.3 h1:Tyg69hoVXDnpO5Qvpsu8EoquarbPyQb+YwExWHP8wWU= +github.com/caarlos0/env/v6 v6.9.3/go.mod h1:hvp/ryKXKipEkcuYjs9mI4bBCg+UI0Yhgm5Zu0ddvwc= github.com/cactus/go-statsd-client/statsd v0.0.0-20190922113730-52b467de415c/go.mod h1:D4RDtP0MffJ3+R36OkGul0LwJLIN8nRb0Ac6jZmJCmo= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= @@ -169,8 +179,8 @@ github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3Ee github.com/cpuguy83/go-md2man v1.0.10 h1:BSKMNlYxDvnunlTymqtgONjNnaRV1sTpcovwwjF22jk= github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= -github.com/cpuguy83/go-md2man/v2 v2.0.1 h1:r/myEWzV9lfsM1tFLgDyu0atFtJ1fXn261LKYj/3DxU= -github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/cpuguy83/go-md2man/v2 v2.0.2 h1:p1EgwI/C7NhT0JmVkwCD2ZBK8j4aeHQX2pMHHBfMQ6w= +github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= @@ -185,12 +195,15 @@ github.com/dimchansky/utfbom v1.1.0/go.mod h1:rO41eb7gLfo8SF1jd9F8HplJm1Fewwi4mQ github.com/dlclark/regexp2 v1.4.1-0.20201116162257-a2a8dda75c91/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= github.com/dlclark/regexp2 v1.7.0 h1:7lJfhqlPssTb1WQx4yvTHN0uElPEv52sbaECrAQxjAo= github.com/dlclark/regexp2 v1.7.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= -github.com/dop251/goja v0.0.0-20220405120441-9037c2b61cbf h1:Yt+4K30SdjOkRoRRm3vYNQgR+/ZIy0RmeUDZo7Y8zeQ= -github.com/dop251/goja v0.0.0-20220405120441-9037c2b61cbf/go.mod h1:R9ET47fwRVRPZnOGvHxxhuZcbrMCuiqOz3Rlrh4KSnk= +github.com/dop251/goja v0.0.0-20211022113120-dc8c55024d06/go.mod h1:R9ET47fwRVRPZnOGvHxxhuZcbrMCuiqOz3Rlrh4KSnk= +github.com/dop251/goja v0.0.0-20220722151623-4765a9872229 h1:tZlMnVCISkoA6ibywWNsK2FtFcA5cZLQ+NE17WvSdW0= +github.com/dop251/goja v0.0.0-20220722151623-4765a9872229/go.mod h1:1jWwHOtOkEqsfX6tYsufUc7BBTuGHH2ekiJabpkN4CA= github.com/dop251/goja_nodejs v0.0.0-20210225215109-d91c329300e7/go.mod h1:hn7BA7c8pLvoGndExHudxTDKZ84Pyvv+90pbBjbTz0Y= +github.com/dop251/goja_nodejs v0.0.0-20211022123610-8dd9abb0616d/go.mod h1:DngW8aVqWbuLRMHItjPUyqdj+HWPvnQe8V8y1nDpIbM= github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= -github.com/eapache/go-resiliency v1.2.0 h1:v7g92e/KSN71Rq7vSThKaWIq68fL4YHvWyiUKorFR1Q= github.com/eapache/go-resiliency v1.2.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs= +github.com/eapache/go-resiliency v1.3.0 h1:RRL0nge+cWGlxXbUzJ7yMcq6w2XBEr19dCN6HECGaT0= +github.com/eapache/go-resiliency v1.3.0/go.mod h1:5yPzW0MIvSe0JDsv0v+DvcjEv2FyD6iZYSs1ZI+iQho= github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21 h1:YEetp8/yCZMuEPMUDHG0CW/brkkEp8mzqk2+ODEitlw= github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU= github.com/eapache/queue v1.1.0 h1:YOEu7KNc61ntiQlcEeUIoDTJ2o8mQznoNvUhiigpIqc= @@ -210,34 +223,52 @@ github.com/etcd-io/bbolt v1.3.3/go.mod h1:ZF2nL25h33cCyBtcyWeZ2/I3HQOfTP+0PIEvHj github.com/fasthttp-contrib/websocket v0.0.0-20160511215533-1f3b11f56072/go.mod h1:duJ4Jxv5lDcvg4QuQr0oowTf7dz4/CR8NtyCooz9HL8= github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= github.com/form3tech-oss/jwt-go v3.2.2+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= -github.com/form3tech-oss/jwt-go v3.2.5+incompatible h1:/l4kBbb4/vGSsdtB5nUe8L7B9mImVMaBPw9L/0TBHU8= -github.com/form3tech-oss/jwt-go v3.2.5+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/gavv/httpexpect v2.0.0+incompatible/go.mod h1:x+9tiU1YnrOvnB725RkpoLv1M62hOWzwo5OXotisrKc= github.com/getsentry/sentry-go v0.9.0/go.mod h1:kELm/9iCblqUYh+ZRML7PNdCvEuw24wBvJPYyi86cws= -github.com/getsentry/sentry-go v0.11.0 h1:qro8uttJGvNAMr5CLcFI9CHR0aDzXl0Vs3Pmw/oTPg8= -github.com/getsentry/sentry-go v0.11.0/go.mod h1:KBQIxiZAetw62Cj8Ri964vAEWVdgfaUCn30Q3bCvANo= +github.com/getsentry/sentry-go v0.13.0 h1:20dgTiUSfxRB/EhMPtxcL9ZEbM1ZdR+W/7f7NWD+xWo= +github.com/getsentry/sentry-go v0.13.0/go.mod h1:EOsfu5ZdvKPfeHYV6pTVQnsjfp30+XA7//UooKNumH0= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/gin-contrib/sse v0.0.0-20190301062529-5545eab6dad3/go.mod h1:VJ0WA2NBN22VlZ2dKZQPAPnyWw5XTlK1KymzLKsr59s= +github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= github.com/gin-gonic/gin v1.4.0/go.mod h1:OW2EZn3DO8Ln9oIKOvM++LBO+5UPHJJDH72/q/3rZdM= +github.com/gin-gonic/gin v1.7.7/go.mod h1:axIBovoeJpVj8S3BwE0uPMTeReE4+AfFtqpqaZ1qq1U= github.com/go-check/check v0.0.0-20180628173108-788fd7840127/go.mod h1:9ES+weclKsC9YodN5RgxqK/VD9HM9JsCSh7rNhMZE98= github.com/go-errors/errors v1.0.1 h1:LUHzmkK3GUKUrL/1gfBUxAHzcev3apQlezX/+O7ma6w= github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= github.com/go-martini/martini v0.0.0-20170121215854-22fa46961aab/go.mod h1:/P9AEU963A2AYjv4d1V5eVL1CQbEJq6aCNHDDjibzu8= +github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= +github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA= +github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4= github.com/go-sourcemap/sourcemap v2.1.3+incompatible h1:W1iEw64niKVGogNgBN3ePyLFfuisuzeidWPMPWmECqU= github.com/go-sourcemap/sourcemap v2.1.3+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68= github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo= github.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= github.com/gobwas/ws v1.0.2/go.mod h1:szmBTxLgaFppYjEmNtny/v3w89xOydFnnZMcgRRu/EM= -github.com/goccy/go-json v0.9.7 h1:IcB+Aqpx/iMHu5Yooh7jEzJk1JZ7Pjtmys2ukPr7EeM= -github.com/goccy/go-json v0.9.7/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/goccy/go-json v0.9.10 h1:hCeNmprSNLB8B8vQKWl6DpuH0t60oEs+TAk9a7CScKc= +github.com/goccy/go-json v0.9.10/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY= +github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= +github.com/golang-jwt/jwt/v4 v4.0.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= +github.com/golang-jwt/jwt/v4 v4.2.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= +github.com/golang-jwt/jwt/v4 v4.4.2 h1:rcc4lwaZgFMCZ5jxF9ABolDcIHdBytAFgqFPbSJQAYs= +github.com/golang-jwt/jwt/v4 v4.4.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= @@ -318,6 +349,9 @@ github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+ github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/enterprise-certificate-proxy v0.0.0-20220520183353-fd19c99a87aa/go.mod h1:17drOmN3MwGY7t0e+Ei9b45FFGA3fBs3x36SsCg1hq8= +github.com/googleapis/enterprise-certificate-proxy v0.1.0 h1:zO8WHNx/MYiAKJ3d5spxZXZE6KHmIQGQcAzwUzV7qQw= +github.com/googleapis/enterprise-certificate-proxy v0.1.0/go.mod h1:17drOmN3MwGY7t0e+Ei9b45FFGA3fBs3x36SsCg1hq8= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= @@ -336,23 +370,28 @@ github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFb github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= -github.com/hashicorp/go-immutable-radix v1.1.0 h1:vN9wG1D6KG6YHRTWr8512cxGOVgTMEfgEdSj/hr8MPc= github.com/hashicorp/go-immutable-radix v1.1.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= -github.com/hashicorp/go-memdb v1.0.4 h1:sIdJHAEtV3//iXcUb4LumSQeorYos5V0ptvqvQvFgDA= +github.com/hashicorp/go-immutable-radix v1.3.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-immutable-radix v1.3.1 h1:DKHmCUm2hRBK510BaiZlwvpD40f8bJFeZnpfm2KLowc= +github.com/hashicorp/go-immutable-radix v1.3.1/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= github.com/hashicorp/go-memdb v1.0.4/go.mod h1:LWQ8R70vPrS4OEY9k28D2z8/Zzyu34NVzeRibGAzHO0= +github.com/hashicorp/go-memdb v1.3.3 h1:oGfEWrFuxtIUF3W2q/Jzt6G85TrMk9ey6XfYLvVe1Wo= +github.com/hashicorp/go-memdb v1.3.3/go.mod h1:uBTr1oQbtuMgd1SSGoR8YV27eT3sBHbYiNm53bMpgSg= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go-uuid v1.0.2 h1:cfejS+Tpcp13yd5nYHWDI6qVCny6wyX2Mt5SGur2IGE= github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8= +github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/golang-lru v0.5.1 h1:0hERBMJE1eitiLkihrMvRVBYAkpHzc/J3QdDN+dAcgU= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc= +github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/hashicorp/hcl/v2 v2.11.1 h1:yTyWcXcm9XB0TEkyU/JCRU6rYy4K+mgLtzn2wlrJbcc= -github.com/hashicorp/hcl/v2 v2.11.1/go.mod h1:FwWsfWEjyV/CMj8s/gqAuiviY72rJ1/oayI9WftqcKg= +github.com/hashicorp/hcl/v2 v2.13.0 h1:0Apadu1w6M11dyGFxWnmhhcMjkbAiKCv7G1r/2QgCNc= +github.com/hashicorp/hcl/v2 v2.13.0/go.mod h1:e4z5nxYlWNPdDSNYX+ph14EvWYMFm3eP0zIUqPc2jr0= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= @@ -389,11 +428,15 @@ github.com/jpillora/backoff v1.0.0 h1:uvFg412JmmHBHw7iwprIxkPMI+sGQ4kzOWsMeHnm2E github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88/go.mod h1:3w7q1U84EfirKl04SVQ/s7nPm1ZPhiXd34z40TNz36k= github.com/kataras/golog v0.0.10/go.mod h1:yJ8YKCmyL+nWjERB90Qwn+bdyBZsaQwU3bTVFgkFIp8= github.com/kataras/iris/v12 v12.1.8/go.mod h1:LMYy4VlP67TQ3Zgriz8RE2h2kMZV2SgMYbq3UhfoFmE= @@ -403,10 +446,13 @@ github.com/kataras/sitemap v0.0.5/go.mod h1:KY2eugMKiPwsJgx7+U103YZehfvNGOXURubc github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.8.2/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= github.com/klauspost/compress v1.9.7/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= -github.com/klauspost/compress v1.15.0 h1:xqfchp4whNFxn5A4XFyyYtitiWI8Hy5EW59jEwcyL6U= -github.com/klauspost/compress v1.15.0/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= +github.com/klauspost/compress v1.15.6/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU= +github.com/klauspost/compress v1.15.9 h1:wKRjX6JRtDdrE9qwa4b/Cip7ACOshUI4smpCQanqjSY= +github.com/klauspost/compress v1.15.9/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU= github.com/klauspost/cpuid v1.2.1/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= @@ -418,24 +464,33 @@ github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348 h1:MtvEpTB6LX3vkb4ax0b5D2DHbNAUsen0Gx5wZoq3lV4= github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k= github.com/labstack/echo/v4 v4.1.11/go.mod h1:i541M3Fj6f76NZtHSj7TXnyM8n2gaodfvfxNnFqi74g= +github.com/labstack/echo/v4 v4.5.0/go.mod h1:czIriw4a0C1dFun+ObrXp7ok03xON0N1awStJ6ArI7Y= github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k= +github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.11/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ= -github.com/mattn/go-sqlite3 v2.0.2+incompatible h1:qzw9c2GNT8UFrgWNDhCTqRqYUSmu/Dav/9Z58LGpk7U= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-sqlite3 v2.0.2+incompatible/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= +github.com/mattn/go-sqlite3 v2.0.3+incompatible h1:gXHsfypPkaMZrKbD5209QV9jbUTJKjyR5WD3HYQSd+U= +github.com/mattn/go-sqlite3 v2.0.3+incompatible/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= github.com/mattn/goveralls v0.0.2/go.mod h1:8d1ZMHsd7fW6IRPKQh46F2WRpyib5/X4FOpevwGNQEw= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/mediocregopher/radix/v3 v3.4.2/go.mod h1:8FL3F6UQRXHXIBSPUs5h0RybMF8i4n7wVopoX3x7Bv8= github.com/microcosm-cc/bluemonday v1.0.2/go.mod h1:iVP4YcDBq+n/5fb23BhYFvIMq/leAFZyRl6bYmGDlGc= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= -github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 h1:DpOJ2HYzCv8LZP15IdmG+YdwD2luVPHITV96TkirNBM= github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= +github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0= +github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.4.1 h1:CpVNEelQCZBooIPDn+AR3NpivK/TIKU8bDxdASFVQag= -github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -444,6 +499,8 @@ github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3Rllmb github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/moul/http2curl v1.0.0/go.mod h1:8UbvGypXm98wA/IqH45anm5Y2Z6ep6O31QGOAZ3H0fQ= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/myesui/uuid v1.0.0 h1:xCBmH4l5KuvLYc5L7AS7SZg9/jKdIFubM7OVoLqaQUI= github.com/myesui/uuid v1.0.0/go.mod h1:2CDfNgU0LR8mIdO8vdWd8i9gWWxLlcoIGGpSNgafq84= github.com/nats-io/jwt v0.3.0/go.mod h1:fRYCDE99xlTsqUzISS1Bi75UBJ6ljOJQOAAu5VglpSg= @@ -454,16 +511,35 @@ github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+W github.com/onsi/ginkgo v1.10.3/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= -github.com/pierrec/lz4/v4 v4.1.14 h1:+fL8AQEZtz/ijeNnpduH0bROTu0O3NZAlPjQxGn8LwE= github.com/pierrec/lz4/v4 v4.1.14/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= +github.com/pierrec/lz4/v4 v4.1.15 h1:MO0/ucJhngq7299dKLwIMtgTfbkoSPF6AoMYDd8Q4q0= +github.com/pierrec/lz4/v4 v4.1.15/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4= github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= +github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= +github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= +github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= +github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= +github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 h1:N/ElC8H3+5XpJzTSTfLsJV/mx9Q9g7kxmchpfZyxgzM= github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= @@ -481,9 +557,12 @@ github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= -github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE= github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0= +github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/smira/go-statsd v1.3.2 h1:1EeuzxNZ/TD9apbTOFSM9nulqfcsQFmT4u1A2DREabI= @@ -513,21 +592,23 @@ github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UV github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1 h1:5TQK59W5E3v0r2duFAb7P95B6hEeOyEnHRa8MjYSMTY= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/twinj/uuid v1.0.0 h1:fzz7COZnDrXGTAOHGuUGYd6sG+JMq+AoE7+Jlu0przk= github.com/twinj/uuid v1.0.0/go.mod h1:mMgcE1RHFUFqe5AfiwlINXisXfDGro23fWdPUfOMjRY= github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw= github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY= -github.com/urfave/cli v1.22.5 h1:lNq9sAHXK2qfdI8W+GRItjCEkI+2oR4d+MEHy1CKXoU= -github.com/urfave/cli v1.22.5/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= +github.com/urfave/cli v1.22.9 h1:cv3/KhXGBGjEXLC4bH0sLuJ9BewaAbpk5oyMOveu4pw= +github.com/urfave/cli v1.22.9/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI= github.com/urfave/negroni v1.0.0/go.mod h1:Meg73S6kFm/4PpbYdq35yYWoCZ9mS/YSx+lKnmiohz4= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/valyala/fasthttp v1.6.0/go.mod h1:FstJa9V+Pj9vQ7OJie2qMHdwemEDaDiSdBnvPM1Su9w= github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8= +github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= github.com/valyala/tcplisten v0.0.0-20161114210144-ceec8f93295a/go.mod h1:v3UYOV9WzVtRmSR+PDvWpU/qWl4Wa5LApYYX4ZtKbio= github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= github.com/vmihailenco/msgpack/v4 v4.3.12/go.mod h1:gborTTJjAo/GWTqqRjrLCn9pgNN+NXzzngzBKDPIqw4= @@ -535,8 +616,8 @@ github.com/vmihailenco/tagparser v0.1.1/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgq github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g= github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8= -github.com/xdg/scram v1.0.3 h1:nTadYh2Fs4BK2xdldEa2g5bbaZp0/+1nJMMPtPxS/to= -github.com/xdg/scram v1.0.3/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I= +github.com/xdg/scram v1.0.5 h1:TuS0RFmt5Is5qm9Tm2SoD89OPqe4IRiFtyFY4iwWXsw= +github.com/xdg/scram v1.0.5/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I= github.com/xdg/stringprep v1.0.3 h1:cmL5Enob4W83ti/ZHuZLuKD/xqJfus4fVPwE+/BDm+4= github.com/xdg/stringprep v1.0.3/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= @@ -554,8 +635,8 @@ github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= -github.com/yuin/gopher-lua v0.0.0-20210529063254-f4c35e4016d9 h1:k/gmLsJDWwWqbLCur2yWnJzwQEKRcAHXo6seXGuSwWw= -github.com/yuin/gopher-lua v0.0.0-20210529063254-f4c35e4016d9/go.mod h1:E1AXubJBdNmFERAOucpDIxNzeGfLzg0mYh+UfMWdChA= +github.com/yuin/gopher-lua v0.0.0-20220504180219-658193537a64 h1:5mLPGnFdSsevFRFc9q3yYbBkB6tsm4aCwwQV/j1JQAQ= +github.com/yuin/gopher-lua v0.0.0-20220504180219-658193537a64/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw= github.com/zclconf/go-cty v1.2.0/go.mod h1:hOPWgoHbaTUnI5k4D2ld+GRpFJSCe6bCM7m1q/N4PQ8= github.com/zclconf/go-cty v1.8.0/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uUAzyuvAk= github.com/zclconf/go-cty v1.10.0 h1:mp9ZXQeIcN8kAwuqorjH+Q+njbJKjLrvB2yIh4q7U+0= @@ -573,7 +654,6 @@ go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqe golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= @@ -581,11 +661,15 @@ golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20191206172530-e9b2fee46413/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20191227163750-53104e6ec876/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20200728195943-123391ffb6de/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20201112155050-0c6587e931a9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20220214200702-86341886e292 h1:f+lwQ+GtmgoY+A2YaQxlSOnDjXcQ7ZRLWOHbC6HtRqE= +golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220214200702-86341886e292/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.0.0-20220517005047-85d78b3ac167/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa h1:zuSxTR4o9y82ebqCUJYNGJbGPo6sKVl54f/TVDObg1c= +golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -625,6 +709,7 @@ golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73r golang.org/x/net v0.0.0-20180811021610-c39426892332/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -634,6 +719,7 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= @@ -662,15 +748,20 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211008194852-3b03d305991f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220412020605-290c469a71a5/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2 h1:NWy5+hlRbC7HK+PmcXVUmW1IMyFce7to56IUvhUFm7Y= golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220607020251-c690dde0001d/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.0.0-20220617184016-355a448f1bc9/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.0.0-20220624214902-1bab6f366d9e/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b h1:PxfKdU9lEEDYjdIzOtC4qFWgkU2rGHdKlKowJSMN9h0= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -690,8 +781,10 @@ golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= -golang.org/x/oauth2 v0.0.0-20220524215830-622c5d57e401 h1:zwrSfklXn0gxyLRX/aR+q6cgHbV/ItVyzbPlbA+dkAw= -golang.org/x/oauth2 v0.0.0-20220524215830-622c5d57e401/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= +golang.org/x/oauth2 v0.0.0-20220608161450-d0670ef3b1eb/go.mod h1:jaDAt6Dkxork7LmZnYtzbRWj0W47D86a3TGe0YHBvmE= +golang.org/x/oauth2 v0.0.0-20220622183110-fd043fe589d2/go.mod h1:jaDAt6Dkxork7LmZnYtzbRWj0W47D86a3TGe0YHBvmE= +golang.org/x/oauth2 v0.0.0-20220722155238-128564f6959c h1:q3gFqPqH7NVofKo3c3yETAP//pPI+G5mvB7qqj1Y5kY= +golang.org/x/oauth2 v0.0.0-20220722155238-128564f6959c/go.mod h1:h4gKUeWbJ4rQPri7E0u6Gs4e9Ri2zaLxzw5DI5XGrYg= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -703,19 +796,21 @@ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20220513210516-0976fa681c29 h1:w8s32wxx3sY+OjLlv9qltkLU5yvJzxjjgiHWLjdIcw4= -golang.org/x/sync v0.0.0-20220513210516-0976fa681c29/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4 h1:uVc8UZUe6tr40fFVnUP5Oj+veunVezqYl9z7DYw9xzw= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190204203706-41f3e6584952/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190502175342-a43fa875dd82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -726,7 +821,9 @@ golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -738,6 +835,8 @@ golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -745,14 +844,17 @@ golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -760,9 +862,12 @@ golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220209214540-3681064d5158/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -770,8 +875,13 @@ golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220502124256-b6088ccd6cba/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a h1:dGzPydgVsqGcTRVwiLJ1jVbufYwmzD3LfVPLKsKg+0k= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220610221304-9f5ed59c137d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220615213510-4f61da869c0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220624220833-87e55d714810/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f h1:v4INt8xihDGvnrfjMDVXGxw9wrfxYyCjk0KbXjhR55s= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -787,7 +897,8 @@ golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20220411224347-583f2d630306/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20201208040808-7e3f01d25324/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20220609170525-579cf78fd858/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181221001348-537d06c36207/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -848,6 +959,7 @@ golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= +golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= @@ -886,8 +998,11 @@ google.golang.org/api v0.74.0/go.mod h1:ZpfMZOVRMywNyvJFeqL9HRWBgAuRfSjJFpe9QtRR google.golang.org/api v0.75.0/go.mod h1:pU9QmyHLnzlpar1Mjt4IbapUCy8J+6HD6GeELN69ljA= google.golang.org/api v0.78.0/go.mod h1:1Sg78yoMLOhlQTeF+ARBoytAcH1NNyyl390YMy6rKmw= google.golang.org/api v0.80.0/go.mod h1:xY3nI94gbvBrE0J6NHXhxOmW97HG7Khjkku6AFB3Hyg= -google.golang.org/api v0.81.0 h1:o8WF5AvfidafWbFjsRyupxyEQJNUWxLZJCK5NXrxZZ8= -google.golang.org/api v0.81.0/go.mod h1:FA6Mb/bZxj706H2j+j2d6mHEEaHBmbbWnkfvmorOCko= +google.golang.org/api v0.84.0/go.mod h1:NTsGnUFJMYROtiquksZHBWtHfeMC7iYthki7Eq3pa8o= +google.golang.org/api v0.85.0/go.mod h1:AqZf8Ep9uZ2pyTvgL+x0D3Zt0eoT9b5E8fmzfu6FO2g= +google.golang.org/api v0.86.0/go.mod h1:+Sem1dnrKlrXMR/X0bPnMWyluQe4RsNoYfmNLhOIkzw= +google.golang.org/api v0.88.0 h1:MPwxQRqpyskYhr2iNyfsQ8R06eeyhe7UEuR30p136ZQ= +google.golang.org/api v0.88.0/go.mod h1:+Sem1dnrKlrXMR/X0bPnMWyluQe4RsNoYfmNLhOIkzw= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -971,9 +1086,14 @@ google.golang.org/genproto v0.0.0-20220421151946-72621c1f0bd3/go.mod h1:8w6bsBMX google.golang.org/genproto v0.0.0-20220429170224-98d788798c3e/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220505152158-f39f71e6c8f3/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= google.golang.org/genproto v0.0.0-20220518221133-4f43b3371335/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= -google.golang.org/genproto v0.0.0-20220519153652-3a47de7e79bd/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= -google.golang.org/genproto v0.0.0-20220523171625-347a074981d8 h1:4NSrVrQGh6+UqBEd+Kwdh6ZDwESH0Sj2bNUQN+VjoQk= google.golang.org/genproto v0.0.0-20220523171625-347a074981d8/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= +google.golang.org/genproto v0.0.0-20220608133413-ed9918b62aac/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= +google.golang.org/genproto v0.0.0-20220616135557-88e70c0c3a90/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= +google.golang.org/genproto v0.0.0-20220617124728-180714bec0ad/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= +google.golang.org/genproto v0.0.0-20220624142145-8cd45d7dbd1f/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= +google.golang.org/genproto v0.0.0-20220628213854-d9e0b6570c03/go.mod h1:KEWEmljWE5zPzLBa/oHl6DaEt9LmfH6WtH1OHIvleBA= +google.golang.org/genproto v0.0.0-20220720214146-176da50484ac h1:EOa+Yrhx1C0O+4pHeXeWrCwdI0tWI6IfUU56Vebs9wQ= +google.golang.org/genproto v0.0.0-20220720214146-176da50484ac/go.mod h1:GkXuJDJ6aQ7lnJcRF+SJVgFdQhypqgl3LB1C9vabdRE= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -1003,8 +1123,10 @@ google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9K google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= google.golang.org/grpc v1.46.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= -google.golang.org/grpc v1.46.2 h1:u+MLGgVf7vRdjEYZ8wDFhAVNmhkbJ5hmrA1LMWK1CAQ= google.golang.org/grpc v1.46.2/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= +google.golang.org/grpc v1.47.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= +google.golang.org/grpc v1.48.0 h1:rQOsyJ/8+ufEDJd/Gdsz7HG220Mh9HAhFHRGnIjda0w= +google.golang.org/grpc v1.48.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= @@ -1021,9 +1143,11 @@ google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQ google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.28.0 h1:w43yiav+6bVFTBQFZX0r7ipe9JQ1QsbMgHwbBziscLw= google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= @@ -1035,10 +1159,13 @@ gopkg.in/mgo.v2 v2.0.0-20180705113604-9856a29383ce/go.mod h1:yeKp02qBN3iKW1OzL3M gopkg.in/stretchr/testify.v1 v1.2.2 h1:yhQC6Uy5CqibAIlk1wlusa/MJ3iAN49/BsR/dCCKz3M= gopkg.in/stretchr/testify.v1 v1.2.2/go.mod h1:QI5V/q6UbPmuhtm10CaFZxED9NreB8PnFYN9JcR6TxU= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20191120175047-4206685974f2/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= From fff7770cb8271a37378894e4145dc1d649866c02 Mon Sep 17 00:00:00 2001 From: colmsnowplow Date: Mon, 1 Aug 2022 13:41:42 +0100 Subject: [PATCH 23/25] Remove support for GCP auth via env vars (closes #181) --- cmd/init.go | 11 ----------- config/config.go | 23 +++++++++++------------ pkg/common/helpers.go | 15 --------------- pkg/common/helpers_test.go | 26 -------------------------- 4 files changed, 11 insertions(+), 64 deletions(-) diff --git a/cmd/init.go b/cmd/init.go index 939425c8..a73a671a 100644 --- a/cmd/init.go +++ b/cmd/init.go @@ -9,7 +9,6 @@ package cmd import ( "encoding/json" "fmt" - "os" "github.com/getsentry/sentry-go" "github.com/pkg/errors" @@ -17,7 +16,6 @@ import ( sentryhook "github.com/snowplow-devops/go-sentryhook" config "github.com/snowplow-devops/stream-replicator/config" - "github.com/snowplow-devops/stream-replicator/pkg/common" ) var ( @@ -43,15 +41,6 @@ func Init() (*config.Config, bool, error) { return nil, false, errors.Wrap(err, "Failed to build config") } - // Configure GCP Access (if set) - if cfg.Data.GoogleServiceAccountB64 != "" { - targetFile, err := common.GetGCPServiceAccountFromBase64(cfg.Data.GoogleServiceAccountB64) - if err != nil { - return nil, false, errors.Wrap(err, "Failed to store GCP Service Account JSON file") - } - os.Setenv("GOOGLE_APPLICATION_CREDENTIALS", targetFile) - } - // Configure Sentry sentryEnabled := cfg.Data.Sentry.Dsn != "" if sentryEnabled { diff --git a/config/config.go b/config/config.go index a63ff7a8..5076b4b2 100644 --- a/config/config.go +++ b/config/config.go @@ -33,18 +33,17 @@ type Config struct { Decoder Decoder } -// ConfigurationData for holding all configuration options -type ConfigurationData struct { - Source *Component `hcl:"source,block" envPrefix:"SOURCE_"` - Target *Component `hcl:"target,block" envPrefix:"TARGET_"` - FailureTarget *FailureConfig `hcl:"failure_target,block"` - Sentry *SentryConfig `hcl:"sentry,block"` - StatsReceiver *StatsConfig `hcl:"stats_receiver,block"` - Transformation string `hcl:"message_transformation,optional" env:"MESSAGE_TRANSFORMATION"` - LogLevel string `hcl:"log_level,optional" env:"LOG_LEVEL"` - GoogleServiceAccountB64 string `hcl:"google_application_credentials_b64,optional" env:"GOOGLE_APPLICATION_CREDENTIALS_B64"` - UserProvidedID string `hcl:"user_provided_id,optional" env:"USER_PROVIDED_ID"` - DisableTelemetry bool `hcl:"disable_telemetry,optional" env:"DISABLE_TELEMETRY"` +// configurationData for holding all configuration options +type configurationData struct { + Source *component `hcl:"source,block" envPrefix:"SOURCE_"` + Target *component `hcl:"target,block" envPrefix:"TARGET_"` + FailureTarget *failureConfig `hcl:"failure_target,block"` + Sentry *sentryConfig `hcl:"sentry,block"` + StatsReceiver *statsConfig `hcl:"stats_receiver,block"` + Transformations []*component `hcl:"transform,block"` + LogLevel string `hcl:"log_level,optional" env:"LOG_LEVEL"` + UserProvidedID string `hcl:"user_provided_id,optional" env:"USER_PROVIDED_ID"` + DisableTelemetry bool `hcl:"disable_telemetry,optional" env:"DISABLE_TELEMETRY"` } // component is a type to abstract over configuration blocks. diff --git a/pkg/common/helpers.go b/pkg/common/helpers.go index edea93fe..8a962424 100644 --- a/pkg/common/helpers.go +++ b/pkg/common/helpers.go @@ -20,23 +20,8 @@ import ( "github.com/aws/aws-sdk-go/aws/session" "github.com/aws/aws-sdk-go/service/sts" "github.com/pkg/errors" - "github.com/twinj/uuid" ) -// --- Cloud Helpers - -// GetGCPServiceAccountFromBase64 will take a base64 encoded string -// and attempt to create a JSON file on disk within the /tmp directory -// for later use in creating GCP clients. -func GetGCPServiceAccountFromBase64(serviceAccountB64 string) (string, error) { - targetFile := fmt.Sprintf(`tmp_replicator/stream-replicator-service-account-%s.json`, uuid.NewV4().String()) - err := DecodeB64ToFile(serviceAccountB64, targetFile) - if err != nil { - return ``, err - } - return targetFile, nil -} - // DeleteTemporaryDir deletes the temp directory we created to store credentials func DeleteTemporaryDir() error { err := os.RemoveAll(`tmp_replicator`) diff --git a/pkg/common/helpers_test.go b/pkg/common/helpers_test.go index 3360f29c..8257b87b 100644 --- a/pkg/common/helpers_test.go +++ b/pkg/common/helpers_test.go @@ -8,38 +8,12 @@ package common import ( "crypto/tls" - "strings" "testing" "time" "github.com/stretchr/testify/assert" ) -// --- Cloud Helpers -func TestGetGCPServiceAccountFromBase64(t *testing.T) { - assert := assert.New(t) - defer DeleteTemporaryDir() - - path, err := GetGCPServiceAccountFromBase64("ewogICJoZWxsbyI6IndvcmxkIgp9") - - assert.NotEqual(path, "") - assert.Nil(err) - assert.True(strings.HasPrefix(path, "tmp_replicator/stream-replicator-service-account-")) - assert.True(strings.HasSuffix(path, ".json")) -} - -func TestGetGCPServiceAccountFromBase64_NotBase64(t *testing.T) { - assert := assert.New(t) - - path, err := GetGCPServiceAccountFromBase64("helloworld") - - assert.Equal("", path) - assert.NotNil(err) - if err != nil { - assert.True(strings.HasPrefix(err.Error(), "Failed to Base64 decode")) - } -} - func TestGetAWSSession(t *testing.T) { assert := assert.New(t) From 4209baf12b8334261094ef2870a02835f385d624 Mon Sep 17 00:00:00 2001 From: colmsnowplow Date: Thu, 4 Aug 2022 13:12:04 +0100 Subject: [PATCH 24/25] Refactor v1 release filters (closes #192) --- .../transform-mixed-filtered.hcl | 2 +- pkg/transform/snowplow_enriched_filter.go | 267 +++++++++----- .../snowplow_enriched_filter_test.go | 332 +++++++++++++++--- pkg/transform/snowplow_enriched_util.go | 17 - pkg/transform/snowplow_enriched_util_test.go | 11 - pkg/transform/transform_test_variables.go | 5 +- .../transformconfig/transform_config.go | 75 ++-- .../transformconfig/transform_config_test.go | 82 +++-- 8 files changed, 548 insertions(+), 243 deletions(-) diff --git a/config/test-fixtures/transform-mixed-filtered.hcl b/config/test-fixtures/transform-mixed-filtered.hcl index 9ba1d870..1d6711db 100644 --- a/config/test-fixtures/transform-mixed-filtered.hcl +++ b/config/test-fixtures/transform-mixed-filtered.hcl @@ -8,7 +8,7 @@ transform { transform { use "spEnrichedFilter" { - field = "app_id" + atomic_field = "app_id" regex = "wrong" regex_timeout = 10 } diff --git a/pkg/transform/snowplow_enriched_filter.go b/pkg/transform/snowplow_enriched_filter.go index 3cec9a2c..74b4393d 100644 --- a/pkg/transform/snowplow_enriched_filter.go +++ b/pkg/transform/snowplow_enriched_filter.go @@ -8,8 +8,8 @@ package transform import ( "fmt" - "log" "regexp" + "strconv" "strings" "time" @@ -21,42 +21,17 @@ import ( "github.com/snowplow-devops/stream-replicator/pkg/models" ) -func findSpEnrichedFilterValue(queriedField, parsedEventName, eventVer, field string, parsedMessage analytics.ParsedEvent, path []interface{}) ([]interface{}, error) { - var vf interface{} - var valueFound []interface{} - var err error - - switch { - case strings.HasPrefix(queriedField, `contexts_`): - vf, err = parsedMessage.GetContextValue(queriedField, path...) - valueFound = append(valueFound, vf.([]interface{})...) - case strings.HasPrefix(queriedField, `unstruct_event`): - eventNameFull := `unstruct_event_` + parsedEventName - if queriedField == eventNameFull || queriedField == eventNameFull+`_`+eventVer { - vf, err = parsedMessage.GetUnstructEventValue(path...) - valueFound = append(valueFound, vf) - } - default: - vf, err = parsedMessage.GetValue(field) - valueFound = append(valueFound, vf) +func evaluateSpEnrichedFilter(re *regexp2.Regexp, valuesFound []interface{}) bool { + // if valuesFound is nil, we found no value. + // Because negative matches are a thing, we still want to match against an empty string + if valuesFound == nil { + valuesFound = make([]interface{}, 1) } - if err != nil { - // GetValue returns an error if the field requested is empty. Check for that particular error before returning error - if err.Error() == analytics.EmptyFieldErr { - return nil, nil + for _, v := range valuesFound { + if v == nil { + v = "" // because nil gets cast to `` } - return nil, err - } - return valueFound, nil -} -func evaluateSpEnrichedFilter(valuesFound []interface{}, regex string, regexTimeout int) bool { - re, err := regexp2.Compile(regex, 0) - re.MatchTimeout = time.Duration(regexTimeout) * time.Second - if err != nil { - log.Fatal(errors.Wrap(err, `error compiling regex for filter`)) - } - for _, v := range valuesFound { if ok, _ := re.MatchString(fmt.Sprintf("%v", v)); ok { return true } @@ -64,14 +39,21 @@ func evaluateSpEnrichedFilter(valuesFound []interface{}, regex string, regexTime return false } -// createSpEnrichedFilterFunction returns a TransformationFunction which filters messages based on a field in the Snowplow enriched event -// and a regex declared by the user. -func createSpEnrichedFilterFunction(field, regex string, regexTimeout int, isUnstructEvent bool) (TransformationFunction, error) { +func createSpEnrichedFilterFunction(regex string, regexTimeout int, getFunc valueGetter) (TransformationFunction, error) { + if regexTimeout == 0 { + // default timeout for regex is 10 seconds + regexTimeout = 10 + } + + // regexToMatch is what we use to evaluate the actual filter, once we have the value. + regexToMatch, err := regexp2.Compile(regex, 0) + regexToMatch.MatchTimeout = time.Duration(regexTimeout) * time.Second + if err != nil { + return nil, errors.Wrap(err, `error compiling regex for filter`) + } + return func(message *models.Message, intermediateState interface{}) (*models.Message, *models.Message, *models.Message, interface{}) { - if regexTimeout == 0 { - // default timeout for regex is 10 seconds - regexTimeout = 10 - } + // Evaluate intermediateState to parsedEvent parsedMessage, parseErr := IntermediateAsSpEnrichedParsed(intermediateState, message) if parseErr != nil { @@ -79,61 +61,15 @@ func createSpEnrichedFilterFunction(field, regex string, regexTimeout int, isUns return nil, nil, message, nil } - // This regex retrieves the path fields - // (e.g. field1.field2[0].field3 -> [field1, field2, 0, field3]) - regexWords := `\w+` - re := regexp.MustCompile(regexWords) - - // separate the path string into words using regex - path := re.FindAllString(field, -1) - separatedPath := make([]string, len(path)-1) - for idx, pathField := range path[1:] { - separatedPath[idx] = pathField - } - - var parsedEventName string - var eventMajorVer string - var err error - - // only call SDK functions if an unstruct_event is being filtered - if isUnstructEvent { - // get event name - eventName, err := parsedMessage.GetValue(`event_name`) - if err != nil { - message.SetError(err) - return nil, nil, message, nil - } - parsedEventName = eventName.(string) - // get event version - fullEventVer, err := parsedMessage.GetValue(`event_version`) - if err != nil { - message.SetError(err) - return nil, nil, message, nil - } - // get the major event version - eventMajorVer = strings.Split(fullEventVer.(string), `-`)[0] - if eventMajorVer == `` { - message.SetError(fmt.Errorf(`invalid schema version format: %s`, fullEventVer)) - return nil, nil, message, nil - } - } - - // find the value in the event - valueFound, err := findSpEnrichedFilterValue( - path[0], - parsedEventName, - eventMajorVer, - field, - parsedMessage, - convertPathToInterfaces(separatedPath), - ) + // get the value + valueFound, err := getFunc(parsedMessage) if err != nil { message.SetError(err) return nil, nil, message, nil } // evaluate whether the found value passes the filter, determining if the message should be kept - shouldKeepMessage := evaluateSpEnrichedFilter(valueFound, regex, regexTimeout) + shouldKeepMessage := evaluateSpEnrichedFilter(regexToMatch, valueFound) // if message is not to be kept, return it as a filtered message to be acked in the main function if !shouldKeepMessage { @@ -145,17 +81,158 @@ func createSpEnrichedFilterFunction(field, regex string, regexTimeout int, isUns }, nil } +// valueGetter is a function that can hold the logic for getting values in the case of base, context, and unstruct fields, +// which respecively require different logic. +type valueGetter func(analytics.ParsedEvent) ([]interface{}, error) + +// Because each type of value requires different arguments, we use these `make` functions to construct them. +// This allows us to unit test each one, plug them into the createSpEnrichedFilterFunction constructor, +// and to construct them so that field names/paths and regexes are handled only once, at startup. + +// makeBaseValueGetter returns a valueGetter for base-level values. +func makeBaseValueGetter(field string) valueGetter { + return func(parsedMessage analytics.ParsedEvent) (value []interface{}, err error) { + // find the value in the event + valueFound, err := parsedMessage.GetValue(field) + // We don't return an error for empty field since this just means the value is nil. + if err != nil && err.Error() != analytics.EmptyFieldErr { + return nil, err + } + return []interface{}{valueFound}, nil + } +} + // NewSpEnrichedFilterFunction returns a TransformationFunction which filters messages based on a field in the Snowplow enriched event. func NewSpEnrichedFilterFunction(field, regex string, regexTimeout int) (TransformationFunction, error) { - return createSpEnrichedFilterFunction(field, regex, regexTimeout, false) + + // getBaseValueForMatch is responsible for retrieving data from the message for base fields + getBaseValueForMatch := makeBaseValueGetter(field) + + return createSpEnrichedFilterFunction(regex, regexTimeout, getBaseValueForMatch) +} + +// makeContextValueGetter creates a valueGetter for context data +func makeContextValueGetter(name string, path []interface{}) valueGetter { + return func(parsedMessage analytics.ParsedEvent) ([]interface{}, error) { + value, err := parsedMessage.GetContextValue(name, path...) + // We don't return an error for empty field since this just means the value is nil. + if err != nil && err.Error() != analytics.EmptyFieldErr { + return nil, err + } + // bug in analytics sdk requires the type casting below. https://github.com/snowplow/snowplow-golang-analytics-sdk/issues/36 + // GetContextValue should always return []interface{} but instead it returns an interface{} which always contains type []interface{} + + // if it's nil, return nil - we just didn't find any value. + if value == nil { + return nil, nil + } + // otherwise, type assertion. + valueFound, ok := value.([]interface{}) + if !ok { + return nil, errors.New(fmt.Sprintf("Context filter encountered unexpected type in getting value for path %v", path)) + } + + return valueFound, nil + } } // NewSpEnrichedFilterFunctionContext returns a TransformationFunction for filtering a context -func NewSpEnrichedFilterFunctionContext(field, regex string, regexTimeout int) (TransformationFunction, error) { - return createSpEnrichedFilterFunction(field, regex, regexTimeout, false) +func NewSpEnrichedFilterFunctionContext(contextFullName, pathToField, regex string, regexTimeout int) (TransformationFunction, error) { + + path, err := parsePathToArguments(pathToField) + if err != nil { + return nil, errors.Wrap(err, "error creating Context filter function") + } + + // getContextValuesForMatch is responsible for retrieving data from the message for context fields + getContextValuesForMatch := makeContextValueGetter(contextFullName, path) + + return createSpEnrichedFilterFunction(regex, regexTimeout, getContextValuesForMatch) +} + +// makeUnstructValueGetter creates a valueGetter for unstruct data. +func makeUnstructValueGetter(eventName string, versionRegex *regexp.Regexp, path []interface{}) valueGetter { + return func(parsedMessage analytics.ParsedEvent) (value []interface{}, err error) { + eventNameFound, err := parsedMessage.GetValue(`event_name`) + if err != nil { // This field can't be empty for a valid event, so we return all errors here + return nil, err + } + if eventNameFound != eventName { // If we don't have an exact match on event name, we return nil value + return nil, nil + } + versionFound, err := parsedMessage.GetValue(`event_version`) + if err != nil { // This field can't be empty for a valid event, so we return all errors here + return nil, err + } + if !versionRegex.MatchString(versionFound.(string)) { // If we don't match the provided version regex, return nil value + return nil, nil + } + + valueFound, err := parsedMessage.GetUnstructEventValue(path...) + // We don't return an error for empty field since this just means the value is nil. + if err != nil && err.Error() != analytics.EmptyFieldErr && !strings.Contains(err.Error(), "not found") { + // This last clause exists because of this: https://github.com/snowplow/snowplow-golang-analytics-sdk/issues/37 + // TODO: Fix that and remove it as soon as possible. + return nil, err + } + + if valueFound == nil { + return nil, nil + } + + return []interface{}{valueFound}, nil + } } // NewSpEnrichedFilterFunctionUnstructEvent returns a TransformationFunction for filtering an unstruct_event -func NewSpEnrichedFilterFunctionUnstructEvent(field, regex string, regexTimeout int) (TransformationFunction, error) { - return createSpEnrichedFilterFunction(field, regex, regexTimeout, true) +func NewSpEnrichedFilterFunctionUnstructEvent(eventNameToMatch, eventVersionToMatch, pathToField, regex string, regexTimeout int) (TransformationFunction, error) { + + path, err := parsePathToArguments(pathToField) + if err != nil { + return nil, errors.Wrap(err, "error creating Unstruct filter function") + } + + versionRegex, err := regexp.Compile(eventVersionToMatch) + if err != nil { + return nil, errors.Wrap(err, fmt.Sprint("Failed to compile regex: ", eventVersionToMatch)) + } + + // getUnstructValuesForMatch is responsible for retrieving data from the message for context fields. + // It also checks that the correct event name and version are provided, and returns nil if not. + getUnstructValuesForMatch := makeUnstructValueGetter(eventNameToMatch, versionRegex, path) + + return createSpEnrichedFilterFunction(regex, regexTimeout, getUnstructValuesForMatch) +} + +// parsePathToArguments parses a string path to custom data (eg. `test1.test2[0].test3`) +// into the slice of interfaces expected by the analytics SDK's Get() methods. +func parsePathToArguments(pathToField string) ([]interface{}, error) { + // validate that an edge case (unmatched opening brace) isn't present + if strings.Count(pathToField, "[") != strings.Count(pathToField, "]") { + return nil, errors.New(fmt.Sprint("unmatched brace in path: ", pathToField)) + } + + // regex to separate path into components + re := regexp.MustCompile(`\[\d+\]|[^\.\[]+`) + parts := re.FindAllString(pathToField, -1) + + // regex to identify arrays + arrayRegex := regexp.MustCompile(`\[\d+\]`) + + convertedPath := make([]interface{}, 0) + for _, part := range parts { + + if arrayRegex.MatchString(part) { // handle arrays first + intPart, err := strconv.Atoi(part[1 : len(part)-1]) // strip braces and convert to int + if err != nil { + return nil, errors.New(fmt.Sprint("error parsing path element: ", part)) + } + + convertedPath = append(convertedPath, intPart) + } else { // handle strings + convertedPath = append(convertedPath, part) + } + + } + return convertedPath, nil } diff --git a/pkg/transform/snowplow_enriched_filter_test.go b/pkg/transform/snowplow_enriched_filter_test.go index 738c5505..acc11583 100644 --- a/pkg/transform/snowplow_enriched_filter_test.go +++ b/pkg/transform/snowplow_enriched_filter_test.go @@ -7,33 +7,35 @@ package transform import ( + "regexp" "testing" + "github.com/dlclark/regexp2" "github.com/stretchr/testify/assert" "github.com/snowplow-devops/stream-replicator/pkg/models" ) -func TestNewSpEnrichedFilterFunction(t *testing.T) { - assert := assert.New(t) +var messageGood = models.Message{ + Data: snowplowTsv3, + PartitionKey: "some-key", +} - var messageGood = models.Message{ - Data: snowplowTsv3, - PartitionKey: "some-key", - } +var messageGoodInt = models.Message{ + Data: snowplowTsv4, + PartitionKey: "some-key", +} - var messageGoodInt = models.Message{ - Data: snowplowTsv4, - PartitionKey: "some-key", - } +var messageWithUnstructEvent = models.Message{ + Data: snowplowTsv1, + PartitionKey: "some-key", +} - var messageWithUnstructEvent = models.Message{ - Data: snowplowTsv1, - PartitionKey: "some-key", - } +func TestNewSpEnrichedFilterFunction(t *testing.T) { + assert := assert.New(t) // Single value cases - aidFilterFuncKeep, _ := NewSpEnrichedFilterFunction("app_id", "test-data3", 0) + aidFilterFuncKeep, _ := NewSpEnrichedFilterFunction("app_id", "^test-data3$", 0) aidKeepIn, aidKeepOut, fail, _ := aidFilterFuncKeep(&messageGood, nil) @@ -50,7 +52,7 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Nil(fail2) // int value - urlPrtFilterFuncKeep, _ := NewSpEnrichedFilterFunction("page_urlport", "80", 10) + urlPrtFilterFuncKeep, _ := NewSpEnrichedFilterFunction("page_urlport", "^80$", 10) urlPrtKeepIn, urlPrtKeepOut, fail, _ := urlPrtFilterFuncKeep(&messageGood, nil) @@ -59,7 +61,7 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Nil(fail) // Multiple value cases - aidFilterFuncKeepWithMultiple, _ := NewSpEnrichedFilterFunction("app_id", "someotherValue|test-data3", 10) + aidFilterFuncKeepWithMultiple, _ := NewSpEnrichedFilterFunction("app_id", "^someotherValue|test-data3$", 10) aidMultipleNegationFailedIn, aidMultipleKeepOut, fail3, _ := aidFilterFuncKeepWithMultiple(&messageGood, nil) @@ -67,7 +69,7 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Nil(aidMultipleKeepOut) assert.Nil(fail3) - aidFilterFuncDiscardWithMultiple, _ := NewSpEnrichedFilterFunction("app_id", "someotherValue|failThis", 10) + aidFilterFuncDiscardWithMultiple, _ := NewSpEnrichedFilterFunction("app_id", "^someotherValue|failThis$", 10) aidNegationMultipleIn, aidMultipleDiscardOut, fail3, _ := aidFilterFuncDiscardWithMultiple(&messageGood, nil) @@ -76,7 +78,6 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Nil(fail3) // Single value negation cases - aidFilterFuncNegationDiscard, _ := NewSpEnrichedFilterFunction("app_id", "^((?!test-data3).)*$", 10) aidNegationIn, aidNegationOut, fail4, _ := aidFilterFuncNegationDiscard(&messageGood, nil) @@ -85,7 +86,7 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Equal(snowplowTsv3, aidNegationOut.Data) assert.Nil(fail4) - aidFilterFuncNegationKeep, _ := NewSpEnrichedFilterFunction("app_id", "^((?!failThis).)*$", 10) + aidFilterFuncNegationKeep, _ := NewSpEnrichedFilterFunction("app_id", "^((?!someValue).)*$", 10) aidNegationFailedIn, aidNegationFailedOut, fail5, _ := aidFilterFuncNegationKeep(&messageGood, nil) @@ -111,10 +112,11 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Nil(fail7) // Filters on a nil field - txnFilterFunctionAffirmation, _ := NewSpEnrichedFilterFunction("txn_id", "something", 10) + txnFilterFunctionAffirmation, _ := NewSpEnrichedFilterFunction("txn_id", "^something$", 10) nilAffirmationIn, nilAffirmationOut, fail8, _ := txnFilterFunctionAffirmation(&messageGood, nil) + // nil doesn't match the regex and should be filtered out. assert.Nil(nilAffirmationIn) assert.Equal(snowplowTsv3, nilAffirmationOut.Data) assert.Nil(fail8) @@ -123,12 +125,27 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { nilNegationIn, nilNegationOut, fail8, _ := txnFilterFunctionNegation(&messageGood, nil) - assert.Nil(nilNegationIn) - assert.Equal(snowplowTsv3, nilNegationOut.Data) + // nil DOES match the negative lookup - it doesn't contain 'something'. So should be kept. + assert.Equal(snowplowTsv3, nilNegationIn.Data) + assert.Nil(nilNegationOut) assert.Nil(fail8) + fieldNotExistsFilter, _ := NewSpEnrichedFilterFunction("nothing", "", 10) + + notExistsIn, notExistsOut, notExistsFail, _ := fieldNotExistsFilter(&messageGood, nil) + + assert.Nil(notExistsIn) + assert.Nil(notExistsOut) + assert.NotNil(notExistsFail) +} + +func TestNewSpEnrichedFilterFunctionContext(t *testing.T) { + assert := assert.New(t) + + // The relevant data in messageGood looks like this: "test1":{"test2":[{"test3":"testValue"}] + // context filter success - contextFuncKeep, _ := NewSpEnrichedFilterFunctionContext("contexts_nl_basjes_yauaa_context_1.test1.test2[0].test3", "testValue", 10) + contextFuncKeep, _ := NewSpEnrichedFilterFunctionContext("contexts_nl_basjes_yauaa_context_1", "test1.test2[0].test3", "^testValue$", 10) contextKeepIn, contextKeepOut, fail9, _ := contextFuncKeep(&messageGood, nil) @@ -136,8 +153,10 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Nil(contextKeepOut) assert.Nil(fail9) + // The relevant data in messageGoodInt looks like this: "test1":{"test2":[{"test3":1}] + // context filter success (integer value) - contextFuncKeep, _ = NewSpEnrichedFilterFunctionContext("contexts_nl_basjes_yauaa_context_1.test1.test2[0].test3", "1", 10) + contextFuncKeep, _ = NewSpEnrichedFilterFunctionContext("contexts_nl_basjes_yauaa_context_1", "test1.test2[0].test3", "^1$", 10) contextKeepIn, contextKeepOut, fail9, _ = contextFuncKeep(&messageGoodInt, nil) @@ -145,50 +164,55 @@ func TestNewSpEnrichedFilterFunction(t *testing.T) { assert.Nil(contextKeepOut) assert.Nil(fail9) - // context filter failure - contextFuncKeep, _ = NewSpEnrichedFilterFunctionContext("contexts_nl_basjes_yauaa_context_2.test1.test2[0].test3", "testValue", 10) + // context filter wrong path + contextFuncKeep, _ = NewSpEnrichedFilterFunctionContext("contexts_nl_basjes_yauaa_context_2", "test1.test2[0].test3", "^testValue$", 10) contextKeepIn, contextKeepOut, fail9, _ = contextFuncKeep(&messageGood, nil) assert.Nil(contextKeepIn) assert.Equal(snowplowTsv3, contextKeepOut.Data) assert.Nil(fail9) +} + +func TestNewSpEnrichedFilterFunctionUnstructEvent(t *testing.T) { + assert := assert.New(t) // event filter success, filtered event name - eventFilterFunCkeep, _ := NewSpEnrichedFilterFunctionUnstructEvent("unstruct_event_add_to_cart_1.sku", "item41", 10) + eventFilterFuncKeep, _ := NewSpEnrichedFilterFunctionUnstructEvent("add_to_cart", "1-*-*", "sku", "^item41$", 10) - eventKeepIn, eventKeepOut, fail10, _ := eventFilterFunCkeep(&messageWithUnstructEvent, nil) + eventKeepIn, eventKeepOut, fail10, _ := eventFilterFuncKeep(&messageWithUnstructEvent, nil) assert.Equal(snowplowTsv1, eventKeepIn.Data) assert.Nil(eventKeepOut) assert.Nil(fail10) // event filter success, filtered event name, no event ver - eventFilterFunCkeep, _ = NewSpEnrichedFilterFunctionUnstructEvent("unstruct_event_add_to_cart.sku", "item41", 10) + eventFilterFuncKeep, _ = NewSpEnrichedFilterFunctionUnstructEvent("add_to_cart", "", "sku", "^item41$", 10) - eventKeepIn, eventKeepOut, fail10, _ = eventFilterFunCkeep(&messageWithUnstructEvent, nil) + eventKeepIn, eventKeepOut, fail10, _ = eventFilterFuncKeep(&messageWithUnstructEvent, nil) assert.Equal(snowplowTsv1, eventKeepIn.Data) assert.Nil(eventKeepOut) assert.Nil(fail10) // event filter failure, wrong event name - eventFilterFunCkeep, _ = NewSpEnrichedFilterFunctionUnstructEvent("unstruct_event_wrong_name.sku", "item41", 10) + eventFilterFuncKeep, _ = NewSpEnrichedFilterFunctionUnstructEvent("wrong_name", "", "sku", "^item41$", 10) - eventKeepIn, eventKeepOut, fail11, _ := eventFilterFunCkeep(&messageWithUnstructEvent, nil) + eventKeepIn, eventKeepOut, fail11, _ := eventFilterFuncKeep(&messageWithUnstructEvent, nil) assert.Nil(eventKeepIn) assert.Equal(snowplowTsv1, eventKeepOut.Data) assert.Nil(fail11) // event filter failure, field not found - eventFilterFunCkeep, _ = NewSpEnrichedFilterFunctionUnstructEvent("unstruct_event_add_to_cart.ska", "item41", 10) + eventFilterFuncKeep, _ = NewSpEnrichedFilterFunctionUnstructEvent("add_to_cart", "", "ska", "item41", 10) - eventNoFieldIn, eventNoFieldOut, fail12, _ := eventFilterFunCkeep(&messageWithUnstructEvent, nil) + eventNoFieldIn, eventNoFieldOut, fail12, _ := eventFilterFuncKeep(&messageWithUnstructEvent, nil) assert.Nil(eventNoFieldIn) - assert.Nil(eventNoFieldOut) - assert.NotNil(fail12) + assert.Equal(snowplowTsv1, eventNoFieldOut.Data) + assert.Nil(fail12) + } func TestSpEnrichedFilterFunction_Slice(t *testing.T) { @@ -212,7 +236,7 @@ func TestSpEnrichedFilterFunction_Slice(t *testing.T) { }, } - filterFunc, _ := NewSpEnrichedFilterFunction("app_id", "test-data1", 10) + filterFunc, _ := NewSpEnrichedFilterFunction("app_id", "^test-data1$", 10) filter1 := NewTransformation(filterFunc) filter1Res := filter1(messages) @@ -240,7 +264,7 @@ func TestSpEnrichedFilterFunction_Slice(t *testing.T) { }, } - filterFunc2, _ := NewSpEnrichedFilterFunction("app_id", "test-data1|test-data2", 10) + filterFunc2, _ := NewSpEnrichedFilterFunction("app_id", "^test-data1|test-data2$", 10) filter2 := NewTransformation(filterFunc2) filter2Res := filter2(messages) @@ -269,9 +293,233 @@ func TestSpEnrichedFilterFunction_Slice(t *testing.T) { func TestEvaluateSpEnrichedFilter(t *testing.T) { assert := assert.New(t) + regex, err := regexp2.Compile("^yes$", 0) + if err != nil { + panic(err) + } + valuesFound := []interface{}{"NO", "maybe", "yes"} - assert.True(evaluateSpEnrichedFilter(valuesFound, "yes", 10)) + assert.True(evaluateSpEnrichedFilter(regex, valuesFound)) + + valuesFound2 := []interface{}{"NO", "maybe", "nope", nil} + assert.False(evaluateSpEnrichedFilter(regex, valuesFound2)) - valuesFound = []interface{}{"NO", "maybe", "nope"} - assert.False(evaluateSpEnrichedFilter(valuesFound, "yes", 10)) + regexInt, err := regexp2.Compile("^123$", 0) + if err != nil { + panic(err) + } + + valuesFound3 := []interface{}{123, "maybe", "nope", nil} + assert.True(evaluateSpEnrichedFilter(regexInt, valuesFound3)) + + // This asserts that when any element of the input is nil, we assert against empty string. + // It exists to ensure we don't evaluate against the string `` since we're naively casting values to string. + regexNil, err := regexp2.Compile("^$", 0) + if err != nil { + panic(err) + } + + assert.True(evaluateSpEnrichedFilter(regexNil, []interface{}{nil})) + + // just to make sure the regex only matches empty: + assert.False(evaluateSpEnrichedFilter(regexNil, []interface{}{"a"})) + + // These tests ensures that when getters return a nil slice, we're still asserting against the empty value. + // This is important since we have negative lookaheads. + + assert.True(evaluateSpEnrichedFilter(regexNil, nil)) + + // negative lookahead: + regexNegative, err := regexp2.Compile("^((?!failThis).)*$", 0) + if err != nil { + panic(err) + } + + assert.True(evaluateSpEnrichedFilter(regexNegative, nil)) +} + +func TestMakeBaseValueGetter(t *testing.T) { + assert := assert.New(t) + + // simple app ID + appIDGetter := makeBaseValueGetter("app_id") + + res, err := appIDGetter(spTsv3Parsed) + + assert.Equal([]interface{}{"test-data3"}, res) + assert.Nil(err) + + nonExistentFieldGetter := makeBaseValueGetter("nope") + + res2, err2 := nonExistentFieldGetter(spTsv3Parsed) + + assert.Nil(res2) + assert.NotNil(err2) + if err2 != nil { + assert.Equal("Key nope not a valid atomic field", err2.Error()) + } + // TODO: currently we'll only hit this error while processing data. Ideally we should hit it on startup. +} + +func TestMakeContextValueGetter(t *testing.T) { + assert := assert.New(t) + + contextGetter := makeContextValueGetter("contexts_nl_basjes_yauaa_context_1", []interface{}{"test1", "test2", 0, "test3"}) + + res, err := contextGetter(spTsv3Parsed) + + assert.Equal([]interface{}{"testValue"}, res) + assert.Nil(err) + + res2, err2 := contextGetter(spTsv1Parsed) + + // If the path doesn't exist, we shoud return nil, nil. + assert.Nil(res2) + assert.Nil(err2) + + contextGetterArray := makeContextValueGetter("contexts_com_acme_just_ints_1", []interface{}{"integerField"}) + + res3, err3 := contextGetterArray(spTsv1Parsed) + + assert.Equal([]interface{}{float64(0), float64(1), float64(2)}, res3) + assert.Nil(err3) +} + +func TestMakeUnstructValueGetter(t *testing.T) { + assert := assert.New(t) + + re1 := regexp.MustCompile("1-*-*") + + unstructGetter := makeUnstructValueGetter("add_to_cart", re1, []interface{}{"sku"}) + + res, err := unstructGetter(spTsv1Parsed) + + assert.Equal([]interface{}{"item41"}, res) + assert.Nil(err) + + unstructGetterWrongPath := makeUnstructValueGetter("add_to_cart", re1, []interface{}{"notSku"}) + + // If it's not in the event, both should be nil + res2, err2 := unstructGetterWrongPath(spTsv1Parsed) + + assert.Nil(res2) + assert.Nil(err2) + + // test that wrong schema version behaves appropriately (return nil nil) + re2 := regexp.MustCompile("2-*-*") + + unstructWrongSchemaGetter := makeUnstructValueGetter("add_to_cart", re2, []interface{}{"sku"}) + + res3, err3 := unstructWrongSchemaGetter(spTsv1Parsed) + + assert.Nil(res3) + assert.Nil(err3) + + // test that not specifying a version behaves appropriately (accepts all versions) + re3 := regexp.MustCompile("") + + unstructAnyVersionGetter := makeUnstructValueGetter("add_to_cart", re3, []interface{}{"sku"}) + + res4, err4 := unstructAnyVersionGetter(spTsv1Parsed) + + assert.Equal([]interface{}{"item41"}, res4) + assert.Nil(err4) + + // test that wrong event name behaves appropriately (return nil nil) + + unstructWrongEvnetName := makeUnstructValueGetter("not_add_to_cart_at_all", re3, []interface{}{"sku"}) + + res5, err5 := unstructWrongEvnetName(spTsv1Parsed) + + assert.Nil(res5) + assert.Nil(err5) +} + +func BenchmarkBaseFieldFilter(b *testing.B) { + var messageGood = models.Message{ + Data: snowplowTsv3, + PartitionKey: "some-key", + } + aidFilterFuncKeep, _ := NewSpEnrichedFilterFunction("app_id", "^test-data3$", 0) + + aidFilterFuncNegationKeep, _ := NewSpEnrichedFilterFunction("app_id", "^((?!failThis).)*$", 10) + + for i := 0; i < b.N; i++ { + + aidFilterFuncKeep(&messageGood, nil) + aidFilterFuncNegationKeep(&messageGood, nil) + } +} + +func BenchmarkContextFilterNew(b *testing.B) { + var messageGood = models.Message{ + Data: snowplowTsv3, + PartitionKey: "some-key", + } + + contextFuncAffirm, _ := NewSpEnrichedFilterFunctionContext("contexts_nl_basjes_yauaa_context_1", "test1.test2[0].test3", "^testValue$", 10) + contextFuncNegate, _ := NewSpEnrichedFilterFunctionContext("contexts_nl_basjes_yauaa_context_1", "test1.test2[0].test3", "^((?!failThis).)*$", 10) + + for i := 0; i < b.N; i++ { + contextFuncAffirm(&messageGood, nil) + contextFuncNegate(&messageGood, nil) + } +} + +func BenchmarkUnstructFilterNew(b *testing.B) { + var messageGood = models.Message{ + Data: snowplowTsv1, + PartitionKey: "some-key", + } + + unstructFilterFuncAffirm, _ := NewSpEnrichedFilterFunctionUnstructEvent("add_to_cart", "1-*-*", "sku", "^item41$", 10) + unstructFilterFuncNegate, _ := NewSpEnrichedFilterFunctionUnstructEvent("add_to_cart", "1-*-*", "sku", "^((?!failThis).)*$", 10) + + for i := 0; i < b.N; i++ { + unstructFilterFuncAffirm(&messageGood, nil) + unstructFilterFuncNegate(&messageGood, nil) + + } +} + +func TestParsePathToArguments(t *testing.T) { + assert := assert.New(t) + + // Common case + path1, err1 := parsePathToArguments("test1[123].test2[1].test3") + expectedPath1 := []interface{}{"test1", 123, "test2", 1, "test3"} + + assert.Equal(expectedPath1, path1) + assert.Nil(err1) + + // Success edge case - field names with different character + path2, err2 := parsePathToArguments("test-1.test_2[1].test$3") + expectedPath2 := []interface{}{"test-1", "test_2", 1, "test$3"} + + assert.Equal(expectedPath2, path2) + assert.Nil(err2) + + // Success edge case - field name is stringified int + path3, err3 := parsePathToArguments("123.456[1].789") + expectedPath3 := []interface{}{"123", "456", 1, "789"} + + assert.Equal(expectedPath3, path3) + assert.Nil(err3) + + // Success edge case - nested arrays + path4, err4 := parsePathToArguments("test1.test2[1][2].test3") + expectedPath4 := []interface{}{"test1", "test2", 1, 2, "test3"} + + assert.Equal(expectedPath4, path4) + assert.Nil(err4) + + // Failure edge case - unmatched brace in path + // We are validating for this and failing at startup, with the assumption that it must be misconfiguration. + path5, err5 := parsePathToArguments("test1.test[2.test3") + + assert.Nil(path5) + assert.NotNil(err5) + if err5 != nil { + assert.Equal("unmatched brace in path: test1.test[2.test3", err5.Error()) + } } diff --git a/pkg/transform/snowplow_enriched_util.go b/pkg/transform/snowplow_enriched_util.go index d494c823..b29b34cd 100644 --- a/pkg/transform/snowplow_enriched_util.go +++ b/pkg/transform/snowplow_enriched_util.go @@ -7,8 +7,6 @@ package transform import ( - "strconv" - "github.com/snowplow/snowplow-golang-analytics-sdk/analytics" "github.com/snowplow-devops/stream-replicator/pkg/models" @@ -28,18 +26,3 @@ func IntermediateAsSpEnrichedParsed(intermediateState interface{}, message *mode } return parsedMessage, nil } - -// convertPathToInterfaces converts a slice of strings representing a path to a slice of interfaces to be used -// by the SDK Get() function -func convertPathToInterfaces(path []string) []interface{} { - var output []interface{} - for _, pathField := range path { - pathFieldInt, err := strconv.Atoi(pathField) - if err != nil { - output = append(output, pathField) - } else { - output = append(output, pathFieldInt) - } - } - return output -} diff --git a/pkg/transform/snowplow_enriched_util_test.go b/pkg/transform/snowplow_enriched_util_test.go index 47ea5469..d253cb21 100644 --- a/pkg/transform/snowplow_enriched_util_test.go +++ b/pkg/transform/snowplow_enriched_util_test.go @@ -46,14 +46,3 @@ func TestIntermediateAsSpEnrichedParsed(t *testing.T) { assert.Equal("Cannot parse tsv event - wrong number of fields provided: 1", err4.Error()) } } - -// TestConvertPathToInterfaces tests that convertPathToInterfaces returns integers and strings where appropriate -func TestConvertPathToInterfaces(t *testing.T) { - assert := assert.New(t) - - expected := []interface{}{"one", 2, 3, "four", "five", 6} - - res := convertPathToInterfaces([]string{"one", "2", "3", "four", "five", "6"}) - - assert.Equal(expected, res) -} diff --git a/pkg/transform/transform_test_variables.go b/pkg/transform/transform_test_variables.go index 6464bd01..0e48b5bd 100644 --- a/pkg/transform/transform_test_variables.go +++ b/pkg/transform/transform_test_variables.go @@ -12,13 +12,12 @@ import ( "github.com/snowplow-devops/stream-replicator/pkg/models" ) -var snowplowTsv1 = []byte(`test-data1 pc 2019-05-10 14:40:37.436 2019-05-10 14:40:35.972 2019-05-10 14:40:35.551 unstruct e9234345-f042-46ad-b1aa-424464066a33 py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 18.194.133.57 d26822f5-52cc-4292-8f77-14ef6b7a27e2 {"schema":"iglu:com.snowplowanalytics.snowplow/unstruct_event/jsonschema/1-0-0","data":{"schema":"iglu:com.snowplowanalytics.snowplow/add_to_cart/jsonschema/1-0-0","data":{"sku":"item41","quantity":2,"unitPrice":32.4,"currency":"GBP"}}} python-requests/2.21.0 2019-05-10 14:40:35.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??"}}]} 2019-05-10 14:40:35.972 com.snowplowanalytics.snowplow add_to_cart jsonschema 1-0-0 `) +var snowplowTsv1 = []byte(`test-data1 pc 2019-05-10 14:40:37.436 2019-05-10 14:40:35.972 2019-05-10 14:40:35.551 unstruct e9234345-f042-46ad-b1aa-424464066a33 py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 18.194.133.57 d26822f5-52cc-4292-8f77-14ef6b7a27e2 {"schema":"iglu:com.snowplowanalytics.snowplow/unstruct_event/jsonschema/1-0-0","data":{"schema":"iglu:com.snowplowanalytics.snowplow/add_to_cart/jsonschema/1-0-0","data":{"sku":"item41","quantity":2,"unitPrice":32.4,"currency":"GBP"}}} python-requests/2.21.0 2019-05-10 14:40:35.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:com.acme/justInts/jsonschema/1-0-0", "data":{"integerField": 0}},{"schema":"iglu:com.acme/justInts/jsonschema/1-0-0", "data":{"integerField": 1}},{"schema":"iglu:com.acme/justInts/jsonschema/1-0-0", "data":{"integerField": 2}},{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??"}}]} 2019-05-10 14:40:35.972 com.snowplowanalytics.snowplow add_to_cart jsonschema 1-0-0 `) var spTsv1Parsed, _ = analytics.ParseEvent(string(snowplowTsv1)) -var snowplowJSON1 = []byte(`{"app_id":"test-data1","collector_tstamp":"2019-05-10T14:40:35.972Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:35.972Z","dvce_created_tstamp":"2019-05-10T14:40:35.551Z","dvce_sent_tstamp":"2019-05-10T14:40:35Z","etl_tstamp":"2019-05-10T14:40:37.436Z","event":"unstruct","event_format":"jsonschema","event_id":"e9234345-f042-46ad-b1aa-424464066a33","event_name":"add_to_cart","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"d26822f5-52cc-4292-8f77-14ef6b7a27e2","platform":"pc","unstruct_event_com_snowplowanalytics_snowplow_add_to_cart_1":{"currency":"GBP","quantity":2,"sku":"item41","unitPrice":32.4},"user_id":"user\u003cbuilt-in function input\u003e","user_ipaddress":"18.194.133.57","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) +var snowplowJSON1 = []byte(`{"app_id":"test-data1","collector_tstamp":"2019-05-10T14:40:35.972Z","contexts_com_acme_just_ints_1":[{"integerField":0},{"integerField":1},{"integerField":2}],"contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:35.972Z","dvce_created_tstamp":"2019-05-10T14:40:35.551Z","dvce_sent_tstamp":"2019-05-10T14:40:35Z","etl_tstamp":"2019-05-10T14:40:37.436Z","event":"unstruct","event_format":"jsonschema","event_id":"e9234345-f042-46ad-b1aa-424464066a33","event_name":"add_to_cart","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"d26822f5-52cc-4292-8f77-14ef6b7a27e2","platform":"pc","unstruct_event_com_snowplowanalytics_snowplow_add_to_cart_1":{"currency":"GBP","quantity":2,"sku":"item41","unitPrice":32.4},"user_id":"user\u003cbuilt-in function input\u003e","user_ipaddress":"18.194.133.57","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) var snowplowTsv2 = []byte(`test-data2 pc 2019-05-10 14:40:32.392 2019-05-10 14:40:31.105 2019-05-10 14:40:30.218 transaction_item 5071169f-3050-473f-b03f-9748319b1ef2 py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 18.194.133.57 68220ade-307b-4898-8e25-c4c8ac92f1d7 transaction item58 35.87 1 python-requests/2.21.0 2019-05-10 14:40:30.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??"}}]} 2019-05-10 14:40:31.105 com.snowplowanalytics.snowplow transaction_item jsonschema 1-0-0 `) var spTsv2Parsed, _ = analytics.ParseEvent(string(snowplowTsv2)) var snowplowJSON2 = []byte(`{"app_id":"test-data2","collector_tstamp":"2019-05-10T14:40:31.105Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??"}],"derived_tstamp":"2019-05-10T14:40:31.105Z","dvce_created_tstamp":"2019-05-10T14:40:30.218Z","dvce_sent_tstamp":"2019-05-10T14:40:30Z","etl_tstamp":"2019-05-10T14:40:32.392Z","event":"transaction_item","event_format":"jsonschema","event_id":"5071169f-3050-473f-b03f-9748319b1ef2","event_name":"transaction_item","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"68220ade-307b-4898-8e25-c4c8ac92f1d7","platform":"pc","ti_orderid":"transaction\u003cbuilt-in function input\u003e","ti_price":35.87,"ti_quantity":1,"ti_sku":"item58","user_id":"user\u003cbuilt-in function input\u003e","user_ipaddress":"18.194.133.57","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) - var snowplowTsv3 = []byte(`test-data3 pc 2019-05-10 14:40:30.836 2019-05-10 14:40:29.576 2019-05-10 14:40:29.204 page_view e8aef68d-8533-45c6-a672-26a0f01be9bd py-0.8.2 ssc-0.15.0-googlepubsub beam-enrich-0.2.0-common-0.36.0 user 18.194.133.57 b66c4a12-8584-4c7a-9a5d-7c96f59e2556 www.demo-site.com/campaign-landing-page landing-page 80 www.demo-site.com/campaign-landing-page python-requests/2.21.0 2019-05-10 14:40:29.000 {"schema":"iglu:com.snowplowanalytics.snowplow/contexts/jsonschema/1-0-1","data":[{"schema":"iglu:nl.basjes/yauaa_context/jsonschema/1-0-0","data":{"deviceBrand":"Unknown","deviceName":"Unknown","operatingSystemName":"Unknown","agentVersionMajor":"2","layoutEngineVersionMajor":"??","deviceClass":"Unknown","agentNameVersionMajor":"python-requests 2","operatingSystemClass":"Unknown","layoutEngineName":"Unknown","agentName":"python-requests","agentVersion":"2.21.0","layoutEngineClass":"Unknown","agentNameVersion":"python-requests 2.21.0","operatingSystemVersion":"??","agentClass":"Special","layoutEngineVersion":"??","test1":{"test2":[{"test3":"testValue"}]}}}]} 2019-05-10 14:40:29.576 com.snowplowanalytics.snowplow page_view jsonschema 1-0-0 `) var spTsv3Parsed, _ = analytics.ParseEvent(string(snowplowTsv3)) var snowplowJSON3 = []byte(`{"app_id":"test-data3","collector_tstamp":"2019-05-10T14:40:29.576Z","contexts_nl_basjes_yauaa_context_1":[{"agentClass":"Special","agentName":"python-requests","agentNameVersion":"python-requests 2.21.0","agentNameVersionMajor":"python-requests 2","agentVersion":"2.21.0","agentVersionMajor":"2","deviceBrand":"Unknown","deviceClass":"Unknown","deviceName":"Unknown","layoutEngineClass":"Unknown","layoutEngineName":"Unknown","layoutEngineVersion":"??","layoutEngineVersionMajor":"??","operatingSystemClass":"Unknown","operatingSystemName":"Unknown","operatingSystemVersion":"??","test1":{"test2":[{"test3":"testValue"}]}}],"derived_tstamp":"2019-05-10T14:40:29.576Z","dvce_created_tstamp":"2019-05-10T14:40:29.204Z","dvce_sent_tstamp":"2019-05-10T14:40:29Z","etl_tstamp":"2019-05-10T14:40:30.836Z","event":"page_view","event_format":"jsonschema","event_id":"e8aef68d-8533-45c6-a672-26a0f01be9bd","event_name":"page_view","event_vendor":"com.snowplowanalytics.snowplow","event_version":"1-0-0","network_userid":"b66c4a12-8584-4c7a-9a5d-7c96f59e2556","page_title":"landing-page","page_url":"www.demo-site.com/campaign-landing-page","page_urlpath":"www.demo-site.com/campaign-landing-page","page_urlport":80,"platform":"pc","user_id":"user\u003cbuilt-in function input\u003e","user_ipaddress":"18.194.133.57","useragent":"python-requests/2.21.0","v_collector":"ssc-0.15.0-googlepubsub","v_etl":"beam-enrich-0.2.0-common-0.36.0","v_tracker":"py-0.8.2"}`) diff --git a/pkg/transform/transformconfig/transform_config.go b/pkg/transform/transformconfig/transform_config.go index 362f153f..fd4d8ee2 100644 --- a/pkg/transform/transformconfig/transform_config.go +++ b/pkg/transform/transformconfig/transform_config.go @@ -20,10 +20,15 @@ import ( // Transformation represents a transformation's configuration type Transformation struct { - Description string `hcl:"description,optional"` - Field string `hcl:"field,optional"` - Regex string `hcl:"regex,optional"` - RegexTimeout int `hcl:"regex_timeout,optional"` + // For native filters + Description string `hcl:"description,optional"` + UnstructEventName string `hcl:"unstruct_event_name,optional"` + UnstructEventVersionRegex string `hcl:"unstruct_event_version_regex,optional"` + ContextFullName string `hcl:"context_full_name,optional"` + CustomFieldPath string `hcl:"custom_field_path,optional"` + AtomicField string `hcl:"atomic_field,optional"` + Regex string `hcl:"regex,optional"` + RegexTimeout int `hcl:"regex_timeout,optional"` // for JS and Lua transformations SourceB64 string `hcl:"source_b64,optional"` TimeoutSec int `hcl:"timeout_sec,optional"` @@ -76,55 +81,55 @@ func ValidateTransformations(transformations []*Transformation) []error { case "spEnrichedToJson": continue case "spEnrichedSetPk": - if transformation.Field == `` { - validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedSetPk, empty field`, idx)) + if transformation.AtomicField == `` { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedSetPk, empty atomic field`, idx)) continue } case "spEnrichedFilter": - if transformation.Field != `` && transformation.Regex != `` { + if transformation.AtomicField == `` { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilter, empty atomic field`, idx)) + } + if transformation.Regex == `` { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilter, empty regex`, idx)) + } else { _, err := regexp.Compile(transformation.Regex) if err != nil { validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilter, regex does not compile. error: %v`, idx, err)) - continue } - continue } - if transformation.Field == `` { - validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilter, empty field`, idx)) + continue + case "spEnrichedFilterContext": + if transformation.ContextFullName == `` { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilterContext, empty context full name`, idx)) } - if transformation.Regex == `` { - validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilter, empty regex`, idx)) + if transformation.CustomFieldPath == `` { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilterContext, empty custom field path`, idx)) } - case "spEnrichedFilterContext": - if transformation.Field != `` && transformation.Regex != `` { + if transformation.Regex == `` { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilterContext, empty regex`, idx)) + } else { _, err := regexp.Compile(transformation.Regex) if err != nil { validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilterContext, regex does not compile. error: %v`, idx, err)) - continue } - continue } - if transformation.Field == `` { - validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilterContext, empty field`, idx)) + continue + case "spEnrichedFilterUnstructEvent": + if transformation.CustomFieldPath == `` { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilterUnstructEvent, empty custom field path`, idx)) } - if transformation.Regex == `` { - validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilterContext, empty regex`, idx)) + if transformation.UnstructEventName == `` { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilterUnstructEvent, empty event name`, idx)) } - case "spEnrichedFilterUnstructEvent": - if transformation.Field != `` && transformation.Regex != `` { + if transformation.Regex == `` { + validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilterUnstructEvent, empty regex`, idx)) + } else { _, err := regexp.Compile(transformation.Regex) if err != nil { validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilterUnstructEvent, regex does not compile. error: %v`, idx, err)) - continue } - continue - } - if transformation.Field == `` { - validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilterUnstructEvent, empty field`, idx)) - } - if transformation.Regex == `` { - validationErrors = append(validationErrors, fmt.Errorf(`validation error #%d spEnrichedFilterUnstructEvent, empty regex`, idx)) } + continue case "lua": if transformation.Engine.SmokeTest(`main`) != nil { validationErrors = append(validationErrors, fmt.Errorf(`validation error in lua transformation #%d, main() smoke test failed`, idx)) @@ -224,21 +229,21 @@ func GetTransformations(c *config.Config) (transform.TransformationApplyFunction case "spEnrichedToJson": funcs = append(funcs, transform.SpEnrichedToJSON) case "spEnrichedSetPk": - funcs = append(funcs, transform.NewSpEnrichedSetPkFunction(transformation.Field)) + funcs = append(funcs, transform.NewSpEnrichedSetPkFunction(transformation.AtomicField)) case "spEnrichedFilter": - filterFunc, err := transform.NewSpEnrichedFilterFunction(transformation.Field, transformation.Regex, transformation.RegexTimeout) + filterFunc, err := transform.NewSpEnrichedFilterFunction(transformation.AtomicField, transformation.Regex, transformation.RegexTimeout) if err != nil { return nil, err } funcs = append(funcs, filterFunc) case "spEnrichedFilterContext": - filterFunc, err := transform.NewSpEnrichedFilterFunctionContext(transformation.Field, transformation.Regex, transformation.RegexTimeout) + filterFunc, err := transform.NewSpEnrichedFilterFunctionContext(transformation.ContextFullName, transformation.CustomFieldPath, transformation.Regex, transformation.RegexTimeout) if err != nil { return nil, err } funcs = append(funcs, filterFunc) case "spEnrichedFilterUnstructEvent": - filterFunc, err := transform.NewSpEnrichedFilterFunctionUnstructEvent(transformation.Field, transformation.Regex, transformation.RegexTimeout) + filterFunc, err := transform.NewSpEnrichedFilterFunctionUnstructEvent(transformation.UnstructEventName, transformation.UnstructEventVersionRegex, transformation.CustomFieldPath, transformation.Regex, transformation.RegexTimeout) if err != nil { return nil, err } diff --git a/pkg/transform/transformconfig/transform_config_test.go b/pkg/transform/transformconfig/transform_config_test.go index b442a9dd..c42c243e 100644 --- a/pkg/transform/transformconfig/transform_config_test.go +++ b/pkg/transform/transformconfig/transform_config_test.go @@ -160,8 +160,8 @@ function notMain(x) { { Name: "spEnrichedSetPk success", Transformations: []*Transformation{{ - Name: "spEnrichedSetPk", - Field: `app_id`, + Name: "spEnrichedSetPk", + AtomicField: `app_id`, }}, }, { @@ -169,106 +169,111 @@ function notMain(x) { Transformations: []*Transformation{{ Name: "spEnrichedSetPk", }}, - ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedSetPk, empty field")}, + ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedSetPk, empty atomic field")}, }, { Name: "spEnrichedFilter success", Transformations: []*Transformation{{ - Name: "spEnrichedFilter", - Field: "app_id", - Regex: "test.+", + Name: "spEnrichedFilter", + AtomicField: "app_id", + Regex: "test.+", }}, }, { Name: "spEnrichedFilter regexp does not compile", Transformations: []*Transformation{{ - Name: "spEnrichedFilter", - Field: "app_id", - Regex: "?(?=-)", + Name: "spEnrichedFilter", + AtomicField: "app_id", + Regex: "?(?=-)", }}, ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilter, regex does not compile. error: error parsing regexp: missing argument to repetition operator: `?`")}, }, { - Name: "spEnrichedFilter empty field", + Name: "spEnrichedFilter empty atomic field", Transformations: []*Transformation{{ Name: "spEnrichedFilter", Regex: "test.+", }}, - ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilter, empty field")}, + ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilter, empty atomic field")}, }, { Name: "spEnrichedFilter empty regex", Transformations: []*Transformation{{ - Name: "spEnrichedFilter", - Field: "app_id", + Name: "spEnrichedFilter", + AtomicField: "app_id", }}, ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilter, empty regex")}, }, { Name: "spEnrichedFilterContext success", Transformations: []*Transformation{{ - Name: "spEnrichedFilterContext", - Field: "contexts_nl_basjes_yauaa_context_1.test1.test2[0]", - Regex: "test.+", + Name: "spEnrichedFilterContext", + ContextFullName: "contexts_nl_basjes_yauaa_context_1", + CustomFieldPath: "test1.test2[0]", + Regex: "test.+", }}, }, { Name: "spEnrichedFilterContext regexp does not compile", Transformations: []*Transformation{{ - Name: "spEnrichedFilterContext", - Field: "contexts_nl_basjes_yauaa_context_1.test1.test2[0]", - Regex: "?(?=-)", + Name: "spEnrichedFilterContext", + ContextFullName: "contexts_nl_basjes_yauaa_context_1", + CustomFieldPath: "test1.test2[0]", + Regex: "?(?=-)", }}, ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilterContext, regex does not compile. error: error parsing regexp: missing argument to repetition operator: `?`")}, }, { - Name: "spEnrichedFilterContext empty field", + Name: "spEnrichedFilterContext empty custom field path", Transformations: []*Transformation{{ Name: "spEnrichedFilterContext", Regex: "test.+", }}, - ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilterContext, empty field")}, + ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilterContext, empty context full name"), fmt.Errorf("validation error #0 spEnrichedFilterContext, empty custom field path")}, }, { Name: "spEnrichedFilterContext empty regex", Transformations: []*Transformation{{ - Name: "spEnrichedFilterContext", - Field: "contexts_nl_basjes_yauaa_context_1.test1.test2[0]", + Name: "spEnrichedFilterContext", + ContextFullName: "contexts_nl_basjes_yauaa_context_1", + CustomFieldPath: "test1.test2[0]", }}, ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilterContext, empty regex")}, }, { Name: "spEnrichedFilterUnstructEvent success", Transformations: []*Transformation{{ - Name: "spEnrichedFilterUnstructEvent", - Field: "unstruct_event_add_to_cart_1.sku", - Regex: "test.+", + Name: "spEnrichedFilterUnstructEvent", + CustomFieldPath: "sku", + Regex: "test.+", + UnstructEventName: "add_to_cart", }}, }, { Name: "spEnrichedFilterUnstructEvent regexp does not compile", Transformations: []*Transformation{{ - Name: "spEnrichedFilterUnstructEvent", - Field: "unstruct_event_add_to_cart_1.sku", - Regex: "?(?=-)", + Name: "spEnrichedFilterUnstructEvent", + CustomFieldPath: "sku", + Regex: "?(?=-)", + UnstructEventName: "add_to_cart", }}, ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilterUnstructEvent, regex does not compile. error: error parsing regexp: missing argument to repetition operator: `?`")}, }, { - Name: "spEnrichedFilterUnstructEvent empty field", + Name: "spEnrichedFilterUnstructEvent empty custom field path and event name", Transformations: []*Transformation{{ Name: "spEnrichedFilterUnstructEvent", Regex: "test.+", }}, - ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilterUnstructEvent, empty field")}, + ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilterUnstructEvent, empty custom field path"), fmt.Errorf("validation error #0 spEnrichedFilterUnstructEvent, empty event name")}, }, { - Name: "spEnrichedFilterUnstructEvent empty regex", + Name: "spEnrichedFilterUnstructEvent empty regex and event name", Transformations: []*Transformation{{ - Name: "spEnrichedFilterUnstructEvent", - Field: "unstruct_event_add_to_cart_1.sku", + Name: "spEnrichedFilterUnstructEvent", + CustomFieldPath: "sku", }}, - ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilterUnstructEvent, empty regex")}, + ExpectedErrs: []error{fmt.Errorf("validation error #0 spEnrichedFilterUnstructEvent, empty event name"), fmt.Errorf("validation error #0 spEnrichedFilterUnstructEvent, empty regex")}, }, { Name: "lua success", @@ -321,8 +326,8 @@ function notMain(x) { }, ExpectedErrs: []error{ fmt.Errorf("validation error in js transformation #0, main() smoke test failed"), - fmt.Errorf("validation error #1 spEnrichedFilter, empty field"), - fmt.Errorf("validation error #3 spEnrichedSetPk, empty field"), + fmt.Errorf("validation error #1 spEnrichedFilter, empty atomic field"), + fmt.Errorf("validation error #3 spEnrichedSetPk, empty atomic field"), }, }, } @@ -332,10 +337,9 @@ function notMain(x) { assert := assert.New(t) valErrs := ValidateTransformations(tt.Transformations) - if tt.ExpectedErrs != nil { for idx, valErr := range valErrs { - assert.Equal(valErr.Error(), tt.ExpectedErrs[idx].Error()) + assert.Equal(tt.ExpectedErrs[idx].Error(), valErr.Error()) } } else { assert.Nil(valErrs) From e08611eced9124423f0f8e628868508320ca9f8c Mon Sep 17 00:00:00 2001 From: colmsnowplow Date: Fri, 22 Jul 2022 18:34:44 +0100 Subject: [PATCH 25/25] Prepared for release --- CHANGELOG | 27 +++++++++++++++++++++++++++ README.md | 2 +- VERSION | 2 +- cmd/constants.go | 2 +- 4 files changed, 30 insertions(+), 3 deletions(-) diff --git a/CHANGELOG b/CHANGELOG index 57a13a88..d46d7cd8 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -1,3 +1,30 @@ +Version 1.0.0 (2022-08-15) +-------------------------- +Refactor v1 release filters (#192) +Remove support for GCP auth via env vars (#181) +Bump dependencies (#120) +Fix import order (#175) +Allow env var configuration of updated transformation config (#174) +Fix typo in statsd reporting (#158) +Cleanup Makefile (#112) +Make setting of EventHub Partition Key configurable (#148) +Fix latency reporting with no transformations (#108) +Rationalise transformations and transformation config (#169) +Resolve CI caching errors (#164) +Bump to Go 1.18 (#163) +Bump analytics SDK version to v0.3.0 (#131) +Fix bug in makefile that prevents integration-down from completing (#162) +Revamp unit testing project-wide (#129) +Make anything that doesn't need to be exported private (#111) +Add custom transformation layer (#146) +Fail tests on NewConfig error (#145) +Remove AWS Lambda and GCP Cloudfunctions builds (#140) +Add telemetry (#124) +Extend filtering to use custom data (#176) +Use Snyk test to block release if there are vulnerabilities (#119) +Clean up tls configuration (#177) +Allow configuration from a file (#105) + Version 0.8.1 (2022-06-07) -------------------------- Update Sarama package to 1.34 for kafka v3 (#133) diff --git a/README.md b/README.md index fcc32981..135125d7 100644 --- a/README.md +++ b/README.md @@ -60,5 +60,5 @@ Unauthorized copying of this project via any medium is strictly prohibited. Copyright (c) 2020-2022 Snowplow Analytics Ltd. All rights reserved. -[release-image]: http://img.shields.io/badge/golang-0.8.1-6ad7e5.svg?style=flat +[release-image]: http://img.shields.io/badge/golang-1.0.0-6ad7e5.svg?style=flat [releases]: https://github.com/snowplow-devops/stream-replicator/releases/ diff --git a/VERSION b/VERSION index c18d72be..afaf360d 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -0.8.1 \ No newline at end of file +1.0.0 \ No newline at end of file diff --git a/cmd/constants.go b/cmd/constants.go index fa33693d..c2989eee 100644 --- a/cmd/constants.go +++ b/cmd/constants.go @@ -8,7 +8,7 @@ package cmd const ( // AppVersion is the current version of the replicator - AppVersion = "0.8.1" + AppVersion = "1.0.0" // AppName is the name of the application to use in logging / places that require the artifact AppName = "stream-replicator"