From d68fa14f10dad19c09742d500f483751e1857e69 Mon Sep 17 00:00:00 2001 From: Brett Hoerner Date: Tue, 10 Oct 2023 08:13:05 -0600 Subject: [PATCH] chore(plugin-server): run node-rdkafka with cooperative rebalancing patched in (#17747) * Remove node-rdkafka-acosom * Add node-rdkafka * Replace node-rdkafka-acosom imports with node-rdkafka * Patch node-rdkafka with changes from https://github.com/PostHog/node-rdkafka/ * Add patch directions to README --- plugin-server/README.md | 25 + plugin-server/functional_tests/kafka.ts | 2 +- plugin-server/package.json | 5 +- .../patches/node-rdkafka@2.17.0.patch | 1081 +++++++++++++++++ plugin-server/pnpm-lock.yaml | 493 +------- plugin-server/src/kafka/admin.ts | 2 +- plugin-server/src/kafka/batch-consumer.ts | 2 +- plugin-server/src/kafka/config.ts | 2 +- plugin-server/src/kafka/consumer.ts | 2 +- plugin-server/src/kafka/producer.ts | 2 +- .../analytics-events-ingestion-consumer.ts | 2 +- ...cs-events-ingestion-historical-consumer.ts | 14 +- ...tics-events-ingestion-overflow-consumer.ts | 2 +- .../batch-processing/each-batch-ingestion.ts | 2 +- .../main/ingestion-queues/kafka-metrics.ts | 2 +- .../src/main/ingestion-queues/kafka-queue.ts | 2 +- .../services/console-logs-ingester.ts | 2 +- .../services/offset-high-water-marker.ts | 2 +- .../services/partition-locker.ts | 8 +- .../services/replay-events-ingester.ts | 2 +- .../session-recordings-consumer-v1.ts | 2 +- .../session-recordings-consumer-v2.ts | 2 +- .../session-recording/types.ts | 2 +- .../src/utils/db/kafka-producer-wrapper.ts | 2 +- plugin-server/src/utils/event.ts | 2 +- .../main/ingestion-queues/kafka-queue.test.ts | 2 +- .../run-async-handlers-event-pipeline.test.ts | 2 +- .../session-recording/fixtures.ts | 2 +- .../session-recordings-consumer-v1.test.ts | 2 +- .../session-recordings-consumer-v2.test.ts | 2 +- 30 files changed, 1208 insertions(+), 466 deletions(-) create mode 100644 plugin-server/patches/node-rdkafka@2.17.0.patch diff --git a/plugin-server/README.md b/plugin-server/README.md index fb3a5c69121d7..801be0db1fe7b 100644 --- a/plugin-server/README.md +++ b/plugin-server/README.md @@ -181,6 +181,31 @@ New functions called here are: > Note: > An `organization_id` is tied to a _company_ and its _installed plugins_, a `team_id` is tied to a _project_ and its _plugin configs_ (enabled/disabled+extra config). +### Patching node-rdkafka + +We carry a node-rdkafka patch that adds cooperative rebalancing. To generate this patch: + + # setup a local node-rdkafka clone + git clone git@github.com:PostHog/node-rdkafka.git + cd node-rdkafka + git remote add blizzard git@github.com:Blizzard/node-rdkafka.git + git fetch blizzard + + # generate the diff + git diff blizzard/master > ~/node-rdkafka.diff + + # in the plugin-server directory, this will output a temporary working directory + pnpm patch node-rdkafka@2.17.0 + + # enter the temporary directory from the previous command + cd /private/var/folders/b7/bmmghlpx5qdd6gpyvmz1k1_m0000gn/T/6082767a6879b3b4e11182f944f5cca3 + + # if asked, skip any missing files + patch -p1 < ~/node-rdkafka.diff + + # in the plugin-server directory, target the temporary directory from the previous command + pnpm patch-commit /private/var/folders/b7/bmmghlpx5qdd6gpyvmz1k1_m0000gn/T/6082767a6879b3b4e11182f944f5cca3 + ## Questions? ### [Join our Slack community. 🦔](https://posthog.com/slack) diff --git a/plugin-server/functional_tests/kafka.ts b/plugin-server/functional_tests/kafka.ts index 4c33975ca69ca..c2ab7ac87a6ab 100644 --- a/plugin-server/functional_tests/kafka.ts +++ b/plugin-server/functional_tests/kafka.ts @@ -1,6 +1,6 @@ import { CompressionCodecs, CompressionTypes } from 'kafkajs' import SnappyCodec from 'kafkajs-snappy' -import { HighLevelProducer } from 'node-rdkafka-acosom' +import { HighLevelProducer } from 'node-rdkafka' import { defaultConfig } from '../src/config/config' import { produce as defaultProduce } from '../src/kafka/producer' diff --git a/plugin-server/package.json b/plugin-server/package.json index 2ce0d46d906a2..9b450b8c7a114 100644 --- a/plugin-server/package.json +++ b/plugin-server/package.json @@ -73,7 +73,7 @@ "lru-cache": "^6.0.0", "luxon": "^1.27.0", "node-fetch": "^2.6.1", - "node-rdkafka-acosom": "2.16.1", + "node-rdkafka": "^2.17.0", "node-schedule": "^2.1.0", "pg": "^8.6.0", "pino": "^8.6.0", @@ -138,7 +138,8 @@ }, "pnpm": { "patchedDependencies": { - "pg@8.10.0": "patches/pg@8.10.0.patch" + "pg@8.10.0": "patches/pg@8.10.0.patch", + "node-rdkafka@2.17.0": "patches/node-rdkafka@2.17.0.patch" } } } diff --git a/plugin-server/patches/node-rdkafka@2.17.0.patch b/plugin-server/patches/node-rdkafka@2.17.0.patch new file mode 100644 index 0000000000000..20d8c5c84381a --- /dev/null +++ b/plugin-server/patches/node-rdkafka@2.17.0.patch @@ -0,0 +1,1081 @@ +diff --git a/Oops.rej b/Oops.rej +new file mode 100644 +index 0000000000000000000000000000000000000000..328fc546fcb400745783b3562f1cb1cb055e1804 +--- /dev/null ++++ b/Oops.rej +@@ -0,0 +1,26 @@ ++@@ -1,25 +0,0 @@ ++-# This workflow will run tests using node and then publish a package to GitHub Packages when a release is created ++-# For more information see: https://help.github.com/actions/language-and-framework-guides/publishing-nodejs-packages ++- ++-name: Publish node-rdkafka ++- ++-on: ++- release: ++- types: [created] ++- ++-jobs: ++- publish-npm: ++- runs-on: ubuntu-latest ++- steps: ++- - uses: actions/checkout@v3 ++- with: ++- submodules: recursive ++- - uses: actions/setup-node@v3 ++- with: ++- node-version: 18 ++- registry-url: https://registry.npmjs.org/ ++- cache: "npm" ++- - run: npm ci ++- - run: npm publish ++- env: ++- NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}} +diff --git a/docker-compose.yml b/docker-compose.yml +index abe29df25c7312382074b3e15289cb862a340247..8a12f135b4f96e5a0dd25e7c21adb2b3b0e644fa 100644 +--- a/docker-compose.yml ++++ b/docker-compose.yml +@@ -1,23 +1,51 @@ + --- +-zookeeper: +- image: confluentinc/cp-zookeeper +- ports: +- - "2181:2181" +- environment: +- ZOOKEEPER_CLIENT_PORT: 2181 +- ZOOKEEPER_TICK_TIME: 2000 +-kafka: +- image: confluentinc/cp-kafka +- links: +- - zookeeper +- ports: +- - "9092:9092" +- environment: +- KAFKA_BROKER_ID: 1 +- KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181' +- KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://localhost:9092' +- KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 +- KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 +- KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 +- KAFKA_DEFAULT_REPLICATION_FACTOR: 1 +- KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 ++version: '2' ++services: ++ zookeeper: ++ image: confluentinc/cp-zookeeper ++ ports: ++ - "2181:2181" ++ networks: ++ - localnet ++ environment: ++ ZOOKEEPER_CLIENT_PORT: 2181 ++ ZOOKEEPER_TICK_TIME: 2000 ++ kafka: ++ image: confluentinc/cp-kafka ++ ports: ++ - 9092:9092 ++ - 9997:9997 ++ networks: ++ - localnet ++ depends_on: ++ - zookeeper ++ environment: ++ KAFKA_BROKER_ID: 1 ++ KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181' ++ KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092 ++ # KAFKA_LISTENERS: PLAINTEXT://kafka0:29092,PLAINTEXT_HOST://localhost:9092 ++ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT ++ KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT ++ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 ++ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 ++ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 ++ KAFKA_DEFAULT_REPLICATION_FACTOR: 1 ++ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 ++ kafka-ui: ++ container_name: kafka-ui ++ image: provectuslabs/kafka-ui:latest ++ ports: ++ - 8080:8080 ++ networks: ++ - localnet ++ depends_on: ++ - zookeeper ++ - kafka ++ environment: ++ KAFKA_CLUSTERS_0_NAME: local ++ KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:29092 ++ KAFKA_CLUSTERS_0_ZOOKEEPER: zookeeper:2181 ++networks: ++ localnet: ++ attachable: true ++ +diff --git a/e2e/both.spec.js b/e2e/both.spec.js +index a8289ec319239fb05b1f321bff78a7c9e267f1cf..85ca5ef64264a903a30d5d4bac31f6b1a3792102 100644 +--- a/e2e/both.spec.js ++++ b/e2e/both.spec.js +@@ -163,7 +163,7 @@ describe('Consumer/Producer', function() { + + }); + }); +- ++ + it('should return ready messages on partition EOF', function(done) { + crypto.randomBytes(4096, function(ex, buffer) { + producer.setPollInterval(10); +diff --git a/e2e/consumer.spec.js b/e2e/consumer.spec.js +index a167483f1e0ea15c4edcb368e36640b4349574e8..38fcfd7464afb7df682b7b5f1fdb228b8d280a25 100644 +--- a/e2e/consumer.spec.js ++++ b/e2e/consumer.spec.js +@@ -11,10 +11,12 @@ var crypto = require('crypto'); + + var eventListener = require('./listener'); + ++var cooperativeRebalanceCallback = require('../lib/kafka-consumer').cooperativeRebalanceCallback; + var KafkaConsumer = require('../').KafkaConsumer; ++var AdminClient = require('../').AdminClient; ++var LibrdKafkaError = require('../lib/error'); + + var kafkaBrokerList = process.env.KAFKA_HOST || 'localhost:9092'; +-var topic = 'test'; + + describe('Consumer', function() { + var gcfg; +@@ -31,6 +33,7 @@ describe('Consumer', function() { + }); + + describe('commit', function() { ++ var topic = 'test'; + var consumer; + beforeEach(function(done) { + consumer = new KafkaConsumer(gcfg, {}); +@@ -61,6 +64,7 @@ describe('Consumer', function() { + }); + + describe('committed and position', function() { ++ var topic = 'test'; + var consumer; + beforeEach(function(done) { + consumer = new KafkaConsumer(gcfg, {}); +@@ -95,6 +99,7 @@ describe('Consumer', function() { + }); + + it('after assign, should get committed array without offsets ', function(done) { ++ var topic = 'test'; + consumer.assign([{topic:topic, partition:0}]); + // Defer this for a second + setTimeout(function() { +@@ -110,6 +115,7 @@ describe('Consumer', function() { + }); + + it('after assign and commit, should get committed offsets', function(done) { ++ var topic = 'test'; + consumer.assign([{topic:topic, partition:0}]); + consumer.commitSync({topic:topic, partition:0, offset:1000}); + consumer.committed(null, 1000, function(err, committed) { +@@ -123,6 +129,7 @@ describe('Consumer', function() { + }); + + it('after assign, before consume, position should return an array without offsets', function(done) { ++ var topic = 'test'; + consumer.assign([{topic:topic, partition:0}]); + var position = consumer.position(); + t.equal(Array.isArray(position), true, 'Position should be an array'); +@@ -147,6 +154,7 @@ describe('Consumer', function() { + }); + + describe('seek and positioning', function() { ++ var topic = 'test'; + var consumer; + beforeEach(function(done) { + consumer = new KafkaConsumer(gcfg, {}); +@@ -195,6 +203,7 @@ describe('Consumer', function() { + + describe('subscribe', function() { + ++ var topic = 'test'; + var consumer; + beforeEach(function(done) { + consumer = new KafkaConsumer(gcfg, {}); +@@ -232,6 +241,7 @@ describe('Consumer', function() { + + describe('assign', function() { + ++ var topic = 'test'; + var consumer; + beforeEach(function(done) { + consumer = new KafkaConsumer(gcfg, {}); +@@ -266,7 +276,346 @@ describe('Consumer', function() { + }); + }); + ++ describe('assignmentLost', function() { ++ function pollForTopic(client, topicName, maxTries, tryDelay, cb, customCondition) { ++ var tries = 0; ++ ++ function getTopicIfExists(innerCb) { ++ client.getMetadata({ ++ topic: topicName, ++ }, function(metadataErr, metadata) { ++ if (metadataErr) { ++ cb(metadataErr); ++ return; ++ } ++ ++ var topicFound = metadata.topics.filter(function(topicObj) { ++ var foundTopic = topicObj.name === topicName; ++ ++ // If we have a custom condition for "foundedness", do it here after ++ // we make sure we are operating on the correct topic ++ if (foundTopic && customCondition) { ++ return customCondition(topicObj); ++ } ++ return foundTopic; ++ }); ++ ++ if (topicFound.length >= 1) { ++ innerCb(null, topicFound[0]); ++ return; ++ } ++ ++ innerCb(new Error('Could not find topic ' + topicName)); ++ }); ++ } ++ ++ function maybeFinish(err, obj) { ++ if (err) { ++ queueNextTry(); ++ return; ++ } ++ ++ cb(null, obj); ++ } ++ ++ function queueNextTry() { ++ tries += 1; ++ if (tries < maxTries) { ++ setTimeout(function() { ++ getTopicIfExists(maybeFinish); ++ }, tryDelay); ++ } else { ++ cb(new Error('Exceeded max tries of ' + maxTries)); ++ } ++ } ++ ++ queueNextTry(); ++ } ++ ++ var client = AdminClient.create({ ++ 'client.id': 'kafka-test', ++ 'metadata.broker.list': kafkaBrokerList ++ }); ++ var consumer1; ++ var consumer2; ++ var assignmentLostCount = 0; ++ var grp = 'kafka-mocha-grp-' + crypto.randomBytes(20).toString('hex'); ++ var assignment_lost_gcfg = { ++ 'bootstrap.servers': kafkaBrokerList, ++ 'group.id': grp, ++ 'debug': 'all', ++ 'enable.auto.commit': false, ++ 'session.timeout.ms': 10000, ++ 'heartbeat.interval.ms': 1000, ++ 'auto.offset.reset': 'earliest', ++ 'topic.metadata.refresh.interval.ms': 3000, ++ 'partition.assignment.strategy': 'cooperative-sticky', ++ 'rebalance_cb': function(err, assignment) { ++ if ( ++ err.code === LibrdKafkaError.codes.ERR__REVOKE_PARTITIONS && ++ this.assignmentLost() ++ ) { ++ assignmentLostCount++; ++ } ++ cooperativeRebalanceCallback.call(this, err, assignment); ++ } ++ }; ++ ++ beforeEach(function(done) { ++ assignment_lost_gcfg['client.id'] = 1; ++ consumer1 = new KafkaConsumer(assignment_lost_gcfg, {}); ++ eventListener(consumer1); ++ consumer1.connect({ timeout: 2000 }, function(err, info) { ++ t.ifError(err); ++ }); ++ assignment_lost_gcfg['client.id'] = 2; ++ consumer2 = new KafkaConsumer(assignment_lost_gcfg, {}); ++ eventListener(consumer2); ++ consumer2.connect({ timeout: 2000 }, function(err, info) { ++ t.ifError(err); ++ done(); ++ }); ++ }); ++ ++ afterEach(function(done) { ++ consumer1.disconnect(function() { ++ consumer2.disconnect(function() { ++ done(); ++ }); ++ }); ++ }); ++ ++ it('should return false if not lost', function() { ++ t.equal(false, consumer1.assignmentLost()); ++ }); ++ ++ it('should be lost if topic gets deleted', function(cb) { ++ this.timeout(100000); ++ ++ var time = Date.now(); ++ var topicName = 'consumer-assignment-lost-test-topic-' + time; ++ var topicName2 = 'consumer-assignment-lost-test-topic2-' + time; ++ var deleting = false; ++ ++ client.createTopic({ ++ topic: topicName, ++ num_partitions: 2, ++ replication_factor: 1 ++ }, function(err) { ++ pollForTopic(consumer1, topicName, 10, 1000, function(err) { ++ t.ifError(err); ++ client.createTopic({ ++ topic: topicName2, ++ num_partitions: 2, ++ replication_factor: 1 ++ }, function(err) { ++ pollForTopic(consumer1, topicName2, 10, 1000, function(err) { ++ t.ifError(err); ++ consumer1.subscribe([topicName, topicName2]); ++ consumer2.subscribe([topicName, topicName2]); ++ consumer1.consume(); ++ consumer2.consume(); ++ var tryDelete = function() { ++ setTimeout(function() { ++ if(consumer1.assignments().length === 2 && ++ consumer2.assignments().length === 2 ++ ) { ++ client.deleteTopic(topicName, function(deleteErr) { ++ t.ifError(deleteErr); ++ }); ++ } else { ++ tryDelete(); ++ } ++ }, 2000); ++ }; ++ tryDelete(); ++ }); ++ }); ++ }); ++ }); ++ ++ var checking = false; ++ setInterval(function() { ++ if (assignmentLostCount >= 2 && !checking) { ++ checking = true; ++ t.equal(assignmentLostCount, 2); ++ client.deleteTopic(topicName2, function(deleteErr) { ++ // Cleanup topics ++ t.ifError(deleteErr); ++ cb(); ++ }); ++ } ++ }, 2000); ++ }); ++ ++ }); ++ ++ describe('incrementalAssign and incrementUnassign', function() { ++ ++ var topic = 'test7'; ++ var consumer; ++ beforeEach(function(done) { ++ consumer = new KafkaConsumer(gcfg, {}); ++ ++ consumer.connect({ timeout: 2000 }, function(err, info) { ++ t.ifError(err); ++ done(); ++ }); ++ ++ eventListener(consumer); ++ }); ++ ++ afterEach(function(done) { ++ consumer.disconnect(function() { ++ done(); ++ }); ++ }); ++ ++ it('should be able to assign an assignment', function() { ++ t.equal(0, consumer.assignments().length); ++ var assignments = [{ topic:topic, partition:0 }]; ++ consumer.assign(assignments); ++ t.equal(1, consumer.assignments().length); ++ t.equal(0, consumer.assignments()[0].partition); ++ t.equal(0, consumer.subscription().length); ++ ++ var additionalAssignment = [{ topic:topic, partition:1 }]; ++ consumer.incrementalAssign(additionalAssignment); ++ t.equal(2, consumer.assignments().length); ++ t.equal(0, consumer.assignments()[0].partition); ++ t.equal(1, consumer.assignments()[1].partition); ++ t.equal(0, consumer.subscription().length); ++ }); ++ ++ it('should be able to revoke an assignment', function() { ++ t.equal(0, consumer.assignments().length); ++ var assignments = [{ topic:topic, partition:0 }, { topic:topic, partition:1 }, { topic:topic, partition:2 }]; ++ consumer.assign(assignments); ++ t.equal(3, consumer.assignments().length); ++ t.equal(0, consumer.assignments()[0].partition); ++ t.equal(1, consumer.assignments()[1].partition); ++ t.equal(2, consumer.assignments()[2].partition); ++ t.equal(0, consumer.subscription().length); ++ ++ var revokedAssignments = [{ topic:topic, partition:2 }]; ++ consumer.incrementalUnassign(revokedAssignments); ++ t.equal(2, consumer.assignments().length); ++ t.equal(0, consumer.assignments()[0].partition); ++ t.equal(1, consumer.assignments()[1].partition); ++ t.equal(0, consumer.subscription().length); ++ }); ++ ++ }); ++ ++ describe('rebalance', function() { ++ ++ var topic = 'test7'; ++ var grp = 'kafka-mocha-grp-' + crypto.randomBytes(20).toString('hex'); ++ var consumer1; ++ var consumer2; ++ var counter = 0; ++ var reblance_gcfg = { ++ 'bootstrap.servers': kafkaBrokerList, ++ 'group.id': grp, ++ 'debug': 'all', ++ 'enable.auto.commit': false, ++ 'heartbeat.interval.ms': 200, ++ 'rebalance_cb': true ++ }; ++ ++ it('should be able reblance using the eager strategy', function(done) { ++ this.timeout(20000); ++ ++ var isStarted = false; ++ reblance_gcfg['partition.assignment.strategy'] = 'range,roundrobin'; ++ ++ reblance_gcfg['client.id'] = '1'; ++ consumer1 = new KafkaConsumer(reblance_gcfg, {}); ++ reblance_gcfg['client.id'] = '2'; ++ consumer2 = new KafkaConsumer(reblance_gcfg, {}); ++ ++ eventListener(consumer1); ++ eventListener(consumer2); ++ ++ consumer1.connect({ timeout: 2000 }, function(err, info) { ++ t.ifError(err); ++ consumer1.subscribe([topic]); ++ consumer1.on('rebalance', function(err, assignment) { ++ counter++; ++ if (!isStarted) { ++ isStarted = true; ++ consumer2.connect({ timeout: 2000 }, function(err, info) { ++ consumer2.subscribe([topic]); ++ consumer2.consume(); ++ consumer2.on('rebalance', function(err, assignment) { ++ counter++; ++ }); ++ }); ++ } ++ }); ++ consumer1.consume(); ++ }); ++ ++ setTimeout(function() { ++ t.deepStrictEqual(consumer1.assignments(), [ { topic: topic, partition: 0, offset: -1000 } ]); ++ t.deepStrictEqual(consumer2.assignments(), [ { topic: topic, partition: 1, offset: -1000 } ]); ++ t.equal(counter, 4); ++ consumer1.disconnect(function() { ++ consumer2.disconnect(function() { ++ done(); ++ }); ++ }); ++ }, 9000); ++ }); ++ ++ it('should be able reblance using the cooperative incremental strategy', function(cb) { ++ this.timeout(20000); ++ var isStarted = false; ++ reblance_gcfg['partition.assignment.strategy'] = 'cooperative-sticky'; ++ reblance_gcfg['client.id'] = '1'; ++ consumer1 = new KafkaConsumer(reblance_gcfg, {}); ++ reblance_gcfg['client.id'] = '2'; ++ consumer2 = new KafkaConsumer(reblance_gcfg, {}); ++ ++ eventListener(consumer1); ++ eventListener(consumer2); ++ ++ consumer1.connect({ timeout: 2000 }, function(err, info) { ++ t.ifError(err); ++ consumer1.subscribe([topic]); ++ consumer1.on('rebalance', function(err, assignment) { ++ if (!isStarted) { ++ isStarted = true; ++ consumer2.connect({ timeout: 2000 }, function(err, info) { ++ consumer2.subscribe([topic]); ++ consumer2.consume(); ++ consumer2.on('rebalance', function(err, assignment) { ++ counter++; ++ }); ++ }); ++ } ++ }); ++ consumer1.consume(); ++ }); ++ ++ setTimeout(function() { ++ t.equal(consumer1.assignments().length, 1); ++ t.equal(consumer2.assignments().length, 1); ++ t.equal(counter, 8); ++ ++ consumer1.disconnect(function() { ++ consumer2.disconnect(function() { ++ cb(); ++ }); ++ }); ++ }, 9000); ++ }); ++ ++ }); ++ + describe('disconnect', function() { ++ ++ var topic = 'test'; + var tcfg = { 'auto.offset.reset': 'earliest' }; + + it('should happen gracefully', function(cb) { +diff --git a/index.d.ts b/index.d.ts +index d7ce7e61e985ce46ceae2c10329d6448cc487dca..2c7b9a3d40b0547209c2cffe1f4e62d9573ab617 100644 +--- a/index.d.ts ++++ b/index.d.ts +@@ -223,6 +223,12 @@ export class KafkaConsumer extends Client { + consume(cb: (err: LibrdKafkaError, messages: Message[]) => void): void; + consume(): void; + ++ incrementalAssign(assigments: Assignment[]): this; ++ ++ incrementalUnassign(assignments: Assignment[]): this; ++ ++ assignmentLost(): boolean; ++ + getWatermarkOffsets(topic: string, partition: number): WatermarkOffsets; + + offsetsStore(topicPartitions: TopicPartitionOffset[]): any; +diff --git a/lib/index.js b/lib/index.js +index e2e8a9c899700e56b3ddeff84e67ad97206ccabf..ba6d678275101170aedc694fedc489f479b5d05e 100644 +--- a/lib/index.js ++++ b/lib/index.js +@@ -7,7 +7,7 @@ + * of the MIT license. See the LICENSE.txt file for details. + */ + +-var KafkaConsumer = require('./kafka-consumer'); ++var KafkaConsumer = require('./kafka-consumer').KafkaConsumer; + var Producer = require('./producer'); + var HighLevelProducer = require('./producer/high-level-producer'); + var error = require('./error'); +diff --git a/lib/kafka-consumer.js b/lib/kafka-consumer.js +index c479240f3bab17c68e38623b89ef67546ba59122..97e8458ab28757d013172de31e238ee2ee3f6ebc 100644 +--- a/lib/kafka-consumer.js ++++ b/lib/kafka-consumer.js +@@ -8,8 +8,6 @@ + */ + 'use strict'; + +-module.exports = KafkaConsumer; +- + var Client = require('./client'); + var util = require('util'); + var Kafka = require('../librdkafka'); +@@ -21,6 +19,48 @@ var DEFAULT_CONSUME_LOOP_TIMEOUT_DELAY = 500; + var DEFAULT_CONSUME_TIME_OUT = 1000; + util.inherits(KafkaConsumer, Client); + ++var eagerRebalanceCallback = function(err, assignment) { ++ // Create the librdkafka error ++ err = LibrdKafkaError.create(err); ++ // Emit the event ++ this.emit('rebalance', err, assignment); ++ ++ // That's it ++ try { ++ if (err.code === LibrdKafkaError.codes.ERR__ASSIGN_PARTITIONS) { ++ this.assign(assignment); ++ } else if (err.code === LibrdKafkaError.codes.ERR__REVOKE_PARTITIONS) { ++ this.unassign(); ++ } ++ } catch (e) { ++ // Ignore exceptions if we are not connected ++ if (this.isConnected()) { ++ this.emit('rebalance.error', e); ++ } ++ } ++}; ++ ++var cooperativeRebalanceCallback = function(err, assignment) { ++ // Create the librdkafka error ++ err = LibrdKafkaError.create(err); ++ // Emit the event ++ this.emit('rebalance', err, assignment); ++ ++ // That's it ++ try { ++ if (err.code === LibrdKafkaError.codes.ERR__ASSIGN_PARTITIONS) { ++ this.incrementalAssign(assignment); ++ } else if (err.code === LibrdKafkaError.codes.ERR__REVOKE_PARTITIONS) { ++ this.incrementalUnassign(assignment); ++ } ++ } catch (e) { ++ // Ignore exceptions if we are not connected ++ if (this.isConnected()) { ++ this.emit('rebalance.error', e); ++ } ++ } ++}; ++ + /** + * KafkaConsumer class for reading messages from Kafka + * +@@ -52,26 +92,10 @@ function KafkaConsumer(conf, topicConf) { + + // If rebalance is undefined we don't want any part of this + if (onRebalance && typeof onRebalance === 'boolean') { +- conf.rebalance_cb = function(err, assignment) { +- // Create the librdkafka error +- err = LibrdKafkaError.create(err); +- // Emit the event +- self.emit('rebalance', err, assignment); +- +- // That's it +- try { +- if (err.code === -175 /*ERR__ASSIGN_PARTITIONS*/) { +- self.assign(assignment); +- } else if (err.code === -174 /*ERR__REVOKE_PARTITIONS*/) { +- self.unassign(); +- } +- } catch (e) { +- // Ignore exceptions if we are not connected +- if (self.isConnected()) { +- self.emit('rebalance.error', e); +- } +- } +- }; ++ conf.rebalance_cb = ++ conf['partition.assignment.strategy'] === 'cooperative-sticky' ? ++ cooperativeRebalanceCallback.bind(this) : ++ eagerRebalanceCallback.bind(this); + } else if (onRebalance && typeof onRebalance === 'function') { + /* + * Once this is opted in to, that's it. It's going to manually rebalance +@@ -79,13 +103,13 @@ function KafkaConsumer(conf, topicConf) { + * a way to override them. + */ + +- conf.rebalance_cb = function(err, assignment) { +- // Create the librdkafka error +- err = err ? LibrdKafkaError.create(err) : undefined; ++ conf.rebalance_cb = function(err, assignment) { ++ // Create the librdkafka error ++ err = err ? LibrdKafkaError.create(err) : undefined; + +- self.emit('rebalance', err, assignment); +- onRebalance.call(self, err, assignment); +- }; ++ self.emit('rebalance', err, assignment); ++ onRebalance.call(self, err, assignment); ++ }; + } + + // Same treatment for offset_commit_cb +@@ -264,6 +288,19 @@ KafkaConsumer.prototype.assign = function(assignments) { + return this; + }; + ++/** ++ * Incremental assign the consumer specific partitions and topics ++ * ++ * @param {array} assignments - Assignments array. Should contain ++ * objects with topic and partition set. ++ * @return {Client} - Returns itself ++ */ ++ ++KafkaConsumer.prototype.incrementalAssign = function(assignments) { ++ this._client.incrementalAssign(TopicPartition.map(assignments)); ++ return this; ++}; ++ + /** + * Unassign the consumer from its assigned partitions and topics. + * +@@ -275,6 +312,34 @@ KafkaConsumer.prototype.unassign = function() { + return this; + }; + ++/** ++ * Incremental unassign the consumer from specific partitions and topics ++ * ++ * @param {array} assignments - Assignments array. Should contain ++ * objects with topic and partition set. ++ * @return {Client} - Returns itself ++ */ ++ ++KafkaConsumer.prototype.incrementalUnassign = function(assignments) { ++ this._client.incrementalUnassign(TopicPartition.map(assignments)); ++ return this; ++}; ++ ++/** ++ * Get the assignment lost state. ++ * Examples for an assignment to be lost: ++ * - Unsuccessful heartbeats ++ * - Unknown member id during heartbeats ++ * - Illegal generation during heartbeats ++ * - Static consumer fenced by other consumer with same group.instance.id ++ * - Max. poll interval exceeded ++ * - Subscribed topic(s) no longer exist during meta data updates ++ * @return {boolean} - Returns true if the assignment is lost ++ */ ++ ++KafkaConsumer.prototype.assignmentLost = function() { ++ return this._client.assignmentLost(); ++}; + + /** + * Get the assignments for the consumer +@@ -654,3 +719,9 @@ KafkaConsumer.prototype.pause = function(topicPartitions) { + + return this._errorWrap(this._client.pause(topicPartitions), true); + }; ++ ++module.exports = { ++ KafkaConsumer: KafkaConsumer, ++ eagerRebalanceCallback: eagerRebalanceCallback, ++ cooperativeRebalanceCallback: cooperativeRebalanceCallback ++}; +diff --git a/run_docker.sh b/run_docker.sh +index a6aadbd64609e5d5ae1a80205aac7ce3a49d9345..f817aa976c83b74670c7464099679eb32a390051 100755 +--- a/run_docker.sh ++++ b/run_docker.sh +@@ -21,14 +21,16 @@ topics=( + "test4" + "test5" + "test6" ++ "test7" + ) + + # Run docker-compose exec to make them + for topic in "${topics[@]}" + do + echo "Making topic $topic" ++ [[ "$topic" != "test7" ]] && partitions=1 || partitions=2 + until docker-compose exec kafka \ +- kafka-topics --create --topic $topic --partitions 1 --replication-factor 1 --if-not-exists --zookeeper zookeeper:2181 ++ kafka-topics --create --topic $topic --partitions $partitions --replication-factor 1 --if-not-exists --bootstrap-server localhost:9092 + do + topic_result="$?" + if [ "$topic_result" == "1" ]; then +diff --git a/src/kafka-consumer.cc b/src/kafka-consumer.cc +index 019b0cb6478756120efe9a5f6f1bb4182b4af4ea..3895407788ae31ae38d7707eb63528ebac6e3b24 100644 +--- a/src/kafka-consumer.cc ++++ b/src/kafka-consumer.cc +@@ -179,6 +179,32 @@ Baton KafkaConsumer::Assign(std::vector partitions) { + return Baton(errcode); + } + ++Baton KafkaConsumer::IncrementalAssign( ++ std::vector partitions) { ++ if (!IsConnected()) { ++ return Baton(RdKafka::ERR__STATE, "KafkaConsumer is disconnected"); ++ } ++ ++ RdKafka::KafkaConsumer* consumer = ++ dynamic_cast(m_client); ++ ++ RdKafka::Error *e = consumer->incremental_assign(partitions); ++ ++ if (e) { ++ RdKafka::ErrorCode errcode = e->code(); ++ delete e; ++ return Baton(errcode); ++ } ++ ++ m_partition_cnt += partitions.size(); ++ for (auto i = partitions.begin(); i != partitions.end(); ++i) { ++ m_partitions.push_back(*i); ++ } ++ partitions.clear(); ++ ++ return Baton(RdKafka::ERR_NO_ERROR); ++} ++ + Baton KafkaConsumer::Unassign() { + if (!IsClosing() && !IsConnected()) { + return Baton(RdKafka::ERR__STATE); +@@ -195,12 +221,46 @@ Baton KafkaConsumer::Unassign() { + + // Destroy the old list of partitions since we are no longer using it + RdKafka::TopicPartition::destroy(m_partitions); ++ m_partitions.clear(); + + m_partition_cnt = 0; + + return Baton(RdKafka::ERR_NO_ERROR); + } + ++Baton KafkaConsumer::IncrementalUnassign( ++ std::vector partitions) { ++ if (!IsClosing() && !IsConnected()) { ++ return Baton(RdKafka::ERR__STATE); ++ } ++ ++ RdKafka::KafkaConsumer* consumer = ++ dynamic_cast(m_client); ++ ++ RdKafka::Error *e = consumer->incremental_unassign(partitions); ++ if (e) { ++ RdKafka::ErrorCode errcode = e->code(); ++ delete e; ++ return Baton(errcode); ++ } ++ ++ // Destroy the old list of partitions since we are no longer using it ++ RdKafka::TopicPartition::destroy(partitions); ++ ++ m_partitions.erase( ++ std::remove_if( ++ m_partitions.begin(), ++ m_partitions.end(), ++ [&partitions](RdKafka::TopicPartition *x) -> bool { ++ return std::find( ++ partitions.begin(), ++ partitions.end(), x) != partitions.end(); ++ }), ++ m_partitions.end()); ++ m_partition_cnt -= partitions.size(); ++ return Baton(RdKafka::ERR_NO_ERROR); ++} ++ + Baton KafkaConsumer::Commit(std::vector toppars) { + if (!IsConnected()) { + return Baton(RdKafka::ERR__STATE); +@@ -469,6 +529,12 @@ Baton KafkaConsumer::RefreshAssignments() { + } + } + ++bool KafkaConsumer::AssignmentLost() { ++ RdKafka::KafkaConsumer* consumer = ++ dynamic_cast(m_client); ++ return consumer->assignment_lost(); ++} ++ + std::string KafkaConsumer::Name() { + if (!IsConnected()) { + return std::string(""); +@@ -527,8 +593,11 @@ void KafkaConsumer::Init(v8::Local exports) { + Nan::SetPrototypeMethod(tpl, "committed", NodeCommitted); + Nan::SetPrototypeMethod(tpl, "position", NodePosition); + Nan::SetPrototypeMethod(tpl, "assign", NodeAssign); ++ Nan::SetPrototypeMethod(tpl, "incrementalAssign", NodeIncrementalAssign); + Nan::SetPrototypeMethod(tpl, "unassign", NodeUnassign); ++ Nan::SetPrototypeMethod(tpl, "incrementalUnassign", NodeIncrementalUnassign); + Nan::SetPrototypeMethod(tpl, "assignments", NodeAssignments); ++ Nan::SetPrototypeMethod(tpl, "assignmentLost", NodeAssignmentLost); + + Nan::SetPrototypeMethod(tpl, "commit", NodeCommit); + Nan::SetPrototypeMethod(tpl, "commitSync", NodeCommitSync); +@@ -759,6 +828,64 @@ NAN_METHOD(KafkaConsumer::NodeAssign) { + info.GetReturnValue().Set(Nan::True()); + } + ++NAN_METHOD(KafkaConsumer::NodeIncrementalAssign) { ++ Nan::HandleScope scope; ++ ++ if (info.Length() < 1 || !info[0]->IsArray()) { ++ // Just throw an exception ++ return Nan::ThrowError("Need to specify an array of partitions"); ++ } ++ ++ v8::Local partitions = info[0].As(); ++ std::vector topic_partitions; ++ ++ for (unsigned int i = 0; i < partitions->Length(); ++i) { ++ v8::Local partition_obj_value; ++ if (!( ++ Nan::Get(partitions, i).ToLocal(&partition_obj_value) && ++ partition_obj_value->IsObject())) { ++ Nan::ThrowError("Must pass topic-partition objects"); ++ } ++ ++ v8::Local partition_obj = partition_obj_value.As(); ++ ++ // Got the object ++ int64_t partition = GetParameter(partition_obj, "partition", -1); ++ std::string topic = GetParameter(partition_obj, "topic", ""); ++ ++ if (!topic.empty()) { ++ RdKafka::TopicPartition* part; ++ ++ if (partition < 0) { ++ part = Connection::GetPartition(topic); ++ } else { ++ part = Connection::GetPartition(topic, partition); ++ } ++ ++ // Set the default value to offset invalid. If provided, we will not set ++ // the offset. ++ int64_t offset = GetParameter( ++ partition_obj, "offset", RdKafka::Topic::OFFSET_INVALID); ++ if (offset != RdKafka::Topic::OFFSET_INVALID) { ++ part->set_offset(offset); ++ } ++ ++ topic_partitions.push_back(part); ++ } ++ } ++ ++ KafkaConsumer* consumer = ObjectWrap::Unwrap(info.This()); ++ ++ // Hand over the partitions to the consumer. ++ Baton b = consumer->IncrementalAssign(topic_partitions); ++ ++ if (b.err() != RdKafka::ERR_NO_ERROR) { ++ Nan::ThrowError(RdKafka::err2str(b.err()).c_str()); ++ } ++ ++ info.GetReturnValue().Set(Nan::True()); ++} ++ + NAN_METHOD(KafkaConsumer::NodeUnassign) { + Nan::HandleScope scope; + +@@ -779,6 +906,71 @@ NAN_METHOD(KafkaConsumer::NodeUnassign) { + info.GetReturnValue().Set(Nan::True()); + } + ++NAN_METHOD(KafkaConsumer::NodeIncrementalUnassign) { ++ Nan::HandleScope scope; ++ ++ if (info.Length() < 1 || !info[0]->IsArray()) { ++ // Just throw an exception ++ return Nan::ThrowError("Need to specify an array of partitions"); ++ } ++ ++ v8::Local partitions = info[0].As(); ++ std::vector topic_partitions; ++ ++ for (unsigned int i = 0; i < partitions->Length(); ++i) { ++ v8::Local partition_obj_value; ++ if (!( ++ Nan::Get(partitions, i).ToLocal(&partition_obj_value) && ++ partition_obj_value->IsObject())) { ++ Nan::ThrowError("Must pass topic-partition objects"); ++ } ++ ++ v8::Local partition_obj = partition_obj_value.As(); ++ ++ // Got the object ++ int64_t partition = GetParameter(partition_obj, "partition", -1); ++ std::string topic = GetParameter(partition_obj, "topic", ""); ++ ++ if (!topic.empty()) { ++ RdKafka::TopicPartition* part; ++ ++ if (partition < 0) { ++ part = Connection::GetPartition(topic); ++ } else { ++ part = Connection::GetPartition(topic, partition); ++ } ++ ++ // Set the default value to offset invalid. If provided, we will not set ++ // the offset. ++ int64_t offset = GetParameter( ++ partition_obj, "offset", RdKafka::Topic::OFFSET_INVALID); ++ if (offset != RdKafka::Topic::OFFSET_INVALID) { ++ part->set_offset(offset); ++ } ++ ++ topic_partitions.push_back(part); ++ } ++ } ++ ++ KafkaConsumer* consumer = ObjectWrap::Unwrap(info.This()); ++ // Hand over the partitions to the consumer. ++ Baton b = consumer->IncrementalUnassign(topic_partitions); ++ ++ if (b.err() != RdKafka::ERR_NO_ERROR) { ++ Nan::ThrowError(RdKafka::err2str(b.err()).c_str()); ++ } ++ ++ info.GetReturnValue().Set(Nan::True()); ++} ++ ++NAN_METHOD(KafkaConsumer::NodeAssignmentLost) { ++ Nan::HandleScope scope; ++ ++ KafkaConsumer* consumer = ObjectWrap::Unwrap(info.This()); ++ bool b = consumer->AssignmentLost(); ++ info.GetReturnValue().Set(Nan::New(b)); ++} ++ + NAN_METHOD(KafkaConsumer::NodeUnsubscribe) { + Nan::HandleScope scope; + +diff --git a/src/kafka-consumer.h b/src/kafka-consumer.h +index c91590ecc5d47c1d7a2a93c3e46b4b4657525df0..43e016db4ec47121051cb282f718a2b3156aacd4 100644 +--- a/src/kafka-consumer.h ++++ b/src/kafka-consumer.h +@@ -72,7 +72,10 @@ class KafkaConsumer : public Connection { + int AssignedPartitionCount(); + + Baton Assign(std::vector); ++ Baton IncrementalAssign(std::vector); + Baton Unassign(); ++ Baton IncrementalUnassign(std::vector); ++ bool AssignmentLost(); + + Baton Seek(const RdKafka::TopicPartition &partition, int timeout_ms); + +@@ -105,7 +108,10 @@ class KafkaConsumer : public Connection { + static NAN_METHOD(NodeSubscribe); + static NAN_METHOD(NodeDisconnect); + static NAN_METHOD(NodeAssign); ++ static NAN_METHOD(NodeIncrementalAssign); + static NAN_METHOD(NodeUnassign); ++ static NAN_METHOD(NodeIncrementalUnassign); ++ static NAN_METHOD(NodeAssignmentLost); + static NAN_METHOD(NodeAssignments); + static NAN_METHOD(NodeUnsubscribe); + static NAN_METHOD(NodeCommit); +diff --git a/test/consumer.spec.js b/test/consumer.spec.js +index 40b52ee4e1c718890f43b91adfb543319d5cc342..5e1a5655be0d2598163478aaaae936213c3bf27c 100644 +--- a/test/consumer.spec.js ++++ b/test/consumer.spec.js +@@ -77,7 +77,7 @@ module.exports = { + }); + }, + 'has necessary bindings for librdkafka 1:1 binding': function() { +- var methods = ['assign', 'unassign', 'subscribe']; ++ var methods = ['assign', 'unassign', 'subscribe', 'incrementalAssign', 'incrementalUnassign', 'assignmentLost']; + methods.forEach(function(m) { + t.equal(typeof(client[m]), 'function', 'Client is missing ' + m + ' method'); + }); +diff --git a/test/kafka-consumer.spec.js b/test/kafka-consumer.spec.js +index 0f4de520ed6b8a06dfe355e0bb9091273def98a5..ada72a7e621ea5433f194ab3d22eef326082c155 100644 +--- a/test/kafka-consumer.spec.js ++++ b/test/kafka-consumer.spec.js +@@ -7,7 +7,8 @@ + * of the MIT license. See the LICENSE.txt file for details. + */ + +-var KafkaConsumer = require('../lib/kafka-consumer'); ++var KafkaConsumer = require('../lib/kafka-consumer').KafkaConsumer; ++ + var t = require('assert'); + + var client; diff --git a/plugin-server/pnpm-lock.yaml b/plugin-server/pnpm-lock.yaml index e19f6d0c09d0d..fa8870ad4c13d 100644 --- a/plugin-server/pnpm-lock.yaml +++ b/plugin-server/pnpm-lock.yaml @@ -5,6 +5,9 @@ settings: excludeLinksFromLockfile: false patchedDependencies: + node-rdkafka@2.17.0: + hash: p4aetcvho53cvjti6c3zi7tfaq + path: patches/node-rdkafka@2.17.0.patch pg@8.10.0: hash: ju7a73s3qkmrf666u3aacfrooi path: patches/pg@8.10.0.patch @@ -121,9 +124,9 @@ dependencies: node-fetch: specifier: ^2.6.1 version: 2.6.9 - node-rdkafka-acosom: - specifier: 2.16.1 - version: 2.16.1(ts-node@10.9.1)(typescript@4.9.5) + node-rdkafka: + specifier: ^2.17.0 + version: 2.17.0(patch_hash=p4aetcvho53cvjti6c3zi7tfaq) node-schedule: specifier: ^2.1.0 version: 2.1.1 @@ -2644,6 +2647,7 @@ packages: engines: {node: '>=12'} dependencies: '@jridgewell/trace-mapping': 0.3.9 + dev: true /@dabh/diagnostics@2.0.3: resolution: {integrity: sha512-hrlQOIi7hAfzsMqlGSFyVucrx38O+j6wiGOf//H2ecvIEqYN4ADBSS2iLMh5UFyDunCNniUIPk/q3riFv45xRA==} @@ -3236,10 +3240,12 @@ packages: get-package-type: 0.1.0 js-yaml: 3.14.1 resolve-from: 5.0.0 + dev: true /@istanbuljs/schema@0.1.3: resolution: {integrity: sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==} engines: {node: '>=8'} + dev: true /@jest/console@28.1.3: resolution: {integrity: sha512-QPAkP5EwKdK/bxIr6C1I4Vs0rm2nHiANzj/Z5X2JQkrZo6IqvC4ldZ9K95tF0HdidhA8Bo6egxSzUFPYKcEXLw==} @@ -3488,6 +3494,7 @@ packages: /@jridgewell/resolve-uri@3.1.1: resolution: {integrity: sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA==} engines: {node: '>=6.0.0'} + dev: true /@jridgewell/set-array@1.1.2: resolution: {integrity: sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==} @@ -3510,6 +3517,7 @@ packages: dependencies: '@jridgewell/resolve-uri': 3.1.1 '@jridgewell/sourcemap-codec': 1.4.15 + dev: true /@jsdoc/salty@0.2.5: resolution: {integrity: sha512-TfRP53RqunNe2HBobVBJ0VLhK1HbfvBYeTC1ahnN64PWvyYyGebmMiPkuwvD9fpw2ZbkoPb8Q7mwy0aR8Z9rvw==} @@ -3813,6 +3821,7 @@ packages: cpu: [arm64] os: [darwin] requiresBuild: true + dev: true optional: true /@swc/core-darwin-x64@1.3.55: @@ -3821,6 +3830,7 @@ packages: cpu: [x64] os: [darwin] requiresBuild: true + dev: true optional: true /@swc/core-linux-arm-gnueabihf@1.3.55: @@ -3829,6 +3839,7 @@ packages: cpu: [arm] os: [linux] requiresBuild: true + dev: true optional: true /@swc/core-linux-arm64-gnu@1.3.55: @@ -3837,6 +3848,7 @@ packages: cpu: [arm64] os: [linux] requiresBuild: true + dev: true optional: true /@swc/core-linux-arm64-musl@1.3.55: @@ -3845,6 +3857,7 @@ packages: cpu: [arm64] os: [linux] requiresBuild: true + dev: true optional: true /@swc/core-linux-x64-gnu@1.3.55: @@ -3853,6 +3866,7 @@ packages: cpu: [x64] os: [linux] requiresBuild: true + dev: true optional: true /@swc/core-linux-x64-musl@1.3.55: @@ -3861,6 +3875,7 @@ packages: cpu: [x64] os: [linux] requiresBuild: true + dev: true optional: true /@swc/core-win32-arm64-msvc@1.3.55: @@ -3869,6 +3884,7 @@ packages: cpu: [arm64] os: [win32] requiresBuild: true + dev: true optional: true /@swc/core-win32-ia32-msvc@1.3.55: @@ -3877,6 +3893,7 @@ packages: cpu: [ia32] os: [win32] requiresBuild: true + dev: true optional: true /@swc/core-win32-x64-msvc@1.3.55: @@ -3885,6 +3902,7 @@ packages: cpu: [x64] os: [win32] requiresBuild: true + dev: true optional: true /@swc/core@1.3.55: @@ -3907,6 +3925,7 @@ packages: '@swc/core-win32-arm64-msvc': 1.3.55 '@swc/core-win32-ia32-msvc': 1.3.55 '@swc/core-win32-x64-msvc': 1.3.55 + dev: true /@swc/jest@0.2.26(@swc/core@1.3.55): resolution: {integrity: sha512-7lAi7q7ShTO3E5Gt1Xqf3pIhRbERxR1DUxvtVa9WKzIB+HGQ7wZP5sYx86zqnaEoKKGhmOoZ7gyW0IRu8Br5+A==} @@ -3941,15 +3960,19 @@ packages: /@tsconfig/node10@1.0.9: resolution: {integrity: sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA==} + dev: true /@tsconfig/node12@1.0.11: resolution: {integrity: sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==} + dev: true /@tsconfig/node14@1.0.3: resolution: {integrity: sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==} + dev: true /@tsconfig/node16@1.0.3: resolution: {integrity: sha512-yOlFc+7UtL/89t2ZhjPvvB/DeAr3r+Dq58IgzsFkOAvVC6NMJXmCGjbptdXdR9qsX7pKcTL+s87FtYREi2dEEQ==} + dev: true /@types/adm-zip@0.4.34: resolution: {integrity: sha512-8ToYLLAYhkRfcmmljrKi22gT2pqu7hGMDtORP1emwIEGmgUTZOsaDjzWFzW5N2frcFRz/50CWt4zA1CxJ73pmQ==} @@ -4528,21 +4551,10 @@ packages: normalize-path: 3.0.0 picomatch: 2.3.1 - /append-transform@2.0.0: - resolution: {integrity: sha512-7yeyCEurROLQJFv5Xj4lEGTy0borxepjFv1g22oAdqFu//SrAlDl1O1Nxx15SH1RoliUml6p8dwJW9jvZughhg==} - engines: {node: '>=8'} - dependencies: - default-require-extensions: 3.0.1 - dev: false - /aproba@2.0.0: resolution: {integrity: sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==} dev: false - /archy@1.0.0: - resolution: {integrity: sha512-Xg+9RwCg/0p32teKdGMPTPnVXKD0w3DfHnFTficozsAgsvq2XenPJq/MYpzzQ/v8zrOyJn6Ds39VA4JIDwFfqw==} - dev: false - /are-we-there-yet@2.0.0: resolution: {integrity: sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==} engines: {node: '>=10'} @@ -4561,11 +4573,13 @@ packages: /arg@4.1.3: resolution: {integrity: sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==} + dev: true /argparse@1.0.10: resolution: {integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==} dependencies: sprintf-js: 1.0.3 + dev: true /argparse@2.0.1: resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} @@ -4660,11 +4674,6 @@ packages: tslib: 2.5.0 dev: false - /async-hook-domain@2.0.4: - resolution: {integrity: sha512-14LjCmlK1PK8eDtTezR6WX8TMaYNIzBIsd2D1sGoGjgx0BuNMMoSdk7i/drlbtamy0AWv9yv2tkB+ASdmeqFIw==} - engines: {node: '>=10'} - dev: false - /async-retry@1.3.3: resolution: {integrity: sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==} dependencies: @@ -4877,13 +4886,9 @@ packages: resolution: {integrity: sha512-rA2CrUl1+6yKrn+XgLs8Hdy18OER1UW146nM+ixzhQXDY+Bd3ySkyIJGwF2a4I45JwbvF1mDL/nWkqBwpOcdBA==} dev: false - /bind-obj-methods@3.0.0: - resolution: {integrity: sha512-nLEaaz3/sEzNSyPWRsN9HNsqwk1AUyECtGj+XwGdIi3xABnEqecvXtIJ0wehQXuuER5uZ/5fTs2usONgYjG+iw==} - engines: {node: '>=10'} - dev: false - /bindings@1.5.0: resolution: {integrity: sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==} + requiresBuild: true dependencies: file-uri-to-path: 1.0.0 dev: false @@ -5085,6 +5090,7 @@ packages: /buffer-from@1.1.2: resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==} + dev: true /buffer-writer@2.0.0: resolution: {integrity: sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==} @@ -5206,16 +5212,6 @@ packages: resolution: {integrity: sha512-WF0LihfemtesFcJgO7xfOoOcnWzY/QHR4qeDqV44jPU3HTI54+LnfXK3SA27AVVGCdZFgjjFFaqUA9Jx7dMJZA==} dev: true - /caching-transform@4.0.0: - resolution: {integrity: sha512-kpqOvwXnjjN44D89K5ccQC+RUrsy7jB/XLlRrx0D7/2HNcTPqzsb6XgYoErwko6QsV184CA2YgS1fxDiiDZMWA==} - engines: {node: '>=8'} - dependencies: - hasha: 5.2.2 - make-dir: 3.1.0 - package-hash: 4.0.0 - write-file-atomic: 3.0.3 - dev: false - /call-bind@1.0.2: resolution: {integrity: sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==} dependencies: @@ -5246,6 +5242,7 @@ packages: /camelcase@5.3.1: resolution: {integrity: sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==} engines: {node: '>=6'} + dev: true /camelcase@6.3.0: resolution: {integrity: sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==} @@ -5332,14 +5329,6 @@ packages: engines: {node: '>=6'} dev: false - /cliui@6.0.0: - resolution: {integrity: sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==} - dependencies: - string-width: 4.2.3 - strip-ansi: 6.0.1 - wrap-ansi: 6.2.0 - dev: false - /cliui@7.0.4: resolution: {integrity: sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==} dependencies: @@ -5442,10 +5431,6 @@ packages: engines: {node: '>= 6'} dev: true - /commondir@1.0.1: - resolution: {integrity: sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==} - dev: false - /compressible@2.0.18: resolution: {integrity: sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==} engines: {node: '>= 0.6'} @@ -5575,6 +5560,7 @@ packages: /create-require@1.1.1: resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==} + dev: true /cron-parser@4.8.1: resolution: {integrity: sha512-jbokKWGcyU4gl6jAfX97E1gDpY12DJ1cLJZmoDzaAln/shZ+S3KBFBuA2Q6WeUN4gJf/8klnV1EfvhA2lK5IRQ==} @@ -5776,11 +5762,6 @@ packages: dependencies: ms: 2.1.2 - /decamelize@1.2.0: - resolution: {integrity: sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==} - engines: {node: '>=0.10.0'} - dev: false - /dedent@0.7.0: resolution: {integrity: sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA==} dev: true @@ -5793,13 +5774,6 @@ packages: engines: {node: '>=0.10.0'} dev: true - /default-require-extensions@3.0.1: - resolution: {integrity: sha512-eXTJmRbm2TIt9MgWTsOH1wEuhew6XGZcMeGKCtLedIg/NCsg1iBePXkceTdK4Fii7pzmN9tGsZhKzZ4h7O/fxw==} - engines: {node: '>=8'} - dependencies: - strip-bom: 4.0.0 - dev: false - /default-user-agent@1.0.0: resolution: {integrity: sha512-bDF7bg6OSNcSwFWPu4zYKpVkJZQYVrAANMYB8bc9Szem1D0yKdm4sa/rOCs2aC9+2GMqQ7KnwtZRvDhmLF0dXw==} engines: {node: '>= 0.10.0'} @@ -5903,6 +5877,7 @@ packages: /diff@4.0.2: resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==} engines: {node: '>=0.3.1'} + dev: true /diffie-hellman@5.0.3: resolution: {integrity: sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg==} @@ -6111,10 +6086,6 @@ packages: is-symbol: 1.0.4 dev: true - /es6-error@4.1.1: - resolution: {integrity: sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg==} - dev: false - /escalade@3.1.1: resolution: {integrity: sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==} engines: {node: '>=6'} @@ -6471,10 +6442,6 @@ packages: resolution: {integrity: sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==} engines: {node: '>=6'} - /events-to-array@1.1.2: - resolution: {integrity: sha512-inRWzRY7nG+aXZxBzEqYKB3HPgwflZRopAjDCHv0whhRx+MTUr1ei0ICZUypdyE0HRm4L2d5VEcIqLD6yl+BFA==} - dev: false - /events@1.1.1: resolution: {integrity: sha512-kEcvvCBByWXGnZy6JUlgAp2gBIUjfCAV6P6TgT1/aaQKcmuAEC4OZTV1I4EWQLz2gxZw76atuVyvHhTxvi0Flw==} engines: {node: '>=0.4.x'} @@ -6640,6 +6607,7 @@ packages: /file-uri-to-path@1.0.0: resolution: {integrity: sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==} + requiresBuild: true dev: false /file-uri-to-path@2.0.0: @@ -6654,21 +6622,13 @@ packages: dependencies: to-regex-range: 5.0.1 - /find-cache-dir@3.3.2: - resolution: {integrity: sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig==} - engines: {node: '>=8'} - dependencies: - commondir: 1.0.1 - make-dir: 3.1.0 - pkg-dir: 4.2.0 - dev: false - /find-up@4.1.0: resolution: {integrity: sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==} engines: {node: '>=8'} dependencies: locate-path: 5.0.0 path-exists: 4.0.0 + dev: true /find-up@5.0.0: resolution: {integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==} @@ -6678,10 +6638,6 @@ packages: path-exists: 4.0.0 dev: true - /findit@2.0.0: - resolution: {integrity: sha512-ENZS237/Hr8bjczn5eKuBohLgaD0JyUd0arxretR1f9RO46vZHA1b2y0VorgGV3WaOT3c+78P8h7v4JGJ1i/rg==} - dev: false - /flat-cache@3.0.4: resolution: {integrity: sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==} engines: {node: ^10.12.0 || >=12.0.0} @@ -6721,6 +6677,7 @@ packages: dependencies: cross-spawn: 7.0.3 signal-exit: 3.0.7 + dev: true /foreground-child@3.1.1: resolution: {integrity: sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg==} @@ -6755,14 +6712,6 @@ packages: pause-stream: 0.0.11 dev: false - /fromentries@1.3.2: - resolution: {integrity: sha512-cHEpEQHUg0f8XdtZCc2ZAhrHzKzT0MrFUTcvx+hfxYu7rGMDc5SKoXFh+n4YigxsHXRzc6OrCshdR1bWH6HHyg==} - dev: false - - /fs-exists-cached@1.0.0: - resolution: {integrity: sha512-kSxoARUDn4F2RPXX48UXnaFKwVU7Ivd/6qpzZL29MCDmr9sTvybv4gFCp+qaI4fM9m0z9fgz/yJvi56GAz+BZg==} - dev: false - /fs-extra@10.1.0: resolution: {integrity: sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==} engines: {node: '>=12'} @@ -6820,10 +6769,6 @@ packages: /function-bind@1.1.1: resolution: {integrity: sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==} - /function-loop@2.0.1: - resolution: {integrity: sha512-ktIR+O6i/4h+j/ZhZJNdzeI4i9lEPeEK6UPR2EVyTVBqOwcU3Za9xYKLH64ZR9HmcROyRrOkizNyjjtWJzDDkQ==} - dev: false - /function.prototype.name@1.1.5: resolution: {integrity: sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==} engines: {node: '>= 0.4'} @@ -6946,6 +6891,7 @@ packages: /get-package-type@0.1.0: resolution: {integrity: sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==} engines: {node: '>=8.0.0'} + dev: true /get-stream@6.0.1: resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} @@ -7248,14 +7194,6 @@ packages: inherits: 2.0.4 minimalistic-assert: 1.0.1 - /hasha@5.2.2: - resolution: {integrity: sha512-Hrp5vIK/xr5SkeN2onO32H0MgNZ0f17HRNH39WfL0SYUNOTZ5Lz1TJ8Pajo/87dYGEFlLMm7mIc/k/s6Bvz9HQ==} - engines: {node: '>=8'} - dependencies: - is-stream: 2.0.1 - type-fest: 0.8.1 - dev: false - /help-me@4.2.0: resolution: {integrity: sha512-TAOnTB8Tz5Dw8penUuzHVrKNKlCIbwwbHnXraNJxPwf8LRtE2HlM84RYuezMFcwOJmoYOCWVDyJ8TQGxn9PgxA==} dependencies: @@ -7290,6 +7228,7 @@ packages: /html-escaper@2.0.2: resolution: {integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==} + dev: true /htmlescape@1.1.1: resolution: {integrity: sha512-eVcrzgbR4tim7c7soKQKtxa/kQM4TzjnlU83rcZ9bHU6t31ehfV7SktN6McWgwPWg+JYMA/O3qpGxBvFq1z2Jg==} @@ -7723,11 +7662,6 @@ packages: call-bind: 1.0.2 dev: true - /is-windows@1.0.2: - resolution: {integrity: sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==} - engines: {node: '>=0.10.0'} - dev: false - /is-wsl@1.1.0: resolution: {integrity: sha512-gfygJYZ2gLTDlmbWMI0CE2MwnFzSN/2SZfkMlItC4K/JBlsWVDB0bO6XhqcY13YXE7iMcAJnzTCJjPiTeJJ0Mw==} engines: {node: '>=4'} @@ -7757,25 +7691,7 @@ packages: /istanbul-lib-coverage@3.2.0: resolution: {integrity: sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw==} engines: {node: '>=8'} - - /istanbul-lib-hook@3.0.0: - resolution: {integrity: sha512-Pt/uge1Q9s+5VAZ+pCo16TYMWPBIl+oaNIjgLQxcX0itS6ueeaA+pEfThZpH8WxhFgCiEb8sAJY6MdUKgiIWaQ==} - engines: {node: '>=8'} - dependencies: - append-transform: 2.0.0 - dev: false - - /istanbul-lib-instrument@4.0.3: - resolution: {integrity: sha512-BXgQl9kf4WTCPCCpmFGoJkz/+uhvm7h7PFKUYxh7qarQd3ER33vHG//qaE8eN25l07YqZPpHXU9I09l/RD5aGQ==} - engines: {node: '>=8'} - dependencies: - '@babel/core': 7.21.4 - '@istanbuljs/schema': 0.1.3 - istanbul-lib-coverage: 3.2.0 - semver: 6.3.0 - transitivePeerDependencies: - - supports-color - dev: false + dev: true /istanbul-lib-instrument@5.2.1: resolution: {integrity: sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==} @@ -7790,18 +7706,6 @@ packages: - supports-color dev: true - /istanbul-lib-processinfo@2.0.3: - resolution: {integrity: sha512-NkwHbo3E00oybX6NGJi6ar0B29vxyvNwoC7eJ4G4Yq28UfY758Hgn/heV8VRFhevPED4LXfFz0DQ8z/0kw9zMg==} - engines: {node: '>=8'} - dependencies: - archy: 1.0.0 - cross-spawn: 7.0.3 - istanbul-lib-coverage: 3.2.0 - p-map: 3.0.0 - rimraf: 3.0.2 - uuid: 8.3.2 - dev: false - /istanbul-lib-report@3.0.0: resolution: {integrity: sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==} engines: {node: '>=8'} @@ -7809,6 +7713,7 @@ packages: istanbul-lib-coverage: 3.2.0 make-dir: 3.1.0 supports-color: 7.2.0 + dev: true /istanbul-lib-source-maps@4.0.1: resolution: {integrity: sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==} @@ -7819,6 +7724,7 @@ packages: source-map: 0.6.1 transitivePeerDependencies: - supports-color + dev: true /istanbul-reports@3.1.5: resolution: {integrity: sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w==} @@ -7826,13 +7732,7 @@ packages: dependencies: html-escaper: 2.0.2 istanbul-lib-report: 3.0.0 - - /jackspeak@1.4.2: - resolution: {integrity: sha512-GHeGTmnuaHnvS+ZctRB01bfxARuu9wW83ENbuiweu07SFcVlZrJpcshSre/keGT7YGBhLHg/+rXCNSrsEHKU4Q==} - engines: {node: '>=8'} - dependencies: - cliui: 7.0.4 - dev: false + dev: true /jackspeak@2.3.0: resolution: {integrity: sha512-uKmsITSsF4rUWQHzqaRUuyAir3fZfW3f202Ee34lz/gZCi970CPZwyQXLGNgWJvvZbvFyzeyGq0+4fcG/mBKZg==} @@ -8277,6 +8177,7 @@ packages: dependencies: argparse: 1.0.10 esprima: 4.0.1 + dev: true /js-yaml@4.1.0: resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==} @@ -8498,25 +8399,6 @@ packages: type-check: 0.4.0 dev: true - /libtap@1.4.0: - resolution: {integrity: sha512-STLFynswQ2A6W14JkabgGetBNk6INL1REgJ9UeNKw5llXroC2cGLgKTqavv0sl8OLVztLLipVKMcQ7yeUcqpmg==} - engines: {node: '>=10'} - dependencies: - async-hook-domain: 2.0.4 - bind-obj-methods: 3.0.0 - diff: 4.0.2 - function-loop: 2.0.1 - minipass: 3.3.6 - own-or: 1.0.0 - own-or-env: 1.0.2 - signal-exit: 3.0.7 - stack-utils: 2.0.6 - tap-parser: 11.0.2 - tap-yaml: 1.0.2 - tcompare: 5.0.7 - trivial-deferred: 1.1.2 - dev: false - /lines-and-columns@1.2.4: resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} @@ -8531,6 +8413,7 @@ packages: engines: {node: '>=8'} dependencies: p-locate: 4.1.0 + dev: true /locate-path@6.0.0: resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==} @@ -8555,10 +8438,6 @@ packages: resolution: {integrity: sha512-C5N2Z3DgnnKr0LOpv/hKCgKdb7ZZwafIrsesve6lmzvZIRZRGaZ/l6Q8+2W7NaT+ZwO3fFlSCzCzrDCFdJfZ4g==} dev: false - /lodash.flattendeep@4.4.0: - resolution: {integrity: sha512-uHaJFihxmJcEX3kT4I23ABqKKalJ/zDrDg0lsFtc1h+3uw49SIJ5beyhx5ExVRti3AvKoOJngIj7xz3oylPdWQ==} - dev: false - /lodash.isarguments@3.1.0: resolution: {integrity: sha512-chi4NHZlZqZD18a0imDHnZPrDeBbTtVN7GXMwuGdRH9qotxAjYs3aVLKc7zNOG9eddR5Ksd8rvFEBc9SsggPpg==} dev: false @@ -8662,6 +8541,7 @@ packages: /make-error@1.3.6: resolution: {integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==} + dev: true /make-fetch-happen@10.2.1: resolution: {integrity: sha512-NgOPbRiaQM10DYXvN3/hhGVI2M5MtITFryzBGxHM5p4wnFxsVCbxkrBrDsk+EZ5OB4jEOT7AjDxtdF+KVEFT7w==} @@ -9150,28 +9030,15 @@ packages: resolution: {integrity: sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==} dev: true - /node-preload@0.2.1: - resolution: {integrity: sha512-RM5oyBy45cLEoHqCeh+MNuFAxO0vTFBLskvQbOKnEE7YTTSN4tbN8QWDIPQ6L+WvKsB/qLEGpYe2ZZ9d4W9OIQ==} - engines: {node: '>=8'} - dependencies: - process-on-spawn: 1.0.0 - dev: false - - /node-rdkafka-acosom@2.16.1(ts-node@10.9.1)(typescript@4.9.5): - resolution: {integrity: sha512-DuzWrv2u0M97LCYWY6W/opX331bUIFwLnOZ31147u2DfrGyqZjuMZAOv70TXFQmaxyGHZ84ipQzVLNWbBbZl0g==} + /node-rdkafka@2.17.0(patch_hash=p4aetcvho53cvjti6c3zi7tfaq): + resolution: {integrity: sha512-vFABzRcE5FaH0WqfqJRxDoqeG6P8UEB3M4qFQ7SkwMgQueMMO78+fm8MYfl5hLW3bBYfBekK2BXIIr0lDQtSEQ==} engines: {node: '>=6.0.0'} requiresBuild: true dependencies: bindings: 1.5.0 nan: 2.17.0 - tap: 16.3.4(ts-node@10.9.1)(typescript@4.9.5) - transitivePeerDependencies: - - coveralls - - flow-remove-types - - supports-color - - ts-node - - typescript dev: false + patched: true /node-releases@2.0.10: resolution: {integrity: sha512-5GFldHPXVG/YZmFzJvKK2zDSzPKhEp0+ZR5SVaoSag9fsL5YgHbUHDfnG5494ISANDcK4KwPXAx2xqVEydmd7w==} @@ -9273,42 +9140,6 @@ packages: engines: {node: '>=0.10.0'} dev: true - /nyc@15.1.0: - resolution: {integrity: sha512-jMW04n9SxKdKi1ZMGhvUTHBN0EICCRkHemEoE5jm6mTYcqcdas0ATzgUgejlQUHMvpnOZqGB5Xxsv9KxJW1j8A==} - engines: {node: '>=8.9'} - hasBin: true - dependencies: - '@istanbuljs/load-nyc-config': 1.1.0 - '@istanbuljs/schema': 0.1.3 - caching-transform: 4.0.0 - convert-source-map: 1.9.0 - decamelize: 1.2.0 - find-cache-dir: 3.3.2 - find-up: 4.1.0 - foreground-child: 2.0.0 - get-package-type: 0.1.0 - glob: 7.2.3 - istanbul-lib-coverage: 3.2.0 - istanbul-lib-hook: 3.0.0 - istanbul-lib-instrument: 4.0.3 - istanbul-lib-processinfo: 2.0.3 - istanbul-lib-report: 3.0.0 - istanbul-lib-source-maps: 4.0.1 - istanbul-reports: 3.1.5 - make-dir: 3.1.0 - node-preload: 0.2.1 - p-map: 3.0.0 - process-on-spawn: 1.0.0 - resolve-from: 5.0.0 - rimraf: 3.0.2 - signal-exit: 3.0.7 - spawn-wrap: 2.0.0 - test-exclude: 6.0.0 - yargs: 15.4.1 - transitivePeerDependencies: - - supports-color - dev: false - /object-assign@4.1.1: resolution: {integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==} engines: {node: '>=0.10.0'} @@ -9379,11 +9210,6 @@ packages: is-wsl: 2.2.0 dev: false - /opener@1.5.2: - resolution: {integrity: sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A==} - hasBin: true - dev: false - /opn@5.5.0: resolution: {integrity: sha512-PqHpggC9bLV0VeWcdKhkpxY+3JTzetLSqTCWL/z/tFIbI6G8JCjondXklT1JinczLz2Xib62sSp0T/gKT4KksA==} engines: {node: '>=4'} @@ -9436,16 +9262,6 @@ packages: minimist: 1.2.8 dev: false - /own-or-env@1.0.2: - resolution: {integrity: sha512-NQ7v0fliWtK7Lkb+WdFqe6ky9XAzYmlkXthQrBbzlYbmFKoAYbDDcwmOm6q8kOuwSRXW8bdL5ORksploUJmWgw==} - dependencies: - own-or: 1.0.0 - dev: false - - /own-or@1.0.0: - resolution: {integrity: sha512-NfZr5+Tdf6MB8UI9GLvKRs4cXY8/yB0w3xtt84xFdWy8hkGjn+JFc60VhzS/hFRfbyxFcGYMTjnF4Me+RbbqrA==} - dev: false - /p-defer@3.0.0: resolution: {integrity: sha512-ugZxsxmtTln604yeYd29EGrNhazN2lywetzpKhfmQjW/VJmhpDmWbiX+h0zL8V91R0UXkhb3KtPmyq9PZw3aYw==} engines: {node: '>=8'} @@ -9468,6 +9284,7 @@ packages: engines: {node: '>=6'} dependencies: p-try: 2.2.0 + dev: true /p-limit@3.1.0: resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==} @@ -9480,6 +9297,7 @@ packages: engines: {node: '>=8'} dependencies: p-limit: 2.3.0 + dev: true /p-locate@5.0.0: resolution: {integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==} @@ -9493,13 +9311,6 @@ packages: engines: {node: '>=6'} dev: false - /p-map@3.0.0: - resolution: {integrity: sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==} - engines: {node: '>=8'} - dependencies: - aggregate-error: 3.1.0 - dev: false - /p-map@4.0.0: resolution: {integrity: sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==} engines: {node: '>=10'} @@ -9517,6 +9328,7 @@ packages: /p-try@2.2.0: resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==} engines: {node: '>=6'} + dev: true /pac-proxy-agent@5.0.0: resolution: {integrity: sha512-CcFG3ZtnxO8McDigozwE3AqAw15zDvGH+OjXO4kzf7IkEKkQ4gxQ+3sdF50WmhQ4P/bVusXcqNE2S3XrNURwzQ==} @@ -9544,16 +9356,6 @@ packages: netmask: 2.0.2 dev: false - /package-hash@4.0.0: - resolution: {integrity: sha512-whdkPIooSu/bASggZ96BWVvZTRMOFxnyUG5PnTSGKoJE2gd5mbVNmR2Nj20QFzxYYgAXpoqC+AiXzl+UMRh7zQ==} - engines: {node: '>=8'} - dependencies: - graceful-fs: 4.2.11 - hasha: 5.2.2 - lodash.flattendeep: 4.4.0 - release-zalgo: 1.0.0 - dev: false - /packet-reader@1.0.0: resolution: {integrity: sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==} dev: false @@ -9611,6 +9413,7 @@ packages: /path-exists@4.0.0: resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} engines: {node: '>=8'} + dev: true /path-is-absolute@1.0.1: resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==} @@ -9780,6 +9583,7 @@ packages: engines: {node: '>=8'} dependencies: find-up: 4.1.0 + dev: true /postgres-array@2.0.0: resolution: {integrity: sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==} @@ -9853,13 +9657,6 @@ packages: resolution: {integrity: sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==} dev: true - /process-on-spawn@1.0.0: - resolution: {integrity: sha512-1WsPDsUSMmZH5LeMLegqkPDrsGgsWwk1Exipy2hvB0o/F0ASzbpIctSCcZIK1ykJvtTJULEH+20WOFjMvGnCTg==} - engines: {node: '>=8'} - dependencies: - fromentries: 1.3.2 - dev: false - /process-warning@2.2.0: resolution: {integrity: sha512-/1WZ8+VQjR6avWOgHeEPd7SDQmFQ1B5mC1eRXsCm5TarlNmx/wCsa5GEaxGm05BORRtyG/Ex/3xq3TuRvq57qg==} dev: false @@ -10004,6 +9801,7 @@ packages: /punycode@2.3.0: resolution: {integrity: sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==} engines: {node: '>=6'} + dev: true /python-struct@1.1.3: resolution: {integrity: sha512-UsI/mNvk25jRpGKYI38Nfbv84z48oiIWwG67DLVvjRhy8B/0aIK+5Ju5WOHgw/o9rnEmbAS00v4rgKFQeC332Q==} @@ -10209,13 +10007,6 @@ packages: jsesc: 0.5.0 dev: false - /release-zalgo@1.0.0: - resolution: {integrity: sha512-gUAyHVHPPC5wdqX/LG4LWtRYtgjxyX78oanFNTMMyFEfOqdC54s3eE82imuWKbOeqYht2CrNf64Qb8vgmmtZGA==} - engines: {node: '>=4'} - dependencies: - es6-error: 4.1.1 - dev: false - /remove-trailing-separator@1.1.0: resolution: {integrity: sha512-/hS+Y0u3aOfIETiaiirUFwDBDzmXPvO+jAfKTitUngIPzdKc6Z0LoFjM/CK5PL4C+eKwHohlHAb6H0VFfmmUsw==} dev: false @@ -10229,10 +10020,6 @@ packages: engines: {node: '>=0.10.0'} dev: true - /require-main-filename@2.0.0: - resolution: {integrity: sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==} - dev: false - /requizzle@0.2.4: resolution: {integrity: sha512-JRrFk1D4OQ4SqovXOgdav+K8EAhSB/LJZqCz8tbX0KObcdeM15Ss59ozWMBWmmINMagCwmqn4ZNryUGpBsl6Jw==} dependencies: @@ -10253,6 +10040,7 @@ packages: /resolve-from@5.0.0: resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} engines: {node: '>=8'} + dev: true /resolve.exports@1.1.1: resolution: {integrity: sha512-/NtpHNDN7jWhAaQ9BvBUYZ6YTXsRBgfqWFWP7BZBaoMJO/I3G5OFzvTuWNlZC3aPjins1F+TNrLKsGbH4rfsRQ==} @@ -10576,6 +10364,7 @@ packages: dependencies: buffer-from: 1.1.2 source-map: 0.6.1 + dev: true /source-map@0.5.7: resolution: {integrity: sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ==} @@ -10591,24 +10380,13 @@ packages: deprecated: Please use @jridgewell/sourcemap-codec instead dev: true - /spawn-wrap@2.0.0: - resolution: {integrity: sha512-EeajNjfN9zMnULLwhZZQU3GWBoFNkbngTUPfaawT4RkMiviTxcX0qfhVbGey39mfctfDHkWtuecgQ8NJcyQWHg==} - engines: {node: '>=8'} - dependencies: - foreground-child: 2.0.0 - is-windows: 1.0.2 - make-dir: 3.1.0 - rimraf: 3.0.2 - signal-exit: 3.0.7 - which: 2.0.2 - dev: false - /split2@4.2.0: resolution: {integrity: sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==} engines: {node: '>= 10.x'} /sprintf-js@1.0.3: resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} + dev: true /ssri@10.0.5: resolution: {integrity: sha512-bSf16tAFkGeRlUNDjXu8FzaMQt6g2HZJrun7mtMbIPOddxt3GLMSz5VWUWcqTJUPfLEaDIepGxv+bYQW49596A==} @@ -10633,6 +10411,7 @@ packages: engines: {node: '>=10'} dependencies: escape-string-regexp: 2.0.0 + dev: true /standard-as-callback@2.1.0: resolution: {integrity: sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A==} @@ -10797,6 +10576,7 @@ packages: /strip-bom@4.0.0: resolution: {integrity: sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==} engines: {node: '>=8'} + dev: true /strip-final-newline@2.0.0: resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} @@ -10872,91 +10652,6 @@ packages: engines: {node: '>= 6.0.0'} dev: false - /tap-mocha-reporter@5.0.3: - resolution: {integrity: sha512-6zlGkaV4J+XMRFkN0X+yuw6xHbE9jyCZ3WUKfw4KxMyRGOpYSRuuQTRJyWX88WWuLdVTuFbxzwXhXuS2XE6o0g==} - engines: {node: '>= 8'} - hasBin: true - dependencies: - color-support: 1.1.3 - debug: 4.3.4 - diff: 4.0.2 - escape-string-regexp: 2.0.0 - glob: 7.2.3 - tap-parser: 11.0.2 - tap-yaml: 1.0.2 - unicode-length: 2.1.0 - transitivePeerDependencies: - - supports-color - dev: false - - /tap-parser@11.0.2: - resolution: {integrity: sha512-6qGlC956rcORw+fg7Fv1iCRAY8/bU9UabUAhs3mXRH6eRmVZcNPLheSXCYaVaYeSwx5xa/1HXZb1537YSvwDZg==} - engines: {node: '>= 8'} - hasBin: true - dependencies: - events-to-array: 1.1.2 - minipass: 3.3.6 - tap-yaml: 1.0.2 - dev: false - - /tap-yaml@1.0.2: - resolution: {integrity: sha512-GegASpuqBnRNdT1U+yuUPZ8rEU64pL35WPBpCISWwff4dErS2/438barz7WFJl4Nzh3Y05tfPidZnH+GaV1wMg==} - dependencies: - yaml: 1.10.2 - dev: false - - /tap@16.3.4(ts-node@10.9.1)(typescript@4.9.5): - resolution: {integrity: sha512-SAexdt2ZF4XBgye6TPucFI2y7VE0qeFXlXucJIV1XDPCs+iJodk0MYacr1zR6Ycltzz7PYg8zrblDXKbAZM2LQ==} - engines: {node: '>=12'} - hasBin: true - peerDependencies: - coveralls: ^3.1.1 - flow-remove-types: '>=2.112.0' - ts-node: '>=8.5.2' - typescript: '>=3.7.2' - peerDependenciesMeta: - coveralls: - optional: true - flow-remove-types: - optional: true - ts-node: - optional: true - typescript: - optional: true - dependencies: - chokidar: 3.5.3 - findit: 2.0.0 - foreground-child: 2.0.0 - fs-exists-cached: 1.0.0 - glob: 7.2.3 - isexe: 2.0.0 - istanbul-lib-processinfo: 2.0.3 - jackspeak: 1.4.2 - libtap: 1.4.0 - minipass: 3.3.6 - mkdirp: 1.0.4 - nyc: 15.1.0 - opener: 1.5.2 - rimraf: 3.0.2 - signal-exit: 3.0.7 - source-map-support: 0.5.21 - tap-mocha-reporter: 5.0.3 - tap-parser: 11.0.2 - tap-yaml: 1.0.2 - tcompare: 5.0.7 - ts-node: 10.9.1(@swc/core@1.3.55)(@types/node@16.18.25)(typescript@4.9.5) - typescript: 4.9.5 - which: 2.0.2 - transitivePeerDependencies: - - supports-color - dev: false - bundledDependencies: - - ink - - treport - - '@types/react' - - '@isaacs/import-jsx' - - react - /tar@6.1.13: resolution: {integrity: sha512-jdIBIN6LTIe2jqzay/2vtYLlBHa3JF42ot3h1dW8Q0PaAG4v8rm0cvpVePtau5C6OKXGGcgO9q2AMNSWxiLqKw==} engines: {node: '>=10'} @@ -10969,13 +10664,6 @@ packages: yallist: 4.0.0 dev: false - /tcompare@5.0.7: - resolution: {integrity: sha512-d9iddt6YYGgyxJw5bjsN7UJUO1kGOtjSlNy/4PoGYAjQS5pAT/hzIoLf1bZCw+uUxRmZJh7Yy1aA7xKVRT9B4w==} - engines: {node: '>=10'} - dependencies: - diff: 4.0.2 - dev: false - /tdigest@0.1.2: resolution: {integrity: sha512-+G0LLgjjo9BZX2MfdvPfH+MKLCrxlXSYec5DaPYP1fe6Iyhf0/fSmJ0bFiZ1F8BT6cGXl2LpltQptzjXKWEkKA==} dependencies: @@ -11029,6 +10717,7 @@ packages: '@istanbuljs/schema': 0.1.3 glob: 7.2.3 minimatch: 3.1.2 + dev: true /text-hex@1.0.0: resolution: {integrity: sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==} @@ -11146,11 +10835,6 @@ packages: resolution: {integrity: sha512-XrHUvV5HpdLmIj4uVMxHggLbFSZYIn7HEWsqePZcI50pco+MPqJ50wMGY794X7AOOhxOBAjbkqfAbEe/QMp2Lw==} dev: false - /trivial-deferred@1.1.2: - resolution: {integrity: sha512-vDPiDBC3hyP6O4JrJYMImW3nl3c03Tsj9fEXc7Qc/XKa1O7gf5ZtFfIR/E0dun9SnDHdwjna1Z2rSzYgqpxh/g==} - engines: {node: '>= 8'} - dev: false - /ts-node@10.9.1(@swc/core@1.3.55)(@types/node@16.18.25)(typescript@4.9.5): resolution: {integrity: sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==} hasBin: true @@ -11181,6 +10865,7 @@ packages: typescript: 4.9.5 v8-compile-cache-lib: 3.0.1 yn: 3.1.1 + dev: true /tsconfig-paths@3.14.2: resolution: {integrity: sha512-o/9iXgCYc5L/JxCHPe3Hvh8Q/2xm5Z+p18PESBU6Ff33695QnCHBEjcytY2q19ua7Mbl/DavtBOLq+oG0RCL+g==} @@ -11249,11 +10934,6 @@ packages: engines: {node: '>=10'} dev: true - /type-fest@0.8.1: - resolution: {integrity: sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==} - engines: {node: '>=8'} - dev: false - /type-fest@1.4.0: resolution: {integrity: sha512-yGSza74xk0UG8k+pLh5oeoYirvIiWo5t0/o3zHHAO2tRDiZcxWP7fywNlXhqb6/r6sWvwi+RsyQMWhVLe4BVuA==} engines: {node: '>=10'} @@ -11281,6 +10961,7 @@ packages: resolution: {integrity: sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==} engines: {node: '>=4.2.0'} hasBin: true + dev: true /uc.micro@1.0.6: resolution: {integrity: sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==} @@ -11344,12 +11025,6 @@ packages: engines: {node: '>=4'} dev: false - /unicode-length@2.1.0: - resolution: {integrity: sha512-4bV582zTV9Q02RXBxSUMiuN/KHo5w4aTojuKTNT96DIKps/SIawFp7cS5Mu25VuY1AioGXrmYyzKZUzh8OqoUw==} - dependencies: - punycode: 2.3.0 - dev: false - /unicode-match-property-ecmascript@2.0.0: resolution: {integrity: sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==} engines: {node: '>=4'} @@ -11541,6 +11216,7 @@ packages: /v8-compile-cache-lib@3.0.1: resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==} + dev: true /v8-profiler-next@1.9.0: resolution: {integrity: sha512-+k2Lb0c9lEDsGNT1GfrrlV4evR2KsF3LljW8PUfRaM1OQmr+F3lXGxVIffa+LNXaw3CnwdsSdqI2zaYEhDdu5Q==} @@ -11612,10 +11288,6 @@ packages: is-symbol: 1.0.4 dev: true - /which-module@2.0.1: - resolution: {integrity: sha512-iBdZ57RDvnOR9AGBhML2vFZf7h8vmBjhoaZqODJBFWHVtKkDmKuHai3cx5PgVMrX5YDNp27AofYbAwctSS+vhQ==} - dev: false - /which-typed-array@1.1.9: resolution: {integrity: sha512-w9c4xkx6mPidwp7180ckYWfMmvxpjlZuIudNtDf4N/tTAUB8VJbX25qZoAsrtGuYNnGw3pa0AXgbGKRB8/EceA==} engines: {node: '>= 0.4'} @@ -11677,15 +11349,6 @@ packages: resolution: {integrity: sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==} engines: {node: '>=0.10.0'} - /wrap-ansi@6.2.0: - resolution: {integrity: sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==} - engines: {node: '>=8'} - dependencies: - ansi-styles: 4.3.0 - string-width: 4.2.3 - strip-ansi: 6.0.1 - dev: false - /wrap-ansi@7.0.0: resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} engines: {node: '>=10'} @@ -11766,10 +11429,6 @@ packages: resolution: {integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==} engines: {node: '>=0.4'} - /y18n@4.0.3: - resolution: {integrity: sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==} - dev: false - /y18n@5.0.8: resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} engines: {node: '>=10'} @@ -11785,14 +11444,6 @@ packages: engines: {node: '>= 6'} dev: false - /yargs-parser@18.1.3: - resolution: {integrity: sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==} - engines: {node: '>=6'} - dependencies: - camelcase: 5.3.1 - decamelize: 1.2.0 - dev: false - /yargs-parser@20.2.9: resolution: {integrity: sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==} engines: {node: '>=10'} @@ -11802,23 +11453,6 @@ packages: engines: {node: '>=12'} dev: true - /yargs@15.4.1: - resolution: {integrity: sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==} - engines: {node: '>=8'} - dependencies: - cliui: 6.0.0 - decamelize: 1.2.0 - find-up: 4.1.0 - get-caller-file: 2.0.5 - require-directory: 2.1.1 - require-main-filename: 2.0.0 - set-blocking: 2.0.0 - string-width: 4.2.3 - which-module: 2.0.1 - y18n: 4.0.3 - yargs-parser: 18.1.3 - dev: false - /yargs@16.2.0: resolution: {integrity: sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==} engines: {node: '>=10'} @@ -11847,6 +11481,7 @@ packages: /yn@3.1.1: resolution: {integrity: sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==} engines: {node: '>=6'} + dev: true /yocto-queue@0.1.0: resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} diff --git a/plugin-server/src/kafka/admin.ts b/plugin-server/src/kafka/admin.ts index dbf1693ebcf84..bcc5f742d8d81 100644 --- a/plugin-server/src/kafka/admin.ts +++ b/plugin-server/src/kafka/admin.ts @@ -1,4 +1,4 @@ -import { AdminClient, CODES, GlobalConfig, IAdminClient, LibrdKafkaError } from 'node-rdkafka-acosom' +import { AdminClient, CODES, GlobalConfig, IAdminClient, LibrdKafkaError } from 'node-rdkafka' import { status } from '../utils/status' diff --git a/plugin-server/src/kafka/batch-consumer.ts b/plugin-server/src/kafka/batch-consumer.ts index 77d86ba3b60eb..04845a2f18019 100644 --- a/plugin-server/src/kafka/batch-consumer.ts +++ b/plugin-server/src/kafka/batch-consumer.ts @@ -1,4 +1,4 @@ -import { GlobalConfig, KafkaConsumer, Message } from 'node-rdkafka-acosom' +import { GlobalConfig, KafkaConsumer, Message } from 'node-rdkafka' import { exponentialBuckets, Gauge, Histogram } from 'prom-client' import { status } from '../utils/status' diff --git a/plugin-server/src/kafka/config.ts b/plugin-server/src/kafka/config.ts index 64b24402ee809..b864593622d0b 100644 --- a/plugin-server/src/kafka/config.ts +++ b/plugin-server/src/kafka/config.ts @@ -1,4 +1,4 @@ -import { GlobalConfig } from 'node-rdkafka-acosom' +import { GlobalConfig } from 'node-rdkafka' import { hostname } from 'os' import { KafkaConfig } from '../utils/db/hub' diff --git a/plugin-server/src/kafka/consumer.ts b/plugin-server/src/kafka/consumer.ts index dd521c4a4c8d5..190d0137c0909 100644 --- a/plugin-server/src/kafka/consumer.ts +++ b/plugin-server/src/kafka/consumer.ts @@ -8,7 +8,7 @@ import { Message, TopicPartition, TopicPartitionOffset, -} from 'node-rdkafka-acosom' +} from 'node-rdkafka' import { kafkaRebalancePartitionCount, latestOffsetTimestampGauge } from '../main/ingestion-queues/metrics' import { status } from '../utils/status' diff --git a/plugin-server/src/kafka/producer.ts b/plugin-server/src/kafka/producer.ts index 4b373fd2c734f..508cc2eb552ac 100644 --- a/plugin-server/src/kafka/producer.ts +++ b/plugin-server/src/kafka/producer.ts @@ -6,7 +6,7 @@ import { MessageValue, NumberNullUndefined, ProducerGlobalConfig, -} from 'node-rdkafka-acosom' +} from 'node-rdkafka' import { getSpan } from '../sentry' import { status } from '../utils/status' diff --git a/plugin-server/src/main/ingestion-queues/analytics-events-ingestion-consumer.ts b/plugin-server/src/main/ingestion-queues/analytics-events-ingestion-consumer.ts index 0728360634981..3d6a1dce57221 100644 --- a/plugin-server/src/main/ingestion-queues/analytics-events-ingestion-consumer.ts +++ b/plugin-server/src/main/ingestion-queues/analytics-events-ingestion-consumer.ts @@ -1,5 +1,5 @@ import { EachBatchPayload } from 'kafkajs' -import { Message } from 'node-rdkafka-acosom' +import { Message } from 'node-rdkafka' import * as schedule from 'node-schedule' import { Counter } from 'prom-client' diff --git a/plugin-server/src/main/ingestion-queues/analytics-events-ingestion-historical-consumer.ts b/plugin-server/src/main/ingestion-queues/analytics-events-ingestion-historical-consumer.ts index 1b8a19fa528b1..668421ff42d58 100644 --- a/plugin-server/src/main/ingestion-queues/analytics-events-ingestion-historical-consumer.ts +++ b/plugin-server/src/main/ingestion-queues/analytics-events-ingestion-historical-consumer.ts @@ -1,5 +1,5 @@ import { EachBatchPayload } from 'kafkajs' -import { Message } from 'node-rdkafka-acosom' +import { Message } from 'node-rdkafka' import * as schedule from 'node-schedule' import { KAFKA_EVENTS_PLUGIN_INGESTION_HISTORICAL, prefix as KAFKA_PREFIX } from '../../config/kafka-topics' @@ -25,13 +25,13 @@ export const startAnalyticsEventsIngestionHistoricalConsumer = async ({ Consumes analytics events from the Kafka topic `events_plugin_ingestion_historical` and processes them for ingestion into ClickHouse. - This is the historical events or "slow-lane" processing queue as it contains only + This is the historical events or "slow-lane" processing queue as it contains only events that have timestamps in the past. */ status.info('🔁', 'Starting analytics events historical consumer with rdkafka') - /* - We don't want to move events to overflow from here, it's fine for the processing to + /* + We don't want to move events to overflow from here, it's fine for the processing to take longer, but we want the locality constraints to be respected like normal ingestion. */ const batchHandler = async (messages: Message[], queue: IngestionConsumer): Promise => { @@ -62,13 +62,13 @@ export const startLegacyAnalyticsEventsIngestionHistoricalConsumer = async ({ Consumes analytics events from the Kafka topic `events_plugin_ingestion_historical` and processes them for ingestion into ClickHouse. - This is the historical events or "slow-lane" processing queue as it contains only + This is the historical events or "slow-lane" processing queue as it contains only events that have timestamps in the past. */ status.info('🔁', 'Starting analytics events historical consumer with kafkajs') - /* - We don't want to move events to overflow from here, it's fine for the processing to + /* + We don't want to move events to overflow from here, it's fine for the processing to take longer, but we want the locality constraints to be respected like normal ingestion. */ const batchHandler = async (payload: EachBatchPayload, queue: KafkaJSIngestionConsumer): Promise => { diff --git a/plugin-server/src/main/ingestion-queues/analytics-events-ingestion-overflow-consumer.ts b/plugin-server/src/main/ingestion-queues/analytics-events-ingestion-overflow-consumer.ts index 8d4b71cb9083f..aa2ff70beaf21 100644 --- a/plugin-server/src/main/ingestion-queues/analytics-events-ingestion-overflow-consumer.ts +++ b/plugin-server/src/main/ingestion-queues/analytics-events-ingestion-overflow-consumer.ts @@ -1,5 +1,5 @@ import { EachBatchPayload } from 'kafkajs' -import { Message } from 'node-rdkafka-acosom' +import { Message } from 'node-rdkafka' import * as schedule from 'node-schedule' import { KAFKA_EVENTS_PLUGIN_INGESTION_OVERFLOW, prefix as KAFKA_PREFIX } from '../../config/kafka-topics' diff --git a/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-ingestion.ts b/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-ingestion.ts index f74da4d6890bc..652f70cd74244 100644 --- a/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-ingestion.ts +++ b/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-ingestion.ts @@ -1,5 +1,5 @@ import * as Sentry from '@sentry/node' -import { Message, MessageHeader } from 'node-rdkafka-acosom' +import { Message, MessageHeader } from 'node-rdkafka' import { KAFKA_EVENTS_PLUGIN_INGESTION_DLQ, KAFKA_EVENTS_PLUGIN_INGESTION_OVERFLOW } from '../../../config/kafka-topics' import { Hub, PipelineEvent, WorkerMethods } from '../../../types' diff --git a/plugin-server/src/main/ingestion-queues/kafka-metrics.ts b/plugin-server/src/main/ingestion-queues/kafka-metrics.ts index fb9b736f08581..a433272bd5f19 100644 --- a/plugin-server/src/main/ingestion-queues/kafka-metrics.ts +++ b/plugin-server/src/main/ingestion-queues/kafka-metrics.ts @@ -1,7 +1,7 @@ import * as Sentry from '@sentry/node' import { StatsD } from 'hot-shots' import { Consumer } from 'kafkajs' -import { KafkaConsumer } from 'node-rdkafka-acosom' +import { KafkaConsumer } from 'node-rdkafka' import { Hub } from '../../types' diff --git a/plugin-server/src/main/ingestion-queues/kafka-queue.ts b/plugin-server/src/main/ingestion-queues/kafka-queue.ts index 433bbb975fea1..d40a0b7ef7699 100644 --- a/plugin-server/src/main/ingestion-queues/kafka-queue.ts +++ b/plugin-server/src/main/ingestion-queues/kafka-queue.ts @@ -1,7 +1,7 @@ import * as Sentry from '@sentry/node' import { StatsD } from 'hot-shots' import { Consumer, EachBatchPayload, Kafka } from 'kafkajs' -import { Message } from 'node-rdkafka-acosom' +import { Message } from 'node-rdkafka' import { Counter } from 'prom-client' import { BatchConsumer, startBatchConsumer } from '../../kafka/batch-consumer' diff --git a/plugin-server/src/main/ingestion-queues/session-recording/services/console-logs-ingester.ts b/plugin-server/src/main/ingestion-queues/session-recording/services/console-logs-ingester.ts index 68abeb224c1eb..87a854de17ce1 100644 --- a/plugin-server/src/main/ingestion-queues/session-recording/services/console-logs-ingester.ts +++ b/plugin-server/src/main/ingestion-queues/session-recording/services/console-logs-ingester.ts @@ -1,5 +1,5 @@ import { captureException } from '@sentry/node' -import { HighLevelProducer as RdKafkaProducer, NumberNullUndefined } from 'node-rdkafka-acosom' +import { HighLevelProducer as RdKafkaProducer, NumberNullUndefined } from 'node-rdkafka' import { Counter } from 'prom-client' import { KAFKA_LOG_ENTRIES } from '../../../../config/kafka-topics' diff --git a/plugin-server/src/main/ingestion-queues/session-recording/services/offset-high-water-marker.ts b/plugin-server/src/main/ingestion-queues/session-recording/services/offset-high-water-marker.ts index 5fecc626940e3..3dbf4e7876f40 100644 --- a/plugin-server/src/main/ingestion-queues/session-recording/services/offset-high-water-marker.ts +++ b/plugin-server/src/main/ingestion-queues/session-recording/services/offset-high-water-marker.ts @@ -1,6 +1,6 @@ import { captureException } from '@sentry/node' import { Redis } from 'ioredis' -import { TopicPartition } from 'node-rdkafka-acosom' +import { TopicPartition } from 'node-rdkafka' import { RedisPool } from '../../../../types' import { timeoutGuard } from '../../../../utils/db/utils' diff --git a/plugin-server/src/main/ingestion-queues/session-recording/services/partition-locker.ts b/plugin-server/src/main/ingestion-queues/session-recording/services/partition-locker.ts index cdc56f5efabdf..ca2da07d7edea 100644 --- a/plugin-server/src/main/ingestion-queues/session-recording/services/partition-locker.ts +++ b/plugin-server/src/main/ingestion-queues/session-recording/services/partition-locker.ts @@ -1,7 +1,7 @@ import { captureException } from '@sentry/node' import { randomUUID } from 'crypto' import { Redis } from 'ioredis' -import { TopicPartition } from 'node-rdkafka-acosom' +import { TopicPartition } from 'node-rdkafka' import { RedisPool } from '../../../../types' import { timeoutGuard } from '../../../../utils/db/utils' @@ -43,10 +43,10 @@ export class PartitionLocker { tps.forEach((tp) => keys.add(topicPartitionKey(this.keyPrefix, tp))) return [...keys] } - /* + /* Claim the lock for partitions for this consumer - If already locked, we extend the TTL - - If it is claimed, we wait and retry until it is cleared + - If it is claimed, we wait and retry until it is cleared - If unclaimed, we claim it */ public async claim(tps: TopicPartition[]) { @@ -104,7 +104,7 @@ export class PartitionLocker { } } - /* + /* Release a lock for a partition - Clear our claim if it is set to our consumer so that another can claim it */ diff --git a/plugin-server/src/main/ingestion-queues/session-recording/services/replay-events-ingester.ts b/plugin-server/src/main/ingestion-queues/session-recording/services/replay-events-ingester.ts index 5be4d3411d260..f92f2ab1a0674 100644 --- a/plugin-server/src/main/ingestion-queues/session-recording/services/replay-events-ingester.ts +++ b/plugin-server/src/main/ingestion-queues/session-recording/services/replay-events-ingester.ts @@ -1,7 +1,7 @@ import { captureException, captureMessage } from '@sentry/node' import { randomUUID } from 'crypto' import { DateTime } from 'luxon' -import { HighLevelProducer as RdKafkaProducer, NumberNullUndefined } from 'node-rdkafka-acosom' +import { HighLevelProducer as RdKafkaProducer, NumberNullUndefined } from 'node-rdkafka' import { Counter } from 'prom-client' import { KAFKA_CLICKHOUSE_SESSION_REPLAY_EVENTS } from '../../../../config/kafka-topics' diff --git a/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v1.ts b/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v1.ts index ee4d77a156bbe..bfb6aaefef061 100644 --- a/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v1.ts +++ b/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v1.ts @@ -1,7 +1,7 @@ import { PluginEvent } from '@posthog/plugin-scaffold' import { captureException, captureMessage } from '@sentry/node' import { DateTime } from 'luxon' -import { HighLevelProducer as RdKafkaProducer, Message, NumberNullUndefined } from 'node-rdkafka-acosom' +import { HighLevelProducer as RdKafkaProducer, Message, NumberNullUndefined } from 'node-rdkafka' import { KAFKA_CLICKHOUSE_SESSION_RECORDING_EVENTS, diff --git a/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v2.ts b/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v2.ts index 49f9fbc60daa2..2471afc28b6c7 100644 --- a/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v2.ts +++ b/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v2.ts @@ -1,7 +1,7 @@ import * as Sentry from '@sentry/node' import { captureException } from '@sentry/node' import { mkdirSync, rmSync } from 'node:fs' -import { CODES, features, librdkafkaVersion, Message, TopicPartition } from 'node-rdkafka-acosom' +import { CODES, features, librdkafkaVersion, Message, TopicPartition } from 'node-rdkafka' import { Counter, Gauge, Histogram } from 'prom-client' import { sessionRecordingConsumerConfig } from '../../../config/config' diff --git a/plugin-server/src/main/ingestion-queues/session-recording/types.ts b/plugin-server/src/main/ingestion-queues/session-recording/types.ts index 6bff13bbde468..eb6df15214ef2 100644 --- a/plugin-server/src/main/ingestion-queues/session-recording/types.ts +++ b/plugin-server/src/main/ingestion-queues/session-recording/types.ts @@ -1,6 +1,6 @@ // This is the incoming message from Kafka -import { TopicPartitionOffset } from 'node-rdkafka-acosom' +import { TopicPartitionOffset } from 'node-rdkafka' import { RRWebEvent } from '../../../types' diff --git a/plugin-server/src/utils/db/kafka-producer-wrapper.ts b/plugin-server/src/utils/db/kafka-producer-wrapper.ts index a3aa73ad49117..8f7cef4c06b30 100644 --- a/plugin-server/src/utils/db/kafka-producer-wrapper.ts +++ b/plugin-server/src/utils/db/kafka-producer-wrapper.ts @@ -1,5 +1,5 @@ import { Message, ProducerRecord } from 'kafkajs' -import { HighLevelProducer, LibrdKafkaError, MessageHeader, MessageKey, MessageValue } from 'node-rdkafka-acosom' +import { HighLevelProducer, LibrdKafkaError, MessageHeader, MessageKey, MessageValue } from 'node-rdkafka' import { Counter } from 'prom-client' import { disconnectProducer, flushProducer, produce } from '../../kafka/producer' diff --git a/plugin-server/src/utils/event.ts b/plugin-server/src/utils/event.ts index e957ca5f263fc..3c05b7894974b 100644 --- a/plugin-server/src/utils/event.ts +++ b/plugin-server/src/utils/event.ts @@ -1,6 +1,6 @@ import { PluginEvent, ProcessedPluginEvent } from '@posthog/plugin-scaffold' import { DateTime } from 'luxon' -import { Message } from 'node-rdkafka-acosom' +import { Message } from 'node-rdkafka' import { ClickHouseEvent, PipelineEvent, PostIngestionEvent, RawClickHouseEvent } from '../types' import { convertDatabaseElementsToRawElements } from '../worker/vm/upgrades/utils/fetchEventsForInterval' diff --git a/plugin-server/tests/main/ingestion-queues/kafka-queue.test.ts b/plugin-server/tests/main/ingestion-queues/kafka-queue.test.ts index 31dc19d000f3b..91aa4523cf392 100644 --- a/plugin-server/tests/main/ingestion-queues/kafka-queue.test.ts +++ b/plugin-server/tests/main/ingestion-queues/kafka-queue.test.ts @@ -1,4 +1,4 @@ -import { Assignment } from 'node-rdkafka-acosom' +import { Assignment } from 'node-rdkafka' import { KAFKA_EVENTS_PLUGIN_INGESTION } from '../../../src/config/kafka-topics' import { countPartitionsPerTopic } from '../../../src/kafka/consumer' diff --git a/plugin-server/tests/main/ingestion-queues/run-async-handlers-event-pipeline.test.ts b/plugin-server/tests/main/ingestion-queues/run-async-handlers-event-pipeline.test.ts index f270308eaf6a1..6f73fb042b9b4 100644 --- a/plugin-server/tests/main/ingestion-queues/run-async-handlers-event-pipeline.test.ts +++ b/plugin-server/tests/main/ingestion-queues/run-async-handlers-event-pipeline.test.ts @@ -16,7 +16,7 @@ // KafkaJS consumer runner, which we assume will handle retries. import Redis from 'ioredis' -import LibrdKafkaError from 'node-rdkafka-acosom/lib/error' +import LibrdKafkaError from 'node-rdkafka/lib/error' import { defaultConfig } from '../../../src/config/config' import { KAFKA_EVENTS_JSON } from '../../../src/config/kafka-topics' diff --git a/plugin-server/tests/main/ingestion-queues/session-recording/fixtures.ts b/plugin-server/tests/main/ingestion-queues/session-recording/fixtures.ts index 2f82463b156bf..fc8c33782923e 100644 --- a/plugin-server/tests/main/ingestion-queues/session-recording/fixtures.ts +++ b/plugin-server/tests/main/ingestion-queues/session-recording/fixtures.ts @@ -1,4 +1,4 @@ -import { Message } from 'node-rdkafka-acosom' +import { Message } from 'node-rdkafka' import { KAFKA_SESSION_RECORDING_SNAPSHOT_ITEM_EVENTS } from '../../../../src/config/kafka-topics' import { IncomingRecordingMessage } from '../../../../src/main/ingestion-queues/session-recording/types' diff --git a/plugin-server/tests/main/ingestion-queues/session-recording/session-recordings-consumer-v1.test.ts b/plugin-server/tests/main/ingestion-queues/session-recording/session-recordings-consumer-v1.test.ts index d9fe676086d05..d328d1a4f2339 100644 --- a/plugin-server/tests/main/ingestion-queues/session-recording/session-recordings-consumer-v1.test.ts +++ b/plugin-server/tests/main/ingestion-queues/session-recording/session-recordings-consumer-v1.test.ts @@ -1,5 +1,5 @@ import { DateTime } from 'luxon' -import LibrdKafkaError from 'node-rdkafka-acosom/lib/error' +import LibrdKafkaError from 'node-rdkafka/lib/error' import { defaultConfig } from '../../../../src/config/config' import { eachBatch } from '../../../../src/main/ingestion-queues/session-recording/session-recordings-consumer-v1' diff --git a/plugin-server/tests/main/ingestion-queues/session-recording/session-recordings-consumer-v2.test.ts b/plugin-server/tests/main/ingestion-queues/session-recording/session-recordings-consumer-v2.test.ts index 7a1368bf6937d..e23a927f11a88 100644 --- a/plugin-server/tests/main/ingestion-queues/session-recording/session-recordings-consumer-v2.test.ts +++ b/plugin-server/tests/main/ingestion-queues/session-recording/session-recordings-consumer-v2.test.ts @@ -1,6 +1,6 @@ import { randomUUID } from 'crypto' import { mkdirSync, readdirSync, rmSync } from 'node:fs' -import { Message } from 'node-rdkafka-acosom' +import { Message } from 'node-rdkafka' import path from 'path' import { waitForExpect } from '../../../../functional_tests/expectations'