From 5dbbd58a21181c4f8b101829e9ac213ce8c24fd2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Francisco=20L=C3=B3pez?= Date: Mon, 22 Jul 2019 11:31:37 +0200 Subject: [PATCH 1/2] [OPEX-527] Increase bach limit logic to handle 1k logs --- package.json | 2 +- src/processor.js | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package.json b/package.json index 151af47..829a88f 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "auth0-log-extension-tools", - "version": "1.3.6", + "version": "1.3.7", "description": "A set of tools for logging", "main": "src/index.js", "dependencies": { diff --git a/src/processor.js b/src/processor.js index 51988d6..b4f7b56 100644 --- a/src/processor.js +++ b/src/processor.js @@ -175,8 +175,8 @@ LogsProcessor.prototype.run = function(handler) { const getNextLimit = () => { var limit = batchSize; limit -= logsBatch.length; - if (limit > 100) { - limit = 100; + if (limit > 1000) { + limit = 1000; } return limit; }; From 7370937e46c124219fa56a459139bfcf37f75e95 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Francisco=20L=C3=B3pez?= Date: Mon, 22 Jul 2019 17:01:07 +0200 Subject: [PATCH 2/2] Add test for higher limit --- tests/helpers/mocks.js | 4 ++-- tests/lib/processor.tests.js | 13 +++++++++++++ tests/lib/stream.tests.js | 17 +++++++++++++++++ 3 files changed, 32 insertions(+), 2 deletions(-) diff --git a/tests/helpers/mocks.js b/tests/helpers/mocks.js index 4ada35f..4d77227 100644 --- a/tests/helpers/mocks.js +++ b/tests/helpers/mocks.js @@ -27,10 +27,10 @@ module.exports.logs = (options = {}) => const query = querystring.parse(uri); const logs = []; const from = (query.from) ? parseInt(query.from, 10) : 0; - const take = (query.take) ? parseInt(query.take, 10) : 100; + const take = parseInt(query.take || options.take, 10) || 100; for (let i = from + 1; i <= from + take; i += 1) { - if (i <= 500) { + if (i <= 500 || take > 100) { logs.push({ _id: '' + i, date: (options.outdated) ? new Date('1999-10-10') : new Date(), type: options.type }); } } diff --git a/tests/lib/processor.tests.js b/tests/lib/processor.tests.js index 81823ff..e4e9623 100644 --- a/tests/lib/processor.tests.js +++ b/tests/lib/processor.tests.js @@ -76,6 +76,19 @@ describe('LogsProcessor', () => { }); }); + it('should process higher limit logs and send response', () => { + helpers.mocks.logs({ take: 1000 }); + + const processor = createProcessor(); + return processor.run((logs, cb) => setTimeout(() => cb())) + .then((result) => { + expect(result).to.be.an('object'); + expect(result.status).to.be.an('object'); + expect(result.status.logsProcessed).to.equal(1000); + expect(result.checkpoint).to.equal('1000'); + }); + }); + it('should process logs and done by timelimit', () => { helpers.mocks.logs({ times: 2 }); diff --git a/tests/lib/stream.tests.js b/tests/lib/stream.tests.js index ecdcafe..c1a9483 100644 --- a/tests/lib/stream.tests.js +++ b/tests/lib/stream.tests.js @@ -113,6 +113,23 @@ describe('LogsApiStream', () => { logger.next(); }); + it('should done reading logs with a higher log limit', (done) => { + helpers.mocks.logs({ take: 1000 }); + // helpers.mocks.logs({ empty: true }); + + const logger = createStream(); + logger.on('data', () => logger.done()); + logger.on('end', () => { + logger.batchSaved(); + expect(logger.status).to.be.an('object'); + expect(logger.status.logsProcessed).to.equal(1000); + expect(logger.lastCheckpoint).to.equal('1000'); + done(); + }); + + logger.next(); + }); + it('should done reading logs, if ratelimit reached', (done) => { helpers.mocks.logs({ limit: 0 });