diff --git a/lib/connection/result/result.js b/lib/connection/result/result.js index cfbad81db..1bcecf233 100644 --- a/lib/connection/result/result.js +++ b/lib/connection/result/result.js @@ -9,8 +9,8 @@ var Chunk = require('./chunk'); var ResultStream = require('./result_stream'); var ChunkCache = require('./chunk_cache'); var Column = require('./column'); -var Parameters = require('../../parameters'); var StatementType = require('./statement_type'); +const Logger = require('../../logger'); /** * Creates a new Result. @@ -274,6 +274,7 @@ function createChunks(chunkCfgs, } chunks = new Array(chunkCfgs.length); + Logger.getInstance().trace(`Downloading ${chunkCfgs.length} chunks`); // loop over the chunk config objects and build Chunk instances out of them startIndex = 0; diff --git a/test/integration/testLargeResultSet.js b/test/integration/testLargeResultSet.js index 805dbb34f..9b82518b9 100644 --- a/test/integration/testLargeResultSet.js +++ b/test/integration/testLargeResultSet.js @@ -4,11 +4,12 @@ const assert = require('assert'); const async = require('async'); const testUtil = require('./testUtil'); - -const sourceRowCount = 10000; +const { configureLogger } = require('../configureLogger'); describe('Large result Set Tests', function () { + const sourceRowCount = 10000; + let connection; const selectAllFromOrders = `select randstr(1000,random()) from table(generator(rowcount=>${sourceRowCount}))`; @@ -250,3 +251,75 @@ describe('Large result Set Tests', function () }); }); }); + +describe('SNOW-743920: Large result set with ~35 chunks', function () { + let connection; + const tableName = 'test_table'; + const sourceRowCount = 251002; + const generatedRowSize = 350; + const createTable = `create or replace table ${tableName} (data string)`; + const populateData = `insert into ${tableName} select randstr(${generatedRowSize}, random()) from table (generator(rowcount =>${sourceRowCount}))`; + const selectData = `select * from ${tableName}`; + + before(async () => { + connection = testUtil.createConnection(); + await testUtil.connectAsync(connection); + // setting ROWS_PER_RESULTSET causes invalid, not encoded chunks from GCP + // await testUtil.executeCmdAsync(connection, 'alter session set ROWS_PER_RESULTSET = 1000000'); + await testUtil.executeCmdAsync(connection, 'alter session set USE_CACHED_RESULT = false;'); + await testUtil.executeCmdAsync(connection, createTable); + await testUtil.executeCmdAsync(connection, populateData); + configureLogger('TRACE'); + }); + + after(async () => { + configureLogger('ERROR'); + await testUtil.dropTablesIgnoringErrorsAsync(connection, [tableName]); + await testUtil.destroyConnectionAsync(connection); + }); + + it('fetch result set with many chunks without streaming', done => { + connection.execute({ + sqlText: selectData, + complete: function (err, _, rows) { + if (err) { + done(err); + } else { + try { + testUtil.checkError(err); + assert.strictEqual(rows.length, sourceRowCount); + done(); + } catch (e) { + done(e); + } + } + } + }); + }); + + it('fetch result set with many chunks with streaming', done => { + const rows = []; + connection.execute({ + sqlText: selectData, + streamResult: true, + complete: function (err, stmt) { + if (err) { + done(err); + } else { + stmt.streamRows() + .on('error', () => done(err)) + .on('data', row => rows.push(row)) + .on('end', () => { + try { + testUtil.checkError(err); + assert.strictEqual(rows.length, sourceRowCount); + done(); + } catch (e) { + done(e); + } + }); + } + } + }); + }); +});