Skip to content

Commit

Permalink
SNOW-743920: Add test fetching many chunks
Browse files Browse the repository at this point in the history
  • Loading branch information
sfc-gh-dprzybysz committed Jul 4, 2023
1 parent 72f5f09 commit 361cc18
Show file tree
Hide file tree
Showing 2 changed files with 77 additions and 3 deletions.
3 changes: 2 additions & 1 deletion lib/connection/result/result.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ var Chunk = require('./chunk');
var ResultStream = require('./result_stream');
var ChunkCache = require('./chunk_cache');
var Column = require('./column');
var Parameters = require('../../parameters');
var StatementType = require('./statement_type');
const Logger = require('../../logger');

/**
* Creates a new Result.
Expand Down Expand Up @@ -274,6 +274,7 @@ function createChunks(chunkCfgs,
}

chunks = new Array(chunkCfgs.length);
Logger.getInstance().trace(`Downloading ${chunkCfgs.length} chunks`);

// loop over the chunk config objects and build Chunk instances out of them
startIndex = 0;
Expand Down
77 changes: 75 additions & 2 deletions test/integration/testLargeResultSet.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,12 @@
const assert = require('assert');
const async = require('async');
const testUtil = require('./testUtil');

const sourceRowCount = 10000;
const { configureLogger } = require('../configureLogger');

describe('Large result Set Tests', function ()
{
const sourceRowCount = 10000;

let connection;
const selectAllFromOrders = `select randstr(1000,random()) from table(generator(rowcount=>${sourceRowCount}))`;

Expand Down Expand Up @@ -250,3 +251,75 @@ describe('Large result Set Tests', function ()
});
});
});

describe('SNOW-743920: Large result set with ~35 chunks', function () {
let connection;
const tableName = 'test_table';
const sourceRowCount = 251002;
const generatedRowSize = 350;
const createTable = `create or replace table ${tableName} (data string)`;
const populateData = `insert into ${tableName} select randstr(${generatedRowSize}, random()) from table (generator(rowcount =>${sourceRowCount}))`;
const selectData = `select * from ${tableName}`;

before(async () => {
connection = testUtil.createConnection();
await testUtil.connectAsync(connection);
// setting ROWS_PER_RESULTSET causes invalid, not encoded chunks from GCP
// await testUtil.executeCmdAsync(connection, 'alter session set ROWS_PER_RESULTSET = 1000000');
await testUtil.executeCmdAsync(connection, 'alter session set USE_CACHED_RESULT = false;');
await testUtil.executeCmdAsync(connection, createTable);
await testUtil.executeCmdAsync(connection, populateData);
configureLogger('TRACE');
});

after(async () => {
configureLogger('ERROR');
await testUtil.dropTablesIgnoringErrorsAsync(connection, [tableName]);
await testUtil.destroyConnectionAsync(connection);
});

it('fetch result set with many chunks without streaming', done => {
connection.execute({
sqlText: selectData,
complete: function (err, _, rows) {
if (err) {
done(err);
} else {
try {
testUtil.checkError(err);
assert.strictEqual(rows.length, sourceRowCount);
done();
} catch (e) {
done(e);
}
}
}
});
});

it('fetch result set with many chunks with streaming', done => {
const rows = [];
connection.execute({
sqlText: selectData,
streamResult: true,
complete: function (err, stmt) {
if (err) {
done(err);
} else {
stmt.streamRows()
.on('error', () => done(err))
.on('data', row => rows.push(row))
.on('end', () => {
try {
testUtil.checkError(err);
assert.strictEqual(rows.length, sourceRowCount);
done();
} catch (e) {
done(e);
}
});
}
}
});
});
});

0 comments on commit 361cc18

Please sign in to comment.