Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

SNOW-1215393: Test chunk fetching causing out of memory #793

Draft
wants to merge 6 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
95 changes: 3 additions & 92 deletions .github/workflows/build-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,8 @@ jobs:
strategy:
fail-fast: false
matrix:
cloud: [ 'AWS', 'AZURE', 'GCP' ]
nodeVersion: [ '14.x', '16.x', '18.x', '20.x']
cloud: [ 'AWS' ]
nodeVersion: [ '20.x']
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v3
Expand Down Expand Up @@ -77,93 +77,4 @@ jobs:
with:
# without the token code cov may fail because of Github limits https://github.com/codecov/codecov-action/issues/557
token: ${{ secrets.CODE_COV_UPLOAD_TOKEN }}
fail_ci_if_error: true

test-windows:
needs: build
name: Tests on Windows
runs-on: windows-latest
strategy:
fail-fast: false
matrix:
cloud: [ 'AWS', 'AZURE', 'GCP' ]
nodeVersion: [ '14.x', '16.x', '18.x', '20.x']
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v3
with:
node-version: ${{ matrix.nodeVersion }}
- uses: actions/setup-python@v4
with:
python-version: '3.7'
architecture: 'x64'
- name: Download Build Artifacts
uses: actions/download-artifact@v3
with:
name: artifacts
path: artifacts
- name: Tests
shell: cmd
env:
PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }}
CLOUD_PROVIDER: ${{ matrix.cloud }}
run: ci\\test_windows.bat

test-linux:
needs: build
name: Tests on Linux
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
image: [ 'nodejs-centos7-node14', 'nodejs-centos7-fips']
cloud: [ 'AWS', 'AZURE', 'GCP' ]
steps:
- uses: actions/checkout@v4
- name: Download Build Artifacts
uses: actions/download-artifact@v3
with:
name: artifacts
path: artifacts
- name: Tests
shell: bash
env:
PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }}
CLOUD_PROVIDER: ${{ matrix.cloud }}
TARGET_DOCKER_TEST_IMAGE: ${{ matrix.image }}
run: ./ci/test.sh

test-ubuntu:
needs: build
name: Tests on Ubuntu
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
cloud: [ 'AWS', 'AZURE', 'GCP' ]
nodeVersion: ['18.x', '20.x']
steps:
- uses: actions/checkout@v4
- name: Download Build Artifacts
uses: actions/download-artifact@v3
with:
name: artifacts
path: artifacts
- name: Use Node.js
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node-version }}
- name: Tests
shell: bash
env:
PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }}
CLOUD_PROVIDER: ${{ matrix.cloud }}
run: ./ci/test_ubuntu.sh
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v3
with:
# without the token code cov may fail because of Github limits https://github.com/codecov/codecov-action/issues/557
token: ${{ secrets.CODE_COV_UPLOAD_TOKEN }}
fail_ci_if_error: true


fail_ci_if_error: true
3 changes: 1 addition & 2 deletions .github/workflows/lint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,5 +24,4 @@ jobs:
node-version: '18.x'
- name: Install dependencies
run: npm i
- name: Check formatting
run: npm run lint:check:all

6 changes: 3 additions & 3 deletions ci/container/test_component.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
export WORKSPACE=${WORKSPACE:-/mnt/workspace}
export SOURCE_ROOT=${SOURCE_ROOT:-/mnt/host}
export DRIVER_NAME=nodejs
export TIMEOUT=180000
export TIMEOUT=360000
export SF_OCSP_TEST_OCSP_RESPONDER_TIMEOUT=1000

[[ -z "$GIT_BRANCH" ]] && echo "Set GIT_BRANCH to test" && exit 1
Expand Down Expand Up @@ -77,11 +77,11 @@ python3 $THIS_DIR/hang_webserver.py 12345 > hang_webserver.out 2>&1 &
if [[ "$SHOULD_GENERATE_COVERAGE_REPORT" == "1" ]];
then
MOCHA_CMD=(
"npx" "nyc" "--reporter=lcov" "--reporter=text" "mocha" "--exit" "--timeout" "$TIMEOUT" "--recursive" "--full-trace"
"npx" "nyc" "--reporter=lcov" "--reporter=text" "mocha" "--exit" "--timeout" "$TIMEOUT" "--recursive" "--full-trace" "--max-old-space-size=150"
)
else
MOCHA_CMD=(
"mocha" "--timeout" "$TIMEOUT" "--recursive" "--full-trace"
"mocha" "--timeout" "$TIMEOUT" "--recursive" "--full-trace" "--max-old-space-size=150"
)
fi

Expand Down
15 changes: 14 additions & 1 deletion lib/connection/result/chunk.js
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,8 @@ Chunk.prototype.clearRows = function (force) {
// clear out all row and rowset related fields
this._rowsetAsString = this._rowset = this._rows = undefined;
}
this._isProcessed = true;
console.log(`Cleared chunk ${this._id}`);
};

/**
Expand Down Expand Up @@ -199,6 +201,16 @@ Chunk.prototype.isLoading = function () {
* @param callback
*/
Chunk.prototype.load = function (callback) {
// if (this.isLoaded()) {
// console.log(`Ordered fetch of chunk ${this._id} when is already loaded - skipping`);
// return;
// }
if (this._isProcessed) {
console.log(`Ordered fetch of chunk ${this._id} when is already processed`);
}
if (this.isLoaded()) {
console.log(`Ordered fetch of chunk ${this._id} when is already loaded`);
}
// we've started loading
this._isLoading = true;

Expand Down Expand Up @@ -237,6 +249,7 @@ Chunk.prototype.load = function (callback) {
// if the request succeeded, save the
// body as a string version of the rowset
if (!err) {
console.log(`Fetched chunk ${self._id}`);
self._rowsetAsString = body;
}

Expand All @@ -257,7 +270,7 @@ Chunk.prototype.load = function (callback) {
* @private
*/
function buildId(startIndex, endIndex) {
return Util.format('s=%d, e=%d', startIndex, endIndex);
return Util.format('s=%d, e=%d size=%d', startIndex, endIndex, endIndex - startIndex + 1);
}

/**
Expand Down
1 change: 1 addition & 0 deletions lib/connection/result/result_stream.js
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,7 @@ function ResultStream(options) {
for (index = currChunk; index < chunks.length && index <= (currChunk + prefetchSize); index++) {
chunk = chunks[index];
if (!chunk.isLoading()) {
console.log(`Fetching chunk ${chunk._id}`);
chunk.load();
}
}
Expand Down
1 change: 1 addition & 0 deletions lib/connection/result/row_stream.js
Original file line number Diff line number Diff line change
Expand Up @@ -196,6 +196,7 @@ function RowStream(statement, context, options) {
rowBuffer = chunk.getRows().slice(
Math.max(chunkStart, start) - chunkStart,
Math.min(chunkEnd, end) + 1 - chunkStart);
console.log(`Row buffer length is ${rowBuffer.length}` );

// reset the row index
rowIndex = 0;
Expand Down
4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,8 @@
"lint:check:all:errorsOnly": "npm run lint:check:all -- --quiet",
"lint:fix": "eslint --fix",
"test": "mocha -timeout 180000 --recursive --full-trace test/unit/**/*.js test/unit/*.js",
"test:integration": "mocha -timeout 180000 --recursive --full-trace test/integration/**/*.js test/integration/*.js",
"test:single": "mocha -timeout 180000 --full-trace",
"test:integration": "mocha -timeout 360000 --recursive --full-trace test/integration/**/*.js test/integration/*.js",
"test:single": "mocha -timeout 180001 --full-trace",
"test:system": "mocha -timeout 180000 --recursive --full-trace system_test/*.js",
"test:unit": "mocha -timeout 180000 --recursive --full-trace test/unit/**/*.js test/unit/*.js",
"test:unit:coverage": "nyc npm run test:unit",
Expand Down
20 changes: 15 additions & 5 deletions test/integration/testLargeResultSet.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,31 +6,41 @@ const assert = require('assert');
const async = require('async');
const testUtil = require('./testUtil');
const { randomizeName } = require('./testUtil');
const { writeHeapSnapshot } = require('node:v8');

describe('Large result Set Tests', function () {
const sourceRowCount = 10000;

const sourceRowCount = 1000000;
let connection;
const selectAllFromOrders = `select randstr(1000,random()) from table(generator(rowcount=>${sourceRowCount}))`;

// const selectAllFromOrders = `select * from fake_sample_data.public.customer limit 400000`;
// const selectAllFromOrders = `select * from fake_sample_data.public.customer`;
before(async () => {
connection = testUtil.createConnection();
// configureLogger('TRACE');
connection = testUtil.createConnection({
resultPrefetch: 2,
});
await testUtil.connectAsync(connection);
await testUtil.executeCmdAsync(connection, "alter session set CLIENT_RESULT_CHUNK_SIZE=48");
});

after(async () => {
await testUtil.destroyConnectionAsync(connection);
});

it('testSimpleLarge', function (done) {
it.only('testSimpleLarge', function (done) {
connection.execute({
sqlText: selectAllFromOrders,
streamResult: true,
complete: function (err, stmt) {
testUtil.checkError(err);
const stream = stmt.streamRows();
let rowCount = 0;
stream.on('data', function () {
rowCount++;
if (rowCount % 5000 === 0) {
console.log(`Row count: ${rowCount}`);
// console.log(writeHeapSnapshot());
}
});
stream.on('error', function (err) {
testUtil.checkError(err);
Expand Down
Loading