Skip to content

Commit

Permalink
Merge branch 'master' into centos7node
Browse files Browse the repository at this point in the history
  • Loading branch information
sfc-gh-ext-simba-lf authored Sep 21, 2023
2 parents 542383a + 48d2049 commit 837470a
Show file tree
Hide file tree
Showing 7 changed files with 316 additions and 4 deletions.
1 change: 1 addition & 0 deletions .npmignore
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ snowflake-sdk*.tgz
coverage
system_test/
scripts/
samples/
ci/
.github/
.eslintrc.js
Expand Down
5 changes: 1 addition & 4 deletions lib/global_config.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,8 @@ const mkdirp = require('mkdirp');
const Util = require('./util');
const Errors = require('./errors');
const Logger = require('./logger');
const vm = require('vm');
const { XMLParser, XMLValidator } = require("fast-xml-parser");

const VM_CONTEXT = vm.createContext() // create a new context so VM does not have to make a new one for each conversion

let insecureConnect = false;

/**
Expand Down Expand Up @@ -173,7 +170,7 @@ const rest = {
exports.rest = rest;

// The default JSON parser
exports.jsonColumnVariantParser = rawColumnValue => vm.runInContext("(" + rawColumnValue + ")", VM_CONTEXT);
exports.jsonColumnVariantParser = rawColumnValue => new Function(`return (${rawColumnValue});`)();

/**
* Updates the value of the 'jsonColumnVariantParser' parameter.
Expand Down
2 changes: 2 additions & 0 deletions lib/http/base.js
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ HttpClient.prototype.request = function (options)
// that we receive from GCS, so let's get response as arraybuffer and unzip it outside axios
// issue in axios about case insensitive content-encoding is marked as won't fix: https://github.com/axios/axios/issues/4280
// for all other responses we manually parse jsons or other structures from the server so they need to be text
// TODO SNOW-917244 we can get rid of this logic when axios > 1.5.0 will be release as it should contain fix https://github.com/axios/axios/issues/5890
responseType: options.url.includes('storage.googleapis.com') ? 'arraybuffer' : 'text',
};

Expand Down Expand Up @@ -109,6 +110,7 @@ HttpClient.prototype.request = function (options)
// we request that GCS returns body as arraybuffer, not text
// when it is GZIPped then we have to unzip it
// otherwise we should convert arraybuffer to string
// TODO SNOW-917244 we can get rid of this logic when axios > 1.5.0 will be release as it should contain fix https://github.com/axios/axios/issues/5890
try {
if (response.headers['content-encoding'] === 'GZIP') {
const unzippedData = zlib.gunzipSync(response.data).toString('utf-8');
Expand Down
54 changes: 54 additions & 0 deletions samples/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
********************************************************************************
NodeJS Driver - Samples
********************************************************************************

Install
======================================================================

In directory samples run `npm i`.

Test
======================================================================

Prepare for tests
----------------------------------------------------------------------

Specify env variables:

```
export SNOWFLAKE_TEST_USER=<your_user>
export SNOWFLAKE_TEST_PASSWORD=<your_password>
export SNOWFLAKE_TEST_ACCOUNT=<your_account>
export SNOWFLAKE_TEST_WAREHOUSE=<your_warehouse>
export SNOWFLAKE_TEST_DATABASE=<your_database>
export SNOWFLAKE_TEST_SCHEMA=<your_schema>
export SNOWFLAKE_TEST_PROTOCOL=<your_snowflake_protocol>
export SNOWFLAKE_TEST_HOST=<your_snowflake_host>
export SNOWFLAKE_TEST_PORT=<your_snowflake_port>
```

Run test to compare json parser
----------------------------------------------------------------------

By default, the test creates a table with 300000 rows of sample variant data (json format)
and measures the time and number of blocks while retrieving the results using two different
methods to extract data.
1. Streaming results: `stream.on('readable', ...)`
2. Events results: `stream.on('data', ...)`
```
npm run jsonParserComparison
```
Test can be started with parameters:
- number of rows in table, default=300000
- number of selected rows, default=300000
- only for choosen parser if got as last parameter: Function, vm, better-eval, JSON, default all

Example:
```
npm run jsonParserComparison 300000 300000 Function
```

or
```
npm run jsonParserComparison 300000 300000 JSON
```
56 changes: 56 additions & 0 deletions samples/helpers.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
const snowflake = require('snowflake-sdk');
exports.executeQuery = async function (connection, query, binds) {
await new Promise((resolve, reject) => {
connection.execute({
sqlText: query,
binds: binds,
complete: function (err, stmt, rows) {
if (!err) {
resolve(rows);
} else {
reject(err);
}
}
});
});
};

exports.connectUsingEnv = async () => {
const snowflakeTestProtocol = process.env.SNOWFLAKE_TEST_PROTOCOL;
const snowflakeTestHost = process.env.SNOWFLAKE_TEST_HOST;
const snowflakeTestPort = process.env.SNOWFLAKE_TEST_PORT;
const snowflakeTestAccount = process.env.SNOWFLAKE_TEST_ACCOUNT;
const snowflakeTestUser = process.env.SNOWFLAKE_TEST_USER;
const snowflakeTestDatabase = process.env.SNOWFLAKE_TEST_DATABASE;
const snowflakeTestWarehouse = process.env.SNOWFLAKE_TEST_WAREHOUSE;
const snowflakeTestSchema = process.env.SNOWFLAKE_TEST_SCHEMA;
const snowflakeTestPassword = process.env.SNOWFLAKE_TEST_PASSWORD;
const snowflakeTestRole = process.env.SNOWFLAKE_TEST_ROLE;

const connection = snowflake.createConnection({
account: snowflakeTestAccount,
username: snowflakeTestUser,
password: snowflakeTestPassword,
role: snowflakeTestRole,
database: snowflakeTestDatabase,
schema: snowflakeTestSchema,
warehouse: snowflakeTestWarehouse,
host: snowflakeTestHost,
port: snowflakeTestPort,
protocol: snowflakeTestProtocol
});

return new Promise((resolve, reject) => {
connection.connect(
function (err, conn) {
if (err) {
console.error('Unable to connect: ' + err.message);
reject(new Error(err.message));
} else {
console.log('Successfully connected to Snowflake');
resolve(conn);
}
}
);
});
};
188 changes: 188 additions & 0 deletions samples/jsonParserComparison.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,188 @@
const snowflake = require('snowflake-sdk');
const helpers = require('./helpers');
const blocked = require('blocked-at');

async function run() {
const defaultRowCount = 300000;
const rowCountToInsert = process.argv[2];
const rowCountToFetch = process.argv[3];
const choosenParser = process.argv[4];
console.log('Started with arguments: ');
console.log(`Inserted rows amount: ${rowCountToInsert} - default ${defaultRowCount}`);
console.log(`Selected rows amount: ${rowCountToFetch} - default ${defaultRowCount}`);
console.log(`Selected json parse : ${choosenParser} - default all of Function, vm, better-eval, JSON`);

const rowCount = rowCountToInsert || defaultRowCount;
const selectLimit = rowCountToFetch || defaultRowCount;
const testVariantTempName = 'testJsonTempTable000';

const createTempTableWithJsonData = `CREATE OR REPLACE TABLE ${testVariantTempName} (value string)
AS select parse_json('{
"_id": "6501c357397b66ce47719212",
"index": 0,
"guid": "e7e0e5d8-82b4-47f7-a2ab-68588c93d81e",
"isActive": false,
"balance": "$2,611.69",
"picture": "http://placehold.it/32x32",
"age": 21,
"eyeColor": "blue",
"name": "Joanna Atkinson",
"gender": "female",
"company": "AQUAZURE",
"email": "[email protected]",
"phone": "+1 (925) 582-3869",
"address": "395 Karweg Place, Garnet, Mississippi, 9481",
"registered": "2017-05-18T11:16:33 -02:00",
"latitude": 21.372656,
"longitude": -24.488326,
"tags": [
"aliquip",
"aliqua",
"magna",
"pariatur",
"cillum",
"esse",
"nisi"
],
"friends": [
{
"id": 0,
"name": "Davis Blake"
},
{
"id": 1,
"name": "Raymond Jefferson"
},
{
"id": 2,
"name": "Hoffman Roberts"
}
],
"greeting": "Hello, Joanna Atkinson! You have 3 unread messages.",
"favoriteFruit": "apple"
}')
from table(generator(rowcount=>${rowCount}))`;
const createTableWithVariant = (tableName) => `create or replace table ${tableName}(colA variant)`;

const dropTableWithVariant = (tableName) =>`drop table if exists ${tableName}`;
const dropTempTable = `drop table if exists ${testVariantTempName}`;

const insertVariant = (tableName)=> `insert into ${tableName}
select parse_json(value)
from ${testVariantTempName}`;
const selectCountVariant = (tableName) => `select count(colA) from ${(tableName)}`;

let avgBlock = 0, minBlock = 999999999999999, maxBlock = 0;
let blockCount = 0;

const testCases = [];
if (!choosenParser || choosenParser.toString().includes('Function')) {
testCases.push({parser: 'Function', jsonColumnVariantParser: (rawColumnValue) => new Function(`return (${rawColumnValue})`)});
}
if (!choosenParser || choosenParser.toString().includes('better-eval')) {
testCases.push({parser: 'betterEval', jsonColumnVariantParser: (rawColumnValue) => require('better-eval').call('(' + rawColumnValue + ')')});
}
if (!choosenParser || choosenParser.toString().includes('vm')) {
testCases.push({parser: 'vm', jsonColumnVariantParser: rawColumnValue => require('vm').runInNewContext('(' + rawColumnValue + ')')});
}
// eval lib contains vulnerability so we decide to resign using it
// if (!process.argv[4] || process.argv[4].toString().contains('eval')) {
// testCases.push({parser: 'eval', jsonColumnVariantParser: rawColumnValue => eval('(' + rawColumnValue + ')')})
// };
if (!choosenParser || choosenParser.toString().includes('JSON')) {
testCases.push({parser: 'JSON', jsonColumnVariantParser: rawColumnValue => JSON.parse(rawColumnValue)});
}

const execute = async ({parser, jsonColumnVariantParser}, extractFunction) => {
console.log(`\nTest for parser: [${parser}] extracting by ${extractFunction.name}`);
const testVariantTableName = `testVariantTable000${parser}`;
let connection = await helpers.connectUsingEnv();
return new Promise(async (resolve, reject) => {
snowflake.configure({
jsonColumnVariantParser: jsonColumnVariantParser
});

await helpers.executeQuery(connection, createTempTableWithJsonData);
await helpers.executeQuery(connection, createTableWithVariant(testVariantTableName));
await helpers.executeQuery(connection, insertVariant(testVariantTableName));
await helpers.executeQuery(connection, selectCountVariant(testVariantTableName));;

const queryTimeLabel = parser + 'SelectTime';
let avgBlock = 0, minBlock = 999999999999999, maxBlock = 0;
let blockCount = 0;
blocked((time) => {
blockCount++;
avgBlock += time;
minBlock = minBlock > time ? time : minBlock;
maxBlock = maxBlock < time ? time : maxBlock;
});

console.time(queryTimeLabel);
const streamResult = true;
connection.execute({
streamResult: streamResult,
sqlText: `select *
from IDENTIFIER(?) LIMIT ${selectLimit}`,
binds: [testVariantTableName],
complete: function (err, stmt) {
const stream = stmt.streamRows();
extractFunction(stream);
stream.on('end', function () {
console.log('parser: ' + parser);
console.log('streamResult: ' + streamResult);
console.log('row count: ' + selectLimit);
console.timeEnd(queryTimeLabel);
console.log('average block time: ' + avgBlock / blockCount);
console.log('minimum block time: ' + minBlock);
console.log('maximum block time: ' + maxBlock);
console.log('block call count: ' + blockCount);
resolve();
});
stream.on('error', function (err) {
console.log(err);
reject(err);
});
}
});
})
.finally(async () => {
await helpers.executeQuery(connection, dropTableWithVariant(testVariantTableName));
await helpers.executeQuery(connection, dropTempTable);
});
};

function extractOnData(stream) {
let count = 0;
stream.on('data', function () {
count++;
if (count % 10000 === 0) {
console.log(`Parsed rows: ${count}`);
}
});
}

function extractOnStream(stream) {
let count = 0;
stream.on('readable', function () {
while ((stream.read()) !== null) {
count++;
if (count % 10000 === 0) {
console.log(`Parsed rows: ${count}`);
}
}
});
}

testCases.reduce( (promise, nextParser) => {
return promise
.then(() => {
return execute(nextParser, extractOnData);
})
.then(() => {
return execute(nextParser, extractOnStream);
});
}, Promise.resolve());
}

run();

14 changes: 14 additions & 0 deletions samples/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
{
"name": "sample",
"version": "0.0.1",
"description": "Snowflake Node.js driver samples",
"dependencies": {
"better-eval": "^1.3.0",
"blocked-at": "^1.2.0",
"snowflake-sdk": "^1.8.0",
"vm": "^0.1.0"
},
"scripts": {
"jsonParserComparison": "node jsonParserComparison.js"
}
}

0 comments on commit 837470a

Please sign in to comment.