Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
…nector-nodejs into SNOW-728803-request-id-sqlText
  • Loading branch information
sfc-gh-ext-simba-lf committed Sep 21, 2023
2 parents 3870a87 + 319adc8 commit 84d6dd3
Show file tree
Hide file tree
Showing 7 changed files with 344 additions and 11 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/build-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ jobs:
strategy:
fail-fast: false
matrix:
image: [ 'nodejs-centos7-node14']
image: [ 'nodejs-centos7-node14', 'nodejs-centos7-fips']
cloud: [ 'AWS', 'AZURE', 'GCP' ]
steps:
- uses: actions/checkout@v1
Expand Down
1 change: 1 addition & 0 deletions .npmignore
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ snowflake-sdk*.tgz
coverage
system_test/
scripts/
samples/
ci/
.github/
.eslintrc.js
Expand Down
40 changes: 30 additions & 10 deletions ci/image/Dockerfile.nodejs-centos7-fips-test
Original file line number Diff line number Diff line change
Expand Up @@ -35,29 +35,49 @@ SHELL [ "/usr/bin/scl", "enable", "devtoolset-8"]

# node-fips environment variables
ENV NODE_HOME $HOME/node
ENV NODEJS_VERSION 14.0.0
ENV FIPSDIR $HOME/install-openssl-fips
ENV OPENSSL_VERSION 2.0.16
ENV NODEJS_VERSION 18.17.0
ENV OPENSSL_VERSION 3.0.8
ENV PKG_CONFIG_PATH "/usr/local/lib64/pkgconfig"
ENV LD_LIBRARY_PATH "${LD_LIBRARY_PATH}:/usr/local/lib64"
ENV OPENSSL_CONF /usr/local/ssl/openssl.cnf
ENV FIPSCONF /usr/local/ssl/fipsmodule.cnf
ENV OPENSSL_MODULES=/usr/local/lib64/ossl-modules

# Install OpenSSL
# Install OpenSSL
RUN cd $HOME
RUN curl https://www.openssl.org/source/openssl-fips-$OPENSSL_VERSION.tar.gz -o $HOME/openssl-fips-$OPENSSL_VERSION.tar.gz
RUN curl https://www.openssl.org/source/openssl-$OPENSSL_VERSION.tar.gz -o $HOME/openssl-fips-$OPENSSL_VERSION.tar.gz
RUN tar -xvf $HOME/openssl-fips-$OPENSSL_VERSION.tar.gz
RUN mv openssl-fips-$OPENSSL_VERSION $HOME/openssl-fips
RUN mv openssl-$OPENSSL_VERSION $HOME/openssl-fips
RUN cd $HOME/openssl-fips


# Install OpenSSL dependencies
RUN yum -y install perl-IPC-Cmd
RUN yum -y install perl-Digest-SHA
RUN yum -y install openssl-devel

# You must run ONLY these commands when building the FIPS version of OpenSSL
RUN cd $HOME/openssl-fips && ./config && make && make install

RUN cd $HOME/openssl-fips && ./config enable-fips && make && make install

# Enable FIPS by editing the openssl.cnf file
RUN sed -i "s/openssl_conf = openssl_init/nodejs_conf = openssl_init/g" $OPENSSL_CONF
RUN sed -i "s/# .include fipsmodule.cnf/.include ${FIPSCONF//\//\\/}/g" $OPENSSL_CONF
RUN sed -i 's/# fips = fips_sect/fips = fips_sect/g' $OPENSSL_CONF
RUN sed -i 's/# activate = 1/activate = 1/g' $OPENSSL_CONF
RUN sed -i '55ialg_section = algorithm_sect' $OPENSSL_CONF
RUN sed -i '75idefault_properties = fips=yes' $OPENSSL_CONF
RUN sed -i '75i[algorithm_sect]' $OPENSSL_CONF

# Download and build NodeJS
RUN git clone --branch v$NODEJS_VERSION https://github.com/nodejs/node.git $NODE_HOME
RUN gcc --version
RUN g++ --version
RUN cd $NODE_HOME && ./configure --openssl-fips=$FIPSDIR && make -j2 &> /dev/null && make install
RUN cd $NODE_HOME && ./configure --shared-openssl --shared-openssl-libpath=/usr/local/lib64 --shared-openssl-includes=/usr/local/include/openssl --openssl-is-fips && make -j2 &> /dev/null && make install
# Should be $NODEJS_VERSION
RUN node --version
# Should be $OPENSSL_VERSION
RUN node -p "process.versions.openssl"
# Should be 1 (FIPS is enabled by default)
RUN node -p 'crypto.getFips()'

# workspace
RUN mkdir -p /home/user
Expand Down
54 changes: 54 additions & 0 deletions samples/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
********************************************************************************
NodeJS Driver - Samples
********************************************************************************

Install
======================================================================

In directory samples run `npm i`.

Test
======================================================================

Prepare for tests
----------------------------------------------------------------------

Specify env variables:

```
export SNOWFLAKE_TEST_USER=<your_user>
export SNOWFLAKE_TEST_PASSWORD=<your_password>
export SNOWFLAKE_TEST_ACCOUNT=<your_account>
export SNOWFLAKE_TEST_WAREHOUSE=<your_warehouse>
export SNOWFLAKE_TEST_DATABASE=<your_database>
export SNOWFLAKE_TEST_SCHEMA=<your_schema>
export SNOWFLAKE_TEST_PROTOCOL=<your_snowflake_protocol>
export SNOWFLAKE_TEST_HOST=<your_snowflake_host>
export SNOWFLAKE_TEST_PORT=<your_snowflake_port>
```

Run test to compare json parser
----------------------------------------------------------------------

By default, the test creates a table with 300000 rows of sample variant data (json format)
and measures the time and number of blocks while retrieving the results using two different
methods to extract data.
1. Streaming results: `stream.on('readable', ...)`
2. Events results: `stream.on('data', ...)`
```
npm run jsonParserComparison
```
Test can be started with parameters:
- number of rows in table, default=300000
- number of selected rows, default=300000
- only for choosen parser if got as last parameter: Function, vm, better-eval, JSON, default all

Example:
```
npm run jsonParserComparison 300000 300000 Function
```

or
```
npm run jsonParserComparison 300000 300000 JSON
```
56 changes: 56 additions & 0 deletions samples/helpers.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
const snowflake = require('snowflake-sdk');
exports.executeQuery = async function (connection, query, binds) {
await new Promise((resolve, reject) => {
connection.execute({
sqlText: query,
binds: binds,
complete: function (err, stmt, rows) {
if (!err) {
resolve(rows);
} else {
reject(err);
}
}
});
});
};

exports.connectUsingEnv = async () => {
const snowflakeTestProtocol = process.env.SNOWFLAKE_TEST_PROTOCOL;
const snowflakeTestHost = process.env.SNOWFLAKE_TEST_HOST;
const snowflakeTestPort = process.env.SNOWFLAKE_TEST_PORT;
const snowflakeTestAccount = process.env.SNOWFLAKE_TEST_ACCOUNT;
const snowflakeTestUser = process.env.SNOWFLAKE_TEST_USER;
const snowflakeTestDatabase = process.env.SNOWFLAKE_TEST_DATABASE;
const snowflakeTestWarehouse = process.env.SNOWFLAKE_TEST_WAREHOUSE;
const snowflakeTestSchema = process.env.SNOWFLAKE_TEST_SCHEMA;
const snowflakeTestPassword = process.env.SNOWFLAKE_TEST_PASSWORD;
const snowflakeTestRole = process.env.SNOWFLAKE_TEST_ROLE;

const connection = snowflake.createConnection({
account: snowflakeTestAccount,
username: snowflakeTestUser,
password: snowflakeTestPassword,
role: snowflakeTestRole,
database: snowflakeTestDatabase,
schema: snowflakeTestSchema,
warehouse: snowflakeTestWarehouse,
host: snowflakeTestHost,
port: snowflakeTestPort,
protocol: snowflakeTestProtocol
});

return new Promise((resolve, reject) => {
connection.connect(
function (err, conn) {
if (err) {
console.error('Unable to connect: ' + err.message);
reject(new Error(err.message));
} else {
console.log('Successfully connected to Snowflake');
resolve(conn);
}
}
);
});
};
188 changes: 188 additions & 0 deletions samples/jsonParserComparison.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,188 @@
const snowflake = require('snowflake-sdk');
const helpers = require('./helpers');
const blocked = require('blocked-at');

async function run() {
const defaultRowCount = 300000;
const rowCountToInsert = process.argv[2];
const rowCountToFetch = process.argv[3];
const choosenParser = process.argv[4];
console.log('Started with arguments: ');
console.log(`Inserted rows amount: ${rowCountToInsert} - default ${defaultRowCount}`);
console.log(`Selected rows amount: ${rowCountToFetch} - default ${defaultRowCount}`);
console.log(`Selected json parse : ${choosenParser} - default all of Function, vm, better-eval, JSON`);

const rowCount = rowCountToInsert || defaultRowCount;
const selectLimit = rowCountToFetch || defaultRowCount;
const testVariantTempName = 'testJsonTempTable000';

const createTempTableWithJsonData = `CREATE OR REPLACE TABLE ${testVariantTempName} (value string)
AS select parse_json('{
"_id": "6501c357397b66ce47719212",
"index": 0,
"guid": "e7e0e5d8-82b4-47f7-a2ab-68588c93d81e",
"isActive": false,
"balance": "$2,611.69",
"picture": "http://placehold.it/32x32",
"age": 21,
"eyeColor": "blue",
"name": "Joanna Atkinson",
"gender": "female",
"company": "AQUAZURE",
"email": "[email protected]",
"phone": "+1 (925) 582-3869",
"address": "395 Karweg Place, Garnet, Mississippi, 9481",
"registered": "2017-05-18T11:16:33 -02:00",
"latitude": 21.372656,
"longitude": -24.488326,
"tags": [
"aliquip",
"aliqua",
"magna",
"pariatur",
"cillum",
"esse",
"nisi"
],
"friends": [
{
"id": 0,
"name": "Davis Blake"
},
{
"id": 1,
"name": "Raymond Jefferson"
},
{
"id": 2,
"name": "Hoffman Roberts"
}
],
"greeting": "Hello, Joanna Atkinson! You have 3 unread messages.",
"favoriteFruit": "apple"
}')
from table(generator(rowcount=>${rowCount}))`;
const createTableWithVariant = (tableName) => `create or replace table ${tableName}(colA variant)`;

const dropTableWithVariant = (tableName) =>`drop table if exists ${tableName}`;
const dropTempTable = `drop table if exists ${testVariantTempName}`;

const insertVariant = (tableName)=> `insert into ${tableName}
select parse_json(value)
from ${testVariantTempName}`;
const selectCountVariant = (tableName) => `select count(colA) from ${(tableName)}`;

let avgBlock = 0, minBlock = 999999999999999, maxBlock = 0;
let blockCount = 0;

const testCases = [];
if (!choosenParser || choosenParser.toString().includes('Function')) {
testCases.push({parser: 'Function', jsonColumnVariantParser: (rawColumnValue) => new Function(`return (${rawColumnValue})`)});
}
if (!choosenParser || choosenParser.toString().includes('better-eval')) {
testCases.push({parser: 'betterEval', jsonColumnVariantParser: (rawColumnValue) => require('better-eval').call('(' + rawColumnValue + ')')});
}
if (!choosenParser || choosenParser.toString().includes('vm')) {
testCases.push({parser: 'vm', jsonColumnVariantParser: rawColumnValue => require('vm').runInNewContext('(' + rawColumnValue + ')')});
}
// eval lib contains vulnerability so we decide to resign using it
// if (!process.argv[4] || process.argv[4].toString().contains('eval')) {
// testCases.push({parser: 'eval', jsonColumnVariantParser: rawColumnValue => eval('(' + rawColumnValue + ')')})
// };
if (!choosenParser || choosenParser.toString().includes('JSON')) {
testCases.push({parser: 'JSON', jsonColumnVariantParser: rawColumnValue => JSON.parse(rawColumnValue)});
}

const execute = async ({parser, jsonColumnVariantParser}, extractFunction) => {
console.log(`\nTest for parser: [${parser}] extracting by ${extractFunction.name}`);
const testVariantTableName = `testVariantTable000${parser}`;
let connection = await helpers.connectUsingEnv();
return new Promise(async (resolve, reject) => {
snowflake.configure({
jsonColumnVariantParser: jsonColumnVariantParser
});

await helpers.executeQuery(connection, createTempTableWithJsonData);
await helpers.executeQuery(connection, createTableWithVariant(testVariantTableName));
await helpers.executeQuery(connection, insertVariant(testVariantTableName));
await helpers.executeQuery(connection, selectCountVariant(testVariantTableName));;

const queryTimeLabel = parser + 'SelectTime';
let avgBlock = 0, minBlock = 999999999999999, maxBlock = 0;
let blockCount = 0;
blocked((time) => {
blockCount++;
avgBlock += time;
minBlock = minBlock > time ? time : minBlock;
maxBlock = maxBlock < time ? time : maxBlock;
});

console.time(queryTimeLabel);
const streamResult = true;
connection.execute({
streamResult: streamResult,
sqlText: `select *
from IDENTIFIER(?) LIMIT ${selectLimit}`,
binds: [testVariantTableName],
complete: function (err, stmt) {
const stream = stmt.streamRows();
extractFunction(stream);
stream.on('end', function () {
console.log('parser: ' + parser);
console.log('streamResult: ' + streamResult);
console.log('row count: ' + selectLimit);
console.timeEnd(queryTimeLabel);
console.log('average block time: ' + avgBlock / blockCount);
console.log('minimum block time: ' + minBlock);
console.log('maximum block time: ' + maxBlock);
console.log('block call count: ' + blockCount);
resolve();
});
stream.on('error', function (err) {
console.log(err);
reject(err);
});
}
});
})
.finally(async () => {
await helpers.executeQuery(connection, dropTableWithVariant(testVariantTableName));
await helpers.executeQuery(connection, dropTempTable);
});
};

function extractOnData(stream) {
let count = 0;
stream.on('data', function () {
count++;
if (count % 10000 === 0) {
console.log(`Parsed rows: ${count}`);
}
});
}

function extractOnStream(stream) {
let count = 0;
stream.on('readable', function () {
while ((stream.read()) !== null) {
count++;
if (count % 10000 === 0) {
console.log(`Parsed rows: ${count}`);
}
}
});
}

testCases.reduce( (promise, nextParser) => {
return promise
.then(() => {
return execute(nextParser, extractOnData);
})
.then(() => {
return execute(nextParser, extractOnStream);
});
}, Promise.resolve());
}

run();

14 changes: 14 additions & 0 deletions samples/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
{
"name": "sample",
"version": "0.0.1",
"description": "Snowflake Node.js driver samples",
"dependencies": {
"better-eval": "^1.3.0",
"blocked-at": "^1.2.0",
"snowflake-sdk": "^1.8.0",
"vm": "^0.1.0"
},
"scripts": {
"jsonParserComparison": "node jsonParserComparison.js"
}
}

0 comments on commit 84d6dd3

Please sign in to comment.