Skip to content

Commit

Permalink
issue 366 - Replace better-eval with vm and add option to configure p…
Browse files Browse the repository at this point in the history
…arser (#536)

* Replace better-eval with vm

* issue366 - Add test case for variant column type

* issue 366 - Remove 'vm' from package.json as it's already part of node

* issue 366 - Rename test

* issue 366 - Make array const

* issue 366 - Wrapped lines in else clause

* issue 366 - Put variant test in its own describe and placed create/drop table in the before/after functions

* issue 366 - Put variant test in its own describe and placed create/drop table in the before/after functions

* issue 366 - Put variant test in its own describe and placed create/drop table in the before/after functions

* issue 366 - Put variant test in its own describe and placed create/drop table in the before/after functions

* issue 366 - Put variant test in its own describe and placed create/drop table in the before/after functions

* issue 366 - Put variant test in its own describe and placed create/drop table in the before/after functions

* issue 366 - Temp revert wrapping test in describe

* issue 366 - Temp revert else clause

* issue 366 - Temp revert const array

* issue 366 - Temp revert name change

* issue 366 - Temp revert remove vm

* issue 366 - Remove vm

* issue 366 - Rename test

* issue 366 - Make array const

* issue 366 - Wrap in else clause

* issue 366 - Wrap variant test in its own describe

* Require async used by the variant test

* Parse JSON one row at a time

* issue 366 - Add option to configure json/xml parser

* issue 366 - Fix typo from logLevel

* issue 366 - Add test case for xml data type

* issue 366 - Add tests for snowflake.configure options

* issue 366 - Fix ocsp variable

* Return to default configuration after test

* Return to default configuration after test

* issue 366 - nit newline

* Run lint on new test file

* Move required up

* issue 366 - Keep old behaviour of xml parser

* issue 366 - Log and throw error in the default xml parser

* issue 366 - Add expected signature for setting a custom parser

* issue 366 - Make new variables const

* Wrap variant test cases in their own describe

* issue 366 - Wrap the configure test cases in their own describe

* issue 366 - Refactor calling the test cases

* issue 366 - Fixed the custom xml parser in the test case

* issue 366 - Create test with custom parser

* issue 366 - Move require up

* issue 366 - Add logging and throw for both JSON/XML parser errors
sfc-gh-ext-simba-lf authored Jun 28, 2023
1 parent 01b2eb4 commit 0926a0f
Showing 10 changed files with 555 additions and 42 deletions.
23 changes: 9 additions & 14 deletions lib/connection/result/column.js
Original file line number Diff line number Diff line change
@@ -5,11 +5,11 @@
var Util = require('../../util');
var Errors = require('../../errors');
var BigNumber = require('bignumber.js');
const GlobalConfig = require('../../global_config');
const Logger = require('../../logger');
var SfTimestamp = require('./sf_timestamp');
var SqlTypes = require('./data_types').SqlTypes;
var bigInt = require('big-integer');
var { XMLParser, XMLValidator } = require("fast-xml-parser");
var betterEval = require("better-eval");

var NULL_UPPERCASE = 'NULL';

@@ -552,23 +552,18 @@ function convertRawVariant(rawColumnValue, column, context)
{
try
{
ret = betterEval("(" + rawColumnValue + ")");
ret = GlobalConfig.jsonColumnVariantParser(rawColumnValue);
}
catch (parseError)
catch (jsonParseError)
{
// check if raw string is in XML format
// ensure each tag is enclosed and all attributes and elements are valid
if (XMLValidator.validate(rawColumnValue) === true)
try
{
// use XML parser
ret = new XMLParser().parse(rawColumnValue);
ret = GlobalConfig.xmlColumnVariantParser(rawColumnValue);
}
else
catch (xmlParseError)
{
// TODO: log the error

// throw the error
throw parseError;
Logger.getInstance().debug("Variant cannot be parsed neither as JSON: %s nor as XML: %s", jsonParseError.message, xmlParseError.message);
throw new Errors.VariantParseError(jsonParseError, xmlParseError);
}
}
}
2 changes: 2 additions & 0 deletions lib/constants/error_messages.js
Original file line number Diff line number Diff line change
@@ -20,6 +20,8 @@ exports[402002] = 'Request to S3/Blob failed.';
exports[403001] = 'Invalid logLevel. The specified value must be one of these five levels: error, warn, debug, info and trace.';
exports[403002] = 'Invalid insecureConnect option. The specified value must be a boolean.';
exports[403003] = 'Invalid OCSP mode. The specified value must be FAIL_CLOSED, FAIL_OPEN, or INSECURE_MODE.';
exports[403004] = 'Invalid custom JSON parser. The specified value must be a function.';
exports[403005] = 'Invalid custom XML parser. The specified value must be a function.';

// 404001
exports[404001] = 'Connection options must be specified.';
21 changes: 20 additions & 1 deletion lib/core.js
Original file line number Diff line number Diff line change
@@ -171,7 +171,7 @@ function Core(options)
{
// check that the specified value is a valid tag
Errors.checkArgumentValid(LoggerCore.isValidLogTag(logTag),
ErrorCodes.ERR_GLOGAL_CONFIGURE_INVALID_LOG_LEVEL);
ErrorCodes.ERR_GLOBAL_CONFIGURE_INVALID_LOG_LEVEL);

Logger.getInstance().configure(
{
@@ -188,6 +188,7 @@ function Core(options)

GlobalConfig.setInsecureConnect(insecureConnect);
}

let ocspFailOpen = options.ocspFailOpen;
if (Util.exists(ocspFailOpen))
{
@@ -196,6 +197,24 @@ function Core(options)

GlobalConfig.setOcspFailOpen(ocspFailOpen);
}

let jsonColumnVariantParser = options.jsonColumnVariantParser;
if (Util.exists(jsonColumnVariantParser))
{
Errors.checkArgumentValid(Util.isFunction(jsonColumnVariantParser),
ErrorCodes.ERR_GLOBAL_CONFIGURE_INVALID_JSON_PARSER);

GlobalConfig.setJsonColumnVariantParser(jsonColumnVariantParser);
}

let xmlColumnVariantParser = options.xmlColumnVariantParser;
if (Util.exists(xmlColumnVariantParser))
{
Errors.checkArgumentValid(Util.isFunction(xmlColumnVariantParser),
ErrorCodes.ERR_GLOBAL_CONFIGURE_INVALID_XML_PARSER);

GlobalConfig.setXmlColumnVariantParser(xmlColumnVariantParser);
}
}
};

17 changes: 17 additions & 0 deletions lib/errors.js
Original file line number Diff line number Diff line change
@@ -25,6 +25,8 @@ codes.ERR_LARGE_RESULT_SET_RESPONSE_FAILURE = 402002;
codes.ERR_GLOBAL_CONFIGURE_INVALID_LOG_LEVEL = 403001;
codes.ERR_GLOBAL_CONFIGURE_INVALID_INSECURE_CONNECT = 403002;
codes.ERR_GLOBAL_CONFIGURE_INVALID_OCSP_MODE = 403003;
codes.ERR_GLOBAL_CONFIGURE_INVALID_JSON_PARSER = 403004;
codes.ERR_GLOBAL_CONFIGURE_INVALID_XML_PARSER = 403005;

// 404001
codes.ERR_CONN_CREATE_MISSING_OPTIONS = 404001;
@@ -378,6 +380,21 @@ exports.createOCSPError = function (errorCode)
);
};

/**
* Creates a new error by combining the error messages from the json parser and xml parser
*
* @param {Object} jsonParseError contains the JSON parse error message
* @param {Object} xmlParseError contains the XML parse error message
* @returns {Error}
*/
exports.VariantParseError = function (jsonParseError, xmlParseError)
{
var errMessage = `VariantParseError: Variant cannot be parsed neither as JSON nor as XML:\n` +
` - JSON parse error message: ${jsonParseError.message}\n` +
` - XML parse error message: ${xmlParseError.message}`;
return new Error(errMessage);
};

/**
* Determines if a given error is an InternalAssertError.
*
61 changes: 61 additions & 0 deletions lib/global_config.js
Original file line number Diff line number Diff line change
@@ -9,6 +9,10 @@ const mkdirp = require('mkdirp');
const Util = require('./util');
const Errors = require('./errors');
const Logger = require('./logger');
const vm = require('vm');
const { XMLParser, XMLValidator } = require("fast-xml-parser");

const VM_CONTEXT = vm.createContext() // create a new context so VM does not have to make a new one for each conversion

let insecureConnect = false;

@@ -51,6 +55,16 @@ exports.setOcspFailOpen = function (value)
ocspFailOpen = value;
};

/**
* Returns the value of the 'ocspFailOpen' parameter.
*
* @param {boolean} value
*/
exports.getOcspFailOpen = function ()
{
return ocspFailOpen;
};

const ocspModes = {
FAIL_CLOSED: 'FAIL_CLOSED',
FAIL_OPEN: 'FAIL_OPEN',
@@ -157,3 +171,50 @@ const rest = {
HTTPS_PROTOCOL: 'https'
};
exports.rest = rest;

// The default JSON parser
exports.jsonColumnVariantParser = rawColumnValue => vm.runInContext("(" + rawColumnValue + ")", VM_CONTEXT);

/**
* Updates the value of the 'jsonColumnVariantParser' parameter.
*
* @param {function: (rawColumnValue: string) => any} value
*/
exports.setJsonColumnVariantParser = function (value)
{
// validate input
Errors.assertInternal(Util.isFunction(value));

exports.jsonColumnVariantParser = value;
};

// The default XML parser
exports.xmlColumnVariantParser = rawColumnValue =>
{
// check if raw string is in XML format
// ensure each tag is enclosed and all attributes and elements are valid
// XMLValidator.validate returns true if valid, returns an error if invalid
var validateResult = XMLValidator.validate(rawColumnValue);
if (validateResult === true)
{
// use XML parser
return new XMLParser().parse(rawColumnValue);
}
else
{
throw new Error(validateResult.err.msg);
}
};

/**
* Updates the value of the 'xmlColumnVariantParser' parameter.
*
* @param {function: (rawColumnValue: string) => any} value
*/
exports.setXmlColumnVariantParser = function (value)
{
// validate input
Errors.assertInternal(Util.isFunction(value));

exports.xmlColumnVariantParser = value;
};
9 changes: 9 additions & 0 deletions lib/logger/core.js
Original file line number Diff line number Diff line change
@@ -46,6 +46,15 @@ var LOG_LEVELS =
LOG_LEVEL_TRACE
];

const LOG_LEVEL_TAGS = {
ERROR: LOG_LEVEL_ERROR.tag,
WARN: LOG_LEVEL_WARN.tag,
INFO: LOG_LEVEL_INFO.tag,
DEBUG: LOG_LEVEL_DEBUG.tag,
TRACE: LOG_LEVEL_TRACE.tag,
};
exports.LOG_LEVEL_TAGS = LOG_LEVEL_TAGS;

// create two maps, one in which the key is the log level and the value is the
// corresponding log level object, and another in which the key is the log tag
// and the value is the corresponding log level
1 change: 0 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -12,7 +12,6 @@
"async": "^3.2.3",
"aws-sdk": "^2.878.0",
"axios": "^0.27.2",
"better-eval": "^1.3.0",
"big-integer": "^1.6.43",
"bignumber.js": "^2.4.0",
"binascii": "0.0.2",
141 changes: 115 additions & 26 deletions test/integration/testDataType.js
Original file line number Diff line number Diff line change
@@ -2,6 +2,8 @@
* Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved.
*/
var async = require('async');
const GlobalConfig = require('./../../lib/global_config');
const snowflake = require('./../../lib/snowflake');
var testUtil = require('./testUtil');
const sharedStatement = require('./sharedStatements');
var bigInt = require("big-integer");
@@ -28,10 +30,13 @@ describe('Test DataType', function ()
var dropTableWithTime = 'drop table if exists testTime';
var dropTableWithTimestamp = 'drop table if exists testTimestamp';
var dropTableWithBoolean = 'drop table if exists testBoolean';
const truncateTableWithVariant = 'truncate table if exists testVariant;'
var insertDouble = 'insert into testDouble values(123.456)';
var insertLargeNumber = 'insert into testNumber values (12345678901234567890123456789012345678)';
var insertRegularSizedNumber = 'insert into testNumber values (100000001)';
var insertVariant = 'insert into testVariant select parse_json(\'{a : 1 , b :[1 , 2 , 3, -Infinity, undefined], c : {a : 1}}\')';
const insertVariantJSON = 'insert into testVariant select parse_json(\'{a : 1 , b :[1 , 2 , 3, -Infinity, undefined], c : {a : 1}}\')';
const insertVariantJSONForCustomParser = 'insert into testVariant select parse_json(\'{a : 1 , b :[1 , 2 , 3], c : {a : 1}}\')';
const insertVariantXML = 'insert into testVariant select parse_xml(\'<root><a>1</a><b>1</b><c><a>1</a></c></root>\')';
var insertArray = 'insert into testArray select parse_json(\'["a", 1]\')';
var insertDate = 'insert into testDate values(to_date(\'2012-11-11\'))';
var insertTime = 'insert into testTime values(to_time(\'12:34:56.789789789\'))';
@@ -216,32 +221,116 @@ describe('Test DataType', function ()

describe('testSemiStructuredDataType', function ()
{
it('testVariant', function (done)
describe('testVariant', function ()
{
async.series(
[
function (callback)
{
testUtil.executeCmd(connection, createTableWithVariant, callback);
},
function (callback)
{
testUtil.executeCmd(connection, insertVariant, callback);
},
function (callback)
{
testUtil.executeQueryAndVerify(
connection,
selectVariant,
[{'COLA': {a: 1, b: [1, 2, 3, -Infinity, undefined], c: {a: 1}}}],
callback,
null,
true,
false
);
}],
done
);
before(async () =>
{
await testUtil.executeCmdAsync(connection, createTableWithVariant);
});

after(async () =>
{
await testUtil.executeCmdAsync(connection, dropTableWithVariant);
});

afterEach(async () =>
{
await testUtil.executeCmdAsync(connection, truncateTableWithVariant);
});

it('testJSON', function (done)
{
async.series(
[
function (callback)
{
testUtil.executeCmd(connection, insertVariantJSON, callback);
},
function (callback)
{
testUtil.executeQueryAndVerify(
connection,
selectVariant,
[{ 'COLA': { a: 1, b: [1, 2, 3, -Infinity, undefined], c: { a: 1 } } }],
callback,
null,
true,
false
);
}],
done
);
});

it('testXML', function (done)
{
async.series(
[
function (callback)
{
testUtil.executeCmd(connection, insertVariantXML, callback);
},
function (callback)
{
testUtil.executeQueryAndVerify(
connection,
selectVariant,
[{ 'COLA': { root: { a: 1, b: 1, c: { a: 1 } } } }],
callback,
null,
true,
false
);
}],
done
);
});

describe('testCustomParser', function ()
{
let originalParserConfig;

before(() =>
{
originalParserConfig = {
jsonColumnVariantParser: GlobalConfig.jsonColumnVariantParser,
xmlColumnVariantParser: GlobalConfig.xmlColumnVariantParser
}
});

after(() =>
{
snowflake.configure(originalParserConfig);
});

it('testJSONCustomParser', function (done)
{
async.series(
[
function (callback)
{
snowflake.configure({
jsonColumnVariantParser: rawColumnValue => JSON.parse(rawColumnValue)
})
testUtil.executeCmd(connection, insertVariantJSONForCustomParser, callback);
},
function (callback)
{
testUtil.executeQueryAndVerify(
connection,
selectVariant,
[{ 'COLA': { a: 1, b: [1, 2, 3,], c: { a: 1 } } }],
callback
);
}
],
done
);
});

// TODO SNOW - 830291: add custom xml parser test
//it('testXMLCustomParser', function (done) {});
});
});

it('testArray', function (done)
144 changes: 144 additions & 0 deletions test/integration/testLargeResultSet.js
Original file line number Diff line number Diff line change
@@ -2,6 +2,7 @@
* Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved.
*/
const assert = require('assert');
const async = require('async');
const testUtil = require('./testUtil');

const sourceRowCount = 10000;
@@ -105,4 +106,147 @@ describe('Large result Set Tests', function ()
}
});
});

describe('Large Result Set Tests For Variant Column Type', function ()
{
const createTempTable = 'create or replace table testVariantTemp(value string)';
const createTableWithVariant = 'create or replace table testVariantTable(colA variant)';
const dropTableWithVariant = 'drop table if exists testVariantTable';
const dropTempTable = 'drop table if exists testVariantTemp';

before(async () =>
{
await testUtil.executeCmdAsync(connection, createTableWithVariant);
await testUtil.executeCmdAsync(connection, createTempTable);
});

after(async () =>
{
await testUtil.executeCmdAsync(connection, dropTableWithVariant);
await testUtil.executeCmdAsync(connection, dropTempTable);
});

it('testSelectOnVariantColumnForLargeResultSets', function (done)
{
const insertTemp = 'insert into testVariantTemp values (?)';
const insertVariant = 'insert into testVariantTable select parse_json(value) from testVariantTemp';
const selectVariant = 'select * from testVariantTable';

const arrJSON = [];
for (let i = 0; i < sourceRowCount; i++)
{
const sampleJSON = {
"root":
{
"key":
[
{
"key1": i,
"key2": "value2",
"key3": "value3",
"key4": "value4",
"key5":
{
"key":
[
{ "key1": "value1", "key2": "value2" },
{ "key1": "value1", "key2": "value2" },
{ "key1": "value1", "key2": "value2" },
{ "key1": "value1", "key2": "value2" }
]
},
"key6":
[
{ "key1": "value1", "key": "value" },
{ "key1": "value1", "key": "value" },
{ "key1": "value1", "key": "value" },
{ "key1": "value1", "key": "value" },
{ "key1": "value1", "key": "value" },
{ "key1": "value1", "key": "value" },
{ "key1": "value1", "key": "value" }
]
},
]
}
};
arrJSON.push([JSON.stringify(sampleJSON)]);
}

async.series([
function (callback)
{
connection.execute({
sqlText: insertTemp,
binds: arrJSON,
complete: function (err, stmt)
{
if (err)
{
callback(err);
}
else
{
try
{
assert.strictEqual(stmt.getNumUpdatedRows(), sourceRowCount);
callback();
}
catch (err)
{
callback(err);
}
}
}
});
},
function (callback)
{
connection.execute({
sqlText: insertVariant,
complete: (err) => callback(err)
})
},
function (callback)
{
connection.execute({
sqlText: selectVariant,
streamResult: true,
complete: function (err, stmt)
{
if (err)
{
callback(err);
}
else
{
var stream = stmt.streamRows();
var rowCount = 0;
stream.on('data', function ()
{
rowCount++;
});
stream.on('error', function (err)
{
callback(err);
});
stream.on('end', function ()
{
try
{
assert.strictEqual(rowCount, sourceRowCount);
callback();
}
catch (err)
{
callback(err);
}
});
}
}
});
}],
done
);
});
});
});
178 changes: 178 additions & 0 deletions test/unit/snowflake_config_test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,178 @@
/*
* Copyright (c) 2023 Snowflake Computing Inc. All rights reserved.
*/

const assert = require('assert');
const snowflake = require('./../../lib/snowflake');
const ErrorCodes = require('./../../lib/errors').codes;
const Logger = require('./../../lib/logger');
const GlobalConfig = require('./../../lib/global_config');

const LOG_LEVEL_TAGS = require('./../../lib/logger/core').LOG_LEVEL_TAGS;

describe('Snowflake Configure Tests', function () {
let originalConfig;

before(function () {
originalConfig = {
logLevel: Logger.getInstance().getLevelTag(),
insecureConnect: GlobalConfig.isInsecureConnect(),
ocspFailOpen: GlobalConfig.getOcspFailOpen(),
jsonColumnVariantParser: GlobalConfig.jsonColumnVariantParser,
xmlColumnVariantParser: GlobalConfig.xmlColumnVariantParser
};
});

after(function () {
snowflake.configure(originalConfig);
});

describe('Test invalid arguments', function () {
const negativeTestCases =
[
{
name: 'invalid logLevel',
options: { logLevel: 'unsupported' },
errorCode: ErrorCodes.ERR_GLOBAL_CONFIGURE_INVALID_LOG_LEVEL
},
{
name: 'invalid insecureConnect',
options: { insecureConnect: 'unsupported' },
errorCode: ErrorCodes.ERR_GLOBAL_CONFIGURE_INVALID_INSECURE_CONNECT
},
{
name: 'invalid ocspMode',
options: { ocspFailOpen: 'unsupported' },
errorCode: ErrorCodes.ERR_GLOBAL_CONFIGURE_INVALID_OCSP_MODE
},
{
name: 'invalid json parser',
options: { jsonColumnVariantParser: 'unsupported' },
errorCode: ErrorCodes.ERR_GLOBAL_CONFIGURE_INVALID_JSON_PARSER
},
{
name: 'invalid xml parser',
options: { xmlColumnVariantParser: 'unsupported' },
errorCode: ErrorCodes.ERR_GLOBAL_CONFIGURE_INVALID_XML_PARSER
},
];

negativeTestCases.forEach(testCase => {
it(testCase.name, function () {
let error;

try {
snowflake.configure(testCase.options);
} catch (err) {
error = err;
} finally {
assert.ok(error);
assert.strictEqual(error.code, testCase.errorCode);
}
});
});
});

describe('Test valid arguments', function () {
const testCases =
[
{
name: 'logLevel error',
options:
{
logLevel: LOG_LEVEL_TAGS.ERROR
}
},
{
name: 'logLevel warn',
options:
{
logLevel: LOG_LEVEL_TAGS.WARN
}
},
{
name: 'logLevel debug',
options:
{
logLevel: LOG_LEVEL_TAGS.DEBUG
}
},
{
name: 'logLevel info',
options:
{
logLevel: LOG_LEVEL_TAGS.INFO
}
},
{
name: 'logLevel trace',
options:
{
logLevel: LOG_LEVEL_TAGS.TRACE
}
},
{
name: 'insecureConnect false',
options:
{
insecureConnect: false
}
},
{
name: 'insecureConnect true',
options:
{
insecureConnect: true
}
},
{
name: 'ocspFailOpen false',
options:
{
ocspFailOpen: false
}
},
{
name: 'ocspFailOpen true',
options:
{
ocspFailOpen: true
}
},
{
name: 'json parser',
options:
{
jsonColumnVariantParser: rawColumnValue => require('vm').runInNewContext('(' + rawColumnValue + ')')
}
},
{
name: 'xml parser',
options:
{
xmlColumnVariantParser: rawColumnValue => new (require("fast-xml-parser")).XMLParser().parse(rawColumnValue)
}
},
];

testCases.forEach(testCase => {
it(testCase.name, function () {
snowflake.configure(testCase.options);
Object.keys(testCase.options).forEach(function (key) {
const ref = testCase.options[key];
let val;
if (key == 'logLevel') {
val = Logger.getInstance().getLevelTag();
} else if (key == 'insecureConnect') {
val = GlobalConfig.isInsecureConnect();
} else if (key == 'ocspFailOpen') {
val = GlobalConfig.getOcspFailOpen();
} else {
val = GlobalConfig[key];
}
assert.strictEqual(val, ref);
});
});
});
});
});

0 comments on commit 0926a0f

Please sign in to comment.