From 7207966445a76e443e70cd6d6f58e32fc6da688a Mon Sep 17 00:00:00 2001 From: Piotr Bulawa Date: Thu, 21 Dec 2023 11:32:25 +0100 Subject: [PATCH] SNOW-984450: Set ESLint rules to error (#733) --- .eslintrc.js | 10 +- lib/agent/cert_util.js | 10 +- lib/agent/check.js | 8 +- lib/agent/https_ocsp_agent.js | 9 +- lib/agent/ocsp_response_cache.js | 18 +- lib/agent/socket_util.js | 4 +- lib/authentication/auth_default.js | 8 +- lib/authentication/auth_keypair.js | 50 +- lib/authentication/auth_oauth.js | 8 +- lib/authentication/auth_okta.js | 48 +- lib/authentication/auth_web.js | 10 +- lib/authentication/authentication.js | 22 +- lib/browser.js | 2 +- lib/connection/bind_uploader.js | 35 +- lib/connection/connection.js | 47 +- lib/connection/connection_config.js | 84 ++-- lib/connection/connection_context.js | 12 +- lib/connection/result/chunk.js | 32 +- lib/connection/result/chunk_cache.js | 15 +- lib/connection/result/column.js | 94 ++-- lib/connection/result/data_types.js | 18 +- lib/connection/result/doubly_linked_list.js | 2 +- lib/connection/result/result.js | 192 +++---- lib/connection/result/result_stream.js | 22 +- lib/connection/result/row_stream.js | 34 +- lib/connection/result/sf_timestamp.js | 44 +- lib/connection/statement.js | 160 +++--- lib/core.js | 50 +- lib/errors.js | 34 +- lib/file_transfer_agent/azure_util.js | 13 +- lib/file_transfer_agent/encrypt_util.js | 95 ++-- .../file_compression_type.js | 6 +- .../file_transfer_agent.js | 276 +++++----- lib/file_transfer_agent/file_util.js | 46 +- lib/file_transfer_agent/gcs_util.js | 103 ++-- lib/file_transfer_agent/local_util.js | 33 +- .../remote_storage_util.js | 108 ++-- lib/file_transfer_agent/s3_util.js | 4 +- lib/global_config.js | 2 +- lib/http/base.js | 11 +- lib/http/browser.js | 8 +- lib/http/node.js | 7 +- lib/logger.js | 4 +- lib/logger/browser.js | 35 +- lib/logger/node.js | 40 +- lib/parameters.js | 26 +- lib/secret_detector.js | 24 +- lib/services/sf.js | 120 +++-- lib/snowflake.js | 12 +- lib/url_util.js | 2 +- lib/util.js | 36 +- samples/jsonParserComparison.js | 3 - system_test/testLongQuery.js | 16 +- system_test/testProxy.js | 12 +- system_test/testSnowflakeSupportWhs.js | 476 ------------------ system_test/testSystemGetObjects.js | 62 +-- .../testSystemSetWhSnowflakeSupportFlag.js | 42 +- system_test/testToken.js | 18 +- .../ocsp_mock/https_ocsp_mock_agent.js | 6 +- test/integration/testArrayBind.js | 222 ++++---- test/integration/testBind.js | 49 +- test/integration/testCancel.js | 10 +- test/integration/testConcurrent.js | 44 +- test/integration/testConnection.js | 108 ++-- test/integration/testConnectionNegative.js | 14 +- test/integration/testDataType.js | 80 +-- test/integration/testLargeResultSet.js | 17 +- test/integration/testManualConnection.js | 8 +- test/integration/testMultiStatement.js | 24 +- test/integration/testOcsp.js | 26 +- test/integration/testPutGet.js | 5 +- test/integration/testPutSmallFiles.js | 74 +-- test/integration/testStatement.js | 65 +-- test/integration/testStreamRows.js | 81 +-- test/integration/testUpdatedRows.js | 76 +-- .../authentication/authentication_test.js | 155 +++--- .../configuration_finding_test.js | 2 +- .../unit/connection/connection_config_test.js | 36 +- .../connection/result/result_stream_test.js | 16 +- test/unit/connection/result/result_test.js | 27 +- .../connection/result/result_test_binary.js | 17 +- .../connection/result/result_test_boolean.js | 12 +- .../connection/result/result_test_common.js | 26 +- .../connection/result/result_test_date.js | 10 +- .../result/result_test_duplicated_columns.js | 1 - .../connection/result/result_test_number.js | 8 +- .../connection/result/result_test_time.js | 13 +- .../result/result_test_timestamp.js | 10 +- .../connection/result/result_test_variant.js | 8 +- .../connection/result/sf_timestamp_test.js | 16 +- test/unit/connection/statement_test.js | 26 +- test/unit/errors_test.js | 22 +- .../file_transfer_agent/encrypt_util_test.js | 52 +- test/unit/file_transfer_agent/gcs_test.js | 114 ++--- test/unit/file_transfer_agent/s3_test.js | 52 +- .../large_result_set/testLargeResultSet.js | 2 +- test/unit/logger/node_test.js | 2 +- test/unit/mock/mock_http_client.js | 24 +- test/unit/mock/mock_test_util.js | 36 +- test/unit/mock/statement_fetch_as_string.js | 64 +-- test/unit/mock/statement_stream_result.js | 28 +- test/unit/secret_detector_test.js | 84 ++-- test/unit/snowflake_test.js | 198 ++++---- test/unit/url_util_test.js | 4 +- test/unit/util_test.js | 38 +- 105 files changed, 2092 insertions(+), 2642 deletions(-) delete mode 100644 system_test/testSnowflakeSupportWhs.js diff --git a/.eslintrc.js b/.eslintrc.js index 88aa1c9e0..344045e93 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -23,7 +23,7 @@ module.exports = { 'arrow-spacing': ['error'], 'block-spacing': ['error'], 'brace-style': ['error', '1tbs'], - 'camelcase': ['warn'], + 'camelcase': ['error'], 'comma-spacing': ['error'], 'curly': ['error', 'all'], 'eqeqeq': ['error', 'always', { 'null': 'ignore' }], @@ -40,12 +40,12 @@ module.exports = { 'no-loss-of-precision': ['error'], 'no-mixed-spaces-and-tabs': ['error'], 'no-prototype-builtins': ['error'], - 'no-redeclare': ['warn'], - 'no-undef': ['warn'], - 'no-unused-vars': ['warn'], + 'no-redeclare': ['error'], + 'no-undef': ['error'], + 'no-unused-vars': ['error'], 'no-useless-catch': ['error'], 'no-useless-escape': ['error'], - 'no-var': ['warn'], + 'no-var': ['error'], 'object-curly-spacing': ['error', 'always'], 'prefer-const': ['error'], 'quotes': ['error', 'single'], diff --git a/lib/agent/cert_util.js b/lib/agent/cert_util.js index 6ead4873c..f8cdac82e 100644 --- a/lib/agent/cert_util.js +++ b/lib/agent/cert_util.js @@ -49,7 +49,7 @@ const MIN_CACHE_WARMUP_TIME_IN_MILLISECONDS = 18000000; * @returns {*} */ exports.buildCertId = function (cert) { - var issuer = cert.issuerCertificate; + let issuer = cert.issuerCertificate; cert = cert.raw; try { @@ -62,8 +62,8 @@ exports.buildCertId = function (cert) { return null; // if we encountered an error during decoding, return null } - var tbsCert = cert.tbsCertificate; - var tbsIssuer = issuer.tbsCertificate; + const tbsCert = cert.tbsCertificate; + const tbsIssuer = issuer.tbsCertificate; const certID = { hashAlgorithm: { @@ -93,7 +93,7 @@ function sha1(data) { * @returns {{cert: *, issuer: *}} */ exports.decode = function (cert) { - var issuer = cert.issuerCertificate; + let issuer = cert.issuerCertificate; cert = cert.raw; // note: this block might throw an error @@ -193,7 +193,7 @@ const toUTCString = function (epochInMilliSeconds) { * @param raws */ const findResponder = function (issuer, certs, raws) { - var issuerKey = issuer.tbsCertificate.subjectPublicKeyInfo; + let issuerKey = issuer.tbsCertificate.subjectPublicKeyInfo; issuerKey = ocsp.utils.toPEM( rfc5280.SubjectPublicKeyInfo.encode(issuerKey, 'der'), 'PUBLIC KEY'); if (certs.length > 0) { diff --git a/lib/agent/check.js b/lib/agent/check.js index c753dd903..27f497c69 100644 --- a/lib/agent/check.js +++ b/lib/agent/check.js @@ -78,7 +78,7 @@ function getResponse(uri, req, cb) { const chunks = []; response.on('readable', function () { - var chunk = response.read(); + const chunk = response.read(); if (!chunk) { return; } @@ -169,16 +169,16 @@ module.exports = function check(options, cb, mock) { } function setOcspResponderUrl(uri) { - var parsedUrl = require('url').parse(process.env.SF_OCSP_RESPONSE_CACHE_SERVER_URL); + let parsedUrl = require('url').parse(process.env.SF_OCSP_RESPONSE_CACHE_SERVER_URL); - var targetUrl; + let targetUrl; if (parsedUrl.port) { targetUrl = `${parsedUrl.protocol}//${parsedUrl.hostname}:${parsedUrl.port}/retry`; } else { targetUrl = `${parsedUrl.protocol}//${parsedUrl.hostname}/retry`; } - var b64data = req.data.toString('base64'); + const b64data = req.data.toString('base64'); parsedUrl = require('url').parse(uri); process.env.SF_OCSP_RESPONDER_URL = targetUrl + '/' + parsedUrl.hostname + '/' + b64data; diff --git a/lib/agent/https_ocsp_agent.js b/lib/agent/https_ocsp_agent.js index 6454f2689..f7b2cd2f8 100644 --- a/lib/agent/https_ocsp_agent.js +++ b/lib/agent/https_ocsp_agent.js @@ -2,9 +2,8 @@ * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var Util = require('../util'); -var HttpsAgent = require('https').Agent; -var SocketUtil = require('./socket_util'); +const HttpsAgent = require('https').Agent; +const SocketUtil = require('./socket_util'); /** * Creates a new HttpsOcspAgent. @@ -15,7 +14,7 @@ var SocketUtil = require('./socket_util'); * @constructor */ function HttpsOcspAgent(options) { - var agent = HttpsAgent.apply(this, arguments); + const agent = HttpsAgent.apply(this, options); agent.createConnection = function (port, host, options) { // make sure the 'options' variables references the argument that actually // contains the options @@ -34,7 +33,7 @@ function HttpsOcspAgent(options) { } // call super - var socket = HttpsAgent.prototype.createConnection.apply(this, arguments); + const socket = HttpsAgent.prototype.createConnection.apply(this, arguments); // secure the socket and return it return SocketUtil.secureSocket(socket, host, null); diff --git a/lib/agent/ocsp_response_cache.js b/lib/agent/ocsp_response_cache.js index 37cdbf713..24e16d4fd 100644 --- a/lib/agent/ocsp_response_cache.js +++ b/lib/agent/ocsp_response_cache.js @@ -25,7 +25,7 @@ const status = { // validate input const sizeLimit = GlobalConfig.getOcspResponseCacheSizeLimit(); // ocsp cache max age in second -var maxAgeSec = GlobalConfig.getOcspResponseCacheMaxAge(); +let maxAgeSec = GlobalConfig.getOcspResponseCacheMaxAge(); Errors.assertInternal(Util.number.isPositiveInteger(sizeLimit)); Errors.assertInternal(Util.number.isPositiveInteger(maxAgeSec)); @@ -33,12 +33,12 @@ Errors.assertInternal(Util.number.isPositiveInteger(maxAgeSec)); const cacheDir = GlobalConfig.mkdirCacheDir(); const cacheFileName = path.join(cacheDir, 'ocsp_response_cache.json'); // create a cache to store the responses, dynamically changes in size -var cache; +let cache; // JSON object with previous cache's responses -var prevCacheObj; +let prevCacheObj; // Cache updated time, in seconds, initialized as current time. // Will be updated when load from local cache file or refresh by downloading -var cacheUpdateTimeSec = Date.now() / 1000; +let cacheUpdateTimeSec = Date.now() / 1000; function deleteCache() { try { @@ -305,7 +305,7 @@ function OcspResponseCache() { * @returns {Object} */ function validateCacheEntry(certIdBase64, ocspResponseBase64) { - var err; + let err; if (ocspResponseBase64.length !== 2) { Logger.getInstance() .debug('OCSP cache value doesn\'t consist of two elements. Ignored.'); @@ -337,13 +337,13 @@ function OcspResponseCache() { function updateCache(jsonObject) { // Get the size of cache retrieved from the cache server - var responseCacheSize = Object.keys(jsonObject).length; + const responseCacheSize = Object.keys(jsonObject).length; // Check if there are previous entries to append if (prevCacheObj) { // Count overlap between previous cache and response cache // And delete entry if expired - var cacheOverlapCount = 0; + let cacheOverlapCount = 0; for (const entry in jsonObject) { if (entryExists(prevCacheObj, entry)) { cacheOverlapCount++; @@ -352,10 +352,10 @@ function OcspResponseCache() { } // Count entries from previous cache - var prevCacheSize = Object.keys(prevCacheObj).length; + const prevCacheSize = Object.keys(prevCacheObj).length; // New cache size = previous cache size + response cache size - overlap between the two caches - var newCacheSize = prevCacheSize + responseCacheSize - cacheOverlapCount; + const newCacheSize = prevCacheSize + responseCacheSize - cacheOverlapCount; // Create cache using new cache size if it doesn't exceed the upper limit cache = new SimpleCache({ maxSize: newCacheSize < sizeLimit ? newCacheSize : sizeLimit }); diff --git a/lib/agent/socket_util.js b/lib/agent/socket_util.js index 1964388e2..8949bf69f 100644 --- a/lib/agent/socket_util.js +++ b/lib/agent/socket_util.js @@ -178,7 +178,7 @@ exports.canEarlyExitForOCSP = canEarlyExitForOCSP; */ function validateCertChain(cert, cb) { // walk up the certificate chain and collect all the certificates in an array - var certs = []; + const certs = []; while (cert && cert.issuerCertificate && (cert.fingerprint !== cert.issuerCertificate.fingerprint)) { certs.push(cert); @@ -187,7 +187,7 @@ function validateCertChain(cert, cb) { // create an array to store any errors encountered // while validating the certificate chain - var errors = new Array(certs.length); + const errors = new Array(certs.length); /** * Called for every certificate as we traverse the certificate chain and diff --git a/lib/authentication/auth_default.js b/lib/authentication/auth_default.js index ddfb27261..7440b9502 100644 --- a/lib/authentication/auth_default.js +++ b/lib/authentication/auth_default.js @@ -10,9 +10,7 @@ * @returns {Object} * @constructor */ -function auth_default(password) { - var password = password; - +function AuthDefault(password) { /** * Update JSON body with password. * @@ -24,9 +22,9 @@ function auth_default(password) { body['data']['PASSWORD'] = password; }; - this.authenticate = async function (authenticator, serviceName, account, username) { + this.authenticate = async function () { return; }; } -module.exports = auth_default; +module.exports = AuthDefault; diff --git a/lib/authentication/auth_keypair.js b/lib/authentication/auth_keypair.js index c439f0d50..9b80c25ec 100644 --- a/lib/authentication/auth_keypair.js +++ b/lib/authentication/auth_keypair.js @@ -2,7 +2,7 @@ * Copyright (c) 2015-2021 Snowflake Computing Inc. All rights reserved. */ -var util = require('../util'); +const util = require('../util'); /** * Creates a key-pair authenticator. @@ -17,23 +17,19 @@ var util = require('../util'); * @returns {Object} * @constructor */ -function auth_keypair(privateKey, privateKeyPath, privateKeyPass, cryptomod, jwtmod, filesystem) { - var crypto = typeof cryptomod !== 'undefined' ? cryptomod : require('crypto'); - var jwt = typeof jwtmod !== 'undefined' ? jwtmod : require('jsonwebtoken'); - var fs = typeof filesystem !== 'undefined' ? filesystem : require('fs'); +function AuthKeypair(privateKey, privateKeyPath, privateKeyPass, cryptomod, jwtmod, filesystem) { + const crypto = typeof cryptomod !== 'undefined' ? cryptomod : require('crypto'); + const jwt = typeof jwtmod !== 'undefined' ? jwtmod : require('jsonwebtoken'); + const fs = typeof filesystem !== 'undefined' ? filesystem : require('fs'); - var privateKey = privateKey; - var privateKeyPath = privateKeyPath; - var privateKeyPass = privateKeyPass; + let jwtToken; - var jwtToken; - - var LIFETIME = 120; // seconds - var ALGORITHM = 'RS256'; - var ISSUER = 'iss'; - var SUBJECT = 'sub'; - var EXPIRE_TIME = 'exp'; - var ISSUE_TIME = 'iat'; + const LIFETIME = 120; // seconds + const ALGORITHM = 'RS256'; + const ISSUER = 'iss'; + const SUBJECT = 'sub'; + const EXPIRE_TIME = 'exp'; + const ISSUE_TIME = 'iat'; /** * Update JSON body with token. @@ -56,9 +52,9 @@ function auth_keypair(privateKey, privateKeyPath, privateKeyPass, cryptomod, jwt */ function loadPrivateKey(privateKeyPath, privateKeyPass) { // Load private key file - var privateKeyFile = fs.readFileSync(privateKeyPath); + const privateKeyFile = fs.readFileSync(privateKeyPath); - var privateKeyObject; + let privateKeyObject; // For encrypted private key if (privateKeyPass) { @@ -76,7 +72,7 @@ function auth_keypair(privateKey, privateKeyPath, privateKeyPass, cryptomod, jwt }); } - var privateKey = privateKeyObject.export({ + const privateKey = privateKeyObject.export({ format: 'pem', type: 'pkcs8' }); @@ -93,19 +89,19 @@ function auth_keypair(privateKey, privateKeyPath, privateKeyPass, cryptomod, jwt */ function calculatePublicKeyFingerprint(privateKey) { // Extract public key object from private key - var pubKeyObject = crypto.createPublicKey({ + const pubKeyObject = crypto.createPublicKey({ key: privateKey, format: 'pem' }); // Obtain public key string - var publicKey = pubKeyObject.export({ + const publicKey = pubKeyObject.export({ format: 'der', type: 'spki' }); // Generate SHA256 hash of public key and encode in base64 - var publicKeyFingerprint = 'SHA256:' + + const publicKeyFingerprint = 'SHA256:' + crypto.createHash('sha256') .update(publicKey, 'utf8') .digest('base64'); @@ -124,7 +120,7 @@ function auth_keypair(privateKey, privateKeyPath, privateKeyPass, cryptomod, jwt * @returns {null} */ this.authenticate = async function (authenticator, serviceName, account, username) { - var publicKeyFingerprint; + let publicKeyFingerprint; // Use private key if already set in connection string, otherwise use private key file location if (privateKey) { @@ -137,11 +133,11 @@ function auth_keypair(privateKey, privateKeyPath, privateKeyPass, cryptomod, jwt } // Current time + 120 seconds - var currentTime = Date.now(); - var jwtTokenExp = currentTime + LIFETIME; + const currentTime = Date.now(); + const jwtTokenExp = currentTime + LIFETIME; // Create payload containing jwt token and lifetime span - var payload = { + const payload = { [ISSUER]: util.format('%s.%s.%s', account.toUpperCase(), username.toUpperCase(), publicKeyFingerprint), [SUBJECT]: util.format('%s.%s', account.toUpperCase(), username.toUpperCase()), [ISSUE_TIME]: currentTime, @@ -153,4 +149,4 @@ function auth_keypair(privateKey, privateKeyPath, privateKeyPass, cryptomod, jwt }; } -module.exports = auth_keypair; +module.exports = AuthKeypair; diff --git a/lib/authentication/auth_oauth.js b/lib/authentication/auth_oauth.js index 27ae32783..32169acfa 100644 --- a/lib/authentication/auth_oauth.js +++ b/lib/authentication/auth_oauth.js @@ -10,9 +10,7 @@ * @returns {Object} * @constructor */ -function auth_oauth(token) { - var token = token; - +function AuthOauth(token) { /** * Update JSON body with token. * @@ -24,9 +22,9 @@ function auth_oauth(token) { body['data']['TOKEN'] = token; }; - this.authenticate = async function (authenticator, serviceName, account, username) { + this.authenticate = async function () { return; }; } -module.exports = auth_oauth; +module.exports = AuthOauth; diff --git a/lib/authentication/auth_okta.js b/lib/authentication/auth_okta.js index 22ec4bfc4..8732e1038 100644 --- a/lib/authentication/auth_okta.js +++ b/lib/authentication/auth_okta.js @@ -2,8 +2,8 @@ * Copyright (c) 2015-2021 Snowflake Computing Inc. All rights reserved. */ -var util = require('../util'); -var rest = require('../global_config').rest; +const util = require('../util'); +const rest = require('../global_config').rest; /** * Creates an okta authenticator. @@ -18,14 +18,14 @@ var rest = require('../global_config').rest; * @returns {Object} * @constructor */ -function auth_okta(password, region, account, clientType, clientVersion, httpClient) { - var host = util.construct_hostname(region, account); - var port = rest.HTTPS_PORT; - var protocol = rest.HTTPS_PROTOCOL; +function AuthOkta(password, region, account, clientType, clientVersion, httpClient) { + const host = util.constructHostname(region, account); + const port = rest.HTTPS_PORT; + const protocol = rest.HTTPS_PROTOCOL; - var clientAppId = clientType; - var clientAppVersion = clientVersion; - var samlResponse; + const clientAppId = clientType; + const clientAppVersion = clientVersion; + let samlResponse; /** * Update JSON body with saml response. @@ -49,8 +49,8 @@ function auth_okta(password, region, account, clientType, clientVersion, httpCli * @returns {null} */ this.authenticate = async function (authenticator, serviceName, account, username) { - var ssoUrl; - var tokenUrl; + let ssoUrl; + let tokenUrl; await step1(authenticator, serviceName, account, username).then((response) => { const responseData = response['data']; const success = responseData['success']; @@ -71,9 +71,9 @@ function auth_okta(password, region, account, clientType, clientVersion, httpCli step2(authenticator, ssoUrl, tokenUrl); - var oneTimeToken; + let oneTimeToken; await step3(tokenUrl, username, password).then((response) => { - var data = response['data']; + const data = response['data']; if (data['sessionToken']) { oneTimeToken = data['sessionToken']; @@ -82,7 +82,7 @@ function auth_okta(password, region, account, clientType, clientVersion, httpCli } }); - var responseHtml; + let responseHtml; await step4(oneTimeToken, ssoUrl).then((response) => { responseHtml = response['data']; }); @@ -102,9 +102,9 @@ function auth_okta(password, region, account, clientType, clientVersion, httpCli */ function step1(authenticator, serviceName, account, username) { // Create URL to send POST request to - var url = protocol + '://' + host + '/session/authenticator-request'; + const url = protocol + '://' + host + '/session/authenticator-request'; - var header; + let header; if (serviceName) { header = { 'HTTP_HEADER_SERVICE_NAME': serviceName @@ -112,7 +112,7 @@ function auth_okta(password, region, account, clientType, clientVersion, httpCli } // JSON body to send with POST request - var body = { + const body = { 'data': { 'ACCOUNT_NAME': account, 'LOGIN_NAME': username, @@ -162,7 +162,7 @@ function auth_okta(password, region, account, clientType, clientVersion, httpCli */ function step3(tokenUrl, username, password) { // JSON body to send with POST request - var body = { + const body = { 'username': username, 'password': password }; @@ -205,8 +205,8 @@ function auth_okta(password, region, account, clientType, clientVersion, httpCli * @returns {null} */ function step5(responseHtml) { - var postBackUrl = getPostBackUrlFromHtml(responseHtml); - var fullUrl = util.format('%s://%s:%s', protocol, host, port); + const postBackUrl = getPostBackUrlFromHtml(responseHtml); + const fullUrl = util.format('%s://%s:%s', protocol, host, port); // Validate the post back url come back with the SAML response // contains the same prefix as the Snowflake's server url, which is the @@ -227,9 +227,9 @@ function auth_okta(password, region, account, clientType, clientVersion, httpCli * @returns {String} */ function getPostBackUrlFromHtml(html) { - var index = html.search(' { - var putStmt = 'PUT file://' + fileName + '\'' + stageName + '\' overwrite=true auto_compress=false source_compression=gzip'; - var uploadFileOptions = { + const putStmt = 'PUT file://' + fileName + '\'' + stageName + '\' overwrite=true auto_compress=false source_compression=gzip'; + const uploadFileOptions = { sqlText: putStmt, fileStream: fileData, - complete: function (err, stmt, rows) { + complete: function (err, stmt) { if (err) { Logger.getInstance().debug('err ' + err); reject(err); @@ -92,21 +87,21 @@ function BindUploader(options, services, connectionConfig, requestId) { this.services.sf.isStageCreated = true; } - var fileCount = 0; - var strbuffer = ''; + let fileCount = 0; + let strbuffer = ''; - for (var i = 0; i < bindings.length; i++) { - for (var j = 0; j < bindings[i].length; j++) { + for (let i = 0; i < bindings.length; i++) { + for (let j = 0; j < bindings[i].length; j++) { if (j > 0) { strbuffer += ','; } - var value = this.cvsData(bindings[i][j]); + const value = this.cvsData(bindings[i][j]); strbuffer += value; } strbuffer += '\n'; if ((strbuffer.length >= MAX_BUFFER_SIZE) || (i === bindings.length - 1)) { - var fileName = (++fileCount).toString(); + const fileName = (++fileCount).toString(); Logger.getInstance().debug('fileName=' + fileName); await this.uploadFilestream(fileName, strbuffer); strbuffer = ''; diff --git a/lib/connection/connection.js b/lib/connection/connection.js index b84d2fa87..99d9e8bb5 100644 --- a/lib/connection/connection.js +++ b/lib/connection/connection.js @@ -4,17 +4,16 @@ const { v4: uuidv4 } = require('uuid'); const Url = require('url'); const QueryString = require('querystring'); -const GSErrors = require('../constants/gs_errors'); const QueryStatus = require('../constants/query_status'); -var Util = require('../util'); -var Errors = require('../errors'); -var ErrorCodes = Errors.codes; -var EventEmitter = require('events').EventEmitter; -var Statement = require('./statement'); -var Parameters = require('../parameters'); -var Authenticator = require('../authentication/authentication'); -var Logger = require('../logger'); +const Util = require('../util'); +const Errors = require('../errors'); +const ErrorCodes = Errors.codes; +const EventEmitter = require('events').EventEmitter; +const Statement = require('./statement'); +const Parameters = require('../parameters'); +const Authenticator = require('../authentication/authentication'); +const Logger = require('../logger'); const { isOktaAuth } = require('../authentication/authentication'); const PRIVATELINK_URL_SUFFIX = '.privatelink.snowflakecomputing.com'; @@ -30,11 +29,11 @@ function Connection(context) { // validate input Errors.assertInternal(Util.isObject(context)); - var services = context.getServices(); - var connectionConfig = context.getConnectionConfig(); + const services = context.getServices(); + const connectionConfig = context.getConnectionConfig(); // generate an id for the connection - var id = uuidv4(); + const id = uuidv4(); // async max retry and retry pattern from python connector const asyncNoDataMaxRetry = 24; @@ -67,13 +66,13 @@ function Connection(context) { * @returns {boolean} */ this.isTokenValid = function () { - var tokenInfo = services.sf.getConfig().tokenInfo; + const tokenInfo = services.sf.getConfig().tokenInfo; - var sessionTokenExpirationTime = tokenInfo.sessionTokenExpirationTime; - var isSessionValid = sessionTokenExpirationTime > Date.now(); + const sessionTokenExpirationTime = tokenInfo.sessionTokenExpirationTime; + const isSessionValid = sessionTokenExpirationTime > Date.now(); - var masterTokenExpirationTime = tokenInfo.masterTokenExpirationTime; - var isMasterValid = masterTokenExpirationTime > Date.now(); + const masterTokenExpirationTime = tokenInfo.masterTokenExpirationTime; + const isMasterValid = masterTokenExpirationTime > Date.now(); return (isSessionValid && isMasterValid); }; @@ -160,7 +159,7 @@ function Connection(context) { * @returns {null} */ this.setupOcspPrivateLink = function (host) { - var ocspCacheServer = `http://ocsp.${host}/ocsp_response_cache.json`; + const ocspCacheServer = `http://ocsp.${host}/ocsp_response_cache.json`; process.env.SF_OCSP_RESPONSE_CACHE_SERVER_URL = ocspCacheServer; }; @@ -203,9 +202,9 @@ function Connection(context) { // connect to the snowflake service and provide our own callback so that // the connection can be passed in when invoking the connection.connect() // callback - var self = this; + const self = this; - var authenticationType = connectionConfig.getAuthenticator(); + const authenticationType = connectionConfig.getAuthenticator(); // check if authentication type is compatible with connect() // external browser and okta are not compatible with connect() due to their usage of async functions @@ -224,7 +223,7 @@ function Connection(context) { connectionConfig.username); // JSON for connection - var body = Authenticator.formAuthJSON(connectionConfig.getAuthenticator(), + const body = Authenticator.formAuthJSON(connectionConfig.getAuthenticator(), connectionConfig.account, connectionConfig.username, connectionConfig.getClientType(), @@ -262,7 +261,7 @@ function Connection(context) { // connect to the snowflake service and provide our own callback so that // the connection can be passed in when invoking the connection.connect() // callback - var self = this; + const self = this; // Get authenticator to use const auth = Authenticator.getAuthenticator(connectionConfig, context.getHttpClient()); @@ -274,7 +273,7 @@ function Connection(context) { connectionConfig.username) .then(() => { // JSON for connection - var body = Authenticator.formAuthJSON(connectionConfig.getAuthenticator(), + const body = Authenticator.formAuthJSON(connectionConfig.getAuthenticator(), connectionConfig.account, connectionConfig.username, connectionConfig.getClientType(), @@ -339,7 +338,7 @@ function Connection(context) { // log out of the snowflake service and provide our own callback so that // the connection can be passed in when invoking the connection.destroy() // callback - var self = this; + const self = this; services.sf.destroy( { callback: function (err) { diff --git a/lib/connection/connection_config.js b/lib/connection/connection_config.js index fd8441f3e..22b1a10fc 100644 --- a/lib/connection/connection_config.js +++ b/lib/connection/connection_config.js @@ -68,7 +68,7 @@ function consolidateHostAndAccount(options) { if (Util.exists(options.account)) { Errors.checkArgumentValid(Util.isString(options.account), ErrorCodes.ERR_CONN_CREATE_INVALID_ACCOUNT); Errors.checkArgumentValid(Util.isCorrectSubdomain(options.account), ErrorCodes.ERR_CONN_CREATE_INVALID_ACCOUNT_REGEX); - options.host = Util.construct_hostname(options.region, options.account); + options.host = Util.constructHostname(options.region, options.account); dotPos = options.account.indexOf('.'); realAccount = options.account; if (dotPos > 0) { @@ -177,15 +177,15 @@ function ConnectionConfig(options, validateCredentials, qaMode, clientInfo) { Errors.checkArgumentValid(Util.isString(options.accessUrl), ErrorCodes.ERR_CONN_CREATE_INVALID_ACCESS_URL); - var proxyHost = options.proxyHost; - var proxyPort = options.proxyPort; - var proxyUser = options.proxyUser; - var proxyPassword = options.proxyPassword; - var proxyProtocol = options.proxyProtocol || 'http'; - var noProxy = options.noProxy; + const proxyHost = options.proxyHost; + const proxyPort = options.proxyPort; + const proxyUser = options.proxyUser; + const proxyPassword = options.proxyPassword; + const proxyProtocol = options.proxyProtocol || 'http'; + const noProxy = options.noProxy; // if we're running in node and some proxy information is specified - var proxy; + let proxy; if (Util.isNode() && (Util.exists(proxyHost) || Util.exists(proxyPort))) { // check for missing proxyHost Errors.checkArgumentExists(Util.exists(proxyHost), @@ -246,8 +246,8 @@ function ConnectionConfig(options, validateCredentials, qaMode, clientInfo) { } } - var serviceName = options.serviceName; - var authenticator = options.authenticator; + const serviceName = options.serviceName; + let authenticator = options.authenticator; // if no value is specified for authenticator, default to Snowflake if (!Util.exists(authenticator)) { @@ -264,34 +264,34 @@ function ConnectionConfig(options, validateCredentials, qaMode, clientInfo) { browserActionTimeout = WAIT_FOR_BROWSER_ACTION_TIMEOUT; } - var privateKey = options.privateKey; + const privateKey = options.privateKey; if (Util.exists(options.privateKey)) { Errors.checkArgumentValid((Util.isString(privateKey) && Util.isPrivateKey(privateKey)), ErrorCodes.ERR_CONN_CREATE_INVALID_PRIVATE_KEY); } - var privateKeyPath = options.privateKeyPath; + const privateKeyPath = options.privateKeyPath; if (Util.exists(options.privateKeyPath)) { Errors.checkArgumentValid(Util.isString(privateKeyPath), ErrorCodes.ERR_CONN_CREATE_INVALID_PRIVATE_KEY_PATH); } - var privateKeyPass = options.privateKeyPass; + const privateKeyPass = options.privateKeyPass; if (Util.exists(options.privateKeyPass)) { Errors.checkArgumentValid(Util.isString(privateKeyPass), ErrorCodes.ERR_CONN_CREATE_INVALID_PRIVATE_KEY_PASS); } - var token = options.token; + const token = options.token; if (Util.exists(options.token)) { Errors.checkArgumentValid(Util.isString(token), ErrorCodes.ERR_CONN_CREATE_INVALID_OAUTH_TOKEN); } - var warehouse = options.warehouse; - var database = options.database; - var schema = options.schema; - var role = options.role; + const warehouse = options.warehouse; + const database = options.database; + const schema = options.schema; + const role = options.role; // check for invalid warehouse if (Util.exists(warehouse)) { @@ -318,21 +318,21 @@ function ConnectionConfig(options, validateCredentials, qaMode, clientInfo) { } // check for invalid streamResult - var streamResult = options.streamResult; + const streamResult = options.streamResult; if (Util.exists(streamResult)) { Errors.checkArgumentValid(Util.isBoolean(streamResult), ErrorCodes.ERR_CONN_CREATE_INVALID_STREAM_RESULT); } // check for invalid fetchAsString - var fetchAsString = options.fetchAsString; + const fetchAsString = options.fetchAsString; if (Util.exists(fetchAsString)) { // check that the value is an array Errors.checkArgumentValid(Util.isArray(fetchAsString), ErrorCodes.ERR_CONN_CREATE_INVALID_FETCH_AS_STRING); // check that all the array elements are valid - var invalidValueIndex = NativeTypes.findInvalidValue(fetchAsString); + const invalidValueIndex = NativeTypes.findInvalidValue(fetchAsString); Errors.checkArgumentValid(invalidValueIndex === -1, ErrorCodes.ERR_CONN_CREATE_INVALID_FETCH_AS_STRING_VALUES, JSON.stringify(fetchAsString[invalidValueIndex])); @@ -344,14 +344,14 @@ function ConnectionConfig(options, validateCredentials, qaMode, clientInfo) { } // check for invalid clientSessionKeepAlive - var clientSessionKeepAlive = options.clientSessionKeepAlive; + const clientSessionKeepAlive = options.clientSessionKeepAlive; if (Util.exists(clientSessionKeepAlive)) { Errors.checkArgumentValid(Util.isBoolean(clientSessionKeepAlive), ErrorCodes.ERR_CONN_CREATE_INVALID_KEEP_ALIVE); } // check for invalid clientSessionKeepAliveHeartbeatFrequency - var clientSessionKeepAliveHeartbeatFrequency = options.clientSessionKeepAliveHeartbeatFrequency; + let clientSessionKeepAliveHeartbeatFrequency = options.clientSessionKeepAliveHeartbeatFrequency; if (Util.exists(clientSessionKeepAliveHeartbeatFrequency)) { Errors.checkArgumentValid(Util.isNumber(clientSessionKeepAliveHeartbeatFrequency), ErrorCodes.ERR_CONN_CREATE_INVALID_KEEP_ALIVE_HEARTBEAT_FREQ); @@ -359,19 +359,19 @@ function ConnectionConfig(options, validateCredentials, qaMode, clientInfo) { Util.validateClientSessionKeepAliveHeartbeatFrequency(clientSessionKeepAliveHeartbeatFrequency, 14400); } - var jsTreatIntegerAsBigInt = options.jsTreatIntegerAsBigInt; + const jsTreatIntegerAsBigInt = options.jsTreatIntegerAsBigInt; if (Util.exists(jsTreatIntegerAsBigInt)) { Errors.checkArgumentValid(Util.isBoolean(jsTreatIntegerAsBigInt), ErrorCodes.ERR_CONN_CREATE_INVALID_TREAT_INTEGER_AS_BIGINT); } - var gcsUseDownscopedCredential = options.gcsUseDownscopedCredential; + const gcsUseDownscopedCredential = options.gcsUseDownscopedCredential; if (Util.exists(gcsUseDownscopedCredential)) { Errors.checkArgumentValid(Util.isBoolean(gcsUseDownscopedCredential), ErrorCodes.ERR_CONN_CREATE_INVALID_GCS_USE_DOWNSCOPED_CREDENTIAL); } - var clientConfigFile = options.clientConfigFile; + const clientConfigFile = options.clientConfigFile; if (Util.exists(clientConfigFile)) { Errors.checkArgumentValid(Util.isString(clientConfigFile), ErrorCodes.ERR_CONN_CREATE_INVALID_CLIENT_CONFIG_FILE); } @@ -380,10 +380,10 @@ function ConnectionConfig(options, validateCredentials, qaMode, clientInfo) { this._qaMode = qaMode; // if a client-info argument is specified, validate it - var clientType = 'JavaScript'; - var clientName; - var clientVersion; - var clientEnvironment; + const clientType = 'JavaScript'; + let clientName; + let clientVersion; + let clientEnvironment; if (Util.exists(clientInfo)) { Errors.assertInternal(Util.isObject(clientInfo)); Errors.assertInternal(Util.isString(clientInfo.version)); @@ -397,7 +397,7 @@ function ConnectionConfig(options, validateCredentials, qaMode, clientInfo) { clientEnvironment.OCSP_MODE = GlobalConfig.getOcspMode(); } - var clientApplication = options.application; + const clientApplication = options.application; if (Util.exists(clientApplication)) { Errors.checkArgumentValid(Util.isString(clientApplication), ErrorCodes.ERR_CONN_CREATE_INVALID_APPLICATION); @@ -409,7 +409,7 @@ function ConnectionConfig(options, validateCredentials, qaMode, clientInfo) { ErrorCodes.ERR_CONN_CREATE_INVALID_APPLICATION); } - var validateDefaultParameters = false; + let validateDefaultParameters = false; if (Util.exists(options.validateDefaultParameters)) { // check for invalid validateDefaultParameters Errors.checkArgumentValid(Util.isBoolean(options.validateDefaultParameters), @@ -418,7 +418,7 @@ function ConnectionConfig(options, validateCredentials, qaMode, clientInfo) { validateDefaultParameters = options.validateDefaultParameters; } - var bindThreshold = null; + let bindThreshold = null; if (Util.exists(options.arrayBindingThreshold)) { // check for invalid arrayBindingThreshold Errors.checkArgumentValid(Util.isNumber(options.arrayBindingThreshold), @@ -427,7 +427,7 @@ function ConnectionConfig(options, validateCredentials, qaMode, clientInfo) { bindThreshold = options.arrayBindingThreshold; } - var forceStageBindError = null; + let forceStageBindError = null; if (Util.exists(options.forceStageBindError)) { // check for invalid forceStageBindError Errors.checkArgumentValid(Util.isNumber(options.forceStageBindError), @@ -759,12 +759,12 @@ function ConnectionConfig(options, validateCredentials, qaMode, clientInfo) { this.clientConfigFile = options.clientConfigFile; // create the parameters array - var parameters = createParameters(); + const parameters = createParameters(); // create a map in which the keys are the parameter names and the values are // the corresponding parameters - var mapParameters = {}; - var index, length, parameter; + const mapParameters = {}; + let index, length, parameter; for (index = 0, length = parameters.length; index < length; index++) { parameter = parameters[index]; mapParameters[parameter.name] = parameter; @@ -774,7 +774,7 @@ function ConnectionConfig(options, validateCredentials, qaMode, clientInfo) { } // for each property in the options object that matches a known parameter name - var propertyName, propertyValue; + let propertyName, propertyValue; for (propertyName in options) { if (Object.prototype.hasOwnProperty.call(options, propertyName) && Object.prototype.hasOwnProperty.call(mapParameters, propertyName)) { @@ -836,9 +836,9 @@ const PARAM_RETRY_SF_MAX_SLEEP_TIME = 'sfRetryMaxSleepTime'; * @returns {Object[]} */ function createParameters() { - var isNonNegativeInteger = Util.number.isNonNegativeInteger.bind(Util.number); - var isPositiveInteger = Util.number.isPositiveInteger.bind(Util.number); - var isNonNegativeNumber = Util.number.isNonNegative.bind(Util.number); + const isNonNegativeInteger = Util.number.isNonNegativeInteger.bind(Util.number); + const isPositiveInteger = Util.number.isPositiveInteger.bind(Util.number); + const isNonNegativeNumber = Util.number.isNonNegative.bind(Util.number); return [ { @@ -976,7 +976,7 @@ ConnectionConfig.prototype.getRetrySfMaxSleepTime = function () { * @private */ ConnectionConfig.prototype._getParameterValue = function (parameterName) { - var parameter = this._mapParameters[parameterName]; + const parameter = this._mapParameters[parameterName]; return parameter ? parameter.value : undefined; }; diff --git a/lib/connection/connection_context.js b/lib/connection/connection_context.js index 7858f5fbe..441f81597 100644 --- a/lib/connection/connection_context.js +++ b/lib/connection/connection_context.js @@ -2,10 +2,10 @@ * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var Util = require('../util'); -var Errors = require('../errors'); -var SfService = require('../services/sf'); -var LargeResultSetService = require('../services/large_result_set'); +const Util = require('../util'); +const Errors = require('../errors'); +const SfService = require('../services/sf'); +const LargeResultSetService = require('../services/large_result_set'); /** * Creates a new ConnectionContext. @@ -23,7 +23,7 @@ function ConnectionContext(connectionConfig, httpClient, config) { // if a config object was specified, verify // that it has all the information we need - var sfServiceConfig; + let sfServiceConfig; if (Util.exists(config)) { Errors.assertInternal(Util.isObject(config)); Errors.assertInternal(Util.isObject(config.services)); @@ -33,7 +33,7 @@ function ConnectionContext(connectionConfig, httpClient, config) { } // create a map that contains all the services we'll be using - var services = + const services = { sf: new SfService(connectionConfig, httpClient, sfServiceConfig), largeResultSet: new LargeResultSetService(connectionConfig, httpClient) diff --git a/lib/connection/result/chunk.js b/lib/connection/result/chunk.js index 5aa103371..c96b77f9e 100644 --- a/lib/connection/result/chunk.js +++ b/lib/connection/result/chunk.js @@ -2,9 +2,9 @@ * Copyright (c) 2015-2021 Snowflake Computing Inc. All rights reserved. */ -var EventEmitter = require('events').EventEmitter; -var Util = require('../../util'); -var Errors = require('../../errors'); +const EventEmitter = require('events').EventEmitter; +const Util = require('../../util'); +const Errors = require('../../errors'); /** * Creates a new Chunk. @@ -104,7 +104,7 @@ Chunk.prototype.getId = function () { * @returns {Boolean} */ Chunk.prototype.overlapsWithWindow = function (start, end) { - var chunkStart = this._startIndex, chunkEnd = this._endIndex; + const chunkStart = this._startIndex, chunkEnd = this._endIndex; // check if the window overlaps with the chunk from the left or // from the right or from both sides @@ -202,14 +202,14 @@ Chunk.prototype.load = function (callback) { // we've started loading this._isLoading = true; - var self = this; + const self = this; /** * Completes the chunk load. * * @param err */ - var completeLoad = function (err) { + const completeLoad = function (err) { // we're done loading self._isLoading = false; @@ -292,7 +292,7 @@ function convertRowsetToRows( * * @returns {Number} */ - var getRowIndex = function () { + const getRowIndex = function () { return this.rowIndex; }; @@ -301,7 +301,7 @@ function convertRowsetToRows( * * @returns {*} */ - var getStatement = function getStatement() { + const getStatement = function getStatement() { return statement; }; @@ -313,9 +313,9 @@ function convertRowsetToRows( * * @returns {*} */ - var getColumnValue = function getColumnValue(columnIdentifier) { + const getColumnValue = function getColumnValue(columnIdentifier) { // resolve the column identifier to the correct column if possible - var column = resolveColumnIdentifierToColumn( + const column = resolveColumnIdentifierToColumn( columns, columnIdentifier, mapColumnNameToIndices); return column ? column.getRowValue(this) : undefined; @@ -329,9 +329,9 @@ function convertRowsetToRows( * * @returns {*} */ - var getColumnValueAsString = function getColumnValueAsString(columnIdentifier) { + const getColumnValueAsString = function getColumnValueAsString(columnIdentifier) { // resolve the column identifier to the correct column if possible - var column = resolveColumnIdentifierToColumn( + const column = resolveColumnIdentifierToColumn( columns, columnIdentifier, mapColumnNameToIndices); return column ? column.getRowValueAsString(this) : undefined; @@ -343,9 +343,9 @@ function convertRowsetToRows( /////////////////////////////////////////////////////////////////////////// // create a new array to store the processed rows - var length = rowset.length; - var rows = new Array(length); - for (var index = 0; index < length; index++) { + const length = rowset.length; + const rows = new Array(length); + for (let index = 0; index < length; index++) { // add a new item to the rows array rows[index] = { @@ -376,7 +376,7 @@ function convertRowsetToRows( */ function resolveColumnIdentifierToColumn( columns, columnIdentifier, mapColumnNameToIndices) { - var columnIndex; + let columnIndex; // if the column identifier is a string, treat it as a column // name and use it to get the index of the specified column diff --git a/lib/connection/result/chunk_cache.js b/lib/connection/result/chunk_cache.js index a91be4d2f..0334c606f 100644 --- a/lib/connection/result/chunk_cache.js +++ b/lib/connection/result/chunk_cache.js @@ -2,7 +2,7 @@ * Copyright (c) 2015-2021 Snowflake Computing Inc. All rights reserved. */ -var DoublyLinkedList = require('./doubly_linked_list'); +const DoublyLinkedList = require('./doubly_linked_list'); /** * An LRU cache used to store chunks. @@ -26,15 +26,12 @@ function ChunkCache(capacity) { * @param chunk */ ChunkCache.prototype.put = function (chunk) { - var map; - var list; - var chunkId; - var evictedNode; - var evictedChunk; + let evictedNode; + let evictedChunk; - map = this._map; - list = this._list; - chunkId = chunk.getId(); + const map = this._map; + const list = this._list; + const chunkId = chunk.getId(); // if we already have the chunk in the cache, remove the corresponding node // from the list diff --git a/lib/connection/result/column.js b/lib/connection/result/column.js index f47bf140d..4be9cecab 100644 --- a/lib/connection/result/column.js +++ b/lib/connection/result/column.js @@ -2,16 +2,16 @@ * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var Util = require('../../util'); -var Errors = require('../../errors'); -var BigNumber = require('bignumber.js'); +const Util = require('../../util'); +const Errors = require('../../errors'); +const BigNumber = require('bignumber.js'); const GlobalConfig = require('../../global_config'); const Logger = require('../../logger'); -var SfTimestamp = require('./sf_timestamp'); -var SqlTypes = require('./data_types').SqlTypes; -var bigInt = require('big-integer'); +const SfTimestamp = require('./sf_timestamp'); +const SqlTypes = require('./data_types').SqlTypes; +const bigInt = require('big-integer'); -var NULL_UPPERCASE = 'NULL'; +const NULL_UPPERCASE = 'NULL'; /** * Creates a new Column. @@ -24,11 +24,11 @@ var NULL_UPPERCASE = 'NULL'; * @constructor */ function Column(options, index, statementParameters, resultVersion) { - var name = options.overriddenName || options.name; - var nullable = options.nullable; - var scale = options.scale; - var type = options.type; - var precision = options.precision; + const name = options.overriddenName || options.name; + const nullable = options.nullable; + const scale = options.scale; + const type = options.type; + const precision = options.precision; /** * Returns the name of this column. @@ -109,10 +109,10 @@ function Column(options, index, statementParameters, resultVersion) { this.isObject = createFnIsColumnOfType(type, SqlTypes.isObject, SqlTypes); this.isArray = createFnIsColumnOfType(type, SqlTypes.isArray, SqlTypes); - var convert; - var toString; - var toValue; - var format; + let convert; + let toString; + let toValue; + let format; if (this.isNumber()) { const integerAs = statementParameters['JS_TREAT_INTEGER_AS_BIGINT']; @@ -177,7 +177,7 @@ function Column(options, index, statementParameters, resultVersion) { } // create a private context to pass to the extract function - var context = + const context = { convert: convert, toValue: toValue, @@ -238,7 +238,7 @@ function createFnIsColumnOfType(columnType, columnComparisonFn, scope) { * * @returns {Object} */ -function convertRawNumber(rawColumnValue, column, context) { +function convertRawNumber(rawColumnValue) { return { raw: rawColumnValue, processed: Number(rawColumnValue) @@ -255,7 +255,7 @@ function convertRawNumber(rawColumnValue, column, context) { * @param context * @returns {{processed: bigInt.BigInteger, raw: *}} */ -function convertRawBigInt(rawColumnValue, column, context) { +function convertRawBigInt(rawColumnValue) { return { raw: rawColumnValue, processed: bigInt(rawColumnValue) @@ -272,8 +272,8 @@ function convertRawBigInt(rawColumnValue, column, context) { * * @returns {Boolean} */ -function convertRawBoolean(rawColumnValue, column, context) { - var ret; +function convertRawBoolean(rawColumnValue) { + let ret; if ((rawColumnValue === '1') || (rawColumnValue === 'TRUE')) { ret = true; @@ -312,10 +312,10 @@ function convertRawDate(rawColumnValue, column, context) { * @returns {Object} */ function convertRawTime(rawColumnValue, column, context) { - var columnScale = column.getScale(); + const columnScale = column.getScale(); // the values might be big so use BigNumber to do arithmetic - var valFracSecsBig = + const valFracSecsBig = new BigNumber(rawColumnValue).times(Math.pow(10, columnScale)); return convertRawTimestampHelper( @@ -335,10 +335,10 @@ function convertRawTime(rawColumnValue, column, context) { * @returns {Date} */ function convertRawTimestampLtz(rawColumnValue, column, context) { - var columnScale = column.getScale(); + const columnScale = column.getScale(); // the values might be big so use BigNumber to do arithmetic - var valFracSecsBig = + const valFracSecsBig = new BigNumber(rawColumnValue).times(Math.pow(10, columnScale)); // create a new snowflake date @@ -359,10 +359,10 @@ function convertRawTimestampLtz(rawColumnValue, column, context) { * @returns {Date} */ function convertRawTimestampNtz(rawColumnValue, column, context) { - var columnScale = column.getScale(); + const columnScale = column.getScale(); // the values might be big so use BigNumber to do arithmetic - var valFracSecsBig = + const valFracSecsBig = new BigNumber(rawColumnValue).times(Math.pow(10, columnScale)); // create a new snowflake date @@ -383,17 +383,17 @@ function convertRawTimestampNtz(rawColumnValue, column, context) { * @returns {Date} */ function convertRawTimestampTz(rawColumnValue, column, context) { - var valFracSecsBig; - var valFracSecsWithTzBig; - var timezoneBig; - var timezone; - var timestampAndTZIndex; + let valFracSecsBig; + let valFracSecsWithTzBig; + let timezoneBig; + let timezone; + let timestampAndTZIndex; // compute the scale factor - var columnScale = column.getScale(); - var scaleFactor = Math.pow(10, columnScale); + const columnScale = column.getScale(); + const scaleFactor = Math.pow(10, columnScale); - var resultVersion = context.resultVersion; + const resultVersion = context.resultVersion; if (resultVersion === '0' || resultVersion === undefined) { // the values might be big so use BigNumber to do arithmetic valFracSecsBig = @@ -458,14 +458,14 @@ function convertRawTimestampHelper( timezone, format) { // compute the scale factor - var scaleFactor = Math.pow(10, scale); + const scaleFactor = Math.pow(10, scale); // split the value into epoch seconds + nanoseconds; for example, // 1365148923.123456789 will be split into 1365148923 (epoch seconds) // and 123456789 (nano seconds) - var valSecBig = epochFracSecsBig.dividedBy(scaleFactor).integerValue(BigNumber.ROUND_FLOOR); - var fractionsBig = epochFracSecsBig.minus(valSecBig.times(scaleFactor)); - var valSecNanoBig = fractionsBig.times(Math.pow(10, 9 - scale)); + const valSecBig = epochFracSecsBig.dividedBy(scaleFactor).integerValue(BigNumber.ROUND_FLOOR); + const fractionsBig = epochFracSecsBig.minus(valSecBig.times(scaleFactor)); + const valSecNanoBig = fractionsBig.times(Math.pow(10, 9 - scale)); // create a new snowflake date from the information return new SfTimestamp( @@ -485,7 +485,7 @@ function convertRawTimestampHelper( * * @returns {Object | Array} */ -function convertRawVariant(rawColumnValue, column, context) { +function convertRawVariant(rawColumnValue) { // if the input is a non-empty string, convert it to a json object if (Util.string.isNotNullOrEmpty(rawColumnValue)) { try { @@ -512,11 +512,11 @@ function convertRawVariant(rawColumnValue, column, context) { */ function convertRawBinary(rawColumnValue, column, context) { // Ensure the format is valid. - var format = context.format.toUpperCase(); + const format = context.format.toUpperCase(); Errors.assertInternal(format === 'HEX' || format === 'BASE64'); // Decode hex string sent by GS. - var buffer = Buffer.from(rawColumnValue, 'HEX'); + const buffer = Buffer.from(rawColumnValue, 'HEX'); if (format === 'HEX') { buffer.toStringSf = function () { @@ -672,12 +672,12 @@ function toStringFromBinary(columnValue) { * @returns {*} */ function extractFromRow(row, context, asString) { - var map = row._arrayProcessedColumns; - var values = row.values; + const map = row._arrayProcessedColumns; + const values = row.values; // get the value - var columnIndex = this.getIndex(); - var ret = values[columnIndex]; + const columnIndex = this.getIndex(); + let ret = values[columnIndex]; // if we want the value as a string, and the column is of type variant, and we // haven't already processed the value before, we don't need to process the @@ -695,7 +695,7 @@ function extractFromRow(row, context, asString) { // use the appropriate extraction function depending on whether // we want the value or a string representation of the value - var extractFn = !asString ? context.toValue : context.toString; + const extractFn = !asString ? context.toValue : context.toString; ret = extractFn(ret); } diff --git a/lib/connection/result/data_types.js b/lib/connection/result/data_types.js index 6306838bd..724e41605 100644 --- a/lib/connection/result/data_types.js +++ b/lib/connection/result/data_types.js @@ -2,10 +2,10 @@ * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var Util = require('../../util'); -var Errors = require('../../errors'); +const Util = require('../../util'); +const Errors = require('../../errors'); -var sqlTypes = +const sqlTypes = { values: { @@ -172,7 +172,7 @@ var sqlTypes = } }; -var nativeTypes = +const nativeTypes = { values: { @@ -209,8 +209,8 @@ var nativeTypes = Errors.assertInternal(Util.isArray(nativeTypes)); // find the index of the first invalid value - var invalidValueIndex = -1; - for (var index = 0, length = nativeTypes.length; index < length; index++) { + let invalidValueIndex = -1; + for (let index = 0, length = nativeTypes.length; index < length; index++) { if (!this.isValidValue(nativeTypes[index])) { invalidValueIndex = index; break; @@ -221,10 +221,10 @@ var nativeTypes = } }; -var sqlTypeValues = sqlTypes.values; -var nativeTypeValues = nativeTypes.values; +const sqlTypeValues = sqlTypes.values; +const nativeTypeValues = nativeTypes.values; -var MAP_SQL_TO_NATIVE = {}; +const MAP_SQL_TO_NATIVE = {}; MAP_SQL_TO_NATIVE[sqlTypeValues.TEXT] = nativeTypeValues.STRING; MAP_SQL_TO_NATIVE[sqlTypeValues.BINARY] = nativeTypeValues.BUFFER; MAP_SQL_TO_NATIVE[sqlTypeValues.BOOLEAN] = nativeTypeValues.BOOLEAN; diff --git a/lib/connection/result/doubly_linked_list.js b/lib/connection/result/doubly_linked_list.js index 691d37adc..cf95fb4f2 100644 --- a/lib/connection/result/doubly_linked_list.js +++ b/lib/connection/result/doubly_linked_list.js @@ -44,7 +44,7 @@ DoublyLinkedList.prototype.getTail = function () { */ DoublyLinkedList.prototype.insertEnd = function (value) { // create a node from the specified value - var node = new Node(value, null, null); + const node = new Node(value, null, null); // if there are no element in the list yet if (this._length === 0) { diff --git a/lib/connection/result/result.js b/lib/connection/result/result.js index eca247c4e..77cb73fe4 100644 --- a/lib/connection/result/result.js +++ b/lib/connection/result/result.js @@ -2,14 +2,13 @@ * Copyright (c) 2015-2021 Snowflake Computing Inc. All rights reserved. */ -var EventEmitter = require('events').EventEmitter; -var Util = require('../../util'); -var Errors = require('../../errors'); -var Chunk = require('./chunk'); -var ResultStream = require('./result_stream'); -var ChunkCache = require('./chunk_cache'); -var Column = require('./column'); -var StatementType = require('./statement_type'); +const EventEmitter = require('events').EventEmitter; +const Util = require('../../util'); +const Errors = require('../../errors'); +const Chunk = require('./chunk'); +const ResultStream = require('./result_stream'); +const Column = require('./column'); +const StatementType = require('./statement_type'); const ColumnNamesCreator = require('./unique_column_name_creator'); const RowMode = require('../../constants/row_mode'); const Logger = require('../../logger'); @@ -21,17 +20,14 @@ const Logger = require('../../logger'); * @constructor */ function Result(options) { - var data; - var chunkHeaders; - var parametersMap; - var parametersArray; - var length; - var index; - var parameter; - var mapColumnNameToIndices; - var columns; - var column; - var version; + let chunkHeaders; + let length; + let index; + let parameter; + let mapColumnNameToIndices; + let columns; + let column; + let version; // assert that options is a valid object that contains a response, statement, // services and connection config @@ -46,7 +42,7 @@ function Result(options) { this._services = options.services; this._connectionConfig = options.connectionConfig; - data = options.response.data; + const data = options.response.data; this._queryId = data.queryId; this._version = version = String(data.version); // don't rely on the version being a number @@ -73,8 +69,8 @@ function Result(options) { this._sessionState = createSessionState(data); // convert the parameters array to a map - parametersMap = {}; - parametersArray = data.parameters || []; + const parametersMap = {}; + const parametersArray = data.parameters || []; for (index = 0, length = parametersArray.length; index < length; index++) { parameter = parametersArray[index]; parametersMap[parameter.name] = parameter.value; @@ -86,8 +82,8 @@ function Result(options) { // TODO: add timezone related information to columns // create columns from the rowtype array returned in the result - var rowtype = data.rowtype; - var numColumns = rowtype.length; + const rowtype = data.rowtype; + const numColumns = rowtype.length; this._columns = columns = new Array(numColumns); @@ -149,49 +145,13 @@ Util.inherits(Result, EventEmitter); * @param response */ Result.prototype.refresh = function (response) { - var chunks = this._chunks; - var chunkCfgs = response.data.chunks; - for (var index = 0, length = chunks.length; index < length; index++) { + const chunks = this._chunks; + const chunkCfgs = response.data.chunks; + for (let index = 0, length = chunks.length; index < length; index++) { chunks[index].setUrl(chunkCfgs[index].url); } }; -/** - * TODO - * - * @param chunks - * @param capacity - * - * @returns {ChunkCache} - */ -function createChunkCache(chunks, capacity) { - var chunkCache; - var onLoadComplete; - var index; - var length; - - // create a chunk cache - chunkCache = new ChunkCache(capacity); - - // every time a chunk is loaded, add it to the cache - // TODO: should the caching be based on most recently 'used' or most recently - // 'loaded'? - onLoadComplete = function (err, chunk) { - if (!err) { - chunkCache.put(chunk); - } - }; - - // subscribe to the 'loadcomplete' event on all the chunks - for (index = 0, length = chunks.length; index < length; index++) { - chunks[index].on('loadcomplete', onLoadComplete); - } - - // TODO: do we need to unsubscribe from the loadcomplete event at some point? - - return chunkCache; -} - /** * Creates a session state object from the values of the current role, current * warehouse, etc., returned in the result response. @@ -201,11 +161,11 @@ function createChunkCache(chunks, capacity) { * @returns {Object} */ function createSessionState(responseData) { - var currentRole = responseData.finalRoleName; - var currentWarehouse = responseData.finalWarehouseName; - var currentDatabaseProvider = responseData.databaseProvider; - var currentDatabase = responseData.finalDatabaseName; - var currentSchema = responseData.finalSchemaName; + const currentRole = responseData.finalRoleName; + const currentWarehouse = responseData.finalWarehouseName; + const currentDatabaseProvider = responseData.databaseProvider; + const currentDatabase = responseData.finalDatabaseName; + const currentSchema = responseData.finalSchemaName; return { getCurrentRole: function () { @@ -251,11 +211,9 @@ function createChunks(chunkCfgs, resultVersion, statement, services) { - var chunks; - var startIndex; - var length; - var index; - var chunkCfg; + let startIndex; + let index; + let chunkCfg; // if we don't have any chunks, or if some records were returned inline, // fabricate a config object for the first chunk @@ -269,12 +227,12 @@ function createChunks(chunkCfgs, }); } - chunks = new Array(chunkCfgs.length); + const chunks = new Array(chunkCfgs.length); Logger.getInstance().trace(`Downloading ${chunkCfgs.length} chunks`); // loop over the chunk config objects and build Chunk instances out of them startIndex = 0; - length = chunkCfgs.length; + const length = chunkCfgs.length; for (index = 0; index < length; index++) { chunkCfg = chunkCfgs[index]; @@ -324,23 +282,23 @@ Result.prototype.fetchRows = function (options) { // if no value was specified for the start index or if the specified start // index is negative, default to 0, otherwise truncate the fractional part - var start = options.startIndex; + let start = options.startIndex; start = (!Util.isNumber(start) || (start < 0)) ? 0 : Math.floor(start); // if no value was specified for the end index or if the end index is larger // than the row index of the last row, default to the index of the last row, // otherwise truncate the fractional part - var returnedRows = this.getReturnedRows(); - var end = options.endIndex; + const returnedRows = this.getReturnedRows(); + let end = options.endIndex; end = (!Util.isNumber(end) || (end >= returnedRows)) ? returnedRows - 1 : Math.floor(end); // create an EventEmitter that will be returned to the // caller to track progress of the fetch-rows operation - var operation = new EventEmitter(); + const operation = new EventEmitter(); // define a function to asynchronously complete the operation - var asyncComplete = function (err, continueCallback) { + const asyncComplete = function (err, continueCallback) { process.nextTick(function () { operation.emit('complete', err, continueCallback); }); @@ -354,12 +312,12 @@ Result.prototype.fetchRows = function (options) { return operation; } - var connectionConfig = this._connectionConfig; + const connectionConfig = this._connectionConfig; // create a context object to store the state of the operation; we could store // the state in the operation itself, but it would be good to keep this state // private - var context = + const context = { maxNumRowsToProcess: end - start + 1, numRowsProcessed: 0, @@ -369,7 +327,7 @@ Result.prototype.fetchRows = function (options) { // identify the chunks needed to get the requested rows, and create a stream // to read their contents - var resultStream = new ResultStream( + const resultStream = new ResultStream( { chunks: findOverlappingChunks(this._chunks, start, end), prefetchSize: connectionConfig.getResultPrefetch() @@ -392,34 +350,36 @@ Result.prototype.fetchRows = function (options) { * * @param {Object} chunk */ - var processChunk = function (chunk) { + const processChunk = function (chunk) { // get all the rows in the current chunk that overlap with the requested // window - var chunkStart = chunk.getStartIndex(); - var chunkEnd = chunk.getEndIndex(); - var rows = chunk.getRows().slice( + const chunkStart = chunk.getStartIndex(); + const chunkEnd = chunk.getEndIndex(); + const rows = chunk.getRows().slice( Math.max(chunkStart, start) - chunkStart, Math.min(chunkEnd, end) + 1 - chunkStart); - var rowIndex = 0; - var rowsLength = rows.length; + let rowIndex = 0; + const rowsLength = rows.length; // create a function that can be called to batch-process rows - var processRows = function () { + const processRows = function () { // get the start position and start time - var startIndex = rowIndex; - var startTime = Date.now(); + const startIndex = rowIndex; + const startTime = Date.now(); + const each = options.each; + + let stoppedProcessingRows; - var each = options.each; while (rowIndex < rowsLength) { // invoke the each() callback on the current row - var ret = each(rows[rowIndex++]); + const ret = each(rows[rowIndex++]); context.numRowsProcessed++; // if the callback returned false, stop processing rows if (ret === false) { - var stoppedProcessingRows = true; + stoppedProcessingRows = true; break; } @@ -473,16 +433,16 @@ Result.prototype.fetchRows = function (options) { * @returns {Array} */ function findOverlappingChunks(chunks, windowStart, windowEnd) { - var overlappingChunks = []; + const overlappingChunks = []; if (chunks.length !== 0) { // get the index of the first chunk that overlaps with the specified window - var index = findFirstOverlappingChunk(chunks, windowStart, windowEnd); + let index = findFirstOverlappingChunk(chunks, windowStart, windowEnd); // iterate over the chunks starting with the first overlapping chunk and // keep going until there's no overlap - for (var length = chunks.length; index < length; index++) { - var chunk = chunks[index]; + for (let length = chunks.length; index < length; index++) { + const chunk = chunks[index]; if (chunk.overlapsWithWindow(windowStart, windowEnd)) { overlappingChunks.push(chunk); } else { @@ -506,22 +466,20 @@ function findOverlappingChunks(chunks, windowStart, windowEnd) { * @returns {number} */ function findFirstOverlappingChunk(chunks, windowStartIndex, windowEndIndex) { - var helper = function (chunks, + const helper = function (chunks, chunkIndexLeft, chunkIndexRight, windowStartIndex, windowEndIndex) { - var result; - var chunkIndexMiddle; - var middleChunk; - var middleChunkEndIndex; + let result; + let middleChunkEndIndex; // initialize the return value to -1 result = -1; // compute the index of the middle chunk and get the middle chunk - chunkIndexMiddle = Math.floor((chunkIndexLeft + chunkIndexRight) / 2); - middleChunk = chunks[chunkIndexMiddle]; + const chunkIndexMiddle = Math.floor((chunkIndexLeft + chunkIndexRight) / 2); + const middleChunk = chunks[chunkIndexMiddle]; // if we have two or fewer chunks if ((chunkIndexMiddle === chunkIndexLeft) || @@ -613,15 +571,15 @@ Result.prototype.getColumns = function () { * @returns {Object} */ Result.prototype.getColumn = function (columnIdentifier) { - var columnIndex; + let columnIndex; // if the column identifier is a string, treat it as a column // name and use it to get the index of the specified column if (Util.isString(columnIdentifier)) { // if a valid column name was specified, get the index of the first column // with the specified name - if (Object.prototype.hasOwnProperty.call(mapColumnNameToIndices, columnIdentifier)) { - columnIndex = mapColumnNameToIndices[columnIdentifier][0]; + if (Object.prototype.hasOwnProperty.call(this._mapColumnNameToIndices, columnIdentifier)) { + columnIndex = this._mapColumnNameToIndices[columnIdentifier][0]; } } else if (Util.isNumber(columnIdentifier)) { // if the column identifier is a number, treat it as a column index @@ -671,18 +629,18 @@ Result.prototype.getReturnedRows = function () { Result.prototype.getNumUpdatedRows = function () { // initialize if necessary if (!this._numUpdatedRows) { - var numUpdatedRows = -1; + let numUpdatedRows = -1; // the updated-rows metric only applies to dml's - var statementTypeId = this._statementTypeId; + const statementTypeId = this._statementTypeId; if (StatementType.isDml(statementTypeId)) { if (StatementType.isInsert(statementTypeId) || StatementType.isUpdate(statementTypeId) || StatementType.isDelete(statementTypeId) || StatementType.isMerge(statementTypeId) || StatementType.isMultiTableInsert(statementTypeId)) { - var chunks = this._chunks; - var columns = this._columns; + const chunks = this._chunks; + const columns = this._columns; // if the statement is a dml, the result should be small, // meaning we only have one chunk @@ -690,11 +648,11 @@ Result.prototype.getNumUpdatedRows = function () { // add up the values in all the columns numUpdatedRows = 0; - var rows = chunks[0].getRows(); - for (var rowIndex = 0, rowsLength = rows.length; + const rows = chunks[0].getRows(); + for (let rowIndex = 0, rowsLength = rows.length; rowIndex < rowsLength; rowIndex++) { - var row = rows[rowIndex]; - for (var colIndex = 0, colsLength = columns.length; + const row = rows[rowIndex]; + for (let colIndex = 0, colsLength = columns.length; colIndex < colsLength; colIndex++) { numUpdatedRows += Number( row.getColumnValue(columns[colIndex].getId())); diff --git a/lib/connection/result/result_stream.js b/lib/connection/result/result_stream.js index 5869d8271..82fb675c4 100644 --- a/lib/connection/result/result_stream.js +++ b/lib/connection/result/result_stream.js @@ -2,9 +2,9 @@ * Copyright (c) 2015-2021 Snowflake Computing Inc. All rights reserved. */ -var EventEmitter = require('events').EventEmitter; -var Util = require('../../util'); -var Errors = require('../../errors'); +const EventEmitter = require('events').EventEmitter; +const Util = require('../../util'); +const Errors = require('../../errors'); /** * Creates a stream-like object that can be used to read the contents of an @@ -25,8 +25,8 @@ function ResultStream(options) { // options should be an object Errors.assertInternal(Util.isObject(options)); - var chunks = options.chunks; - var prefetchSize = options.prefetchSize; + const chunks = options.chunks; + const prefetchSize = options.prefetchSize; // chunks should be an array Errors.assertInternal(Util.isArray(chunks)); @@ -35,9 +35,9 @@ function ResultStream(options) { Errors.assertInternal(Util.isNumber(prefetchSize) && (prefetchSize >= 0)); // Current chunk being streamed. Start with the first chunk. - var currChunk = 0; + let currChunk = 0; - var self = this; + const self = this; /** * Called when a chunk fires a 'loadcomplete' event. @@ -45,7 +45,7 @@ function ResultStream(options) { * @param {Error} err * @param {Chunk} chunk */ - var onLoadComplete = function (err, chunk) { + const onLoadComplete = function (err, chunk) { // unsubscribe from the 'loadcomplete' event chunk.removeListener('loadcomplete', onLoadComplete); @@ -70,7 +70,7 @@ function ResultStream(options) { * chunks to load, a 'close' event is fired on the stream to notify * subscribers that all the chunks have been successfully read. */ - var doLoad = function () { + const doLoad = function () { // All chunks were loaded, we're done if (currChunk >= chunks.length) { self.asyncClose(); @@ -80,7 +80,7 @@ function ResultStream(options) { chunks[currChunk].on('loadcomplete', onLoadComplete); // Fire off requests to load all the chunks in the buffer that aren't already loading - var chunk, index, length; + let chunk, index; for (index = currChunk; index < chunks.length && index <= (currChunk + prefetchSize); index++) { chunk = chunks[index]; if (!chunk.isLoading()) { @@ -112,7 +112,7 @@ Util.inherits(ResultStream, EventEmitter); ResultStream.prototype.asyncClose = function () { // schedule an operation to close the stream in // the next tick of the event loop - var self = this; + const self = this; process.nextTick(function () { close(self); }); diff --git a/lib/connection/result/row_stream.js b/lib/connection/result/row_stream.js index 8bd85b08d..0c72627d5 100644 --- a/lib/connection/result/row_stream.js +++ b/lib/connection/result/row_stream.js @@ -86,7 +86,7 @@ function RowStream(statement, context, options) { /** * Initializes this stream. */ - var init = function init() { + const init = function init() { // the stream has now been initialized initialized = true; @@ -99,12 +99,12 @@ function RowStream(statement, context, options) { // if no value was specified for the end index or if the end index is // larger than the row index of the last row, default to the index of the // last row, otherwise truncate the fractional part - var returnedRows = context.result.getReturnedRows(); + const returnedRows = context.result.getReturnedRows(); end = (!Util.isNumber(end) || (end >= returnedRows)) ? returnedRows - 1 : Math.floor(end); // find all the chunks that overlap with the specified range - var overlappingChunks = context.result.findOverlappingChunks(start, end); + const overlappingChunks = context.result.findOverlappingChunks(start, end); // if no chunks overlap or start is greater than end, we're done if ((overlappingChunks.length === 0) || (start > end)) { @@ -127,9 +127,9 @@ function RowStream(statement, context, options) { /** * Processes the row buffer. */ - var processRowBuffer = function processRowBuffer() { + const processRowBuffer = function processRowBuffer() { // get the row to add to the read queue - var row = rowBuffer[rowIndex++]; + let row = rowBuffer[rowIndex++]; // if we just read the last row in the row buffer, clear the row buffer and // reset the row index so that we load the next chunk in the result stream @@ -184,15 +184,15 @@ function RowStream(statement, context, options) { * * @param {Chunk} chunk */ - var onResultStreamData = function onResultStreamData(chunk) { + const onResultStreamData = function onResultStreamData(chunk) { // unsubscribe from the result stream's 'data' and 'close' events resultStream.removeListener('data', onResultStreamData); resultStream.removeListener('close', onResultStreamClose); // get all the rows in the chunk that overlap with the requested window, // and use the resulting array as the new row buffer - var chunkStart = chunk.getStartIndex(); - var chunkEnd = chunk.getEndIndex(); + const chunkStart = chunk.getStartIndex(); + const chunkEnd = chunk.getEndIndex(); rowBuffer = chunk.getRows().slice( Math.max(chunkStart, start) - chunkStart, Math.min(chunkEnd, end) + 1 - chunkStart); @@ -217,7 +217,7 @@ function RowStream(statement, context, options) { * @param err * @param continueCallback */ - var onResultStreamClose = function onResultStreamClose(err, continueCallback) { + const onResultStreamClose = function onResultStreamClose(err, continueCallback) { // if the error is retryable and // the result stream hasn't been closed too many times if (isResultStreamErrorRetryable(err) && @@ -243,7 +243,7 @@ function RowStream(statement, context, options) { * * @param {Error} [err] */ - var close = function (err) { + const close = function (err) { // if we have a result stream, stop listening to events on it if (resultStream) { resultStream.removeListener('data', onResultStreamData); @@ -270,7 +270,7 @@ function RowStream(statement, context, options) { /** * Called when we're ready to read the next row in the result. */ - var readNextRow = function readNextRow() { + const readNextRow = function readNextRow() { // if we have a row buffer, process it if (rowBuffer) { processRowBuffer(); @@ -289,7 +289,7 @@ function RowStream(statement, context, options) { * * @param {Error} err */ - var emitError = function emitError(err) { + const emitError = function emitError(err) { self.emit('error', Errors.externalize(err)); }; } @@ -330,11 +330,11 @@ function isResultStreamErrorRetryable(error) { * @returns {Object} */ function buildMapColumnExtractFnNames(columns, fetchAsString) { - var fnNameGetColumnValue = 'getColumnValue'; - var fnNameGetColumnValueAsString = 'getColumnValueAsString'; + const fnNameGetColumnValue = 'getColumnValue'; + const fnNameGetColumnValueAsString = 'getColumnValueAsString'; - var index, length, column; - var mapColumnIdToExtractFnName = {}; + let index, length, column; + const mapColumnIdToExtractFnName = {}; // if no native types need to be retrieved as strings, extract values normally if (!Util.exists(fetchAsString)) { @@ -345,7 +345,7 @@ function buildMapColumnExtractFnNames(columns, fetchAsString) { } else { // build a map that contains all the native types that need to be // retrieved as strings when extracting column values from rows - var nativeTypesMap = {}; + const nativeTypesMap = {}; for (index = 0, length = fetchAsString.length; index < length; index++) { nativeTypesMap[fetchAsString[index].toUpperCase()] = true; } diff --git a/lib/connection/result/sf_timestamp.js b/lib/connection/result/sf_timestamp.js index 88adeb95c..3ec990670 100644 --- a/lib/connection/result/sf_timestamp.js +++ b/lib/connection/result/sf_timestamp.js @@ -2,14 +2,14 @@ * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var Moment = require('moment-timezone'); -var Util = require('../../util'); +const Moment = require('moment-timezone'); +const Util = require('../../util'); /** * An array of tag mappings to convert a sql format to a moment.js format. If * the 2nd element is empty, special code is needed. */ -var CONST_TAGS = +const CONST_TAGS = [ // proper mappings ['YYYY', 'YYYY'], @@ -61,7 +61,7 @@ function SfTimestamp(epochSeconds, nanoSeconds, scale, timezone, format) { this.format = format; // compute the epoch milliseconds and create a moment object from them - var moment = Moment((epochSeconds * 1000) + (nanoSeconds / 1000000)); + let moment = Moment((epochSeconds * 1000) + (nanoSeconds / 1000000)); // set the moment's timezone if (Util.isString(timezone)) { @@ -85,26 +85,26 @@ SfTimestamp.prototype.toString = function () { return this._valueAsString; } - var moment = this.moment; - var nanoSeconds = this.nanoSeconds; - var scale = this.scale; - var formatSql = this.format; + const moment = this.moment; + const nanoSeconds = this.nanoSeconds; + let scale = this.scale; + const formatSql = this.format; // get an upper-case version of the input sql format - var formatSqlUpper = formatSql.toUpperCase(); + const formatSqlUpper = formatSql.toUpperCase(); - var tags = CONST_TAGS; + const tags = CONST_TAGS; // iterate over the format string - var length = formatSql.length; - var formatMoment = ''; - for (var pos = 0; pos < length;) { - var tag = null; - var out = null; + const length = formatSql.length; + let formatMoment = ''; + for (let pos = 0; pos < length;) { + let tag = null; + let out = null; // at each position, check if there's a tag at that position; if so, use // 'out' as the replacement - for (var index = 0; index < tags.length; index++) { + for (let index = 0; index < tags.length; index++) { if (formatSqlUpper.substr(pos).indexOf(tags[index][0]) === 0) { tag = tags[index][0]; out = tags[index][1]; @@ -130,9 +130,9 @@ SfTimestamp.prototype.toString = function () { out = moment.format('ZZ').substr(3); } else if (tag === 'FF') { // if 'FF' is followed by a digit, use the digit as the scale - var digit = null; + let digit = null; if (pos + tag.length < length) { - var matches = formatSql[pos + tag.length].match(/[0-9]/); + const matches = formatSql[pos + tag.length].match(/[0-9]/); if (matches) { digit = matches[0]; } @@ -146,7 +146,7 @@ SfTimestamp.prototype.toString = function () { if (scale > 0) { // divide the nanoSeconds to get the requested number of // meaningful digits - var scaled = Math.floor(nanoSeconds / Math.pow(10, 9 - scale)); + const scaled = Math.floor(nanoSeconds / Math.pow(10, 9 - scale)); // pad with the appropriate number of leading zeros out = (new Array(9).join('0') + scaled).substr(-scale); @@ -175,9 +175,9 @@ SfTimestamp.prototype.toString = function () { */ SfTimestamp.prototype.toSfDate = function () { // create a Date from the moment - var date = this.moment.toDate(); + const date = this.moment.toDate(); - var self = this; + const self = this; date.getEpochSeconds = function () { return self.epochSeconds; @@ -213,7 +213,7 @@ SfTimestamp.prototype.toSfDate = function () { * @returns {Object} */ SfTimestamp.prototype.toSfTime = function () { - var self = this; + const self = this; return { getMidnightSeconds: function () { return self.epochSeconds; diff --git a/lib/connection/statement.js b/lib/connection/statement.js index 360b7eca9..7e76dec09 100644 --- a/lib/connection/statement.js +++ b/lib/connection/statement.js @@ -4,28 +4,28 @@ const { v4: uuidv4 } = require('uuid'); -var Url = require('url'); -var QueryString = require('querystring'); -var EventEmitter = require('events').EventEmitter; -var Util = require('../util'); -var Result = require('./result/result'); -var Parameters = require('../parameters'); -var RowStream = require('./result/row_stream'); -var Errors = require('../errors'); -var ErrorCodes = Errors.codes; -var Logger = require('../logger'); -var NativeTypes = require('./result/data_types').NativeTypes; -var file_transfer_agent = require('.././file_transfer_agent/file_transfer_agent'); -var Bind = require('./bind_uploader'); +const Url = require('url'); +const QueryString = require('querystring'); +const EventEmitter = require('events').EventEmitter; +const Util = require('../util'); +const Result = require('./result/result'); +const Parameters = require('../parameters'); +const RowStream = require('./result/row_stream'); +const Errors = require('../errors'); +const ErrorCodes = Errors.codes; +const Logger = require('../logger'); +const NativeTypes = require('./result/data_types').NativeTypes; +const FileTransferAgent = require('.././file_transfer_agent/file_transfer_agent'); +const Bind = require('./bind_uploader'); const RowMode = require('./../constants/row_mode'); -var states = +const states = { FETCHING: 'fetching', COMPLETE: 'complete' }; -var statementTypes = +const statementTypes = { ROW_PRE_EXEC: 'ROW_PRE_EXEC', ROW_POST_EXEC: 'ROW_POST_EXEC', @@ -41,7 +41,7 @@ const queryCodes = { exports.createContext = function ( options, services, connectionConfig) { // create a statement context for a pre-exec statement - var context = createContextPreExec( + const context = createContextPreExec( options, services, connectionConfig); context.type = statementTypes.FILE_PRE_EXEC; @@ -58,7 +58,7 @@ exports.createContext = function ( function createStatement( statementOptions, context, services, connectionConfig) { // call super - BaseStatement.apply(this, arguments); + BaseStatement.apply(this, [statementOptions, context, services, connectionConfig]); } /** @@ -74,7 +74,7 @@ exports.createStatementPreExec = function ( options, services, connectionConfig) { Logger.getInstance().debug('--createStatementPreExec'); // create a statement context for a pre-exec statement - var context = createContextPreExec( + const context = createContextPreExec( options, services, connectionConfig); if (options.sqlText && (Util.isPutCommand(options.sqlText) || Util.isGetCommand(options.sqlText))) { @@ -86,9 +86,9 @@ exports.createStatementPreExec = function ( options, context, services, connectionConfig); } - var numBinds = countBinding(context.binds); + const numBinds = countBinding(context.binds); Logger.getInstance().debug('numBinds = %d', numBinds); - var threshold = Parameters.getValue(Parameters.names.CLIENT_STAGE_ARRAY_BINDING_THRESHOLD); + let threshold = Parameters.getValue(Parameters.names.CLIENT_STAGE_ARRAY_BINDING_THRESHOLD); if (connectionConfig.getbindThreshold()) { threshold = connectionConfig.getbindThreshold(); } @@ -152,7 +152,7 @@ exports.createStatementPostExec = function ( ErrorCodes.ERR_CONN_FETCH_RESULT_INVALID_QUERY_ID); // check for invalid complete callback - var complete = statementOptions.complete; + const complete = statementOptions.complete; if (Util.exists(complete)) { Errors.checkArgumentValid(Util.isFunction(complete), ErrorCodes.ERR_CONN_FETCH_RESULT_INVALID_COMPLETE); @@ -165,14 +165,14 @@ exports.createStatementPostExec = function ( } // check for invalid fetchAsString - var fetchAsString = statementOptions.fetchAsString; + const fetchAsString = statementOptions.fetchAsString; if (Util.exists(fetchAsString)) { // check that the value is an array Errors.checkArgumentValid(Util.isArray(fetchAsString), ErrorCodes.ERR_CONN_FETCH_RESULT_INVALID_FETCH_AS_STRING); // check that all the array elements are valid - var invalidValueIndex = NativeTypes.findInvalidValue(fetchAsString); + const invalidValueIndex = NativeTypes.findInvalidValue(fetchAsString); Errors.checkArgumentValid(invalidValueIndex === -1, ErrorCodes.ERR_CONN_FETCH_RESULT_INVALID_FETCH_AS_STRING_VALUES, JSON.stringify(fetchAsString[invalidValueIndex])); @@ -188,7 +188,7 @@ exports.createStatementPostExec = function ( Errors.assertInternal(Util.isObject(connectionConfig)); // create a statement context - var statementContext = createStatementContext(); + const statementContext = createStatementContext(); statementContext.queryId = statementOptions.queryId; statementContext.complete = complete; @@ -280,7 +280,7 @@ function createContextPreExec( } // check for invalid complete callback - var complete = statementOptions.complete; + const complete = statementOptions.complete; if (Util.exists(complete)) { Errors.checkArgumentValid(Util.isFunction(complete), ErrorCodes.ERR_CONN_EXEC_STMT_INVALID_COMPLETE); @@ -293,14 +293,14 @@ function createContextPreExec( } // check for invalid fetchAsString - var fetchAsString = statementOptions.fetchAsString; + const fetchAsString = statementOptions.fetchAsString; if (Util.exists(fetchAsString)) { // check that the value is an array Errors.checkArgumentValid(Util.isArray(fetchAsString), ErrorCodes.ERR_CONN_EXEC_STMT_INVALID_FETCH_AS_STRING); // check that all the array elements are valid - var invalidValueIndex = NativeTypes.findInvalidValue(fetchAsString); + const invalidValueIndex = NativeTypes.findInvalidValue(fetchAsString); Errors.checkArgumentValid(invalidValueIndex === -1, ErrorCodes.ERR_CONN_EXEC_STMT_INVALID_FETCH_AS_STRING_VALUES, JSON.stringify(fetchAsString[invalidValueIndex])); @@ -319,14 +319,14 @@ function createContextPreExec( } // if binds are specified - var binds = statementOptions.binds; + const binds = statementOptions.binds; if (Util.exists(binds)) { // make sure the specified value is an array Errors.checkArgumentValid(Util.isArray(binds), ErrorCodes.ERR_CONN_EXEC_STMT_INVALID_BINDS); // make sure everything in the binds array is stringifiable - for (var index = 0, length = binds.length; index < length; index++) { + for (let index = 0, length = binds.length; index < length; index++) { Errors.checkArgumentValid(JSON.stringify(binds[index]) !== undefined, ErrorCodes.ERR_CONN_EXEC_STMT_INVALID_BIND_VALUES, binds[index]); } @@ -349,7 +349,7 @@ function createContextPreExec( } // create a statement context - var statementContext = createStatementContext(); + const statementContext = createStatementContext(); statementContext.sqlText = statementOptions.sqlText; statementContext.complete = complete; @@ -416,7 +416,7 @@ function BaseStatement( // TODO: add the parameters map to the statement context - var statement = this; + const statement = this; /** * Returns this statement's SQL text. @@ -548,7 +548,7 @@ function BaseStatement( context.refresh = function (callback) { // pick the appropriate function to get the result based on whether we // have the query id or request id (we should have at least one) - var sendRequestFn = context.queryId ? + const sendRequestFn = context.queryId ? sendRequestPostExec : sendRequestPreExec; // the current result error might be transient, @@ -573,7 +573,7 @@ function BaseStatement( context.onStatementRequestComp = async function (err, body) { // if we already have a result or a result error, we invoked the complete // callback once, so don't invoke it again - var suppressComplete = context.result || context.resultError; + const suppressComplete = context.result || context.resultError; // clear the previous result error context.resultError = null; @@ -618,7 +618,7 @@ function BaseStatement( * * @param {Object} body */ - context.onStatementRequestSucc = function (body) { + context.onStatementRequestSucc = function () { }; } @@ -633,7 +633,7 @@ Util.inherits(BaseStatement, EventEmitter); function invokeStatementComplete(statement, context) { // find out if the result will be streamed; // if a value is not specified, get it from the connection - var streamResult = context.streamResult; + let streamResult = context.streamResult; if (!Util.exists(streamResult)) { streamResult = context.connectionConfig.getStreamResult(); } @@ -649,7 +649,7 @@ function invokeStatementComplete(statement, context) { process.nextTick(function () { // aggregate all the rows into an array and pass this // array to the complete callback as the last argument - var rows = []; + const rows = []; statement.streamRows() .on('readable', function () { // read only when data is available @@ -686,7 +686,7 @@ function RowStatementPreExec( connectionConfig) { Logger.getInstance().debug('RowStatementPreExec'); // call super - BaseStatement.apply(this, arguments); + BaseStatement.apply(this, [statementOptions, context, services, connectionConfig]); // add the result request headers to the context context.resultRequestHeaders = buildResultRequestHeadersRow(); @@ -788,7 +788,7 @@ function createOnStatementRequestSuccRow(statement, context) { function FileStatementPreExec( statementOptions, context, services, connectionConfig) { // call super - BaseStatement.apply(this, arguments); + BaseStatement.apply(this, [statementOptions, context, services, connectionConfig]); // add the result request headers to the context context.resultRequestHeaders = buildResultRequestHeadersFile(); @@ -801,11 +801,11 @@ function FileStatementPreExec( context.onStatementRequestSucc = async function (body) { context.fileMetadata = body; - var fta = new file_transfer_agent(context); + const fta = new FileTransferAgent(context); await fta.execute(); // build a result from the response - var result = fta.result(); + const result = fta.result(); // init result and meta body.data.rowset = result.rowset; @@ -869,7 +869,7 @@ function StageBindingStatementPreExec( * * @param {Object} body */ - context.onStatementRequestSucc = function (body) { + context.onStatementRequestSucc = function () { //do nothing }; @@ -883,8 +883,8 @@ function StageBindingStatementPreExec( */ this.StageBindingRequest = async function (options, context, services, connectionConfig) { try { - var bindUploaderRequestId = uuidv4(); - var bind = new Bind.BindUploader(options, services, connectionConfig, bindUploaderRequestId); + const bindUploaderRequestId = uuidv4(); + const bind = new Bind.BindUploader(options, services, connectionConfig, bindUploaderRequestId); context.bindStage = Bind.GetStageName(bindUploaderRequestId); await bind.Upload(context.binds); return createRowStatementPreExec( @@ -933,7 +933,7 @@ Util.inherits(StageBindingStatementPreExec, BaseStatement); function StatementPostExec( statementOptions, context, services, connectionConfig) { // call super - BaseStatement.apply(this, arguments); + BaseStatement.apply(this, [statementOptions, context, services, connectionConfig]); // add the result request headers to the context context.resultRequestHeaders = buildResultRequestHeadersRow(); @@ -1077,14 +1077,14 @@ function createFnStreamRows(statement, context) { } // check for invalid fetchAsString - var fetchAsString = options.fetchAsString; + const fetchAsString = options.fetchAsString; if (Util.exists(fetchAsString)) { // check that the value is an array Errors.checkArgumentValid(Util.isArray(fetchAsString), ErrorCodes.ERR_STMT_STREAM_ROWS_INVALID_FETCH_AS_STRING); // check that all the array elements are valid - var invalidValueIndex = NativeTypes.findInvalidValue(fetchAsString); + const invalidValueIndex = NativeTypes.findInvalidValue(fetchAsString); Errors.checkArgumentValid(invalidValueIndex === -1, ErrorCodes.ERR_STMT_STREAM_ROWS_INVALID_FETCH_AS_STRING_VALUES, JSON.stringify(fetchAsString[invalidValueIndex])); @@ -1119,10 +1119,10 @@ function endFetchRows(options, statement, context) { * @param {Object} context */ function fetchRowsFromResult(options, statement, context) { - var numInterrupts = 0; + let numInterrupts = 0; // forward to the result to get a FetchRowsOperation object - var operation = context.result.fetchRows(options); + const operation = context.result.fetchRows(options); // subscribe to the operation's 'complete' event operation.on('complete', function (err, continueCallback) { @@ -1159,8 +1159,8 @@ function fetchRowsFromResult(options, statement, context) { * @param {Function} callback */ function sendCancelStatement(statementContext, statement, callback) { - var url; - var json; + let url; + let json; // use different rest endpoints based on whether the query id is available if (statementContext.queryId) { @@ -1197,10 +1197,10 @@ function sendCancelStatement(statementContext, statement, callback) { */ function sendRequestPreExec(statementContext, onResultAvailable) { // get the request headers - var headers = statementContext.resultRequestHeaders; + const headers = statementContext.resultRequestHeaders; // build the basic json for the request - var json = + const json = { disableOfflineChunks: false, }; @@ -1261,10 +1261,10 @@ function sendRequestPreExec(statementContext, onResultAvailable) { this.sendRequest = function (statementContext, onResultAvailable) { // get the request headers - var headers = statementContext.resultRequestHeaders; + const headers = statementContext.resultRequestHeaders; // build the basic json for the request - var json = + const json = { disableOfflineChunks: false, sqlText: statementContext.sqlText @@ -1292,7 +1292,7 @@ this.sendRequest = function (statementContext, onResultAvailable) { json.queryContextDTO = statementContext.services.sf.getQueryContextDTO(); } - var options = + let options = { method: 'POST', headers: headers, @@ -1309,12 +1309,12 @@ this.sendRequest = function (statementContext, onResultAvailable) { statementContext, headers, onResultAvailable) }; - var sf = statementContext.services.sf; + const sf = statementContext.services.sf; // clone the options options = Util.apply({}, options); - return new Promise((resolve, reject) => { + return new Promise((resolve) => { resolve(sf.postAsync(options)); }); }; @@ -1328,15 +1328,15 @@ this.sendRequest = function (statementContext, onResultAvailable) { * @returns {Object} */ function buildBindsMap(bindsArray) { - var bindsMap = {}; - var isArrayBinding = bindsArray.length > 0 && Util.isArray(bindsArray[0]); - var singleArray = isArrayBinding ? bindsArray[0] : bindsArray; + const bindsMap = {}; + const isArrayBinding = bindsArray.length > 0 && Util.isArray(bindsArray[0]); + const singleArray = isArrayBinding ? bindsArray[0] : bindsArray; - for (var index = 0, length = singleArray.length; index < length; index++) { - var value = singleArray[index]; + for (let index = 0, length = singleArray.length; index < length; index++) { + let value = singleArray[index]; // pick the appropriate logical data type based on the bind value - var type; + let type; if (Util.isBoolean(value)) { type = 'BOOLEAN'; } else if (Util.isObject(value) || Util.isArray(value)) { @@ -1369,8 +1369,8 @@ function buildBindsMap(bindsArray) { } } else { value = []; - for (var rowIndex = 0; rowIndex < bindsArray.length; rowIndex++) { - var value0 = bindsArray[rowIndex][index]; + for (let rowIndex = 0; rowIndex < bindsArray.length; rowIndex++) { + let value0 = bindsArray[rowIndex][index]; if (value0 !== null && !Util.isString(value0)) { if (value0 instanceof Date) { value0 = value0.toJSON(); @@ -1402,7 +1402,7 @@ function buildBindsMap(bindsArray) { */ function sendRequestPostExec(statementContext, onResultAvailable) { // get the request headers - var headers = statementContext.resultRequestHeaders; + const headers = statementContext.resultRequestHeaders; // use the snowflake service to issue the request sendSfRequest(statementContext, @@ -1431,23 +1431,23 @@ function sendRequestPostExec(statementContext, onResultAvailable) { * appended to the url if the request is retried. */ function sendSfRequest(statementContext, options, appendQueryParamOnRetry) { - var sf = statementContext.services.sf; - var connectionConfig = statementContext.connectionConfig; + const sf = statementContext.services.sf; + const connectionConfig = statementContext.connectionConfig; // clone the options options = Util.apply({}, options); // get the original url and callback - var urlOrig = options.url; - var callbackOrig = options.callback; + const urlOrig = options.url; + const callbackOrig = options.callback; - var numRetries = 0; - var maxNumRetries = connectionConfig.getRetrySfMaxNumRetries(); - var sleep = connectionConfig.getRetrySfStartingSleepTime(); + let numRetries = 0; + const maxNumRetries = connectionConfig.getRetrySfMaxNumRetries(); + let sleep = connectionConfig.getRetrySfStartingSleepTime(); let lastStatusCodeForRetry; // create a function to send the request - var sendRequest = function () { + const sendRequest = function () { // if this is a retry and a query parameter should be appended to the url on // retry, update the url if ((numRetries > 0) && appendQueryParamOnRetry) { @@ -1478,7 +1478,7 @@ function sendSfRequest(statementContext, options, appendQueryParamOnRetry) { // use exponential backoff with decorrelated jitter to compute the // next sleep time. - var cap = connectionConfig.getRetrySfMaxSleepTime(); + const cap = connectionConfig.getRetrySfMaxSleepTime(); sleep = Util.nextSleepTime(1, cap, sleep); Logger.getInstance().debug( @@ -1508,7 +1508,7 @@ function sendSfRequest(statementContext, options, appendQueryParamOnRetry) { */ function buildResultRequestCallback( statementContext, headers, onResultAvailable) { - var callback = async function (err, body) { + const callback = async function (err, body) { if (err) { await onResultAvailable.call(null, err, null); } else { @@ -1575,8 +1575,8 @@ function countBinding(binds) { return 0; } Logger.getInstance().debug('-- binds.length= %d', binds.length); - var count = 0; - for (var index = 0; index < binds.length; index++) { + let count = 0; + for (let index = 0; index < binds.length; index++) { if (binds[index] != null && Util.isArray(binds[index])) { count += binds[index].length; } @@ -1585,13 +1585,13 @@ function countBinding(binds) { } function hasNextResult(statement, context) { - return function (options) { + return function () { return (context.multiResultIds != null && context.multiCurId + 1 < context.multiResultIds.length); }; } function createNextReuslt(statement, context) { - return function (options) { + return function () { if (hasNextResult(statement, context)) { context.multiCurId++; context.queryId = context.multiResultIds[context.multiCurId]; diff --git a/lib/core.js b/lib/core.js index dda28a541..f2d089550 100644 --- a/lib/core.js +++ b/lib/core.js @@ -2,17 +2,17 @@ * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var Util = require('./util'); -var Errors = require('./errors'); -var ErrorCodes = Errors.codes; -var Connection = require('./connection/connection'); -var ConnectionConfig = require('./connection/connection_config'); -var ConnectionContext = require('./connection/connection_context'); -var GenericPool = require('generic-pool'); -var Logger = require('./logger'); -var LoggerCore = require('./logger/core'); -var DataTypes = require('./connection/result/data_types'); -var GlobalConfig = require('./global_config'); +const Util = require('./util'); +const Errors = require('./errors'); +const ErrorCodes = Errors.codes; +const Connection = require('./connection/connection'); +const ConnectionConfig = require('./connection/connection_config'); +const ConnectionContext = require('./connection/connection_context'); +const GenericPool = require('generic-pool'); +const Logger = require('./logger'); +const LoggerCore = require('./logger/core'); +const DataTypes = require('./connection/result/data_types'); +const GlobalConfig = require('./global_config'); /** * Creates a new instance of the Snowflake core module. @@ -33,7 +33,7 @@ function Core(options) { Logger.setInstance(new (options.loggerClass)()); // if a connection class is specified, it must be an object or function - var connectionClass = options.connectionClass; + let connectionClass = options.connectionClass; if (Util.exists(connectionClass)) { Errors.assertInternal( Util.isObject(connectionClass) || Util.isFunction(connectionClass)); @@ -42,9 +42,9 @@ function Core(options) { connectionClass = Connection; } - var qaMode = options.qaMode; - var clientInfo = options.client; - var ocspModes = GlobalConfig.ocspModes; + const qaMode = options.qaMode; + const clientInfo = options.client; + const ocspModes = GlobalConfig.ocspModes; /** * Creates a new Connection instance. @@ -54,7 +54,7 @@ function Core(options) { * * @returns {Object} */ - var createConnection = function createConnection(connectionOptions, config) { + const createConnection = function createConnection(connectionOptions, config) { // create a new ConnectionConfig and skip credential-validation if a config // object has been specified; this is because if a config object has been // specified, we're trying to deserialize a connection and the account name, @@ -63,20 +63,20 @@ function Core(options) { // Alternatively, if the connectionOptions includes token information then we will use that // instead of the username/password - var validateCredentials = !config && (connectionOptions && !connectionOptions.sessionToken); - var connectionConfig = + const validateCredentials = !config && (connectionOptions && !connectionOptions.sessionToken); + const connectionConfig = new ConnectionConfig(connectionOptions, validateCredentials, qaMode, clientInfo); // if an http client was specified in the options passed to the module, use // it, otherwise create a new HttpClient - var httpClient = options.httpClient || + const httpClient = options.httpClient || new options.httpClientClass(connectionConfig); return new connectionClass( new ConnectionContext(connectionConfig, httpClient, config)); }; - var instance = + const instance = { ocspModes: ocspModes, /** @@ -121,7 +121,7 @@ function Core(options) { ErrorCodes.ERR_CONN_DESERIALIZE_INVALID_CONFIG_TYPE); // try to json-parse serializedConfig - var config; + let config; try { config = JSON.parse(serializedConnection); } finally { @@ -160,7 +160,7 @@ function Core(options) { }); } - var insecureConnect = options.insecureConnect; + const insecureConnect = options.insecureConnect; if (Util.exists(insecureConnect)) { // check that the specified value is a boolean Errors.checkArgumentValid(Util.isBoolean(insecureConnect), @@ -218,7 +218,7 @@ function Core(options) { } // add some read-only constants - var nativeTypeValues = DataTypes.NativeTypes.values; + const nativeTypeValues = DataTypes.NativeTypes.values; Object.defineProperties(instance, { STRING: { value: nativeTypeValues.STRING }, @@ -267,7 +267,7 @@ function Core(options) { */ this.destroy = function (connection) { return new Promise((resolve) => { - connection.destroy(function (err, conn) { + connection.destroy(function (err) { if (err) { Logger.getInstance().error('Unable to disconnect: ' + err.message); } @@ -296,7 +296,7 @@ function Core(options) { * * @returns {Object} */ - var createPool = function createPool(connectionOptions, poolOptions) { + const createPool = function createPool(connectionOptions, poolOptions) { const connectionPool = GenericPool.createPool( new ConnectionFactory(connectionOptions), poolOptions diff --git a/lib/errors.js b/lib/errors.js index c1cfb4252..bb7384da8 100644 --- a/lib/errors.js +++ b/lib/errors.js @@ -2,10 +2,10 @@ * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var Util = require('./util'); -var errorMessages = require('./constants/error_messages'); +const Util = require('./util'); +const errorMessages = require('./constants/error_messages'); -var codes = {}; +const codes = {}; // 400001 codes.ERR_INTERNAL_ASSERT_FAILED = 400001; @@ -173,7 +173,7 @@ exports.codes = codes; * A map in which the keys are the error codes and the values are the * corresponding SQL-states. */ -var errCodeToSqlState = exports.mapErrorCodeToSqlState = +const errCodeToSqlState = exports.mapErrorCodeToSqlState = { 405501: '08002', 405502: '08002', @@ -185,7 +185,7 @@ var errCodeToSqlState = exports.mapErrorCodeToSqlState = /** * An enumeration of all the different types of errors we create. */ -var errorTypes = +const errorTypes = { // internal synchronous errors InternalAssertError: 'InternalAssertError', @@ -391,7 +391,7 @@ exports.createOCSPError = function (errorCode) { * @returns {Error} */ exports.VariantParseError = function (jsonParseError, xmlParseError) { - var errMessage = 'VariantParseError: Variant cannot be parsed neither as JSON nor as XML:\n' + + const errMessage = 'VariantParseError: Variant cannot be parsed neither as JSON nor as XML:\n' + ` - JSON parse error message: ${jsonParseError.message}\n` + ` - XML parse error message: ${xmlParseError.message}`; return new Error(errMessage); @@ -522,25 +522,25 @@ function createError(name, options) { // TODO: this code is a bit of a mess and needs to be cleaned up // create a new error - var error = new Error(); + const error = new Error(); // set its name error.name = name; // set the error code - var code; + let code; error.code = code = options.code; // if no error message was specified in the options - var message = options.message; + let message = options.message; if (!message) { // use the error code to get the error message template - var messageTemplate = errorMessages[code]; + const messageTemplate = errorMessages[code]; // if some error message arguments were specified, substitute them into the // error message template to get the full error message, otherwise just use // the error message template as the error message - var messageArgs = options.messageArgs; + let messageArgs = options.messageArgs; if (messageArgs) { messageArgs = messageArgs.slice(); messageArgs.unshift(messageTemplate); @@ -553,7 +553,7 @@ function createError(name, options) { // if no sql state was specified in the options, use the error code to try to // get the appropriate sql state - var sqlState = options.sqlState; + let sqlState = options.sqlState; if (!sqlState) { sqlState = errCodeToSqlState[code]; } @@ -574,8 +574,8 @@ function createError(name, options) { // if the error is not synchronous, add an externalize() method if (!options.synchronous) { - error.externalize = function (errorCode, errorMessageArgs, sqlState) { - var propNames = + error.externalize = function () { + const propNames = [ 'name', 'code', @@ -589,10 +589,10 @@ function createError(name, options) { 'stack' ]; - var externalizedError = new Error(); + const externalizedError = new Error(); - var propName, propValue; - for (var index = 0, length = propNames.length; index < length; index++) { + let propName, propValue; + for (let index = 0, length = propNames.length; index < length; index++) { propName = propNames[index]; propValue = this[propName]; if (Util.exists(propValue)) { diff --git a/lib/file_transfer_agent/azure_util.js b/lib/file_transfer_agent/azure_util.js index 987dedd4f..41ab158a8 100644 --- a/lib/file_transfer_agent/azure_util.js +++ b/lib/file_transfer_agent/azure_util.js @@ -26,7 +26,7 @@ function AzureLocation(containerName, path) { * @returns {Object} * @constructor */ -function azure_util(azure, filestream) { +function AzureUtil(azure, filestream) { const AZURE = typeof azure !== 'undefined' ? azure : require('@azure/storage-blob'); const fs = typeof filestream !== 'undefined' ? filestream : require('fs'); @@ -158,11 +158,10 @@ function azure_util(azure, filestream) { * @param {String} fileStream * @param {Object} meta * @param {Object} encryptionMetadata - * @param {Number} maxConcurrency * * @returns {null} */ - this.uploadFileStream = async function (fileStream, meta, encryptionMetadata, maxConcurrency) { + this.uploadFileStream = async function (fileStream, meta, encryptionMetadata) { const azureMetadata = { 'sfcdigest': meta['SHA256_DIGEST'] }; @@ -224,14 +223,12 @@ function azure_util(azure, filestream) { /** * Download the file blob then write the file. * - * @param {String} dataFile * @param {Object} meta - * @param {Object} encryptionMetadata - * @param {Number} maxConcurrency + * @param {Object} fullDstPath * * @returns {null} */ - this.nativeDownloadFile = async function (meta, fullDstPath, maxConcurrency) { + this.nativeDownloadFile = async function (meta, fullDstPath) { const stageInfo = meta['stageInfo']; const client = meta['client']; const azureLocation = this.extractContainerNameAndPath(stageInfo['location']); @@ -286,4 +283,4 @@ function azure_util(azure, filestream) { } } -module.exports = azure_util; +module.exports = AzureUtil; diff --git a/lib/file_transfer_agent/encrypt_util.js b/lib/file_transfer_agent/encrypt_util.js index ab6fa489b..f412d03a6 100644 --- a/lib/file_transfer_agent/encrypt_util.js +++ b/lib/file_transfer_agent/encrypt_util.js @@ -2,7 +2,7 @@ * Copyright (c) 2015-2021 Snowflake Computing Inc. All rights reserved. */ -var path = require('path'); +const path = require('path'); const AES_CBC = 'aes-128-cbc'; const AES_ECB = 'aes-128-ecb'; @@ -42,7 +42,7 @@ exports.EncryptionMetadata = EncryptionMetadata; * @returns {Object} * @constructor */ -function encrypt_util(encrypt, filestream, temp) { +function EncryptUtil(encrypt, filestream, temp) { const crypto = typeof encrypt !== 'undefined' ? encrypt : require('crypto'); const fs = typeof filestream !== 'undefined' ? filestream : require('fs'); const tmp = typeof temp !== 'undefined' ? temp : require('tmp'); @@ -68,7 +68,7 @@ function encrypt_util(encrypt, filestream, temp) { function matDescToUnicode(matDesc) { matDesc['smkId'] = matDesc['smkId'].toString(); matDesc['keySize'] = matDesc['keySize'].toString(); - var newMatDesc = JSON.stringify(matDesc); + const newMatDesc = JSON.stringify(matDesc); return newMatDesc; } @@ -82,38 +82,37 @@ function encrypt_util(encrypt, filestream, temp) { * * @returns {Object} */ - this.encryptFileStream = async function (encryptionMaterial, fileStream, - tmpDir = null, chunkSize = blockSize * 4 * 1024) { + this.encryptFileStream = async function (encryptionMaterial, fileStream) { // Get decoded key from base64 encoded value - var decodedKey = Buffer.from(encryptionMaterial[QUERY_STAGE_MASTER_KEY], BASE64); - var keySize = decodedKey.length; + const decodedKey = Buffer.from(encryptionMaterial[QUERY_STAGE_MASTER_KEY], BASE64); + const keySize = decodedKey.length; // Get secure random bytes with block size - var ivData = getSecureRandom(blockSize); - var fileKey = getSecureRandom(blockSize); + const ivData = getSecureRandom(blockSize); + const fileKey = getSecureRandom(blockSize); // Create cipher with file key, AES CBC, and iv data - var cipher = crypto.createCipheriv(AES_CBC, fileKey, ivData); - var encrypted = cipher.update(fileStream); - var final = cipher.final(); - var encryptedData = Buffer.concat([encrypted, final]); + let cipher = crypto.createCipheriv(AES_CBC, fileKey, ivData); + const encrypted = cipher.update(fileStream); + const final = cipher.final(); + const encryptedData = Buffer.concat([encrypted, final]); // Create key cipher with decoded key and AES ECB cipher = crypto.createCipheriv(AES_ECB, decodedKey, null); // Encrypt with file key - var encKek = Buffer.concat([ + const encKek = Buffer.concat([ cipher.update(fileKey), cipher.final() ]); - var matDesc = MaterialDescriptor( + const matDesc = MaterialDescriptor( encryptionMaterial.smkId, encryptionMaterial.queryId, keySize * 8 ); - var metadata = EncryptionMetadata( + const metadata = EncryptionMetadata( encKek.toString(BASE64), ivData.toString(BASE64), matDescToUnicode(matDesc) @@ -136,28 +135,28 @@ function encrypt_util(encrypt, filestream, temp) { this.encryptFile = async function (encryptionMaterial, inFileName, tmpDir = null, chunkSize = blockSize * 4 * 1024) { // Get decoded key from base64 encoded value - var decodedKey = Buffer.from(encryptionMaterial[QUERY_STAGE_MASTER_KEY], BASE64); - var keySize = decodedKey.length; + const decodedKey = Buffer.from(encryptionMaterial[QUERY_STAGE_MASTER_KEY], BASE64); + const keySize = decodedKey.length; // Get secure random bytes with block size - var ivData = getSecureRandom(blockSize); - var fileKey = getSecureRandom(blockSize); + const ivData = getSecureRandom(blockSize); + const fileKey = getSecureRandom(blockSize); // Create cipher with file key, AES CBC, and iv data - var cipher = crypto.createCipheriv(AES_CBC, fileKey, ivData); + let cipher = crypto.createCipheriv(AES_CBC, fileKey, ivData); // Create temp file - var tmpobj = tmp.fileSync({ dir: tmpDir, prefix: path.basename(inFileName) + '#' }); - var tempOutputFileName = tmpobj.name; - var tempFd = tmpobj.fd; + const tmpobj = tmp.fileSync({ dir: tmpDir, prefix: path.basename(inFileName) + '#' }); + const tempOutputFileName = tmpobj.name; + const tempFd = tmpobj.fd; - await new Promise(function (resolve, reject) { - var infile = fs.createReadStream(inFileName, { highWaterMark: chunkSize }); - var outfile = fs.createWriteStream(tempOutputFileName); + await new Promise(function (resolve) { + const infile = fs.createReadStream(inFileName, { highWaterMark: chunkSize }); + const outfile = fs.createWriteStream(tempOutputFileName); infile.on('data', function (chunk) { // Encrypt chunk using cipher - var encrypted = cipher.update(chunk); + const encrypted = cipher.update(chunk); // Write to temp file outfile.write(encrypted); }); @@ -171,18 +170,18 @@ function encrypt_util(encrypt, filestream, temp) { cipher = crypto.createCipheriv(AES_ECB, decodedKey, null); // Encrypt with file key - var encKek = Buffer.concat([ + const encKek = Buffer.concat([ cipher.update(fileKey), cipher.final() ]); - var matDesc = MaterialDescriptor( + const matDesc = MaterialDescriptor( encryptionMaterial.smkId, encryptionMaterial.queryId, keySize * 8 ); - var metadata = EncryptionMetadata( + const metadata = EncryptionMetadata( encKek.toString(BASE64), ivData.toString(BASE64), matDescToUnicode(matDesc) @@ -210,23 +209,23 @@ function encrypt_util(encrypt, filestream, temp) { this.decryptFile = async function (metadata, encryptionMaterial, inFileName, tmpDir = null, chunkSize = blockSize * 4 * 1024) { // Get key and iv from metadata - var keyBase64 = metadata.key; - var ivBase64 = metadata.iv; + const keyBase64 = metadata.key; + const ivBase64 = metadata.iv; // Get decoded key from base64 encoded value - var decodedKey = Buffer.from(encryptionMaterial[QUERY_STAGE_MASTER_KEY], BASE64); + const decodedKey = Buffer.from(encryptionMaterial[QUERY_STAGE_MASTER_KEY], BASE64); // Get key bytes and iv bytes from base64 encoded value - var keyBytes = new Buffer.from(keyBase64, BASE64); - var ivBytes = new Buffer.from(ivBase64, BASE64); + const keyBytes = new Buffer.from(keyBase64, BASE64); + const ivBytes = new Buffer.from(ivBase64, BASE64); // Create temp file - var tempOutputFileName; - var tempFd; - await new Promise((resolve) => { + let tempOutputFileName; + let tempFd; + await new Promise((resolve, reject) => { tmp.file({ dir: tmpDir, prefix: path.basename(inFileName) + '#' }, (err, path, fd) => { if (err) { - reject(err); + reject(err); } tempOutputFileName = path; tempFd = fd; @@ -235,8 +234,8 @@ function encrypt_util(encrypt, filestream, temp) { }); // Create key decipher with decoded key and AES ECB - var decipher = crypto.createDecipheriv(AES_ECB, decodedKey, null); - var fileKey = Buffer.concat([ + let decipher = crypto.createDecipheriv(AES_ECB, decodedKey, null); + const fileKey = Buffer.concat([ decipher.update(keyBytes), decipher.final() ]); @@ -244,13 +243,13 @@ function encrypt_util(encrypt, filestream, temp) { // Create decipher with file key, iv bytes, and AES CBC decipher = crypto.createDecipheriv(AES_CBC, fileKey, ivBytes); - await new Promise(function (resolve, reject) { - var infile = fs.createReadStream(inFileName, { highWaterMark: chunkSize }); - var outfile = fs.createWriteStream(tempOutputFileName); + await new Promise(function (resolve) { + const infile = fs.createReadStream(inFileName, { highWaterMark: chunkSize }); + const outfile = fs.createWriteStream(tempOutputFileName); infile.on('data', function (chunk) { // Dncrypt chunk using decipher - var decrypted = decipher.update(chunk); + const decrypted = decipher.update(chunk); // Write to temp file outfile.write(decrypted); }); @@ -261,7 +260,7 @@ function encrypt_util(encrypt, filestream, temp) { }); // Close temp file - await new Promise((resolve) => { + await new Promise((resolve, reject) => { fs.close(tempFd, (err) => { if (err) { reject(err); @@ -274,4 +273,4 @@ function encrypt_util(encrypt, filestream, temp) { }; } -exports.encrypt_util = encrypt_util; +exports.EncryptUtil = EncryptUtil; diff --git a/lib/file_transfer_agent/file_compression_type.js b/lib/file_transfer_agent/file_compression_type.js index 1cae10ec0..3da91e609 100644 --- a/lib/file_transfer_agent/file_compression_type.js +++ b/lib/file_transfer_agent/file_compression_type.js @@ -96,9 +96,9 @@ const Types = [ } ]; -var subtypeToMeta = []; -for (var type of Types) { - for (var ms of type['mime_subtypes']) { +const subtypeToMeta = []; +for (const type of Types) { + for (const ms of type['mime_subtypes']) { subtypeToMeta[ms] = type; } } diff --git a/lib/file_transfer_agent/file_transfer_agent.js b/lib/file_transfer_agent/file_transfer_agent.js index 6c56f3b07..811d0833a 100644 --- a/lib/file_transfer_agent/file_transfer_agent.js +++ b/lib/file_transfer_agent/file_transfer_agent.js @@ -2,24 +2,24 @@ * Copyright (c) 2015-2021 Snowflake Computing Inc. All rights reserved. */ -var binascii = require('binascii'); -var crypto = require('crypto'); -var glob = require('glob'); -var fs = require('fs'); -var os = require('os'); -var mime = require('mime-types'); -var path = require('path'); - -var statement = require('../connection/statement'); -var fileCompressionType = require('./file_compression_type'); -var expandTilde = require('expand-tilde'); -var SnowflakeFileUtil = new (require('./file_util').file_util)(); -var SnowflakeRemoteStorageUtil = new (require('./remote_storage_util').remote_storage_util)(); -var SnowflakeFileEncryptionMaterial = require('./remote_storage_util').SnowflakeFileEncryptionMaterial; -var SnowflakeS3Util = new (require('./s3_util'))(); -var SnowflakeLocalUtil = new (require('./local_util').local_util)(); - -var resultStatus = require('./file_util').resultStatus; +const binascii = require('binascii'); +const crypto = require('crypto'); +const glob = require('glob'); +const fs = require('fs'); +const os = require('os'); +const mime = require('mime-types'); +const path = require('path'); + +const statement = require('../connection/statement'); +const fileCompressionType = require('./file_compression_type'); +const expandTilde = require('expand-tilde'); +const SnowflakeFileUtil = new (require('./file_util').FileUtil)(); +const SnowflakeRemoteStorageUtil = new (require('./remote_storage_util').RemoteStorageUtil)(); +const SnowflakeFileEncryptionMaterial = require('./remote_storage_util').SnowflakeFileEncryptionMaterial; +const SnowflakeS3Util = new (require('./s3_util'))(); +const SnowflakeLocalUtil = new (require('./local_util').LocalUtil)(); + +const resultStatus = require('./file_util').resultStatus; const S3_FS = 'S3'; const AZURE_FS = 'AZURE'; @@ -29,7 +29,7 @@ const CMD_TYPE_UPLOAD = 'UPLOAD'; const CMD_TYPE_DOWNLOAD = 'DOWNLOAD'; const FILE_PROTOCOL = 'file://'; -var INJECT_WAIT_IN_PUT = 0; +const INJECT_WAIT_IN_PUT = 0; const RESULT_TEXT_COLUMN_DESC = function (name) { @@ -60,39 +60,38 @@ const RESULT_FIXED_COLUMN_DESC = function (name) { * @returns {Object} * @constructor */ -function file_transfer_agent(context) { - var context = context; - var response = context.fileMetadata; - var command = context.sqlText; +function FileTransferAgent(context) { + const response = context.fileMetadata; + const command = context.sqlText; - var commandType; - var encryptionMaterial = []; - var fileName; - var fileStream = context.fileStream ? context.fileStream : null; + let commandType; + const encryptionMaterial = []; + let fileName; + const fileStream = context.fileStream ? context.fileStream : null; - var autoCompress; - var sourceCompression; - var parallel; - var stageInfo; - var stageLocationType; - var presignedUrls; - var overwrite; + let autoCompress; + let sourceCompression; + let parallel; + let stageInfo; + let stageLocationType; + let presignedUrls; + let overwrite; - var useAccelerateEndpoint = false; + let useAccelerateEndpoint = false; - var srcFiles; - var srcFilesToEncryptionMaterial = {}; - var localLocation; + let srcFiles; + const srcFilesToEncryptionMaterial = {}; + let localLocation; - var results = []; + const results = []; // Store info of files retrieved - var filesToPut = []; + const filesToPut = []; // Store metadata of files retrieved - var fileMetadata = []; - var smallFileMetas = []; - var largeFileMetas = []; + const fileMetadata = []; + const smallFileMetas = []; + const largeFileMetas = []; /** * Execute PUT or GET command. @@ -101,7 +100,7 @@ function file_transfer_agent(context) { */ this.execute = async function () { if (fileStream) { - var data = response['data']; + const data = response['data']; commandType = data['command']; autoCompress = data['autoCompress']; sourceCompression = data['sourceCompression']; @@ -115,7 +114,7 @@ function file_transfer_agent(context) { throw new Error('Incorrect UploadFileStream command'); } - var currFileObj = {}; + const currFileObj = {}; currFileObj['srcFileName'] = data.src_locations[0]; currFileObj['srcFilePath'] = ''; currFileObj['srcFileSize'] = fileStream.length; @@ -132,15 +131,15 @@ function file_transfer_agent(context) { } //upload - var storageClient = getStorageClient(stageLocationType); - var client = storageClient.createClient(stageInfo, false); - var meta = fileMetadata[0]; + let storageClient = getStorageClient(stageLocationType); + const client = storageClient.createClient(stageInfo, false); + const meta = fileMetadata[0]; meta['parallel'] = parallel; meta['client'] = client; meta['fileStream'] = fileStream; //for digest - var hash = crypto.createHash('sha256') + const hash = crypto.createHash('sha256') .update(fileStream) .digest('base64'); meta['SHA256_DIGEST'] = hash; @@ -149,7 +148,7 @@ function file_transfer_agent(context) { meta['requireCompress'] = false; meta['dstFileName'] = meta['srcFileName']; - var storageClient = getStorageClient(meta['stageLocationType']); + storageClient = getStorageClient(meta['stageLocationType']); try { await storageClient.uploadOneFileStream(meta); } finally { @@ -183,7 +182,7 @@ function file_transfer_agent(context) { await transferAccelerateConfig(); await updateFileMetasWithPresignedUrl(); - for (var meta of fileMetadata) { + for (const meta of fileMetadata) { if (meta['srcFileSize'] > SnowflakeS3Util.DATA_SIZE_THRESHOLD) { // Add to large file metas meta['parallel'] = parallel; @@ -211,16 +210,16 @@ function file_transfer_agent(context) { * @returns {Object} */ this.result = function () { - var rowset = []; + const rowset = []; if (commandType === CMD_TYPE_UPLOAD) { - var srcFileSize; - var dstFileSize; - var srcCompressionType; - var dstCompressionType; - var errorDetails; + let srcFileSize; + let dstFileSize; + let srcCompressionType; + let dstCompressionType; + let errorDetails; if (results) { - for (var meta of results) { + for (const meta of results) { if (meta['srcCompressionType']) { srcCompressionType = meta['srcCompressionType']['name']; } else { @@ -264,11 +263,11 @@ function file_transfer_agent(context) { ] }; } else if (commandType === CMD_TYPE_DOWNLOAD) { - var dstFileSize; - var errorDetails; + let dstFileSize; + let errorDetails; if (results) { - for (var meta of results) { + for (const meta of results) { errorDetails = meta['errorDetails']; dstFileSize = meta['dstFileSize']; @@ -299,13 +298,13 @@ function file_transfer_agent(context) { * @returns {null} */ async function upload(largeFileMetas, smallFileMetas) { - var storageClient = getStorageClient(stageLocationType); - var client = storageClient.createClient(stageInfo, false); + const storageClient = getStorageClient(stageLocationType); + const client = storageClient.createClient(stageInfo, false); - for (var meta of smallFileMetas) { + for (const meta of smallFileMetas) { meta['client'] = client; } - for (var meta of largeFileMetas) { + for (const meta of largeFileMetas) { meta['client'] = client; } @@ -330,14 +329,14 @@ function file_transfer_agent(context) { * @returns {null} */ async function uploadFilesinSequential(fileMeta) { - var index = 0; - var fileMetaLen = fileMeta.length; + let index = 0; + const fileMetaLen = fileMeta.length; while (index < fileMetaLen) { - var result = await uploadOneFile(fileMeta[index]); + const result = await uploadOneFile(fileMeta[index]); if (result['resultStatus'] === resultStatus.RENEW_TOKEN) { - var client = renewExpiredClient(); - for (var index2 = index; index2 < fileMetaLen; index2++) { + const client = renewExpiredClient(); + for (let index2 = index; index2 < fileMetaLen; index2++) { fileMeta[index2]['client'] = client; } continue; @@ -362,21 +361,21 @@ function file_transfer_agent(context) { */ async function uploadOneFile(meta) { meta['realSrcFilePath'] = meta['srcFilePath']; - var tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tmp')); + const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tmp')); meta['tmpDir'] = tmpDir; try { if (meta['requireCompress']) { - var result = await SnowflakeFileUtil.compressFileWithGZIP(meta['srcFilePath'], meta['tmpDir']); + const result = await SnowflakeFileUtil.compressFileWithGZIP(meta['srcFilePath'], meta['tmpDir']); meta['realSrcFilePath'] = result.name; } - var result = await SnowflakeFileUtil.getDigestAndSizeForFile(meta['realSrcFilePath']); - var sha256_digest = result.digest; - var uploadSize = result.size; + const result = await SnowflakeFileUtil.getDigestAndSizeForFile(meta['realSrcFilePath']); + const sha256Digest = result.digest; + const uploadSize = result.size; - meta['SHA256_DIGEST'] = sha256_digest; + meta['SHA256_DIGEST'] = sha256Digest; meta['uploadSize'] = uploadSize; - var storageClient = getStorageClient(meta['stageLocationType']); + const storageClient = getStorageClient(meta['stageLocationType']); await storageClient.uploadOneFileWithRetry(meta); } catch (err) { meta['dstFileSize'] = 0; @@ -388,8 +387,8 @@ function file_transfer_agent(context) { meta['errorDetails'] += ` file=${meta['srcFileName']}, real file=${meta['realSrcFilePath']}`; } finally { // Remove all files inside tmp folder - var matchingFileNames = glob.sync(path.join(meta['tmpDir'], meta['srcFileName'] + '*')); - for (var matchingFileName of matchingFileNames) { + const matchingFileNames = glob.sync(path.join(meta['tmpDir'], meta['srcFileName'] + '*')); + for (const matchingFileName of matchingFileNames) { await new Promise((resolve, reject) => { fs.unlink(matchingFileName, err => { if (err) { @@ -417,13 +416,13 @@ function file_transfer_agent(context) { * @returns {null} */ async function download(largeFileMetas, smallFileMetas) { - var storageClient = getStorageClient(stageLocationType); - var client = storageClient.createClient(stageInfo, false); + const storageClient = getStorageClient(stageLocationType); + const client = storageClient.createClient(stageInfo, false); - for (var meta of smallFileMetas) { + for (const meta of smallFileMetas) { meta['client'] = client; } - for (var meta of largeFileMetas) { + for (const meta of largeFileMetas) { meta['client'] = client; } @@ -448,14 +447,14 @@ function file_transfer_agent(context) { * @returns {null} */ async function downloadFilesinSequential(fileMeta) { - var index = 0; - var fileMetaLen = fileMeta.length; + let index = 0; + const fileMetaLen = fileMeta.length; while (index < fileMetaLen) { - var result = await downloadOneFile(fileMeta[index]); + const result = await downloadOneFile(fileMeta[index]); if (result['resultStatus'] === resultStatus.RENEW_TOKEN) { - var client = renewExpiredClient(); - for (var index2 = index; index2 < fileMetaLen; index2++) { + const client = renewExpiredClient(); + for (let index2 = index; index2 < fileMetaLen; index2++) { fileMeta[index2]['client'] = client; } continue; @@ -479,7 +478,7 @@ function file_transfer_agent(context) { * @returns {Object} */ async function downloadOneFile(meta) { - var tmpDir = await new Promise((resolve, reject) => { + const tmpDir = await new Promise((resolve, reject) => { fs.mkdtemp(path.join(os.tmpdir(), 'tmp'), (err, dir) => { if (err) { reject(err); @@ -490,7 +489,7 @@ function file_transfer_agent(context) { meta['tmpDir'] = tmpDir; try { - var storageClient = getStorageClient(meta['stageLocationType']); + const storageClient = getStorageClient(meta['stageLocationType']); await storageClient.downloadOneFile(meta); } catch (err) { meta['dstFileSize'] = -1; @@ -512,8 +511,8 @@ function file_transfer_agent(context) { */ async function transferAccelerateConfig() { if (stageLocationType === S3_FS) { - var client = SnowflakeRemoteStorageUtil.createClient(stageInfo, false); - var s3location = SnowflakeS3Util.extractBucketNameAndPath(stageInfo['location']); + const client = SnowflakeRemoteStorageUtil.createClient(stageInfo, false); + const s3location = SnowflakeS3Util.extractBucketNameAndPath(stageInfo['location']); try { await client.getBucketAccelerateConfiguration({ Bucket: s3location.bucketName }) @@ -536,29 +535,29 @@ function file_transfer_agent(context) { * @returns {null} */ async function updateFileMetasWithPresignedUrl() { - var storageClient = getStorageClient(stageLocationType); + const storageClient = getStorageClient(stageLocationType); // presigned url only applies to remote storage if (storageClient === SnowflakeRemoteStorageUtil) { // presigned url only applies to GCS if (stageLocationType === GCS_FS) { if (commandType === CMD_TYPE_UPLOAD) { - var filePathToReplace = getFileNameFromPutCommand(command); + const filePathToReplace = getFileNameFromPutCommand(command); - for (var meta of fileMetadata) { - var fileNameToReplaceWith = meta['dstFileName']; - var commandWithSingleFile = command; - var commandWithSingleFile = commandWithSingleFile.replace(filePathToReplace, fileNameToReplaceWith); + for (const meta of fileMetadata) { + const fileNameToReplaceWith = meta['dstFileName']; + let commandWithSingleFile = command; + commandWithSingleFile = commandWithSingleFile.replace(filePathToReplace, fileNameToReplaceWith); - var options = { sqlText: commandWithSingleFile }; - var newContext = statement.createContext(options, context.services, context.connectionConfig); + const options = { sqlText: commandWithSingleFile }; + const newContext = statement.createContext(options, context.services, context.connectionConfig); - var ret = await statement.sendRequest(newContext); + const ret = await statement.sendRequest(newContext); meta['stageInfo'] = ret['data']['data']['stageInfo']; meta['presignedUrl'] = meta['stageInfo']['presignedUrl']; } } else if (commandType === CMD_TYPE_DOWNLOAD) { - for (var index = 0; index < fileMetadata.length; index++) { + for (let index = 0; index < fileMetadata.length; index++) { fileMetadata[index]['presignedUrl'] = presignedUrls[index]; } } @@ -576,14 +575,14 @@ function file_transfer_agent(context) { function getFileNameFromPutCommand(command) { // Extract file path from PUT command: // E.g. "PUT file://C: @DB.SCHEMA.%TABLE;" - var startIndex = command.indexOf(FILE_PROTOCOL) + FILE_PROTOCOL.length; - var spaceIndex = command.substring(startIndex).indexOf(' '); - var quoteIndex = command.substring(startIndex).indexOf('\''); - var endIndex = spaceIndex; + const startIndex = command.indexOf(FILE_PROTOCOL) + FILE_PROTOCOL.length; + const spaceIndex = command.substring(startIndex).indexOf(' '); + const quoteIndex = command.substring(startIndex).indexOf('\''); + let endIndex = spaceIndex; if (quoteIndex !== -1 && quoteIndex < spaceIndex) { endIndex = quoteIndex; } - var filePath = command.substring(startIndex, startIndex + endIndex); + const filePath = command.substring(startIndex, startIndex + endIndex); return filePath; } @@ -612,37 +611,38 @@ function file_transfer_agent(context) { * @returns {null} */ function parseCommand() { - var data = response['data']; + const data = response['data']; commandType = data['command']; if (commandType === CMD_TYPE_UPLOAD) { - var src = data['src_locations'][0]; + const src = data['src_locations'][0]; // Get root directory of file path - var root = path.dirname(src); + const root = path.dirname(src); + let dir; // Check root directory exists if (fs.existsSync(root)) { // Check the root path is a directory - var dir = fs.statSync(root); + dir = fs.statSync(root); if (dir.isDirectory()) { // Get file name to upload fileName = path.basename(src); // Full path name of the file - var fileNameFullPath = path.join(root, fileName); + const fileNameFullPath = path.join(root, fileName); // If file name has a wildcard if (fileName.includes('*')) { // Get all file names that matches the wildcard - var matchingFileNames = glob.sync(path.join(root, fileName)); + const matchingFileNames = glob.sync(path.join(root, fileName)); - for (var matchingFileName of matchingFileNames) { + for (const matchingFileName of matchingFileNames) { initEncryptionMaterial(); - var fileInfo = fs.statSync(matchingFileName); - var currFileObj = {}; + const fileInfo = fs.statSync(matchingFileName); + const currFileObj = {}; currFileObj['srcFileName'] = matchingFileName.substring(matchingFileName.lastIndexOf('/') + 1); currFileObj['srcFilePath'] = matchingFileName; currFileObj['srcFileSize'] = fileInfo.size; @@ -654,9 +654,9 @@ function file_transfer_agent(context) { if (fs.existsSync(root)) { initEncryptionMaterial(); - var fileInfo = fs.statSync(fileNameFullPath); + const fileInfo = fs.statSync(fileNameFullPath); - var currFileObj = {}; + const currFileObj = {}; currFileObj['srcFileName'] = fileName; currFileObj['srcFilePath'] = fileNameFullPath; currFileObj['srcFileSize'] = fileInfo.size; @@ -684,7 +684,7 @@ function file_transfer_agent(context) { throw new Error('The number of downloading files doesn\'t match'); } localLocation = expandTilde(data['localLocation']); - var dir = fs.statSync(localLocation); + const dir = fs.statSync(localLocation); if (!dir.isDirectory()) { throw new Error('The local path is not a directory: ' + localLocation); } @@ -704,7 +704,7 @@ function file_transfer_agent(context) { */ function initEncryptionMaterial() { if (response['data'] && response['data']['encryptionMaterial']) { - var rootNode = response['data']['encryptionMaterial']; + const rootNode = response['data']['encryptionMaterial']; if (commandType === CMD_TYPE_UPLOAD) { encryptionMaterial.push(new SnowflakeFileEncryptionMaterial( @@ -729,8 +729,8 @@ function file_transfer_agent(context) { */ function initFileMetadata() { if (commandType === CMD_TYPE_UPLOAD) { - for (var file of filesToPut) { - var currFileObj = {}; + for (const file of filesToPut) { + const currFileObj = {}; currFileObj['srcFilePath'] = file['srcFilePath']; currFileObj['srcFileName'] = file['srcFileName']; currFileObj['srcFileSize'] = file['srcFileSize']; @@ -741,8 +741,8 @@ function file_transfer_agent(context) { fileMetadata.push(currFileObj); } } else if (commandType === CMD_TYPE_DOWNLOAD) { - for (var fileName of srcFiles) { - var currFileObj = {}; + for (const fileName of srcFiles) { + const currFileObj = {}; currFileObj['srcFileName'] = fileName; currFileObj['dstFileName'] = fileName; currFileObj['stageLocationType'] = stageLocationType; @@ -756,8 +756,8 @@ function file_transfer_agent(context) { } if (encryptionMaterial.length > 0) { - var i = 0; - for (var file of fileMetadata) { + let i = 0; + for (const file of fileMetadata) { file['encryptionMaterial'] = encryptionMaterial[i]; i++; } @@ -770,8 +770,8 @@ function file_transfer_agent(context) { * @returns {null} */ function processFileCompressionType() { - var userSpecifiedSourceCompression; - var autoDetect; + let userSpecifiedSourceCompression; + let autoDetect; if (sourceCompression === 'auto_detect') { autoDetect = true; @@ -785,18 +785,19 @@ function file_transfer_agent(context) { autoDetect = false; } - for (var meta of fileMetadata) { - var fileName = meta['srcFileName']; - var filePath = meta['srcFilePath']; + for (const meta of fileMetadata) { + const fileName = meta['srcFileName']; + const filePath = meta['srcFilePath']; - var currentFileCompressionType; + let currentFileCompressionType; + let encoding; if (autoDetect) { - var encoding = mime.lookup(fileName); + encoding = mime.lookup(fileName); if (!encoding) { - var test = Buffer.alloc(4); - var fd = fs.openSync(filePath, 'r+'); + const test = Buffer.alloc(4); + const fd = fs.openSync(filePath, 'r+'); fs.readSync(fd, test, 0, 4, 0); fs.closeSync(fd); @@ -854,4 +855,7 @@ function file_transfer_agent(context) { } } -module.exports = file_transfer_agent; +//TODO SNOW-992387: Create a function to renew expired client +function renewExpiredClient() {} + +module.exports = FileTransferAgent; diff --git a/lib/file_transfer_agent/file_util.js b/lib/file_transfer_agent/file_util.js index 3fa76837b..f9e56601e 100644 --- a/lib/file_transfer_agent/file_util.js +++ b/lib/file_transfer_agent/file_util.js @@ -2,11 +2,11 @@ * Copyright (c) 2015-2021 Snowflake Computing Inc. All rights reserved. */ -var crypto = require('crypto'); -var fs = require('fs'); -var path = require('path'); -var struct = require('python-struct'); -var zlib = require('zlib'); +const crypto = require('crypto'); +const fs = require('fs'); +const path = require('path'); +const struct = require('python-struct'); +const zlib = require('zlib'); const resultStatus = { ERROR: 'ERROR', @@ -40,7 +40,7 @@ exports.FileHeader = FileHeader; * @returns {Object} * @constructor */ -function file_util() { +function FileUtil() { /** * Compress file with GZIP. * @@ -51,17 +51,17 @@ function file_util() { */ this.compressFileWithGZIP = async function (fileName, tmpDir) { // Set file name and path for compressed file - var baseName = path.basename(fileName); - var gzipFileName = path.join(tmpDir, baseName + '_c.gz'); + const baseName = path.basename(fileName); + const gzipFileName = path.join(tmpDir, baseName + '_c.gz'); - await new Promise(function (resolve, reject) { + await new Promise(function (resolve) { // Create gzip object - var gzip = zlib.createGzip(); + const gzip = zlib.createGzip(); // Create stream object for reader and writer - var reader = fs.createReadStream(fileName); - var writer = fs.createWriteStream(gzipFileName); + const reader = fs.createReadStream(fileName); + const writer = fs.createWriteStream(gzipFileName); // Write and compress file - var result = reader.pipe(gzip).pipe(writer); + const result = reader.pipe(gzip).pipe(writer); result.on('finish', function () { resolve(); }); @@ -69,7 +69,7 @@ function file_util() { await this.normalizeGzipHeader(gzipFileName); - var fileInfo = fs.statSync(gzipFileName); + const fileInfo = fs.statSync(gzipFileName); return { name: gzipFileName, @@ -87,7 +87,7 @@ function file_util() { * @returns {null} */ this.normalizeGzipHeader = async function (gzipFileName) { - var fd = fs.openSync(gzipFileName, 'rs+'); + const fd = fs.openSync(gzipFileName, 'rs+'); // Reset the timestamp in gzip header // Write at position 4 @@ -104,15 +104,15 @@ function file_util() { * @returns {Object} */ this.getDigestAndSizeForFile = async function (fileName) { - var chunkSize = 16 * 4 * 1024; + const chunkSize = 16 * 4 * 1024; - var fileInfo = fs.statSync(fileName); - var bufferSize = fileInfo.size; + const fileInfo = fs.statSync(fileName); + const bufferSize = fileInfo.size; - var buffer = []; - await new Promise(function (resolve, reject) { + let buffer = []; + await new Promise(function (resolve) { // Create reader stream and set maximum chunk size - var infile = fs.createReadStream(fileName, { highWaterMark: chunkSize }); + const infile = fs.createReadStream(fileName, { highWaterMark: chunkSize }); infile.on('data', function (chunk) { buffer.push(chunk); }); @@ -122,7 +122,7 @@ function file_util() { }); }); - var hash = crypto.createHash('sha256') + const hash = crypto.createHash('sha256') .update(buffer) .digest('base64'); @@ -133,4 +133,4 @@ function file_util() { }; } -exports.file_util = file_util; +exports.FileUtil = FileUtil; diff --git a/lib/file_transfer_agent/gcs_util.js b/lib/file_transfer_agent/gcs_util.js index 3520cc8be..5b7eacceb 100644 --- a/lib/file_transfer_agent/gcs_util.js +++ b/lib/file_transfer_agent/gcs_util.js @@ -15,14 +15,16 @@ const GCS_METADATA_ENCRYPTIONDATAPROP = GCS_METADATA_PREFIX + ENCRYPTIONDATAPROP const GCS_FILE_HEADER_DIGEST = 'gcs-file-header-digest'; const GCS_FILE_HEADER_CONTENT_LENGTH = 'gcs-file-header-content-length'; const GCS_FILE_HEADER_ENCRYPTION_METADATA = 'gcs-file-header-encryption-metadata'; -const CONTENT_CHUNK_SIZE = 10 * 1024; const HTTP_HEADER_CONTENT_ENCODING = 'Content-Encoding'; -const HTTP_HEADER_ACCEPT_ENCODING = 'Accept-Encoding'; const resultStatus = require('./file_util').resultStatus; const { Storage } = require('@google-cloud/storage'); +const EXPIRED_TOKEN = 'ExpiredToken'; + +const ERRORNO_WSAECONNABORTED = 10053; // network connection was aborted + // GCS Location function GCSLocation(bucketName, path) { return { @@ -40,7 +42,7 @@ function GCSLocation(bucketName, path) { * @returns {Object} * @constructor */ -function gcs_util(httpclient, filestream) { +function GCSUtil(httpclient, filestream) { const axios = typeof httpclient !== 'undefined' ? httpclient : require('axios'); const fs = typeof filestream !== 'undefined' ? filestream : require('fs'); @@ -52,10 +54,10 @@ function gcs_util(httpclient, filestream) { * @returns {String} */ this.createClient = function (stageInfo) { - var stageCredentials = stageInfo['creds']; - var gcsToken = stageCredentials['GCS_ACCESS_TOKEN']; + const stageCredentials = stageInfo['creds']; + const gcsToken = stageCredentials['GCS_ACCESS_TOKEN']; - var client; + let client; if (gcsToken) { const interceptors = []; interceptors.push({ @@ -66,7 +68,7 @@ function gcs_util(httpclient, filestream) { } }); - var storage = new Storage({ interceptors_: interceptors }); + const storage = new Storage({ interceptors_: interceptors }); client = { gcsToken: gcsToken, gcsClient: storage }; } else { @@ -84,8 +86,8 @@ function gcs_util(httpclient, filestream) { * @returns {Object} */ this.extractBucketNameAndPath = function (stageLocation) { - var containerName = stageLocation; - var path = ''; + let containerName = stageLocation; + let path = ''; // split stage location as bucket name and path if (stageLocation.includes('/')) { @@ -125,20 +127,20 @@ function gcs_util(httpclient, filestream) { } }); } else { - var url = this.generateFileURL(meta['stageInfo']['location'], lstrip(filename, '/')); - var accessToken = meta['client'].gcsToken; - var gcsHeaders = { 'Authorization': `Bearer ${accessToken}` }; - var encryptionMetadata; - var digest; - var contentLength; - var encryptionDataProp; - var matDescKey; + const url = this.generateFileURL(meta['stageInfo']['location'], lstrip(filename, '/')); + const accessToken = meta['client'].gcsToken; + const gcsHeaders = { 'Authorization': `Bearer ${accessToken}` }; + let encryptionMetadata; + let digest; + let contentLength; + let encryptionDataProp; + let matDescKey; try { if (accessToken) { - var gcsLocation = this.extractBucketNameAndPath(meta['stageInfo']['location']); + const gcsLocation = this.extractBucketNameAndPath(meta['stageInfo']['location']); - var metadata = await meta['client'].gcsClient + const metadata = await meta['client'].gcsClient .bucket(gcsLocation.bucketName) .file(gcsLocation.path + filename) .getMetadata(); @@ -148,7 +150,7 @@ function gcs_util(httpclient, filestream) { encryptionDataProp = metadata[0].metadata[ENCRYPTIONDATAPROP]; matDescKey = metadata[0].metadata[MATDESC_KEY]; } else { - var response = await axios.head(url, { headers: gcsHeaders }); + const response = await axios.head(url, { headers: gcsHeaders }); digest = response.headers[GCS_METADATA_SFC_DIGEST]; contentLength = response.headers['content-length']; @@ -157,7 +159,7 @@ function gcs_util(httpclient, filestream) { } if (encryptionDataProp) { - var encryptionData = JSON.parse(encryptionDataProp); + const encryptionData = JSON.parse(encryptionDataProp); if (encryptionData) { encryptionMetadata = EncryptionMetadata( encryptionData['WrappedContentKey']['EncryptedKey'], @@ -175,7 +177,7 @@ function gcs_util(httpclient, filestream) { encryptionMetadata ); } catch (err) { - var errCode = err['code'] ? err['code'] : err.response.status; + const errCode = err['code'] ? err['code'] : err.response.status; if ([403, 408, 429, 500, 503].includes(errCode)) { meta['lastError'] = err; @@ -209,7 +211,7 @@ function gcs_util(httpclient, filestream) { * @returns {null} */ this.uploadFile = async function (dataFile, meta, encryptionMetadata, maxConcurrency) { - var fileStream = fs.readFileSync(dataFile); + const fileStream = fs.readFileSync(dataFile); await this.uploadFileStream(fileStream, meta, encryptionMetadata, maxConcurrency); }; @@ -219,21 +221,20 @@ function gcs_util(httpclient, filestream) { * @param {String} fileStream * @param {Object} meta * @param {Object} encryptionMetadata - * @param {Number} maxConcurrency * * @returns {null} */ - this.uploadFileStream = async function (fileStream, meta, encryptionMetadata, maxConcurrency) { - var uploadUrl = meta['presignedUrl']; - var accessToken = null; + this.uploadFileStream = async function (fileStream, meta, encryptionMetadata) { + let uploadUrl = meta['presignedUrl']; + let accessToken = null; if (!uploadUrl) { - var tempFilename = meta['dstFileName'].substring(meta['dstFileName'].indexOf('/') + 1, meta['dstFileName'].length); + const tempFilename = meta['dstFileName'].substring(meta['dstFileName'].indexOf('/') + 1, meta['dstFileName'].length); uploadUrl = this.generateFileURL(meta['stageInfo']['location'], tempFilename); accessToken = meta['client'].gcsToken; } - var contentEncoding = ''; + let contentEncoding = ''; if (meta['dstCompressionType']) { contentEncoding = meta['dstCompressionType']['name']; @@ -245,7 +246,7 @@ function gcs_util(httpclient, filestream) { contentEncoding = ''; } - var gcsHeaders = { + const gcsHeaders = { [HTTP_HEADER_CONTENT_ENCODING]: contentEncoding, [GCS_METADATA_SFC_DIGEST]: meta['SHA256_DIGEST'], }; @@ -277,7 +278,7 @@ function gcs_util(httpclient, filestream) { try { if (accessToken) { - var gcsLocation = this.extractBucketNameAndPath(meta['stageInfo']['location']); + const gcsLocation = this.extractBucketNameAndPath(meta['stageInfo']['location']); await meta['client'].gcsClient .bucket(gcsLocation.bucketName) @@ -325,17 +326,15 @@ function gcs_util(httpclient, filestream) { /** * Download the file. * - * @param {String} dataFile * @param {Object} meta - * @param {Object} encryptionMetadata - * @param {Number} maxConcurrency + * @param {Object} fullDstPath * * @returns {null} */ - this.nativeDownloadFile = async function (meta, fullDstPath, maxConcurrency) { - var downloadUrl = meta['presignedUrl']; - var accessToken = null; - var gcsHeaders = {}; + this.nativeDownloadFile = async function (meta, fullDstPath) { + let downloadUrl = meta['presignedUrl']; + let accessToken = null; + let gcsHeaders = {}; if (!downloadUrl) { downloadUrl = this.generateFileURL( @@ -345,14 +344,14 @@ function gcs_util(httpclient, filestream) { gcsHeaders = { 'Authorization': `Bearer ${accessToken}` }; } - var encryptionDataprop; - var matDescKey; - var sfcDigest; - var size; + let encryptionDataprop; + let matDescKey; + let sfcDigest; + let size; try { if (accessToken) { - var gcsLocation = this.extractBucketNameAndPath(meta['stageInfo']['location']); + const gcsLocation = this.extractBucketNameAndPath(meta['stageInfo']['location']); await meta['client'].gcsClient .bucket(gcsLocation.bucketName) @@ -361,7 +360,7 @@ function gcs_util(httpclient, filestream) { destination: fullDstPath }); - var metadata = await meta['client'].gcsClient + const metadata = await meta['client'].gcsClient .bucket(gcsLocation.bucketName) .file(gcsLocation.path + meta['srcFileName']) .getMetadata(); @@ -371,7 +370,7 @@ function gcs_util(httpclient, filestream) { sfcDigest = metadata[0].metadata[SFC_DIGEST]; size = metadata[0].size; } else { - var response; + let response; await axios({ method: 'get', url: downloadUrl, @@ -411,12 +410,12 @@ function gcs_util(httpclient, filestream) { return; } - var encryptionData; + let encryptionData; if (encryptionDataprop) { encryptionData = JSON.parse(encryptionDataprop); } - var encryptionMetadata; + let encryptionMetadata; if (encryptionData) { encryptionMetadata = EncryptionMetadata( encryptionData['WrappedContentKey']['EncryptedKey'], @@ -425,7 +424,7 @@ function gcs_util(httpclient, filestream) { ); } - var fileInfo = fs.statSync(fullDstPath); + const fileInfo = fs.statSync(fullDstPath); meta['srcFileSize'] = fileInfo.size; meta['resultStatus'] = resultStatus.DOWNLOADED; @@ -444,9 +443,9 @@ function gcs_util(httpclient, filestream) { * @returns {String} */ this.generateFileURL = function (stageLocation, filename) { - var gcsLocation = this.extractBucketNameAndPath(stageLocation); - var fullFilePath = `${gcsLocation.path}${filename}`; - var link = 'https://storage.googleapis.com/' + gcsLocation.bucketName + '/' + fullFilePath; + const gcsLocation = this.extractBucketNameAndPath(stageLocation); + const fullFilePath = `${gcsLocation.path}${filename}`; + const link = 'https://storage.googleapis.com/' + gcsLocation.bucketName + '/' + fullFilePath; return link; }; @@ -466,4 +465,4 @@ function gcs_util(httpclient, filestream) { } } -module.exports = gcs_util; +module.exports = GCSUtil; diff --git a/lib/file_transfer_agent/local_util.js b/lib/file_transfer_agent/local_util.js index c2fc6b724..c003fa642 100644 --- a/lib/file_transfer_agent/local_util.js +++ b/lib/file_transfer_agent/local_util.js @@ -2,8 +2,8 @@ * Copyright (c) 2015-2021 Snowflake Computing Inc. All rights reserved. */ -var fs = require('fs'); -var path = require('path'); +const fs = require('fs'); +const path = require('path'); const expandTilde = require('expand-tilde'); const resultStatus = require('./file_util').resultStatus; @@ -13,12 +13,12 @@ const resultStatus = require('./file_util').resultStatus; * @returns {Object} * @constructor */ -function local_util() { - this.createClient = function (stageInfo, useAccelerateEndpoint) { +function LocalUtil() { + this.createClient = function () { return null; }; - this.destroyClient = function (stageInfo, client) { + this.destroyClient = function () { }; /** @@ -31,22 +31,22 @@ function local_util() { this.uploadOneFileWithRetry = async function (meta) { await new Promise(function (resolve) { // Create stream object for reader and writer - var reader = fs.createReadStream(meta['realSrcFilePath']); + const reader = fs.createReadStream(meta['realSrcFilePath']); // Create directory if doesn't exist if (!fs.existsSync(meta['stageInfo']['location'])) { fs.mkdirSync(meta['stageInfo']['location'], { recursive: true }); } - var output = path.join(meta['stageInfo']['location'], meta['dstFileName']); + let output = path.join(meta['stageInfo']['location'], meta['dstFileName']); // expand '~' and '~user' expressions if (process.platform !== 'win32') { output = expandTilde(output); } - var writer = fs.createWriteStream(output); + const writer = fs.createWriteStream(output); // Write file - var result = reader.pipe(writer); + const result = reader.pipe(writer); result.on('finish', function () { resolve(); }); @@ -64,32 +64,33 @@ function local_util() { * @returns {null} */ this.downloadOneFile = async function (meta) { + let output; await new Promise(function (resolve) { const srcFilePath = expandTilde(meta['stageInfo']['location']); // Create stream object for reader and writer - var realSrcFilePath = path.join(srcFilePath, meta['srcFileName']); - var reader = fs.createReadStream(realSrcFilePath); + const realSrcFilePath = path.join(srcFilePath, meta['srcFileName']); + const reader = fs.createReadStream(realSrcFilePath); // Create directory if doesn't exist if (!fs.existsSync(meta['localLocation'])) { fs.mkdirSync(meta['localLocation'], { recursive: true }); } - var output = path.join(meta['localLocation'], meta['dstFileName']); + output = path.join(meta['localLocation'], meta['dstFileName']); - var writer = fs.createWriteStream(output); + const writer = fs.createWriteStream(output); // Write file - var result = reader.pipe(writer); + const result = reader.pipe(writer); result.on('finish', function () { resolve(); }); }); - var fileStat = fs.statSync(output); + const fileStat = fs.statSync(output); meta['dstFileSize'] = fileStat.size; meta['resultStatus'] = resultStatus.DOWNLOADED; }; } -exports.local_util = local_util; +exports.LocalUtil = LocalUtil; diff --git a/lib/file_transfer_agent/remote_storage_util.js b/lib/file_transfer_agent/remote_storage_util.js index 09d01307a..8137371d3 100644 --- a/lib/file_transfer_agent/remote_storage_util.js +++ b/lib/file_transfer_agent/remote_storage_util.js @@ -2,21 +2,21 @@ * Copyright (c) 2015-2021 Snowflake Computing Inc. All rights reserved. */ -var fs = require('fs'); -var path = require('path'); -var SnowflakeS3Util = require('./s3_util'); -var SnowflakeAzureUtil = require('./azure_util'); -var SnowflakeGCSUtil = require('./gcs_util'); +const fs = require('fs'); +const path = require('path'); +const SnowflakeS3Util = require('./s3_util'); +const SnowflakeAzureUtil = require('./azure_util'); +const SnowflakeGCSUtil = require('./gcs_util'); -var SnowflakeEncryptionUtil = new (require('./encrypt_util').encrypt_util)(); -var resultStatus = require('./file_util').resultStatus; +const SnowflakeEncryptionUtil = new (require('./encrypt_util').EncryptUtil)(); +const resultStatus = require('./file_util').resultStatus; const DEFAULT_CONCURRENCY = 1; const DEFAULT_MAX_RETRY = 5; // File Encryption Material function SnowflakeFileEncryptionMaterial(key, qid, smkid) { - var smkidString = '' + smkid; + const smkidString = '' + smkid; return { 'queryStageMasterKey': key, // query stage master key 'queryId': qid, // query id @@ -26,17 +26,13 @@ function SnowflakeFileEncryptionMaterial(key, qid, smkid) { exports.SnowflakeFileEncryptionMaterial = SnowflakeFileEncryptionMaterial; -function NeedRenewTokenError(Exception) { - return; -} - /** * Creates a remote storage utility object. * * @returns {Object} * @constructor */ -function remote_storage_util() { +function RemoteStorageUtil() { /** * Get storage type based on location type. * @@ -65,7 +61,7 @@ function remote_storage_util() { * @returns {Object} */ this.createClient = function (stageInfo, useAccelerateEndpoint = false) { - var utilClass = this.getForStorageType(stageInfo['locationType']); + const utilClass = this.getForStorageType(stageInfo['locationType']); return utilClass.createClient(stageInfo, useAccelerateEndpoint); }; @@ -76,7 +72,7 @@ function remote_storage_util() { * @param {Object} client */ this.destroyClient = function (stageInfo, client) { - var utilClass = this.getForStorageType(stageInfo['locationType']); + const utilClass = this.getForStorageType(stageInfo['locationType']); if (utilClass.destroyClient) { utilClass.destroyClient(client); } @@ -90,26 +86,26 @@ function remote_storage_util() { * @returns {null} */ this.uploadOneFileStream = async function (meta) { - var encryptionMetadata; - var dataFileStream = meta['fileStream']; + let encryptionMetadata; + let dataFileStream = meta['fileStream']; if (meta['encryptionMaterial']) { - var result = await SnowflakeEncryptionUtil.encryptFileStream( + const result = await SnowflakeEncryptionUtil.encryptFileStream( meta['encryptionMaterial'], meta['fileStream']); encryptionMetadata = result.encryptionMetadata; dataFileStream = result.dataStream; } - var utilClass = this.getForStorageType(meta['stageInfo']['locationType']); + const utilClass = this.getForStorageType(meta['stageInfo']['locationType']); - var maxConcurrency = meta['parallel']; - var lastErr; - var maxRetry = DEFAULT_MAX_RETRY; + let maxConcurrency = meta['parallel']; + let lastErr; + const maxRetry = DEFAULT_MAX_RETRY; - for (var retry = 0; retry < maxRetry; retry++) { + for (let retry = 0; retry < maxRetry; retry++) { if (!meta['overwrite']) { - var fileHeader = await utilClass.getFileHeader(meta, meta['dstFileName']); + const fileHeader = await utilClass.getFileHeader(meta, meta['dstFileName']); if (fileHeader && meta['resultStatus'] === resultStatus.UPLOADED) { // File already exists @@ -136,7 +132,7 @@ function remote_storage_util() { lastErr = meta['lastError']; // Failed to upload file, retrying if (!meta['noSleepingTime']) { - var sleepingTime = Math.min(Math.pow(2, retry), 16); + const sleepingTime = Math.min(Math.pow(2, retry), 16); await new Promise(resolve => setTimeout(resolve, sleepingTime)); } } else if (meta['resultStatus'] === resultStatus.NEED_RETRY_WITH_LOWER_CONCURRENCY) { @@ -147,7 +143,7 @@ function remote_storage_util() { meta['lastMaxConcurrency'] = maxConcurrency; if (!meta['noSleepingTime']) { - var sleepingTime = Math.min(Math.pow(2, retry), 16); + const sleepingTime = Math.min(Math.pow(2, retry), 16); await new Promise(resolve => setTimeout(resolve, sleepingTime)); } } @@ -155,7 +151,7 @@ function remote_storage_util() { if (lastErr) { throw new Error(lastErr); } else { - var msg = 'Unknown Error in uploading a file: ' + dataFile; + const msg = 'Unknown Error in uploading a file: ' + meta['srcFileName']; throw new Error(msg); } }; @@ -168,11 +164,11 @@ function remote_storage_util() { * @returns {null} */ this.uploadOneFile = async function (meta) { - var encryptionMetadata; - var dataFile; + let encryptionMetadata; + let dataFile; if (meta['encryptionMaterial']) { - var result = await SnowflakeEncryptionUtil.encryptFile( + const result = await SnowflakeEncryptionUtil.encryptFile( meta['encryptionMaterial'], meta['realSrcFilePath'], meta['tmpDir']); @@ -182,15 +178,15 @@ function remote_storage_util() { dataFile = meta['realSrcFilePath']; } - var utilClass = this.getForStorageType(meta['stageInfo']['locationType']); + const utilClass = this.getForStorageType(meta['stageInfo']['locationType']); - var maxConcurrency = meta['parallel']; - var lastErr; - var maxRetry = DEFAULT_MAX_RETRY; + let maxConcurrency = meta['parallel']; + let lastErr; + const maxRetry = DEFAULT_MAX_RETRY; - for (var retry = 0; retry < maxRetry; retry++) { + for (let retry = 0; retry < maxRetry; retry++) { if (!meta['overwrite']) { - var fileHeader = await utilClass.getFileHeader(meta, meta['dstFileName']); + const fileHeader = await utilClass.getFileHeader(meta, meta['dstFileName']); if (fileHeader && meta['resultStatus'] === resultStatus.UPLOADED) { // File already exists @@ -217,7 +213,7 @@ function remote_storage_util() { lastErr = meta['lastError']; // Failed to upload file, retrying if (!meta['noSleepingTime']) { - var sleepingTime = Math.min(Math.pow(2, retry), 16); + const sleepingTime = Math.min(Math.pow(2, retry), 16); await new Promise(resolve => setTimeout(resolve, sleepingTime)); } } else if (meta['resultStatus'] === resultStatus.NEED_RETRY_WITH_LOWER_CONCURRENCY) { @@ -228,7 +224,7 @@ function remote_storage_util() { meta['lastMaxConcurrency'] = maxConcurrency; if (!meta['noSleepingTime']) { - var sleepingTime = Math.min(Math.pow(2, retry), 16); + const sleepingTime = Math.min(Math.pow(2, retry), 16); await new Promise(resolve => setTimeout(resolve, sleepingTime)); } } @@ -236,7 +232,7 @@ function remote_storage_util() { if (lastErr) { throw new Error(lastErr); } else { - var msg = 'Unknown Error in uploading a file: ' + dataFile; + const msg = 'Unknown Error in uploading a file: ' + dataFile; throw new Error(msg); } }; @@ -249,14 +245,14 @@ function remote_storage_util() { * @returns {null} */ this.uploadOneFileWithRetry = async function (meta) { - var utilClass = this.getForStorageType(meta['stageInfo']['locationType']); + const utilClass = this.getForStorageType(meta['stageInfo']['locationType']); - var breakFlag = false; - for (var x = 0; x < 10; x++) { + let breakFlag = false; + for (let x = 0; x < 10; x++) { await this.uploadOneFile(meta); if (meta['resultStatus'] === resultStatus.UPLOADED) { - for (var y = 0; y < 10; y++) { + for (let y = 0; y < 10; y++) { await utilClass.getFileHeader(meta, meta['dstFileName']); if (meta['resultStatus'] === resultStatus.NOT_FOUND_FILE) { // Wait 1 second @@ -284,7 +280,7 @@ function remote_storage_util() { */ this.downloadOneFile = async function (meta) { // Downloads a file from S3 - var fullDstPath = meta['localLocation']; + let fullDstPath = meta['localLocation']; await new Promise((resolve, reject) => { fs.realpath(fullDstPath, (err, basePath) => { if (err) { @@ -296,7 +292,7 @@ function remote_storage_util() { }); // TODO: validate fullDstPath is under the writable directory - var baseDir = path.dirname(fullDstPath); + const baseDir = path.dirname(fullDstPath); await new Promise((resolve) => { fs.exists(baseDir, (exists) => { if (!exists) { @@ -309,18 +305,18 @@ function remote_storage_util() { }); }); - var utilClass = this.getForStorageType(meta['stageInfo']['locationType']); - var fileHeader = await utilClass.getFileHeader(meta, meta['srcFileName']); + const utilClass = this.getForStorageType(meta['stageInfo']['locationType']); + let fileHeader = await utilClass.getFileHeader(meta, meta['srcFileName']); if (fileHeader) { meta['srcFileSize'] = fileHeader.contentLength; } - var maxConcurrency = meta['parallel']; - var lastErr; - var maxRetry = DEFAULT_MAX_RETRY; + let maxConcurrency = meta['parallel']; + let lastErr; + const maxRetry = DEFAULT_MAX_RETRY; - for (var retry = 0; retry < maxRetry; retry++) { + for (let retry = 0; retry < maxRetry; retry++) { // Download the file await utilClass.nativeDownloadFile(meta, fullDstPath, maxConcurrency); @@ -340,7 +336,7 @@ function remote_storage_util() { fileHeader = await utilClass.getFileHeader(meta, meta['srcFilePath']); } - var tmpDstName = await SnowflakeEncryptionUtil.decryptFile( + const tmpDstName = await SnowflakeEncryptionUtil.decryptFile( fileHeader.encryptionMetadata, meta['encryptionMaterial'], fullDstPath, @@ -396,14 +392,14 @@ function remote_storage_util() { meta['lastMaxConcurrency'] = maxConcurrency; if (!meta['noSleepingTime']) { - var sleepingTime = Math.min(Math.pow(2, retry), 16); + const sleepingTime = Math.min(Math.pow(2, retry), 16); await new Promise(resolve => setTimeout(resolve, sleepingTime)); } } else if (meta['resultStatus'] === resultStatus.NEED_RETRY) { lastErr = meta['lastError']; // Failed to download file, retrying if (!meta['noSleepingTime']) { - var sleepingTime = Math.min(Math.pow(2, retry), 16); + const sleepingTime = Math.min(Math.pow(2, retry), 16); await new Promise(resolve => setTimeout(resolve, sleepingTime)); } } @@ -411,10 +407,10 @@ function remote_storage_util() { if (lastErr) { throw new Error(lastErr); } else { - var msg = 'Unknown Error in uploading a file: ' + dataFile; + const msg = 'Unknown Error in uploading a file: ' + meta['srcFileName']; throw new Error(msg); } }; } -exports.remote_storage_util = remote_storage_util; +exports.RemoteStorageUtil = RemoteStorageUtil; diff --git a/lib/file_transfer_agent/s3_util.js b/lib/file_transfer_agent/s3_util.js index f9f9c09a9..522821125 100644 --- a/lib/file_transfer_agent/s3_util.js +++ b/lib/file_transfer_agent/s3_util.js @@ -37,7 +37,7 @@ function S3Location(bucketName, s3path) { * @returns {Object} * @constructor */ -function s3_util(s3, filestream) { +function S3Util(s3, filestream) { const AWS = typeof s3 !== 'undefined' ? s3 : require('@aws-sdk/client-s3'); const fs = typeof filestream !== 'undefined' ? filestream : require('fs'); @@ -283,4 +283,4 @@ function s3_util(s3, filestream) { }; } -module.exports = s3_util; +module.exports = S3Util; diff --git a/lib/global_config.js b/lib/global_config.js index ebe5024f7..e9041f0a1 100644 --- a/lib/global_config.js +++ b/lib/global_config.js @@ -102,7 +102,7 @@ exports.getOcspResponseCacheMaxAge = function () { // change max age here because customer would have local cache file exist // already and we need to keep that valid with new version of the driver. // use small value for test only - var maxage = Number(process.env.SF_OCSP_TEST_CACHE_MAXAGE) || 86400; + let maxage = Number(process.env.SF_OCSP_TEST_CACHE_MAXAGE) || 86400; if ((maxage > 86400) || (maxage <= 0)) { maxage = 86400; } diff --git a/lib/http/base.js b/lib/http/base.js index 26a2027e0..b65d7c7f1 100644 --- a/lib/http/base.js +++ b/lib/http/base.js @@ -4,7 +4,6 @@ const zlib = require('zlib'); const Util = require('../util'); -const Errors = require('../errors'); const Logger = require('../logger'); const axios = require('axios'); const URL = require('node:url').URL; @@ -180,7 +179,7 @@ HttpClient.prototype.getRequestModule = function () { * * @returns {*} */ -HttpClient.prototype.getAgent = function (url, proxy, mock) { +HttpClient.prototype.getAgent = function () { return null; }; @@ -196,7 +195,7 @@ function prepareRequestOptions(options) { let data = options.data || options.json; if (data) { - var bufferUncompressed = Buffer.from(JSON.stringify(data), 'utf8'); + const bufferUncompressed = Buffer.from(JSON.stringify(data), 'utf8'); zlib.gzip(bufferUncompressed, null, function (err, bufferCompressed) { // if the compression was successful if (!err) { @@ -252,7 +251,7 @@ function prepareRequestOptions(options) { * @returns {Object} */ function normalizeHeaders(headers) { - var ret = headers; + let ret = headers; if (Util.isObject(headers)) { ret = { @@ -268,8 +267,8 @@ function normalizeHeaders(headers) { // browser-request will inject its own 'accept': 'application/json' header // and the browser XMLHttpRequest object will concatenate the two values and // send 'Accept': 'application/json, application/json' with the request - var headerNameLowerCase; - for (var headerName in headers) { + let headerNameLowerCase; + for (const headerName in headers) { if (Object.prototype.hasOwnProperty.call(headers, headerName)) { headerNameLowerCase = headerName.toLowerCase(); if ((headerNameLowerCase === 'accept') || diff --git a/lib/http/browser.js b/lib/http/browser.js index 5992a8950..9b2a27d2a 100644 --- a/lib/http/browser.js +++ b/lib/http/browser.js @@ -2,9 +2,9 @@ * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var Util = require('../util'); -var request = require('browser-request'); -var Base = require('./base'); +const Util = require('../util'); +const request = require('browser-request'); +const Base = require('./base'); /** * Creates a client that can be used to make requests in the browser. @@ -13,7 +13,7 @@ var Base = require('./base'); * @constructor */ function BrowserHttpClient(connectionConfig) { - Base.apply(this, arguments); + Base.apply(this, connectionConfig); } Util.inherits(BrowserHttpClient, Base); diff --git a/lib/http/node.js b/lib/http/node.js index 64c237be0..57a327f1f 100644 --- a/lib/http/node.js +++ b/lib/http/node.js @@ -9,7 +9,6 @@ const HttpsProxyAgent = require('../agent/https_proxy_ocsp_agent'); const HttpAgent = require('http').Agent; const GlobalConfig = require('../../lib/global_config'); const Logger = require('../logger'); -const Url = require('url'); /** * Returns the delay time calculated by exponential backoff with @@ -36,7 +35,7 @@ NodeHttpClient.prototype.constructExponentialBackoffStrategy = function () { * @constructor */ function NodeHttpClient(connectionConfig) { - Base.apply(this, arguments); + Base.apply(this, [connectionConfig]); } Util.inherits(NodeHttpClient, Base); @@ -85,11 +84,11 @@ function prepareProxyAgentOptions(agentOptions, proxy) { } } -function isBypassProxy(proxy, url, bypassProxy) { +function isBypassProxy(proxy, url) { if (proxy && proxy.noProxy) { const bypassList = proxy.noProxy.split('|'); for (let i = 0; i < bypassList.length; i++) { - host = bypassList[i].trim(); + let host = bypassList[i].trim(); host = host.replace('*', '.*?'); const matches = url.match(host); if (matches) { diff --git a/lib/logger.js b/lib/logger.js index 080d187b8..3f4dd5b65 100644 --- a/lib/logger.js +++ b/lib/logger.js @@ -2,9 +2,9 @@ * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var BrowserLogger = require('./logger/browser'); +const BrowserLogger = require('./logger/browser'); -var instance; +let instance; /** * Sets the logger instance. For internal use only. diff --git a/lib/logger/browser.js b/lib/logger/browser.js index c38079935..335d9f62f 100644 --- a/lib/logger/browser.js +++ b/lib/logger/browser.js @@ -2,8 +2,8 @@ * Copyright (c) 2015-2023 Snowflake Computing Inc. All rights reserved. */ -var Util = require('../util'); -var Core = require('./core'); +const Util = require('../util'); +const Core = require('./core'); /** * Creates a new Logger instance for when we're running in the browser. @@ -18,7 +18,7 @@ function Logger(options) { * * @type {String[]} */ - var buffer = []; + const buffer = []; /** * Logs a message at a given level. @@ -29,7 +29,7 @@ function Logger(options) { * @param {Number} bufferMaxLength the maximum size to which the message * buffer can grow. */ - var logMessage = function (levelTag, message, bufferMaxLength) { + const logMessage = function (levelTag, message, bufferMaxLength) { // add the log level tag (e.g. info, warn, etc.) to the front of the message message = Util.format('%s: %s', levelTag, message); @@ -43,7 +43,7 @@ function Logger(options) { }; // create an inner implementation to which all our methods will be forwarded - var common = Core.createLogger(options, logMessage); + const common = Core.createLogger(options, logMessage); /** * Configures this logger. @@ -67,45 +67,50 @@ function Logger(options) { * Logs a given message at the error level. * * @param {String} message + * @param params */ - this.error = function (message) { - common.error.apply(common, arguments); + this.error = function (message, ...params) { + common.error.apply(common, [message, ...params]); }; /** * Logs a given message at the warning level. * * @param {String} message + * @param params */ - this.warn = function (message) { - common.warn.apply(common, arguments); + this.warn = function (message, ...params) { + common.warn.apply(common, [message, ...params]); }; /** * Logs a given message at the info level. * * @param {String} message + * @param params */ - this.info = function (message) { - common.info.apply(common, arguments); + this.info = function (message, ...params) { + common.info.apply(common, [message, ...params]); }; /** * Logs a given message at the debug level. * * @param {String} message + * @param params */ - this.debug = function (message) { - common.debug.apply(common, arguments); + this.debug = function (message, ...params) { + common.debug.apply(common, [message, ...params]); }; /** * Logs a given message at the trace level. * * @param {String} message + * @param params */ - this.trace = function (message) { - common.trace.apply(common, arguments); + this.trace = function (message, ...params) { + common.trace.apply(common, [message, ...params]); }; /** diff --git a/lib/logger/node.js b/lib/logger/node.js index fa1557ac2..1e1fc1cb5 100644 --- a/lib/logger/node.js +++ b/lib/logger/node.js @@ -2,8 +2,8 @@ * Copyright (c) 2015-2023 Snowflake Computing Inc. All rights reserved. */ -var winston = require('winston'); -var Core = require('./core'); +const winston = require('winston'); +const Core = require('./core'); const Util = require('../util'); const Errors = require('../errors'); @@ -15,8 +15,7 @@ const Errors = require('../errors'); * @constructor */ function Logger(options) { - var common; - var winstonLogger; + let winstonLogger; const defaultFilePath = 'snowflake.log'; let filePath = getFilePath(options); @@ -46,7 +45,7 @@ function Logger(options) { } } - function closeTransport(transport, timeoutMillis) { + function closeTransport(transport) { if (!transport.close) { return; } @@ -68,10 +67,8 @@ function Logger(options) { * @param {String} levelTag the tag associated with the level at which to log * the message. * @param {String} message the message to log. - * @param {Number} bufferMaxLength the maximum size to which the message - * buffer can grow. */ - var logMessage = function (levelTag, message, bufferMaxLength) { + const logMessage = function (levelTag, message) { // initialize the winston logger if needed if (!winstonLogger) { const transports = 'STDOUT' === filePath.toUpperCase() @@ -94,7 +91,7 @@ function Logger(options) { }; // create an inner implementation to which all our methods will be forwarded - common = Core.createLogger(options, logMessage, reconfigureWinstonLogger); + const common = Core.createLogger(options, logMessage, reconfigureWinstonLogger); function getFilePath(options) { if (Util.exists(options)) { @@ -134,45 +131,50 @@ function Logger(options) { * Logs a given message at the error level. * * @param {String} message + * @param params */ - this.error = function (message) { - common.error.apply(common, arguments); + this.error = function (message, ...params) { + common.error.apply(common, [message, ...params]); }; /** * Logs a given message at the warning level. * * @param {String} message + * @param params */ - this.warn = function (message) { - common.warn.apply(common, arguments); + this.warn = function (message, ...params) { + common.warn.apply(common, [message, ...params]); }; /** * Logs a given message at the info level. * * @param {String} message + * @param params */ - this.info = function (message) { - common.info.apply(common, arguments); + this.info = function (message, ...params) { + common.info.apply(common, [message, ...params]); }; /** * Logs a given message at the debug level. * * @param {String} message + * @param params */ - this.debug = function (message) { - common.debug.apply(common, arguments); + this.debug = function (message, ...params) { + common.debug.apply(common, [message, ...params]); }; /** * Logs a given message at the trace level. * * @param {String} message + * @param params */ - this.trace = function (message) { - common.trace.apply(common, arguments); + this.trace = function (message, ...params) { + common.trace.apply(common, [message, ...params]); }; /** diff --git a/lib/parameters.js b/lib/parameters.js index 9e372ac69..90db951bb 100644 --- a/lib/parameters.js +++ b/lib/parameters.js @@ -2,8 +2,8 @@ * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var Util = require('./util'); -var Errors = require('./errors'); +const Util = require('./util'); +const Errors = require('./errors'); /** * Creates a new Parameter. @@ -17,8 +17,8 @@ function Parameter(options) { Errors.assertInternal(Util.isString(options.name)); Errors.assertInternal(Util.exists(options.value)); - var name = options.name; - var value = options.value; + const name = options.name; + let value = options.value; /** * Returns the name of the parameter. @@ -48,7 +48,7 @@ function Parameter(options) { }; } -var names = exports.names = {}; +const names = exports.names = {}; names.JS_DRIVER_DISABLE_OCSP_FOR_NON_SF_ENDPOINTS = 'JS_DRIVER_DISABLE_OCSP_FOR_NON_SF_ENDPOINTS'; names.SERVICE_NAME = 'SERVICE_NAME'; names.CLIENT_SESSION_KEEP_ALIVE = 'CLIENT_SESSION_KEEP_ALIVE'; @@ -58,7 +58,7 @@ names.CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = 'CLIENT_STAGE_ARRAY_BINDING_THRESHO names.MULTI_STATEMENT_COUNT = 'MULTI_STATEMENT_COUNT'; names.QUERY_CONTEXT_CACHE_SIZE = 'QUERY_CONTEXT_CACHE_SIZE'; -var parameters = +const parameters = [ new Parameter( { @@ -112,9 +112,9 @@ var parameters = ]; // put all the parameters in a map so they're easy to retrieve and update -var mapParamNameToParam = {}; -for (var index = 0, length = parameters.length; index < length; index++) { - var parameter = parameters[index]; +const mapParamNameToParam = {}; +for (let index = 0, length = parameters.length; index < length; index++) { + const parameter = parameters[index]; mapParamNameToParam[parameter.getName()] = parameter; } @@ -127,7 +127,7 @@ for (var index = 0, length = parameters.length; index < length; index++) { */ exports.getValue = function (parameterName) { // resolve the parameter name - var parameter = mapParamNameToParam[parameterName]; + const parameter = mapParamNameToParam[parameterName]; // verify that a valid parameter name was specified Errors.assertInternal( @@ -148,11 +148,11 @@ exports.update = function (parametersConfig) { // if any of the items in the configs array matches a known // parameter, update the corresponding parameter's value - for (var index = 0, length = parametersConfig.length; + for (let index = 0, length = parametersConfig.length; index < length; index++) { - var parameterConfig = parametersConfig[index]; + const parameterConfig = parametersConfig[index]; if (Object.prototype.hasOwnProperty.call(mapParamNameToParam, parameterConfig.name)) { - var parameter = mapParamNameToParam[parameterConfig.name]; + const parameter = mapParamNameToParam[parameterConfig.name]; parameter.setValue(parameterConfig.value); } } diff --git a/lib/secret_detector.js b/lib/secret_detector.js index 018f5f7fb..b2f071417 100644 --- a/lib/secret_detector.js +++ b/lib/secret_detector.js @@ -14,10 +14,10 @@ * @returns {Object} * @constructor */ -function secret_detector(customPatterns, mock) { - var CUSTOM_PATTERNS_REGEX = []; - var CUSTOM_PATTERNS_MASK = []; - var CUSTOM_PATTERNS_LENGTH; +function SecretDetector(customPatterns, mock) { + const CUSTOM_PATTERNS_REGEX = []; + const CUSTOM_PATTERNS_MASK = []; + let CUSTOM_PATTERNS_LENGTH; if (customPatterns) { // Check that the customPatterns object contains the keys 'regex' and 'mask @@ -35,15 +35,15 @@ function secret_detector(customPatterns, mock) { CUSTOM_PATTERNS_LENGTH = customPatterns.regex.length; // Push the regex and mask elements onto their respective arrays - for (var index = 0; index < CUSTOM_PATTERNS_LENGTH; index++) { + for (let index = 0; index < CUSTOM_PATTERNS_LENGTH; index++) { CUSTOM_PATTERNS_REGEX.push(new RegExp(`${customPatterns.regex[index]}`, 'gi')); CUSTOM_PATTERNS_MASK.push(String.raw`${customPatterns.mask[index]}`); } } function maskCustomPattern(text) { - var result; - for (var index = 0; index < CUSTOM_PATTERNS_LENGTH; index++) { + let result; + for (let index = 0; index < CUSTOM_PATTERNS_LENGTH; index++) { result = text.replace(CUSTOM_PATTERNS_REGEX[index], CUSTOM_PATTERNS_MASK[index]); // If the text is replaced, return the result if (text !== result) { @@ -108,7 +108,7 @@ function secret_detector(customPatterns, mock) { * @returns {Object} the masked string. */ this.maskSecrets = function (text) { - var result; + let result; if (!text) { result = { @@ -119,9 +119,9 @@ function secret_detector(customPatterns, mock) { return result; } - var masked = false; - var maskedtxt = ''; - var errstr = null; + let masked = false; + let maskedtxt = ''; + let errstr = null; try { if (mock) { mock.execute(); @@ -163,4 +163,4 @@ function secret_detector(customPatterns, mock) { }; } -module.exports = secret_detector; +module.exports = SecretDetector; diff --git a/lib/services/sf.js b/lib/services/sf.js index d92d7d2c2..e059ac3e1 100644 --- a/lib/services/sf.js +++ b/lib/services/sf.js @@ -44,7 +44,6 @@ - destroy() - Disconnected */ -const axios = require('axios'); const { v4: uuidv4 } = require('uuid'); const EventEmitter = require('events').EventEmitter; const Util = require('../util'); @@ -92,7 +91,7 @@ function SnowflakeService(connectionConfig, httpClient, config) { // if a config object was specified, verify // that it has all the information we need - var tokenInfoConfig; + let tokenInfoConfig; if (Util.exists(config)) { Errors.assertInternal(Util.isObject(config)); Errors.assertInternal(Util.isObject(config.tokenInfo)); @@ -108,24 +107,23 @@ function SnowflakeService(connectionConfig, httpClient, config) { } // create a new TokenInfo instance - var tokenInfo = new TokenInfo(tokenInfoConfig); + const tokenInfo = new TokenInfo(tokenInfoConfig); // create state objects for all the different states we can be in - var stateOptions = + const stateOptions = { snowflakeService: this, httpClient: httpClient, connectionConfig: connectionConfig, tokenInfo: tokenInfo }; - var statePristine = new StatePristine(stateOptions); - var stateConnecting = new StateConnecting(stateOptions); - var stateConnected = new StateConnected(stateOptions); - var stateRenewing = new StateRenewing(stateOptions); - var stateDisconnected = new StateDisconnected(stateOptions); + const statePristine = new StatePristine(stateOptions); + const stateConnecting = new StateConnecting(stateOptions); + const stateConnected = new StateConnected(stateOptions); + const stateRenewing = new StateRenewing(stateOptions); + const stateDisconnected = new StateDisconnected(stateOptions); - var currentState; - var isStageCreated = false; + let currentState; /** * Transitions to a given state. @@ -133,7 +131,7 @@ function SnowflakeService(connectionConfig, httpClient, config) { * @param {Object} state * @param {Object} [transitionContext] */ - var transitionTo = function (state, transitionContext) { + const transitionTo = function (state, transitionContext) { // this check is necessary to make sure we don't re-enter a transient state // like Renewing when we're already in it if (currentState !== state) { @@ -291,7 +289,7 @@ function SnowflakeService(connectionConfig, httpClient, config) { * @constructor */ function OperationConnect(options) { - OperationAbstract.apply(this, arguments); + OperationAbstract.apply(this, [options]); } Util.inherits(OperationConnect, OperationAbstract); @@ -301,7 +299,7 @@ function SnowflakeService(connectionConfig, httpClient, config) { */ OperationConnect.prototype.validate = function () { // verify that the options object contains a callback function - var options = this.options; + const options = this.options; Errors.assertInternal( (Util.isObject(options) && Util.isFunction(options.callback))); @@ -322,7 +320,7 @@ function SnowflakeService(connectionConfig, httpClient, config) { * @constructor */ function OperationContinue(options) { - OperationAbstract.apply(this, arguments); + OperationAbstract.apply(this, [options]); } Util.inherits(OperationContinue, OperationAbstract); @@ -332,7 +330,7 @@ function SnowflakeService(connectionConfig, httpClient, config) { */ OperationContinue.prototype.validate = function () { // verify that the options contain a json object - var options = this.options; + const options = this.options; Errors.assertInternal( Util.isObject(options) && Util.isObject(options.json)); @@ -353,7 +351,7 @@ function SnowflakeService(connectionConfig, httpClient, config) { * @constructor */ function OperationRequest(options) { - OperationAbstract.apply(this, arguments); + OperationAbstract.apply(this, [options]); } Util.inherits(OperationRequest, OperationAbstract); @@ -363,7 +361,7 @@ function SnowflakeService(connectionConfig, httpClient, config) { */ OperationRequest.prototype.validate = function () { // verify that the options object contains all the necessary information - var options = this.options; + const options = this.options; Errors.assertInternal(Util.isObject(options)); Errors.assertInternal(Util.isString(options.method)); Errors.assertInternal( @@ -396,7 +394,7 @@ function SnowflakeService(connectionConfig, httpClient, config) { * @constructor */ function OperationDestroy(options) { - OperationAbstract.apply(this, arguments); + OperationAbstract.apply(this, [options]); } Util.inherits(OperationDestroy, OperationAbstract); @@ -406,7 +404,7 @@ function SnowflakeService(connectionConfig, httpClient, config) { */ OperationDestroy.prototype.validate = function () { // verify that the options object contains a callback function - var options = this.options; + const options = this.options; Errors.assertInternal(Util.isObject(options) && Util.isFunction(options.callback)); @@ -422,7 +420,7 @@ function SnowflakeService(connectionConfig, httpClient, config) { }; /* All queued operations will be added to this array */ - var operationQueue = []; + const operationQueue = []; /** * Appends a request operation to the queue. @@ -447,7 +445,7 @@ function SnowflakeService(connectionConfig, httpClient, config) { */ this.drainOperationQueue = function () { // execute all the operations in the queue - for (var index = 0, length = operationQueue.length; index < length; index++) { + for (let index = 0, length = operationQueue.length; index < length; index++) { operationQueue[index].execute(); } @@ -579,7 +577,7 @@ function StateAbstract(options) { response.statusCode !== 200) { // if we didn't get a 200, the request failed if (response.statusCode === 401 && response.body) { - var innerCode; + let innerCode; try { innerCode = JSON.parse(response.body).code; } catch (e) { @@ -622,7 +620,7 @@ function StateAbstract(options) { // if we were able to successfully json-parse the body and the // success flag is false, the operation we tried to perform failed if (body && !body.success) { - var data = body.data; + const data = body.data; err = Errors.createOperationFailedError( body.code, data, body.message, data && data.sqlState ? data.sqlState : undefined); @@ -656,8 +654,8 @@ function StateAbstract(options) { this.connectionConfig = options.connectionConfig; this.tokenInfo = options.tokenInfo; - var httpClient = options.httpClient; - var connectionConfig = options.connectionConfig; + const httpClient = options.httpClient; + const connectionConfig = options.connectionConfig; /////////////////////////////////////////////////////////////////////////// //// Request //// @@ -862,7 +860,7 @@ function StateAbstract(options) { * @param {Object} [context] * @abstract */ -StateAbstract.prototype.enter = function (context) { +StateAbstract.prototype.enter = function () { }; /** @@ -878,7 +876,7 @@ StateAbstract.prototype.exit = function () { * @param {Object} options * @abstract */ -StateAbstract.prototype.connect = function (options) { +StateAbstract.prototype.connect = function () { }; /** @@ -887,7 +885,7 @@ StateAbstract.prototype.connect = function (options) { * @param {Object} [options] * @abstract */ -StateAbstract.prototype.continue = function (options) { +StateAbstract.prototype.continue = function () { }; /** @@ -896,7 +894,7 @@ StateAbstract.prototype.continue = function (options) { * @param {Object} options * @abstract */ -StateAbstract.prototype.request = function (options) { +StateAbstract.prototype.request = function () { }; /** @@ -905,7 +903,7 @@ StateAbstract.prototype.request = function (options) { * @param {Object} options * @abstract */ -StateAbstract.prototype.destroy = function (options) { +StateAbstract.prototype.destroy = function () { }; /////////////////////////////////////////////////////////////////////////// @@ -934,7 +932,7 @@ StatePristine.prototype.connect = function (options) { * @inheritDoc */ StatePristine.prototype.request = function (options) { - var callback = options.callback; + const callback = options.callback; process.nextTick(function () { callback(Errors.createClientError( ErrorCodes.ERR_CONN_REQUEST_STATUS_PRISTINE)); @@ -947,7 +945,7 @@ StatePristine.prototype.request = function (options) { StatePristine.prototype.destroy = function (options) { // we're still in the preconnected state so any // attempts to destroy should result in an error - var callback = options.callback; + const callback = options.callback; process.nextTick(function () { callback(Errors.createClientError( ErrorCodes.ERR_CONN_DESTROY_STATUS_PRISTINE)); @@ -989,7 +987,7 @@ StateConnecting.prototype.exit = function () { StateConnecting.prototype.connect = function (options) { // we're already connecting so any attempts // to connect should result in an error - var callback = options.callback; + const callback = options.callback; process.nextTick(function () { callback(Errors.createClientError( ErrorCodes.ERR_CONN_CONNECT_STATUS_CONNECTING)); @@ -1000,9 +998,9 @@ StateConnecting.prototype.connect = function (options) { * @inheritDoc */ StateConnecting.prototype.continue = function () { - var context = this.context; - var err = context.options.err; - var json = context.options.json; + const context = this.context; + const err = context.options.err; + let json = context.options.json; // if no json was specified, treat this as the first connect // and get the necessary information from connectionConfig @@ -1027,24 +1025,24 @@ StateConnecting.prototype.continue = function () { json.data = json.data || {}; // add the client-app-id, client-app-version, and client-app-name - var clientInfo = + const clientInfo = { CLIENT_APP_ID: this.connectionConfig.getClientType(), CLIENT_APP_VERSION: this.connectionConfig.getClientVersion(), }; // if we have some information about the client environment, add it as well - var clientEnvironment = this.connectionConfig.getClientEnvironment(); + const clientEnvironment = this.connectionConfig.getClientEnvironment(); if (Util.isObject(clientEnvironment)) { clientInfo.CLIENT_ENVIRONMENT = clientEnvironment; } - var clientApplication = this.connectionConfig.getClientApplication(); + const clientApplication = this.connectionConfig.getClientApplication(); if (Util.isString(clientApplication)) { clientEnvironment['APPLICATION'] = clientApplication; } - var sessionParameters = + const sessionParameters = { SESSION_PARAMETERS: {} }; @@ -1167,7 +1165,7 @@ StateConnecting.prototype.continue = function () { * @returns {*} */ function buildLoginUrl(connectionConfig) { - var queryParams = + const queryParams = [ { name: 'warehouse', value: connectionConfig.getWarehouse() }, { name: 'databaseName', value: connectionConfig.getDatabase() }, @@ -1175,13 +1173,13 @@ function buildLoginUrl(connectionConfig) { { name: 'roleName', value: connectionConfig.getRole() } ]; - var queryStringObject = {}; + const queryStringObject = {}; if (!connectionConfig.isQaMode()) { // no requestId is attached to login-request in test mode. queryStringObject.requestId = uuidv4(); } - for (var index = 0, length = queryParams.length; index < length; index++) { - var queryParam = queryParams[index]; + for (let index = 0, length = queryParams.length; index < length; index++) { + const queryParam = queryParams[index]; if (Util.string.isNotNullOrEmpty(queryParam.value)) { queryStringObject[queryParam.name] = queryParam.value; } @@ -1226,7 +1224,7 @@ Util.inherits(StateConnected, StateAbstract); StateConnected.prototype.connect = function (options) { // we're already connected so any attempts // to connect should result in an error - var callback = options.callback; + const callback = options.callback; process.nextTick(function () { callback(Errors.createClientError( ErrorCodes.ERR_CONN_CONNECT_STATUS_CONNECTED)); @@ -1242,8 +1240,8 @@ StateConnected.prototype.requestAsync = async function (options) { * @inheritDoc */ StateConnected.prototype.request = function (options) { - var scopeOrig = options.scope; - var callbackOrig = options.callback; + const scopeOrig = options.scope; + const callbackOrig = options.callback; // define our own scope and callback options.scope = this; @@ -1251,7 +1249,7 @@ StateConnected.prototype.request = function (options) { // if there was no error, invoke the callback if one was specified if (!err) { if (Util.isFunction(callbackOrig)) { - await callbackOrig.apply(scopeOrig, arguments); + await callbackOrig.apply(scopeOrig, [err, body]); } } else { // restore the original scope and callback to the options object because @@ -1299,7 +1297,7 @@ StateConnected.prototype.request = function (options) { * @inheritDoc */ StateConnected.prototype.destroy = function (options) { - var requestID = uuidv4(); + const requestID = uuidv4(); // send out a session token request to terminate the current connection this.createSessionTokenRequest( @@ -1307,7 +1305,7 @@ StateConnected.prototype.destroy = function (options) { method: 'POST', url: `/session?delete=true&requestId=${requestID}`, scope: this, - callback: function (err, body) { + callback: function (err) { // if the destroy request succeeded or the session already expired, we're disconnected if (!err || err.code === GSErrors.code.GONE_SESSION || err.code === GSErrors.code.SESSION_TOKEN_EXPIRED) { err = undefined; @@ -1333,7 +1331,7 @@ Util.inherits(StateRenewing, StateAbstract); /** * @inheritDoc */ -StateRenewing.prototype.enter = function (context) { +StateRenewing.prototype.enter = function () { // send out a master token request to renew the current session token this.createMasterTokenRequest( { @@ -1384,7 +1382,7 @@ StateRenewing.prototype.enter = function (context) { StateRenewing.prototype.connect = function (options) { // we're renewing the session token, which means we're connected, // so any attempts to connect should result in an error - var callback = options.callback; + const callback = options.callback; process.nextTick(function () { callback(Errors.createClientError( ErrorCodes.ERR_CONN_CONNECT_STATUS_CONNECTED)); @@ -1423,7 +1421,7 @@ Util.inherits(StateDisconnected, StateAbstract); StateDisconnected.prototype.connect = function (options) { // we're disconnected -- and fatally so -- so any // attempts to connect should result in an error - var callback = options.callback; + const callback = options.callback; process.nextTick(function () { callback(Errors.createClientError( ErrorCodes.ERR_CONN_CONNECT_STATUS_DISCONNECTED)); @@ -1436,7 +1434,7 @@ StateDisconnected.prototype.connect = function (options) { StateDisconnected.prototype.request = function (options) { // we're disconnected, so any attempts to // send a request should result in an error - var callback = options.callback; + const callback = options.callback; process.nextTick(function () { callback(Errors.createClientError( ErrorCodes.ERR_CONN_REQUEST_STATUS_DISCONNECTED, true)); @@ -1449,7 +1447,7 @@ StateDisconnected.prototype.request = function (options) { StateDisconnected.prototype.destroy = function (options) { // we're already disconnected so any attempts // to destroy should result in an error - var callback = options.callback; + const callback = options.callback; process.nextTick(function () { callback(Errors.createClientError( ErrorCodes.ERR_CONN_DESTROY_STATUS_DISCONNECTED)); @@ -1465,10 +1463,10 @@ StateDisconnected.prototype.destroy = function (options) { * @constructor */ function TokenInfo(config) { - var masterToken; - var sessionToken; - var masterTokenExpirationTime; - var sessionTokenExpirationTime; + let masterToken; + let sessionToken; + let masterTokenExpirationTime; + let sessionTokenExpirationTime; if (Util.isObject(config)) { masterToken = config.masterToken; @@ -1508,7 +1506,7 @@ function TokenInfo(config) { masterToken = data.masterToken; sessionToken = data.token || data.sessionToken; - var currentTime = new Date().getTime(); + const currentTime = new Date().getTime(); masterTokenExpirationTime = currentTime + 1000 * (data.masterValidityInSeconds || diff --git a/lib/snowflake.js b/lib/snowflake.js index 7c19844a7..27301b7fb 100644 --- a/lib/snowflake.js +++ b/lib/snowflake.js @@ -2,15 +2,15 @@ * Copyright (c) 2015-2021 Snowflake Computing Inc. All rights reserved. */ -var core = require('./core'); -var Util = require('./util'); -var Errors = require('./errors'); -var ErrorCodes = Errors.codes; +const core = require('./core'); +const Util = require('./util'); +const Errors = require('./errors'); +const ErrorCodes = Errors.codes; -var clientEnvironment = process.versions; +const clientEnvironment = process.versions; // if we're not using the minimum supported version of node.js, raise an error -var minimumNodeVersion = '6.0.0'; +const minimumNodeVersion = '6.0.0'; if (Util.string.compareVersions(clientEnvironment.node, minimumNodeVersion) < 0) { throw Errors.createClientError( ErrorCodes.ERR_UNSUPPORTED_NODE_JS_VERSION, true, minimumNodeVersion); diff --git a/lib/url_util.js b/lib/url_util.js index 3075f78b4..3e584270d 100644 --- a/lib/url_util.js +++ b/lib/url_util.js @@ -1,7 +1,7 @@ /* * Copyright (c) 2015-2023 Snowflake Computing Inc. All rights reserved. */ -var Logger = require('./logger'); +const Logger = require('./logger'); /** * Determines if a given URL is valid. diff --git a/lib/util.js b/lib/util.js index 55846afc3..113316b17 100644 --- a/lib/util.js +++ b/lib/util.js @@ -3,8 +3,8 @@ * */ -var util = require('util'); -var Url = require('url'); +const util = require('util'); +const Url = require('url'); /** * Note: A simple wrapper around util.inherits() for now, but this might change @@ -20,7 +20,7 @@ var Url = require('url'); * @returns {Object} */ exports.inherits = function (constructor, superConstructor) { - return util.inherits.apply(util, arguments); + return util.inherits.apply(util, [constructor, superConstructor]); }; /** @@ -51,8 +51,8 @@ exports.inherits = function (constructor, superConstructor) { * * @returns {String} */ -exports.format = function (format) { - return util.format.apply(util, arguments); +exports.format = function (format, ...params) { + return util.format.apply(util, [format, ...params]); }; /** @@ -66,7 +66,7 @@ exports.isFunction = function (value) { return !!value && typeof value === 'function'; }; -var toString = Object.prototype.toString; +const toString = Object.prototype.toString; /** * Determines if a given value is an object. @@ -246,8 +246,8 @@ exports.string = } // split on dot - var version1Parts = version1.split('.'); - var version2Parts = version2.split('.'); + const version1Parts = version1.split('.'); + const version2Parts = version2.split('.'); // add trailing zeros to make the parts arrays the same length while (version1Parts.length < version2Parts.length) { @@ -258,9 +258,9 @@ exports.string = } // compare elements in the two arrays one by one - var result = 0; - var version1Part, version2Part; - for (var index = 0, length = version1Parts.length; index < length; index++) { + let result = 0; + let version1Part, version2Part; + for (let index = 0, length = version1Parts.length; index < length; index++) { // convert to number before doing any arithmetic version1Part = Number(version1Parts[index]); version2Part = Number(version2Parts[index]); @@ -311,7 +311,7 @@ exports.url = */ appendParam: function (url, paramName, paramValue) { // if the specified url is valid - var urlAsObject = Url.parse(url); + const urlAsObject = Url.parse(url); if (urlAsObject) { // if the url already has query parameters, use '&' as the separator // when appending the additional query parameter, otherwise use '?' @@ -342,7 +342,7 @@ exports.url = exports.apply = function (dst, src) { // if both dst and src are objects, copy everything from src to dst if (this.isObject(dst) && this.isObject(src)) { - for (var key in src) { + for (const key in src) { if (Object.prototype.hasOwnProperty.call(src, key)) { dst[key] = src[key]; } @@ -470,9 +470,9 @@ exports.isRetryableHttpError = function (response, retry403) { }; exports.validateClientSessionKeepAliveHeartbeatFrequency = function (input, masterValidity) { - var heartbeatFrequency = input; - var realMax = Math.floor(masterValidity / 4); - var realMin = Math.floor(realMax / 4); + let heartbeatFrequency = input; + const realMax = Math.floor(masterValidity / 4); + const realMin = Math.floor(realMax / 4); if (input > realMax) { heartbeatFrequency = realMax; } else if (input < realMin) { @@ -509,8 +509,8 @@ exports.userAgent = userAgent; * @param account which account to connect to * @returns {string} host name */ -exports.construct_hostname = function (region, account) { - var host; +exports.constructHostname = function (region, account) { + let host; if (region === 'us-west-2') { region = ''; } else if (region != null) { diff --git a/samples/jsonParserComparison.js b/samples/jsonParserComparison.js index 154b8316e..0a76bc4b9 100644 --- a/samples/jsonParserComparison.js +++ b/samples/jsonParserComparison.js @@ -72,9 +72,6 @@ async function run() { from ${testVariantTempName}`; const selectCountVariant = (tableName) => `select count(colA) from ${(tableName)}`; - const avgBlock = 0, minBlock = 999999999999999, maxBlock = 0; - const blockCount = 0; - const testCases = []; if (!choosenParser || choosenParser.toString().includes('Function')) { testCases.push({ parser: 'Function', jsonColumnVariantParser: (rawColumnValue) => new Function(`return (${rawColumnValue})`) }); diff --git a/system_test/testLongQuery.js b/system_test/testLongQuery.js index 3143e43f1..38e2536eb 100644 --- a/system_test/testLongQuery.js +++ b/system_test/testLongQuery.js @@ -1,20 +1,20 @@ /* * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var snowflake = require('./../lib/snowflake'); -var connOption = require('../test/integration/connectionOptions'); -var testUtil = require('../test/integration/testUtil'); -var async = require('async'); +const snowflake = require('./../lib/snowflake'); +const connOption = require('../test/integration/connectionOptions'); +const testUtil = require('../test/integration/testUtil'); +const async = require('async'); // This test can run only if Snowflake account is available. -var canRunTest = connOption.snowflakeAccount !== undefined; +const canRunTest = connOption.snowflakeAccount !== undefined; describe('testPingPong', function () { before(function (done) { if (!canRunTest) { done(); } - var connectionToSnowflake = snowflake.createConnection(connOption.snowflakeAccount); + const connectionToSnowflake = snowflake.createConnection(connOption.snowflakeAccount); async.series( [ function (callback) { @@ -40,7 +40,7 @@ describe('testPingPong', function () { done(); return; } - var connectionToSnowflake = snowflake.createConnection(connOption.snowflakeAccount); + const connectionToSnowflake = snowflake.createConnection(connOption.snowflakeAccount); async.series( [ function (callback) { @@ -67,7 +67,7 @@ describe('testPingPong', function () { done(); return; } - var connection = testUtil.createConnection(); + const connection = testUtil.createConnection(); async.series( [ function (callback) { diff --git a/system_test/testProxy.js b/system_test/testProxy.js index 5004605db..b867d798b 100644 --- a/system_test/testProxy.js +++ b/system_test/testProxy.js @@ -1,15 +1,15 @@ /* * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var snowflake = require('../lib/snowflake'); -var async = require('async'); -var testUtil = require('../test/integration/testUtil'); -var connOptions = require('./connectionOptions'); +const snowflake = require('../lib/snowflake'); +const async = require('async'); +const testUtil = require('../test/integration/testUtil'); +const connOptions = require('./connectionOptions'); describe('testProxy', function () { it('testConnectionWithProxy', function (done) { - var connection = snowflake.createConnection(connOptions.connectionWithProxy); + const connection = snowflake.createConnection(connOptions.connectionWithProxy); async.series( [ function (callback) { @@ -24,7 +24,7 @@ describe('testProxy', function () { }); it('testSimpleSelectWithProxy', function (done) { - var connection = snowflake.createConnection(connOptions.connectionWithProxy); + const connection = snowflake.createConnection(connOptions.connectionWithProxy); async.series( [ function (callback) { diff --git a/system_test/testSnowflakeSupportWhs.js b/system_test/testSnowflakeSupportWhs.js deleted file mode 100644 index 90324ecd2..000000000 --- a/system_test/testSnowflakeSupportWhs.js +++ /dev/null @@ -1,476 +0,0 @@ -/* - * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. - */ - -/** - * These tests are currently run as part of RT-Language5, but should be - * moved into a different suite at some point because they really test GS - * functionality more than core driver behavior. - */ -var assert = require('assert'); -var async = require('async'); -var util = require('util'); -var snowflake = require('../lib/snowflake'); -var connOptions = require('../test/integration/connectionOptions'); -var connOptionsInternal = require('./connectionOptions'); -var testUtil = require('../test/integration/testUtil'); - -describe('exclude support warehouses', function () { - // get the current time in seconds - var nowInEpochSecs = Math.floor(Date.now() / 1000); - - // use it to create a unique-ish warehouse name - var supportWhName = 'WH_' + nowInEpochSecs; - - // define the window for which we'll be requesting warehouse metrics - var startTime = nowInEpochSecs - 24 * 3600; - var endTime = nowInEpochSecs + 24 * 3600; - - // number of credits we charge per hour for a standard xsmall warehouse - var standardXsmallCredits = 1; - - var createSupportWh = util.format('create or replace warehouse %s ' + - 'warehouse_size = \'xsmall\'', supportWhName); - var dropSupportWh = util.format('drop warehouse %s', supportWhName); - - var createTestDb = 'create or replace database node_testdb'; - var dropTestDb = 'drop database node_testdb'; - - var setServerTypeStandard = 'alter account externalaccount set ' + - 'server_type = \'STANDARD\''; - - var enableJobScanFns = 'alter account externalaccount set ' + - 'enable_jobscan_functions = true'; - var unsetJobScanFns = 'alter account externalaccount set ' + - 'enable_jobscan_functions = default'; - - var now = new Date(); - - // get the current year as yy - var currentYear = Number(now.getFullYear().toString().substr(2)); - - // subtract a year from the current date and get the result as a string in the - // following format: MM/dd/yy - var todayLastYearAsString = - (now.getMonth() + 1) + '/' + now.getDate() + '/' + (currentYear - 1); - - // add a year to the current date and get the result as a string in the - // following format: MM/dd/yy - var todayNextYearAsString = - (now.getMonth() + 1) + '/' + now.getDate() + '/' + (currentYear + 1); - - // warehouse exclusion can be enabled by setting the exclude start date to a - // year ago - var enableWhExclusion = util.format('alter system set ' + - 'EXCLUDE_SUPPORT_WHS_START_DATE = \'%s\'', todayLastYearAsString); - - // warehouse exclusion can be disabled by setting the exclude start date to a - // year from now - var disableWhExclusion = util.format('alter system set ' + - 'EXCLUDE_SUPPORT_WHS_START_DATE = \'%s\'', todayNextYearAsString); - - var unsetWhExclusion = - 'alter system set EXCLUDE_SUPPORT_WHS_START_DATE = default'; - - var enableSupportWhFlag = util.format( - 'alter warehouse externalaccount.%s set snowflake_support = true', - supportWhName); - var disableSupportWhFlag = util.format( - 'alter warehouse externalaccount.%s set snowflake_support = false', - supportWhName); - - // create two connections, one to externalaccount and another to the snowflake - // account - var connExternal = snowflake.createConnection(connOptionsInternal.externalAccount); - var connSnowflake = snowflake.createConnection(connOptions.snowflakeAccount); - - // the original server_type for externalaccount - var externalAccServerTypeOrig; - - before(function (done) { - async.series([ - function (callback) { - // set up the connection to the snowflake account - testUtil.connect(connSnowflake, callback); - }, - function (callback) { - // enable support to get warehouse metrics from externalaccount - testUtil.executeCmd(connSnowflake, enableJobScanFns, callback); - }, - function (callback) { - // get the original server_type for externalaccount - connSnowflake.execute( - { - sqlText: 'show accounts like \'externalaccount\'', - complete: function (err, statement, rows) { - assert.ok(!err); - assert.ok(util.isArray(rows) && (rows.length === 1)); - - // extract the server type and save it for later use - externalAccServerTypeOrig = rows[0]['server type']; - - // we're done; invoke the callback - callback(); - } - }); - }, - function (callback) { - // change the server type in externalaccount to standard - testUtil.executeCmd(connSnowflake, setServerTypeStandard, callback); - }, - function (callback) { - // set up the connection to externalaccount - testUtil.connect(connExternal, callback); - }, - function (callback) { - // create a database in externalaccount so we can use information - // schema - testUtil.executeCmd(connExternal, createTestDb, callback); - }, - function (callback) { - // create a support warehouse in externalaccount to test warehouse - // metrics - testUtil.executeCmd(connExternal, createSupportWh, callback); - }], - done - ); - }); - - // clean up - after(function (done) { - async.series([ - function (callback) { - // unset feature flag to get warehouse metrics from externalaccount - testUtil.executeCmd(connSnowflake, unsetJobScanFns, callback); - }, - function (callback) { - // change the server_type in externalaccount back to its original value - var sqlText = util.format( - 'alter account externalaccount set server_type = \'%s\'', - externalAccServerTypeOrig); - testUtil.executeCmd(connSnowflake, sqlText, callback); - }, - function (callback) { - // destroy the connection to the snowflake account - testUtil.destroyConnection(connSnowflake, callback); - }, - function (callback) { - // drop the support warehouse we created in externalaccount - testUtil.executeCmd(connExternal, dropSupportWh, callback); - }, - function (callback) { - // drop the database we created in externalaccount to get information - // schema - testUtil.executeCmd(connExternal, dropTestDb, callback); - }, - function (callback) { - // destroy the connection to externalaccount - testUtil.destroyConnection(connExternal, callback); - }], - done - ); - }); - - ///** - // * Tests the customer's billing view. Credits for the test warehouse should - // * only be excluded from the bill if the 'exclude_support_whs_from_bill' - // * system parameter is set and the test warehouse is marked with the - // * 'snowflake_support' flag. - // */ - //it('customer account view', function(done) - //{ - // async.series([ - // function(callback) - // { - // // disable the support-wh-exclusion feature flag - // testUtil.executeCmd(connSnowflake, disableWhExclusion, callback); - // }, - // function(callback) - // { - // // make sure warehouse credits are included in the bill by default - // assertCreditsFromExternalAcc( - // connExternal, standardXsmallCredits, callback); - // }, - // function(callback) - // { - // // enable the support-wh-exclusion feature flag - // testUtil.executeCmd(connSnowflake, enableWhExclusion, callback); - // }, - // function(callback) - // { - // // make sure warehouse credits are included in the bill - // assertCreditsFromExternalAcc( - // connExternal, standardXsmallCredits, callback); - // }, - // function(callback) - // { - // // enable the snowflake_support flag for the test warehouse - // testUtil.executeCmd(connSnowflake, enableSupportWhFlag, callback); - // }, - // function(callback) - // { - // // make sure warehouse credits are excluded from the bill - // assertCreditsFromExternalAcc(connExternal, 0, callback); - // }, - // function(callback) - // { - // // disable the snowflake_support flag for the test warehouse - // testUtil.executeCmd(connSnowflake, disableSupportWhFlag, callback); - // }, - // function(callback) - // { - // // make sure warehouse credits are included in the bill - // assertCreditsFromExternalAcc( - // connExternal, standardXsmallCredits, callback); - // }, - // function(callback) - // { - // // unset the support-wh-exclusion feature flag - // testUtil.executeCmd(connSnowflake, unsetWhExclusion, callback); - // }], - // done - // ); - //}); - - /** - * Tests the snowflake account's billing view. Credits for the test warehouse - * should only be excluded from the bill if the 'exclude_support_whs_from_bill' - * system parameter is set, we explicitly request a view of the billing - * metrics that excludes support warehouses, and the test warehouse is marked - * with the 'snowflake_support' flag. - */ - //it('snowflake account view', function(done) - //{ - // // a = the support-wh-exclusion feature flag - // // b = whether we're requesting a view of the billing metrics that excludes - // // support warehouses - // // c = the snowflake_support flag for test warehouse - // - // async.series([ - // function(callback) - // { - // // disable the support-wh-exclusion feature flag - // testUtil.executeCmd(connSnowflake, disableWhExclusion, callback); - // }, - // function(callback) - // { - // // make sure warehouse credits are included in the bill - // // a = false, b = false, c = false - // assertCreditsFromSnowflakeAcc( - // connSnowflake, false, standardXsmallCredits, callback); - // }, - // function(callback) - // { - // // enable the snowflake_support flag for the test warehouse - // testUtil.executeCmd(connSnowflake, enableSupportWhFlag, callback); - // }, - // function(callback) - // { - // // make sure warehouse credits are included in the bill - // // a = false, b = false, c = true - // assertCreditsFromSnowflakeAcc( - // connSnowflake, false, standardXsmallCredits, callback); - // }, - // function(callback) - // { - // // disable the snowflake_support flag for the test warehouse - // testUtil.executeCmd(connSnowflake, disableSupportWhFlag, callback); - // }, - // function(callback) - // { - // // make sure warehouse credits are included in the bill - // // a = false, b = true, c = false - // assertCreditsFromSnowflakeAcc( - // connSnowflake, true, standardXsmallCredits, callback); - // }, - // function(callback) - // { - // // enable the snowflake_support flag for the test warehouse - // testUtil.executeCmd(connSnowflake, enableSupportWhFlag, callback); - // }, - // function(callback) - // { - // // make sure warehouse credits are included in the bill - // // a = false, b = true, c = true - // assertCreditsFromSnowflakeAcc( - // connSnowflake, true, standardXsmallCredits, callback); - // }, - // function(callback) - // { - // // disable the snowflake_support flag for the test warehouse - // testUtil.executeCmd(connSnowflake, disableSupportWhFlag, callback); - // }, - // function(callback) - // { - // // enable the support-wh-exclusion feature flag - // testUtil.executeCmd(connSnowflake, enableWhExclusion, callback); - // }, - // function(callback) - // { - // // make sure warehouse credits are included in the bill - // // a = true, b = false, c = false - // assertCreditsFromSnowflakeAcc( - // connSnowflake, false, standardXsmallCredits, callback); - // }, - // function(callback) - // { - // // enable the snowflake_support flag for the test warehouse - // testUtil.executeCmd(connSnowflake, enableSupportWhFlag, callback); - // }, - // function(callback) - // { - // // make sure warehouse credits are included in the bill - // // a = true, b = false, c = true - // assertCreditsFromSnowflakeAcc( - // connSnowflake, false, standardXsmallCredits, callback); - // }, - // function(callback) - // { - // // disable the snowflake_support flag for the test warehouse - // testUtil.executeCmd(connSnowflake, disableSupportWhFlag, callback); - // }, - // function(callback) - // { - // // make sure warehouse credits are included in the bill - // // a = true, b = true, c = false - // assertCreditsFromSnowflakeAcc( - // connSnowflake, true, standardXsmallCredits, callback); - // }, - // function(callback) - // { - // // enable the snowflake_support flag for the test warehouse - // testUtil.executeCmd(connSnowflake, enableSupportWhFlag, callback); - // }, - // function(callback) - // { - // // make sure warehouse credits are excluded from the bill - // // a = true, b = true, c = true - // assertCreditsFromSnowflakeAcc(connSnowflake, true, 0, callback); - // }, - // function(callback) - // { - // // disable the snowflake_support flag for the test warehouse - // testUtil.executeCmd(connSnowflake, disableSupportWhFlag, callback); - // }, - // function(callback) - // { - // // unset the support-wh-exclusion feature flag - // testUtil.executeCmd(connSnowflake, unsetWhExclusion, callback); - // }], - // done - // ); - //}); - - /** - * Asynchronous function that can be used to assert whether the test warehouse - * credits seen by externalaccount equal a certain value. - * - * @param conn the connection to use to make the request. - * @param expected the expected number of credits. - * @param cb the callback to invoke if the assert succeeds. - */ - function assertCreditsFromExternalAcc(conn, expected, cb) { - var columnName = 'CREDITS'; - var sqlText = - util.format('select warehouse_name, sum(credits_used) as %s ' + - 'from table(information_schema.warehouse_metering_history(' + - '%s::timestamp, %s::timestamp, \'%s\')) ' + - 'group by warehouse_name', - columnName, startTime, endTime, supportWhName); - - conn.execute( - { - sqlText: sqlText, - complete: function (err, statement, rows) { - assert.ok(!err); - assert.ok(util.isArray(rows)); - - // the actual number of credits must equal the expected value - var credits = (rows.length === 0) ? 0 : rows[0][columnName]; - assert.strictEqual(credits, expected); - - // we're done; invoke the callback - cb(); - } - }); - } - - /** - * Asynchronous function that can be used to assert whether the test warehouse - * credits seen by the snowflake account equal a certain value. - * - * @param conn the connection to use to make the request. - * @param exclude whether to request a version of the billing metrics that - * excludes support warehouses. - * @param expected the expected number of credits. - * @param cb the callback to invoke if the assert succeeds. - */ - function assertCreditsFromSnowflakeAcc(conn, exclude, expected, cb) { - var columnName = 'CREDITS'; - var sqlText = util.format('select system$get_metrics(' + - '\'%s\', \'%s\', \'%s\', \'%s\'::timestamp, \'%s\'::timestamp, ' + - 'null, null, \'%s\', %s) as %s;', - 'ACCOUNT', 'EXTERNALACCOUNT', 'METERING', - startTime, endTime, 'UTC', exclude, columnName); - - conn.execute( - { - sqlText: sqlText, - complete: function (err, statement, rows) { - assert.ok(!err); - assert.ok(rows && (rows.length === 1)); - - // convert the one-row-one-column result to JSON - var response = JSON.parse(rows[0][columnName]); - assert(util.isObject(response)); - - // extract the instance types - var instanceTypes = response.instanceTypes; - assert(util.isArray(instanceTypes)); - - // create a map in which the keys are instance types and the values are - // the prices for the corresponding instance types - var mapInstanceTypeToPrice = {}; - for (var index = 0, length = instanceTypes.length; index < length; index++) { - var instanceType = instanceTypes[index]; - mapInstanceTypeToPrice[instanceType.id] = instanceType.price; - } - - // extract the aggregations - var aggregations = response.aggregations; - assert(util.isArray(aggregations)); - - // find the aggregation for the support warehouse - var supportWhAggregation; - for (index = 0, length = aggregations.length; index < length; index++) { - if (aggregations[index].name === supportWhName) { - supportWhAggregation = aggregations[index]; - } - } - - var credits = 0; - - // if we have an aggregation for the support warehouse - if (util.isObject(supportWhAggregation)) { - // extract the configs array; this contains information about the - // total number of credits - assert(util.isObject(supportWhAggregation.aggregate)); - var supportWhConfigs = supportWhAggregation.aggregate.config; - assert(util.isArray(supportWhConfigs)); - - // convert the counts to credits - for (index = 0, length = supportWhConfigs.length; index < length; index++) { - var config = supportWhConfigs[index]; - credits += mapInstanceTypeToPrice[config.type] * [config.count]; - } - } - - // the actual number of credits must equal the expected value - assert.strictEqual(credits, expected); - - // we're done; invoke the callback - cb(); - } - }); - } -}); \ No newline at end of file diff --git a/system_test/testSystemGetObjects.js b/system_test/testSystemGetObjects.js index d5f2d2d21..446908feb 100644 --- a/system_test/testSystemGetObjects.js +++ b/system_test/testSystemGetObjects.js @@ -8,40 +8,40 @@ * functionality more than core driver behavior. */ -var assert = require('assert'); -var async = require('async'); -var util = require('util'); -var snowflake = require('./../lib/snowflake'); -var connOptions = require('../test/integration/connectionOptions'); -var testUtil = require('../test/integration/testUtil'); +const assert = require('assert'); +const async = require('async'); +const util = require('util'); +const snowflake = require('./../lib/snowflake'); +const connOptions = require('../test/integration/connectionOptions'); +const testUtil = require('../test/integration/testUtil'); describe('system$get_objects()', function () { - var createDatabase = 'create or replace database node_testdb;'; - var createSchema = 'create or replace schema node_testschema;'; - var createTableT1 = 'create or replace table t1 (c1 number);'; - var createTableT2 = 'create or replace table t2 (c1 number);'; - var createViewV1 = 'create or replace view v1 as select * from t1;'; - var createViewV2 = 'create or replace view v2 as select * from t2;'; - var createViewV3 = 'create or replace view v3 as select v1.c1 from v1, v2;'; - var createViewV4 = 'create or replace view v4 as select * from v3;'; - var createStage = 'create or replace stage test_stage ' + + const createDatabase = 'create or replace database node_testdb;'; + const createSchema = 'create or replace schema node_testschema;'; + const createTableT1 = 'create or replace table t1 (c1 number);'; + const createTableT2 = 'create or replace table t2 (c1 number);'; + const createViewV1 = 'create or replace view v1 as select * from t1;'; + const createViewV2 = 'create or replace view v2 as select * from t2;'; + const createViewV3 = 'create or replace view v3 as select v1.c1 from v1, v2;'; + const createViewV4 = 'create or replace view v4 as select * from v3;'; + const createStage = 'create or replace stage test_stage ' + 'url = \'s3://some_url\';'; - var createFileFormat = 'create or replace file format ' + + const createFileFormat = 'create or replace file format ' + 'test_file_format type = \'csv\';'; - var createSequence = 'create or replace sequence test_sequence;'; - var createSqlUdfAdd1Number = 'create or replace function add1 (n number) ' + + const createSequence = 'create or replace sequence test_sequence;'; + const createSqlUdfAdd1Number = 'create or replace function add1 (n number) ' + 'returns number as \'n + 1\';'; - var createSqlUdfAdd1String = 'create or replace function add1 (s string) ' + + const createSqlUdfAdd1String = 'create or replace function add1 (s string) ' + 'returns string as \'s || \'\'1\'\'\';'; - var createJsUdfAdd1Double = 'create or replace function add1 (n double) ' + + const createJsUdfAdd1Double = 'create or replace function add1 (n double) ' + 'returns double language javascript as ' + '\'return n + 1;\';'; - var dropDatabase = 'drop database node_testdb;'; + const dropDatabase = 'drop database node_testdb;'; // create two connections, one to testaccount and another to the snowflake // account - var connTestaccount = snowflake.createConnection(connOptions.valid); - var connSnowflake = snowflake.createConnection(connOptions.snowflakeAccount); + const connTestaccount = snowflake.createConnection(connOptions.valid); + const connSnowflake = snowflake.createConnection(connOptions.snowflakeAccount); before(function (done) { // set up the two connections and create a bunch of objects in testaccount; @@ -387,12 +387,12 @@ describe('system$get_objects()', function () { * @param {Object} options */ function testGetObjectsOnStmt(options) { - var connTestaccount = options.connTestaccount; - var connSnowflake = options.connSnowflake; - var sql = options.sql; - var output = options.output; + const connTestaccount = options.connTestaccount; + const connSnowflake = options.connSnowflake; + const sql = options.sql; + const output = options.output; - var queryId; + let queryId; /** * Builds the SQL text for a system$get_objects('execute [query_id];') @@ -413,7 +413,7 @@ function testGetObjectsOnStmt(options) { connTestaccount.execute( { sqlText: sql, - complete: function (err, statement, rows) { + complete: function (err, statement) { assert.ok(!err); queryId = statement.getQueryId(); callback(); @@ -423,8 +423,8 @@ function testGetObjectsOnStmt(options) { function (callback) { // run system$get_objects('execute [query_id];') from the snowflake // account and verify that we get the desired output - var columnName = 'map'; - var sqlText = util.format('%s as "%s";', + const columnName = 'map'; + const sqlText = util.format('%s as "%s";', buildSqlSystem$GetObjects(queryId), columnName); connSnowflake.execute( { diff --git a/system_test/testSystemSetWhSnowflakeSupportFlag.js b/system_test/testSystemSetWhSnowflakeSupportFlag.js index ba073478e..137967548 100644 --- a/system_test/testSystemSetWhSnowflakeSupportFlag.js +++ b/system_test/testSystemSetWhSnowflakeSupportFlag.js @@ -7,29 +7,29 @@ * moved into a different suite at some point because they really test GS * functionality more than core driver behavior. */ -var assert = require('assert'); -var async = require('async'); -var util = require('util'); -var snowflake = require('../lib/snowflake'); -var connOptions = require('../test/integration/connectionOptions'); -var connOptionsInternal = require('./connectionOptions'); -var testUtil = require('../test/integration/testUtil'); +const assert = require('assert'); +const async = require('async'); +const util = require('util'); +const snowflake = require('../lib/snowflake'); +const connOptions = require('../test/integration/connectionOptions'); +const connOptionsInternal = require('./connectionOptions'); +const testUtil = require('../test/integration/testUtil'); describe('exclude support warehouses', function () { - var createSysWh = + const createSysWh = 'create or replace warehouse syswh warehouse_size = \'xsmall\''; - var dropSysWh = 'drop warehouse syswh'; + const dropSysWh = 'drop warehouse syswh'; - var testWhName = 'SF_TEST_WH'; + const testWhName = 'SF_TEST_WH'; - var createTestWh = util.format('create or replace warehouse %s ' + + const createTestWh = util.format('create or replace warehouse %s ' + 'warehouse_size = \'xsmall\'', testWhName); - var dropTestWh = util.format('drop warehouse %s', testWhName); + const dropTestWh = util.format('drop warehouse %s', testWhName); // create two connections, one to externalaccount and another to the snowflake // account - var connExternal = snowflake.createConnection(connOptionsInternal.externalAccount); - var connSnowflake = snowflake.createConnection(connOptions.snowflakeAccount); + const connExternal = snowflake.createConnection(connOptionsInternal.externalAccount); + const connSnowflake = snowflake.createConnection(connOptions.snowflakeAccount); before(function (done) { async.series([ @@ -73,7 +73,7 @@ describe('exclude support warehouses', function () { * snowflake_support flag on both active and dropped warehouses. */ it('set the snowflake_support flag on both active and dropped warehouse', function (done) { - var testWhId; + let testWhId; async.series([ function (cb) { @@ -137,7 +137,7 @@ describe('exclude support warehouses', function () { * @param callback the callback to invoke once the operation is complete. */ function getWarehouseId(conn, accountName, warehouseName, callback) { - var sqlText = util.format( + const sqlText = util.format( 'show warehouses like \'%s\' in %s', warehouseName, accountName); conn.execute( @@ -165,14 +165,14 @@ describe('exclude support warehouses', function () { */ function setWhSnowflakeSupportFlag( conn, warehouseId, snowflakeSupportFlag, callback) { - var sqlText = util.format( + const sqlText = util.format( 'select system$set_wh_snowflake_support_flag(%s, %s);', warehouseId, snowflakeSupportFlag); conn.execute( { sqlText: sqlText, - complete: function (err, statement, rows) { + complete: function (err) { assert.ok(!err); callback(); } @@ -189,8 +189,8 @@ describe('exclude support warehouses', function () { * @param callback the callback to invoke if the assert succeeds. */ function assertSnowflakeSupportFlag(conn, warehouseId, expected, callback) { - var columnName = 'FLAG'; - var sqlText = util.format( + const columnName = 'FLAG'; + const sqlText = util.format( 'select $1:"WarehouseDPO:primary":snowflakeSupportFlag::string as %s ' + 'from table(dposcan($${"slices": [{"name": "WarehouseDPO:primary"}], ' + '"ranges": [{"name": "id", "value": %s}]}$$))', columnName, warehouseId); @@ -204,7 +204,7 @@ describe('exclude support warehouses', function () { assert.strictEqual(rows.length, 1); // the value is a string so compare with 'true' to convert to boolean - var actualSnowflakeSupportFlag = (rows[0][columnName] === 'true'); + const actualSnowflakeSupportFlag = (rows[0][columnName] === 'true'); assert.strictEqual(actualSnowflakeSupportFlag, expected); // we're done; invoke the callback diff --git a/system_test/testToken.js b/system_test/testToken.js index 49347a4aa..53ea53b0c 100644 --- a/system_test/testToken.js +++ b/system_test/testToken.js @@ -1,15 +1,15 @@ /* * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var snowflake = require('./../lib/snowflake'); -var assert = require('assert'); -var connOption = require('../test/integration/connectionOptions'); -var testUtil = require('../test/integration/testUtil'); -var async = require('async'); +const snowflake = require('./../lib/snowflake'); +const assert = require('assert'); +const connOption = require('../test/integration/connectionOptions'); +const testUtil = require('../test/integration/testUtil'); +const async = require('async'); describe('testLoginTokenExpire', function () { before(function (done) { - var connectionToSnowflake = snowflake.createConnection(connOption.snowflakeAccount); + const connectionToSnowflake = snowflake.createConnection(connOption.snowflakeAccount); async.series( [ function (callback) { @@ -44,7 +44,7 @@ describe('testLoginTokenExpire', function () { }); after(function (done) { - var connectionToSnowflake = snowflake.createConnection(connOption.snowflakeAccount); + const connectionToSnowflake = snowflake.createConnection(connOption.snowflakeAccount); async.series( [ function (callback) { @@ -79,7 +79,7 @@ describe('testLoginTokenExpire', function () { }); it('testSessionToken', function (done) { - var connection = snowflake.createConnection(connOption.valid); + const connection = snowflake.createConnection(connOption.valid); async.series( [ function (callback) { @@ -105,7 +105,7 @@ describe('testLoginTokenExpire', function () { }); it('testMasterTokenExpire', function (done) { - var connection = snowflake.createConnection(connOption.valid); + const connection = snowflake.createConnection(connOption.valid); async.series( [ function (callback) { diff --git a/test/integration/ocsp_mock/https_ocsp_mock_agent.js b/test/integration/ocsp_mock/https_ocsp_mock_agent.js index 7a92f763a..b2d8c9ea7 100644 --- a/test/integration/ocsp_mock/https_ocsp_mock_agent.js +++ b/test/integration/ocsp_mock/https_ocsp_mock_agent.js @@ -13,7 +13,7 @@ const ErrorCodes = Errors.codes; * @constructor */ function HttpsMockAgentOcspRevoked(options) { - var agent = HttpsAgent.apply(this, arguments); + const agent = HttpsAgent.apply(this, options); agent.createConnection = function (options) { const socket = HttpsAgent.prototype.createConnection.apply(this, arguments); return SocketUtil.secureSocket(socket, options.host, null, { @@ -31,7 +31,7 @@ function HttpsMockAgentOcspRevoked(options) { * @constructor */ function HttpsMockAgentOcspUnkwown(options) { - var agent = HttpsAgent.apply(this, arguments); + const agent = HttpsAgent.apply(this, options); agent.createConnection = function (options) { const socket = HttpsAgent.prototype.createConnection.apply(this, arguments); return SocketUtil.secureSocket(socket, options.host, null, { @@ -49,7 +49,7 @@ function HttpsMockAgentOcspUnkwown(options) { * @constructor */ function HttpsMockAgentOcspInvalid(options) { - var agent = HttpsAgent.apply(this, arguments); + const agent = HttpsAgent.apply(this, options); agent.createConnection = function (options) { const socket = HttpsAgent.prototype.createConnection.apply(this, arguments); return SocketUtil.secureSocket(socket, options.host, null, { diff --git a/test/integration/testArrayBind.js b/test/integration/testArrayBind.js index 9becb836c..5cd58a5d0 100644 --- a/test/integration/testArrayBind.js +++ b/test/integration/testArrayBind.js @@ -2,24 +2,24 @@ * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ const snowflake = require('./../../lib/snowflake'); -var async = require('async'); -var assert = require('assert'); -var testUtil = require('./testUtil'); -var connOption = require('./connectionOptions'); +const async = require('async'); +const assert = require('assert'); +const testUtil = require('./testUtil'); +const connOption = require('./connectionOptions'); const Logger = require('../../lib/logger'); describe('Test Array Bind', function () { this.timeout(300000); - var connection; - var createABTable = 'create or replace table testAB(colA string, colB number, colC date, colD time, colE TIMESTAMP_NTZ, colF TIMESTAMP_TZ)'; - var insertAB = 'insert into testAB values(?, ?, ?, ?, ?, ?)'; - var selectAB = 'select * from testAB where colB = 1'; - var createNABTable = 'create or replace table testNAB(colA string, colB number, colC date, colD time, colE TIMESTAMP_NTZ, colF TIMESTAMP_TZ)'; - var insertNAB = 'insert into testNAB values(?, ?, ?, ?, ?, ?)'; - var selectNAB = 'select * from testNAB where colB = 1'; - var createNullTable = 'create or replace table testNullTB(colA string, colB number, colC date, colD time, colE TIMESTAMP_NTZ, colF TIMESTAMP_TZ)'; - var insertNull = 'insert into testNullTB values(?, ?, ?, ?, ?, ?)'; - var selectNull = 'select * from testNullTB where colB = 1'; + let connection; + const createABTable = 'create or replace table testAB(colA string, colB number, colC date, colD time, colE TIMESTAMP_NTZ, colF TIMESTAMP_TZ)'; + const insertAB = 'insert into testAB values(?, ?, ?, ?, ?, ?)'; + const selectAB = 'select * from testAB where colB = 1'; + const createNABTable = 'create or replace table testNAB(colA string, colB number, colC date, colD time, colE TIMESTAMP_NTZ, colF TIMESTAMP_TZ)'; + const insertNAB = 'insert into testNAB values(?, ?, ?, ?, ?, ?)'; + const selectNAB = 'select * from testNAB where colB = 1'; + const createNullTable = 'create or replace table testNullTB(colA string, colB number, colC date, colD time, colE TIMESTAMP_NTZ, colF TIMESTAMP_TZ)'; + const insertNull = 'insert into testNullTB values(?, ?, ?, ?, ?, ?)'; + const selectNull = 'select * from testNullTB where colB = 1'; const usedTableNames = [ 'testAB', 'testNAB', 'testNullTB', @@ -45,26 +45,26 @@ describe('Test Array Bind', function () { }); it('testArrayBind', function (done) { - var NABData; + let NABData; async.series( [ function (callback) { - var createNAB = connection.execute({ + connection.execute({ sqlText: createABTable, - complete: function (err, stmt) { + complete: function (err) { testUtil.checkError(err); callback(); } }); }, function (callback) { - var arrBind = []; - var count = 100; - for (var i = 0; i < count; i++) { + const arrBind = []; + const count = 100; + for (let i = 0; i < count; i++) { arrBind.push(['string' + i, i, '2020-05-11', '12:35:41.3333333', '2022-04-01 23:59:59', '2022-07-08 12:05:30.9999999']); } - var insertABStmt = connection.execute({ + const insertABStmt = connection.execute({ sqlText: insertAB, binds: arrBind, complete: function (err, stmt) { @@ -77,21 +77,21 @@ describe('Test Array Bind', function () { }); }, function (callback) { - var createNAB = connection.execute({ + connection.execute({ sqlText: createNABTable, - complete: function (err, stmt) { + complete: function (err) { testUtil.checkError(err); callback(); } }); }, function (callback) { - var arrBind = []; - var count = 2; - for (var i = 0; i < count; i++) { + const arrBind = []; + const count = 2; + for (let i = 0; i < count; i++) { arrBind.push(['string' + i, i, '2020-05-11', '12:35:41.3333333', '2022-04-01 23:59:59', '2022-07-08 12:05:30.9999999']); } - var insertNABStmt = connection.execute({ + connection.execute({ sqlText: insertNAB, binds: arrBind, complete: function (err, stmt) { @@ -102,7 +102,7 @@ describe('Test Array Bind', function () { }); }, function (callback) { - var selectNABTable = connection.execute({ + connection.execute({ sqlText: selectNAB, complete: function (err, stmt, rows) { testUtil.checkError(err); @@ -112,20 +112,20 @@ describe('Test Array Bind', function () { }); }, function (callback) { - var selectABTable = connection.execute({ + connection.execute({ sqlText: selectAB, complete: function (err, stmt, rows) { testUtil.checkError(err); - var ABData = rows[0]; + const ABData = rows[0]; - var ABDate = new Date(ABData['COLC']); - var ABDataD = new Date(ABData['COLD']).getTime(); - var ABDataE = new Date(ABData['COLE']).getTime(); - var ABDataF = new Date(ABData['COLF']).getTime(); - var NABDate = new Date(NABData['COLC']); - var NABDataD = new Date(NABData['COLD']).getTime(); - var NABDataE = new Date(NABData['COLE']).getTime(); - var NABDataF = new Date(NABData['COLF']).getTime(); + const ABDate = new Date(ABData['COLC']); + const ABDataD = new Date(ABData['COLD']).getTime(); + const ABDataE = new Date(ABData['COLE']).getTime(); + const ABDataF = new Date(ABData['COLF']).getTime(); + const NABDate = new Date(NABData['COLC']); + const NABDataD = new Date(NABData['COLD']).getTime(); + const NABDataE = new Date(NABData['COLE']).getTime(); + const NABDataF = new Date(NABData['COLF']).getTime(); assert.equal(ABData['COLA'], NABData['COLA']); assert.equal(ABData['COLB'], NABData['COLB']); @@ -143,7 +143,7 @@ describe('Test Array Bind', function () { }); it('testArrayBindWillNull', function (done) { - var NABData; + let NABData; async.series( [ function (callback) { @@ -155,9 +155,9 @@ describe('Test Array Bind', function () { }); }, function (callback) { - var arrBind = []; - var count = 100; - for (var i = 0; i < count; i++) { + const arrBind = []; + const count = 100; + for (let i = 0; i < count; i++) { arrBind.push([null, i, '2020-05-11', '12:35:41.3333333', '2022-04-01 23:59:59', '2022-07-08 12:05:30.9999999']); } @@ -184,8 +184,8 @@ describe('Test Array Bind', function () { }); }, function (callback) { - var arrBind = []; - var count = 2; + const arrBind = []; + const count = 2; for (let i = 0; i < count; i++) { arrBind.push(['string' + i, i, '2020-05-11', '12:35:41.3333333', '2022-04-01 23:59:59', '2022-07-08 12:05:30.9999999']); } @@ -224,16 +224,16 @@ describe('Test Array Bind', function () { callback(err); } else { try { - var ABData = rows[0]; + const ABData = rows[0]; - var ABDate = new Date(ABData['COLC']); - var ABDataD = new Date(ABData['COLD']).getTime(); - var ABDataE = new Date(ABData['COLE']).getTime(); - var ABDataF = new Date(ABData['COLF']).getTime(); - var NABDate = new Date(NABData['COLC']); - var NABDataD = new Date(NABData['COLD']).getTime(); - var NABDataE = new Date(NABData['COLE']).getTime(); - var NABDataF = new Date(NABData['COLF']).getTime(); + const ABDate = new Date(ABData['COLC']); + const ABDataD = new Date(ABData['COLD']).getTime(); + const ABDataE = new Date(ABData['COLE']).getTime(); + const ABDataF = new Date(ABData['COLF']).getTime(); + const NABDate = new Date(NABData['COLC']); + const NABDataD = new Date(NABData['COLD']).getTime(); + const NABDataE = new Date(NABData['COLE']).getTime(); + const NABDataF = new Date(NABData['COLF']).getTime(); assert.equal(ABData['COLA'], ''); assert.equal(ABData['COLB'], NABData['COLB']); @@ -258,17 +258,17 @@ describe('Test Array Bind', function () { async.series( [ function (callback) { - var createSql = 'create or replace table testBindJson(colA varchar(30), colB varchar(30))'; + const createSql = 'create or replace table testBindJson(colA varchar(30), colB varchar(30))'; testUtil.executeCmd(connection, createSql, callback); }, function (callback) { - var arrBind = []; - var count = 100; - for (var i = 0; i < count; i++) { + const arrBind = []; + const count = 100; + for (let i = 0; i < count; i++) { arrBind.push(['some-data-for-stuff1', 'some-data-for-stuff2']); } - var insertSql = 'insert into testBindJson(cola,colb) select value:stuff1, value:stuff2 from table(flatten(parse_json(?)))'; - var insertStatement = connection.execute({ + const insertSql = 'insert into testBindJson(cola,colb) select value:stuff1, value:stuff2 from table(flatten(parse_json(?)))'; + connection.execute({ sqlText: insertSql, binds: [JSON.stringify(arrBind)], complete: function (err, stmt) { @@ -289,17 +289,17 @@ describe('Test Array Bind', function () { async.series( [ function (callback) { - var createSql = 'create or replace table testBindLargeArray(colA varchar(30))'; + const createSql = 'create or replace table testBindLargeArray(colA varchar(30))'; testUtil.executeCmd(connection, createSql, callback); }, function (callback) { - var arrBind = []; - var count = 100; - for (var i = 0; i < count; i++) { + const arrBind = []; + const count = 100; + for (let i = 0; i < count; i++) { arrBind.push(['some-data-for-stuff1']); } - var insertSql = 'insert into testBindLargeArray(colA) values (?)'; - var insertStatement = connection.execute({ + const insertSql = 'insert into testBindLargeArray(colA) values (?)'; + connection.execute({ sqlText: insertSql, binds: arrBind, complete: function (err, stmt) { @@ -320,7 +320,7 @@ describe('Test Array Bind', function () { async.series( [ function (callback) { - var createSql = 'create or replace table test101 (id INT, type VARCHAR(40), data VARIANT, createdDateTime TIMESTAMP_TZ(0), action VARCHAR(256))'; + const createSql = 'create or replace table test101 (id INT, type VARCHAR(40), data VARIANT, createdDateTime TIMESTAMP_TZ(0), action VARCHAR(256))'; testUtil.executeCmd(connection, createSql, callback); }, function (callback) { @@ -369,18 +369,18 @@ describe('Test Array Bind', function () { ] ]; - var flatValue = []; + const flatValue = []; dataset.forEach(element => { element.forEach(value => { flatValue.push(value); }); }); - var insertTable101 = 'insert into test101 (id,type,data,createdDateTime,action) select COLUMN1,COLUMN2,TRY_PARSE_JSON(COLUMN3),COLUMN4,COLUMN5 from values (?,?,?,?,?),(?,?,?,?,?),(?,?,?,?,?),(?,?,?,?,?),(?,?,?,?,?),(?,?,?,?,?)'; - var insertStatement = connection.execute({ + const insertTable101 = 'insert into test101 (id,type,data,createdDateTime,action) select COLUMN1,COLUMN2,TRY_PARSE_JSON(COLUMN3),COLUMN4,COLUMN5 from values (?,?,?,?,?),(?,?,?,?,?),(?,?,?,?,?),(?,?,?,?,?),(?,?,?,?,?),(?,?,?,?,?)'; + connection.execute({ sqlText: insertTable101, binds: flatValue, fetchAsString: ['Number', 'Date', 'JSON'], - complete: function (err, stmt) { + complete: function (err) { if (err) { callback(err); } else { @@ -390,12 +390,12 @@ describe('Test Array Bind', function () { }); }, function (callback) { - var selectSql = 'select * from test101 where ID = 5489'; - var selectABTable = connection.execute({ + const selectSql = 'select * from test101 where ID = 5489'; + connection.execute({ sqlText: selectSql, complete: function (err, stmt, rows) { testUtil.checkError(err); - var result = rows[0]; + const result = rows[0]; assert.equal(result['TYPE'], 'SAMPLE'); callback(); } @@ -414,10 +414,10 @@ describe('Test Array Bind - full path', function () { this.timeout(600000); this.retries(3); // this test suit are considered as flaky - var connection; + let connection; const fullTableName = `${DATABASE_NAME}.${SCHEMA_NAME}.testAB`; - var createABTable = `create or replace table ${fullTableName}(colA string, colB number, colC date, colD time, colE TIMESTAMP_NTZ, colF TIMESTAMP_TZ)`; - var insertAB = `insert into ${fullTableName} values(?, ?, ?, ?, ?, ?)`; + const createABTable = `create or replace table ${fullTableName}(colA string, colB number, colC date, colD time, colE TIMESTAMP_NTZ, colF TIMESTAMP_TZ)`; + const insertAB = `insert into ${fullTableName} values(?, ?, ?, ?, ?, ?)`; before(function (done) { connection = snowflake.createConnection({ @@ -442,9 +442,9 @@ describe('Test Array Bind - full path', function () { }); it('Full path array bind', function (done) { - var arrBind = []; - var count = 100; - for (var i = 0; i < count; i++) { + const arrBind = []; + const count = 100; + for (let i = 0; i < count; i++) { arrBind.push([null, i, '2020-05-11', '12:35:41.3333333', '2022-04-01 23:59:59', '2022-07-08 12:05:30.9999999']); } @@ -467,13 +467,13 @@ describe('Test Array Bind - full path', function () { describe('Test Array Bind Force Error on Upload file', function () { this.timeout(300000); - var connection; - var createABTable = 'create or replace table testAB(colA string, colB number, colC date, colD time, colE TIMESTAMP_NTZ, colF TIMESTAMP_TZ)'; - var insertAB = 'insert into testAB values(?, ?, ?, ?, ?, ?)'; - var selectAB = 'select * from testAB where colB = 1'; - var createNABTable = 'create or replace table testNAB(colA string, colB number, colC date, colD time, colE TIMESTAMP_NTZ, colF TIMESTAMP_TZ)'; - var insertNAB = 'insert into testNAB values(?, ?, ?, ?, ?, ?)'; - var selectNAB = 'select * from testNAB where colB = 1'; + let connection; + const createABTable = 'create or replace table testAB(colA string, colB number, colC date, colD time, colE TIMESTAMP_NTZ, colF TIMESTAMP_TZ)'; + const insertAB = 'insert into testAB values(?, ?, ?, ?, ?, ?)'; + const selectAB = 'select * from testAB where colB = 1'; + const createNABTable = 'create or replace table testNAB(colA string, colB number, colC date, colD time, colE TIMESTAMP_NTZ, colF TIMESTAMP_TZ)'; + const insertNAB = 'insert into testNAB values(?, ?, ?, ?, ?, ?)'; + const selectNAB = 'select * from testNAB where colB = 1'; const usedTableNames = ['testAB', 'testNAB']; @@ -497,26 +497,26 @@ describe('Test Array Bind Force Error on Upload file', function () { }); it('testArrayBind force upload file error', function (done) { - var NABData; + let NABData; async.series( [ function (callback) { - var createNAB = connection.execute({ + connection.execute({ sqlText: createABTable, - complete: function (err, stmt) { + complete: function (err) { testUtil.checkError(err); callback(); } }); }, function (callback) { - var arrBind = []; - var count = 100; - for (var i = 0; i < count; i++) { + const arrBind = []; + const count = 100; + for (let i = 0; i < count; i++) { arrBind.push(['string' + i, i, '2020-05-11', '12:35:41.3333333', '2022-04-01 23:59:59', '2022-07-08 12:05:30.9999999']); } - var insertABStmt = connection.execute({ + const insertABStmt = connection.execute({ sqlText: insertAB, binds: arrBind, complete: function (err, stmt) { @@ -529,21 +529,21 @@ describe('Test Array Bind Force Error on Upload file', function () { }); }, function (callback) { - var createNAB = connection.execute({ + connection.execute({ sqlText: createNABTable, - complete: function (err, stmt) { + complete: function (err) { testUtil.checkError(err); callback(); } }); }, function (callback) { - var arrBind = []; - var count = 2; - for (var i = 0; i < count; i++) { + const arrBind = []; + const count = 2; + for (let i = 0; i < count; i++) { arrBind.push(['string' + i, i, '2020-05-11', '12:35:41.3333333', '2022-04-01 23:59:59', '2022-07-08 12:05:30.9999999']); } - var insertNABStmt = connection.execute({ + connection.execute({ sqlText: insertNAB, binds: arrBind, complete: function (err, stmt) { @@ -554,7 +554,7 @@ describe('Test Array Bind Force Error on Upload file', function () { }); }, function (callback) { - var selectNABTable = connection.execute({ + connection.execute({ sqlText: selectNAB, complete: function (err, stmt, rows) { testUtil.checkError(err); @@ -564,20 +564,20 @@ describe('Test Array Bind Force Error on Upload file', function () { }); }, function (callback) { - var selectABTable = connection.execute({ + connection.execute({ sqlText: selectAB, complete: function (err, stmt, rows) { testUtil.checkError(err); - var ABData = rows[0]; - - var ABDate = new Date(ABData['COLC']); - var ABDataD = new Date(ABData['COLD']).getTime(); - var ABDataE = new Date(ABData['COLE']).getTime(); - var ABDataF = new Date(ABData['COLF']).getTime(); - var NABDate = new Date(NABData['COLC']); - var NABDataD = new Date(NABData['COLD']).getTime(); - var NABDataE = new Date(NABData['COLE']).getTime(); - var NABDataF = new Date(NABData['COLF']).getTime(); + const ABData = rows[0]; + + const ABDate = new Date(ABData['COLC']); + const ABDataD = new Date(ABData['COLD']).getTime(); + const ABDataE = new Date(ABData['COLE']).getTime(); + const ABDataF = new Date(ABData['COLF']).getTime(); + const NABDate = new Date(NABData['COLC']); + const NABDataD = new Date(NABData['COLD']).getTime(); + const NABDataE = new Date(NABData['COLE']).getTime(); + const NABDataF = new Date(NABData['COLF']).getTime(); assert.equal(ABData['COLA'], NABData['COLA']); assert.equal(ABData['COLB'], NABData['COLB']); diff --git a/test/integration/testBind.js b/test/integration/testBind.js index 445db60fd..8bec62742 100644 --- a/test/integration/testBind.js +++ b/test/integration/testBind.js @@ -1,24 +1,23 @@ /* * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var async = require('async'); -var assert = require('assert'); -var testUtil = require('./testUtil'); -var util = require('util'); +const async = require('async'); +const assert = require('assert'); +const testUtil = require('./testUtil'); +const util = require('util'); const sharedStatement = require('./sharedStatements'); -const Logger = require('../../lib/logger'); describe('Test Bind Varible', function () { - var connection; - var createTestTbl = 'create or replace table testTbl(colA string, colB number)'; - var dropTestTbl = 'drop table if exists testTbl'; - var insertWithQmark = 'insert into testTbl values(?, ?)'; - var insertWithSemiColon = 'insert into testTbl values(:1, :2)'; - var insertValue = 'insert into testTbl values(\'string\', 3)'; - var insertSingleBind = 'insert into testTbl values(?)'; - var selectAllFromTbl = 'select * from testTbl order by 1'; - var selectAllFromTblLimit1 = 'select * from testTbl order by 1 limit ?'; - var selectWithBind = 'select * from testTbl where COLA = :2 and COLB = :1'; + let connection; + const createTestTbl = 'create or replace table testTbl(colA string, colB number)'; + const dropTestTbl = 'drop table if exists testTbl'; + const insertWithQmark = 'insert into testTbl values(?, ?)'; + const insertWithSemiColon = 'insert into testTbl values(:1, :2)'; + const insertValue = 'insert into testTbl values(\'string\', 3)'; + const insertSingleBind = 'insert into testTbl values(?)'; + const selectAllFromTbl = 'select * from testTbl order by 1'; + const selectAllFromTblLimit1 = 'select * from testTbl order by 1 limit ?'; + const selectWithBind = 'select * from testTbl where COLA = :2 and COLB = :1'; before(function (done) { connection = testUtil.createConnection(); @@ -182,7 +181,7 @@ describe('Test Bind Varible', function () { { sqlText: insertWithSemiColon, binds: [null, 3], - complete: function (err, statement, rows) { + complete: function (err) { assert.ok(!err); callback(); } @@ -209,7 +208,7 @@ describe('Test Bind Varible', function () { }); it('testWrongBinds', function (done) { - var wrongBindsOptions = + const wrongBindsOptions = [ { // empty binds array @@ -276,9 +275,9 @@ describe('Test Bind Varible', function () { testUtil.executeCmd(connection, createTestTbl, callback); }, function (callback) { - var executeWrongBindsOption = function (index) { + const executeWrongBindsOption = function (index) { if (index < wrongBindsOptions.length) { - var option = wrongBindsOptions[index]; + const option = wrongBindsOptions[index]; option.complete = function (err, stmt) { option.verifyResults(err, stmt); executeWrongBindsOption(index + 1); @@ -306,13 +305,13 @@ describe('Test Bind Varible', function () { testUtil.executeCmd(connection, createTestTbl, callback); }, function (callback) { - var bindSets = [ + const bindSets = [ ['string2', 4], ['string3', 5], ['string4', 6] ]; - var insertWithDifferentBinds = function (i) { + const insertWithDifferentBinds = function (i) { if (i < bindSets.length) { testUtil.executeCmd( connection, @@ -330,7 +329,7 @@ describe('Test Bind Varible', function () { insertWithDifferentBinds(0); }, function (callback) { - var expected = [ + const expected = [ { 'COLA': 'string2', 'COLB': 4 }, { 'COLA': 'string3', 'COLB': 5 }, { 'COLA': 'string4', 'COLB': 6 } @@ -385,7 +384,7 @@ describe('Test Bind Varible', function () { }); describe('testBindingWithDifferentDataType', function () { - var testingFunc = function (dataType, binds, expected, callback) { + const testingFunc = function (dataType, binds, expected, callback) { async.series( [ function (callback) { @@ -576,7 +575,7 @@ describe('Test Bind Varible', function () { testUtil.executeCmd(connection, insertValue, callback); }, function (callback) { - var maliciousOptions = [ + const maliciousOptions = [ { sqlText: 'select * from testTbl where colA = ?', binds: ['a; drop table if exists testTbl'] @@ -586,7 +585,7 @@ describe('Test Bind Varible', function () { binds: ['$*~?\':1234567890!@#$%^&*()_='] } ]; - var selectWithOption = function (i) { + const selectWithOption = function (i) { if (i < maliciousOptions.length) { testUtil.executeQueryAndVerify( connection, diff --git a/test/integration/testCancel.js b/test/integration/testCancel.js index fdba1c3b7..b7fc09884 100644 --- a/test/integration/testCancel.js +++ b/test/integration/testCancel.js @@ -1,12 +1,12 @@ /* * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var async = require('async'); -var testUtil = require('./testUtil'); +const async = require('async'); +const testUtil = require('./testUtil'); describe('Test Cancel Query', function () { - var connection; - var longQuery = 'select count(*) from table(generator(timeLimit => 3600))'; + let connection; + const longQuery = 'select count(*) from table(generator(timeLimit => 3600))'; before(function (done) { connection = testUtil.createConnection(); @@ -33,7 +33,7 @@ describe('Test Cancel Query', function () { it('testCancelQuerySimple', function (done) { - var statement = connection.execute({ + const statement = connection.execute({ sqlText: longQuery }); diff --git a/test/integration/testConcurrent.js b/test/integration/testConcurrent.js index 724172edc..a78602e3f 100644 --- a/test/integration/testConcurrent.js +++ b/test/integration/testConcurrent.js @@ -1,16 +1,16 @@ /* * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var async = require('async'); -var assert = require('assert'); -var testUtil = require('./testUtil'); +const async = require('async'); +const assert = require('assert'); +const testUtil = require('./testUtil'); const sourceRowCount = 30000; describe('Test Concurrent Execution', function () { - var connection; - var selectOrders = 'select true from table(generator(rowcount=>' + sourceRowCount + '))'; - var disableCacheResult = 'alter session set use_cached_result = false'; + let connection; + const selectOrders = 'select true from table(generator(rowcount=>' + sourceRowCount + '))'; + const disableCacheResult = 'alter session set use_cached_result = false'; before(function (done) { connection = testUtil.createConnection(); @@ -30,15 +30,15 @@ describe('Test Concurrent Execution', function () { }); it('testConcurrentSelectBySameUser', function (done) { - var completedQueries = 0; - var numberOfQueries = 10; - for (var i = 0; i < numberOfQueries; i++) { + let completedQueries = 0; + const numberOfQueries = 10; + for (let i = 0; i < numberOfQueries; i++) { connection.execute({ sqlText: selectOrders, complete: function (err, stmt) { testUtil.checkError(err); - var stream = stmt.streamRows(); - var rowCount = 0; + const stream = stmt.streamRows(); + let rowCount = 0; stream.on('readable', function () { while (stream.read() !== null) { rowCount++; @@ -63,9 +63,9 @@ describe('Test Concurrent Execution', function () { async.series( [ function (callback) { - var numberOfThread = 10; - var completedThread = 0; - for (var i = 0; i < numberOfThread; i++) { + const numberOfThread = 10; + let completedThread = 0; + for (let i = 0; i < numberOfThread; i++) { testUtil.executeCmd( connection, 'create or replace table test' + i + '(colA varchar)', @@ -79,9 +79,9 @@ describe('Test Concurrent Execution', function () { } }, function (callback) { - var numberOfThread = 10; - var completedThread = 0; - for (var i = 0; i < numberOfThread; i++) { + const numberOfThread = 10; + let completedThread = 0; + for (let i = 0; i < numberOfThread; i++) { testUtil.executeCmd( connection, 'drop table if exists test' + i, @@ -100,16 +100,16 @@ describe('Test Concurrent Execution', function () { }); it('testConcurrentSelectFromDifferentSession', function (done) { - var numberOfQueries = 10; - var completedQueries = 0; - for (var i = 0; i < numberOfQueries; i++) { + const numberOfQueries = 10; + let completedQueries = 0; + for (let i = 0; i < numberOfQueries; i++) { testUtil.createConnection() .connect(function (err, conn) { conn.execute({ sqlText: selectOrders, complete: function (err, stmt) { - var stream = stmt.streamRows(); - var rowCount = 0; + const stream = stmt.streamRows(); + let rowCount = 0; stream.on('readable', function () { while (stream.read() !== null) { rowCount++; diff --git a/test/integration/testConnection.js b/test/integration/testConnection.js index 155451115..547641c56 100644 --- a/test/integration/testConnection.js +++ b/test/integration/testConnection.js @@ -45,7 +45,7 @@ describe('Connection test', function () { }); it('Wrong Username', function (done) { - var connection = snowflake.createConnection(connOption.wrongUserName); + const connection = snowflake.createConnection(connOption.wrongUserName); connection.connect(function (err) { assert.ok(err, 'Username is an empty string'); assert.equal( @@ -57,7 +57,7 @@ describe('Connection test', function () { }); it('Wrong Password', function (done) { - var connection = snowflake.createConnection(connOption.wrongPwd); + const connection = snowflake.createConnection(connOption.wrongPwd); connection.connect(function (err) { assert.ok(err, 'Password is an empty string'); assert.equal( @@ -207,7 +207,7 @@ describe('Connection test - connection pool', function () { this.timeout(30000); it('1 min connection', function (done) { - var connectionPool = snowflake.createPool(connOption.valid, { + const connectionPool = snowflake.createPool(connOption.valid, { max: 10, min: 1, }); @@ -220,7 +220,7 @@ describe('Connection test - connection pool', function () { }); it('5 min connection', function (done) { - var connectionPool = snowflake.createPool(connOption.valid, { + const connectionPool = snowflake.createPool(connOption.valid, { max: 10, min: 5, }); @@ -233,7 +233,7 @@ describe('Connection test - connection pool', function () { }); it('10 min connection', function (done) { - var connectionPool = snowflake.createPool(connOption.valid, { + const connectionPool = snowflake.createPool(connOption.valid, { max: 10, min: 10, }); @@ -246,7 +246,7 @@ describe('Connection test - connection pool', function () { }); it('min greater than max connection', function (done) { - var connectionPool = snowflake.createPool(connOption.valid, { + const connectionPool = snowflake.createPool(connOption.valid, { max: 5, min: 10, }); @@ -259,7 +259,7 @@ describe('Connection test - connection pool', function () { }); it('1 max connection', function (done) { - var connectionPool = snowflake.createPool(connOption.valid, { + const connectionPool = snowflake.createPool(connOption.valid, { max: 1, min: 0, }); @@ -268,14 +268,14 @@ describe('Connection test - connection pool', function () { assert.equal(connectionPool.min, 0); // Acquire a connection - const resourcePromise1 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 1); done(); }); it('1 max connection and acquire() more than 1', function (done) { - var connectionPool = snowflake.createPool(connOption.valid, { + const connectionPool = snowflake.createPool(connOption.valid, { max: 1, min: 0, }); @@ -284,16 +284,16 @@ describe('Connection test - connection pool', function () { assert.equal(connectionPool.min, 0); // Acquire 2 connections - const resourcePromise1 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 1); - const resourcePromise2 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 1); done(); }); it('5 max connection', function (done) { - var connectionPool = snowflake.createPool(connOption.valid, { + const connectionPool = snowflake.createPool(connOption.valid, { max: 5, min: 0, }); @@ -302,22 +302,22 @@ describe('Connection test - connection pool', function () { assert.equal(connectionPool.min, 0); // Acquire 5 connections - const resourcePromise1 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 1); - const resourcePromise2 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 2); - const resourcePromise3 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 3); - const resourcePromise4 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 4); - const resourcePromise5 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 5); done(); }); it('5 max connections and acquire() more than 5', function (done) { - var connectionPool = snowflake.createPool(connOption.valid, { + const connectionPool = snowflake.createPool(connOption.valid, { max: 5, min: 0, }); @@ -326,24 +326,24 @@ describe('Connection test - connection pool', function () { assert.equal(connectionPool.min, 0); // Acquire 6 connections - const resourcePromise1 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 1); - const resourcePromise2 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 2); - const resourcePromise3 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 3); - const resourcePromise4 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 4); - const resourcePromise5 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 5); - const resourcePromise6 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 5); done(); }); it('10 max connection', function (done) { - var connectionPool = snowflake.createPool(connOption.valid, { + const connectionPool = snowflake.createPool(connOption.valid, { max: 10, min: 0, }); @@ -352,32 +352,32 @@ describe('Connection test - connection pool', function () { assert.equal(connectionPool.min, 0); // Acquire 10 connections - const resourcePromise1 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 1); - const resourcePromise2 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 2); - const resourcePromise3 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 3); - const resourcePromise4 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 4); - const resourcePromise5 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 5); - const resourcePromise6 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 6); - const resourcePromise7 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 7); - const resourcePromise8 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 8); - const resourcePromise9 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 9); - const resourcePromise10 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 10); done(); }); it('10 max connections and acquire() more than 10', function (done) { - var connectionPool = snowflake.createPool(connOption.valid, { + const connectionPool = snowflake.createPool(connOption.valid, { max: 10, min: 0, }); @@ -386,34 +386,34 @@ describe('Connection test - connection pool', function () { assert.equal(connectionPool.min, 0); // Acquire 11 connections - const resourcePromise1 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 1); - const resourcePromise2 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 2); - const resourcePromise3 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 3); - const resourcePromise4 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 4); - const resourcePromise5 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 5); - const resourcePromise6 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 6); - const resourcePromise7 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 7); - const resourcePromise8 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 8); - const resourcePromise9 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 9); - const resourcePromise10 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 10); - const resourcePromise11 = connectionPool.acquire(); + connectionPool.acquire(); assert.equal(connectionPool.size, 10); done(); }); it('acquire() 1 connection and release()', function (done) { - var connectionPool = snowflake.createPool(connOption.valid, { + const connectionPool = snowflake.createPool(connOption.valid, { max: 5, min: 0, }); @@ -447,7 +447,7 @@ describe('Connection test - connection pool', function () { it('acquire() 5 connections and release()', function (done) { // Create the connection pool - var connectionPool = snowflake.createPool(connOption.valid, { + const connectionPool = snowflake.createPool(connOption.valid, { max: 5, min: 0, }); @@ -531,7 +531,7 @@ describe('Connection test - connection pool', function () { }); it('acquire() 1 connection and destroy()', function (done) { - var connectionPool = snowflake.createPool(connOption.valid, { + const connectionPool = snowflake.createPool(connOption.valid, { max: 5, min: 0, }); @@ -557,7 +557,7 @@ describe('Connection test - connection pool', function () { }); it('acquire() 5 connections and destroy()', function (done) { - var connectionPool = snowflake.createPool(connOption.valid, { + const connectionPool = snowflake.createPool(connOption.valid, { max: 5, min: 0, }); @@ -641,7 +641,7 @@ describe('Connection test - connection pool', function () { }); it('use()', function (done) { - var connectionPool = snowflake.createPool(connOption.valid, { + const connectionPool = snowflake.createPool(connOption.valid, { max: 5, min: 0, }); @@ -679,7 +679,7 @@ describe('Connection test - connection pool', function () { }); it('wrong password - use', async function () { - var connectionPool = snowflake.createPool(connOption.wrongPwd, { + const connectionPool = snowflake.createPool(connOption.wrongPwd, { max: 10, min: 1, }); @@ -703,7 +703,7 @@ describe('Connection test - connection pool', function () { }); it('wrong password - acquire', async function () { - var connectionPool = snowflake.createPool(connOption.wrongPwd, { + const connectionPool = snowflake.createPool(connOption.wrongPwd, { max: 10, min: 1, }); diff --git a/test/integration/testConnectionNegative.js b/test/integration/testConnectionNegative.js index c179019a7..c9e8d3c95 100644 --- a/test/integration/testConnectionNegative.js +++ b/test/integration/testConnectionNegative.js @@ -1,13 +1,13 @@ /* * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var snowflake = require('./../../lib/snowflake'); -var assert = require('assert'); +const snowflake = require('./../../lib/snowflake'); +const assert = require('assert'); describe('snowflake.createConnection() synchronous errors', function () { // empty error code for now - var ErrorCodes = {}; - var testCases = + const ErrorCodes = {}; + const testCases = [ { name: 'missing options', @@ -133,9 +133,9 @@ describe('snowflake.createConnection() synchronous errors', function () { } ]; - var createItCallback = function (testCase) { + const createItCallback = function (testCase) { return function () { - var error = null; + let error = null; try { snowflake.createConnection(testCase.options); @@ -148,7 +148,7 @@ describe('snowflake.createConnection() synchronous errors', function () { }; }; - var index, length, testCase; + let index, length, testCase; for (index = 0, length = testCases.length; index < length; index++) { testCase = testCases[index]; it(testCase.name, createItCallback(testCase)); diff --git a/test/integration/testDataType.js b/test/integration/testDataType.js index 9fc1de172..272191328 100644 --- a/test/integration/testDataType.js +++ b/test/integration/testDataType.js @@ -1,58 +1,58 @@ /* * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var async = require('async'); +const async = require('async'); const GlobalConfig = require('./../../lib/global_config'); const snowflake = require('./../../lib/snowflake'); -var testUtil = require('./testUtil'); +const testUtil = require('./testUtil'); const sharedStatement = require('./sharedStatements'); -var bigInt = require('big-integer'); +const bigInt = require('big-integer'); describe('Test DataType', function () { - var connection; - var createTableWithString = 'create or replace table testString(colA string)'; - var createTableWithVariant = 'create or replace table testVariant(colA variant)'; - var createTableWithArray = 'create or replace table testArray(colA array)'; - var createTableWithNumber = 'create or replace table testNumber(colA number)'; - var createTableWithDouble = 'create or replace table testDouble(colA double)'; - var createTableWithDate = 'create or replace table testDate(colA date)'; - var createTableWithTime = 'create or replace table testTime(colA time)'; - var createTableWithTimestamp = 'create or replace table testTimestamp(colA timestamp_ltz, ' + + let connection; + const createTableWithString = 'create or replace table testString(colA string)'; + const createTableWithVariant = 'create or replace table testVariant(colA variant)'; + const createTableWithArray = 'create or replace table testArray(colA array)'; + const createTableWithNumber = 'create or replace table testNumber(colA number)'; + const createTableWithDouble = 'create or replace table testDouble(colA double)'; + const createTableWithDate = 'create or replace table testDate(colA date)'; + const createTableWithTime = 'create or replace table testTime(colA time)'; + const createTableWithTimestamp = 'create or replace table testTimestamp(colA timestamp_ltz, ' + 'colB timestamp_tz, colC timestamp_ntz)'; - var createTableWithBoolean = 'create or replace table testBoolean(colA boolean, colB boolean, colC boolean)'; - var dropTableWithString = 'drop table if exists testString'; - var dropTableWithVariant = 'drop table if exists testVariant'; - var dropTableWithArray = 'drop table if exists testArray'; - var dropTableWithNumber = 'drop table if exists testNumber'; - var dropTableWithDouble = 'drop table if exists testDouble'; - var dropTableWithDate = 'drop table if exists testDate'; - var dropTableWithTime = 'drop table if exists testTime'; - var dropTableWithTimestamp = 'drop table if exists testTimestamp'; - var dropTableWithBoolean = 'drop table if exists testBoolean'; + const createTableWithBoolean = 'create or replace table testBoolean(colA boolean, colB boolean, colC boolean)'; + const dropTableWithString = 'drop table if exists testString'; + const dropTableWithVariant = 'drop table if exists testVariant'; + const dropTableWithArray = 'drop table if exists testArray'; + const dropTableWithNumber = 'drop table if exists testNumber'; + const dropTableWithDouble = 'drop table if exists testDouble'; + const dropTableWithDate = 'drop table if exists testDate'; + const dropTableWithTime = 'drop table if exists testTime'; + const dropTableWithTimestamp = 'drop table if exists testTimestamp'; + const dropTableWithBoolean = 'drop table if exists testBoolean'; const truncateTableWithVariant = 'truncate table if exists testVariant;'; - var insertDouble = 'insert into testDouble values(123.456)'; - var insertLargeNumber = 'insert into testNumber values (12345678901234567890123456789012345678)'; - var insertRegularSizedNumber = 'insert into testNumber values (100000001)'; + const insertDouble = 'insert into testDouble values(123.456)'; + const insertLargeNumber = 'insert into testNumber values (12345678901234567890123456789012345678)'; + const insertRegularSizedNumber = 'insert into testNumber values (100000001)'; const insertVariantJSON = 'insert into testVariant select parse_json(\'{a : 1 , b :[1 , 2 , 3, -Infinity, undefined], c : {a : 1}}\')'; const insertVariantJSONForCustomParser = 'insert into testVariant select parse_json(\'{a : 1 , b :[1 , 2 , 3], c : {a : 1}}\')'; const insertVariantXML = 'insert into testVariant select parse_xml(\'111\')'; - var insertArray = 'insert into testArray select parse_json(\'["a", 1]\')'; - var insertDate = 'insert into testDate values(to_date(\'2012-11-11\'))'; - var insertTime = 'insert into testTime values(to_time(\'12:34:56.789789789\'))'; - var insertTimestamp = 'insert into testTimestamp values(to_timestamp_ltz(' + const insertArray = 'insert into testArray select parse_json(\'["a", 1]\')'; + const insertDate = 'insert into testDate values(to_date(\'2012-11-11\'))'; + const insertTime = 'insert into testTime values(to_time(\'12:34:56.789789789\'))'; + const insertTimestamp = 'insert into testTimestamp values(to_timestamp_ltz(' + '\'Thu, 21 Jan 2016 06:32:44 -0800\'), to_timestamp_tz(\'Thu, 21 Jan 2016 06:32:44 -0800\'), ' + 'to_timestamp_ntz(\'Thu, 21 Jan 2016 06:32:44 -0800\'))'; - var insertBoolean = 'insert into testBoolean values(true, false, null)'; - var insertString = 'insert into testString values(\'string with space\')'; - var selectDouble = 'select * from testDouble'; - var selectNumber = 'select * from testNumber'; - var selectVariant = 'select * from testVariant'; - var selectArray = 'select * from testArray'; - var selectDate = 'select * from testDate'; - var selectTime = 'select * from testTime'; - var selectTimestamp = 'select * from testTimestamp'; - var selectBoolean = 'select * from testBoolean'; - var selectString = 'select * from testString'; + const insertBoolean = 'insert into testBoolean values(true, false, null)'; + const insertString = 'insert into testString values(\'string with space\')'; + const selectDouble = 'select * from testDouble'; + const selectNumber = 'select * from testNumber'; + const selectVariant = 'select * from testVariant'; + const selectArray = 'select * from testArray'; + const selectDate = 'select * from testDate'; + const selectTime = 'select * from testTime'; + const selectTimestamp = 'select * from testTimestamp'; + const selectBoolean = 'select * from testBoolean'; + const selectString = 'select * from testString'; before(function (done) { connection = testUtil.createConnection(); diff --git a/test/integration/testLargeResultSet.js b/test/integration/testLargeResultSet.js index a440f3414..460fb5c04 100644 --- a/test/integration/testLargeResultSet.js +++ b/test/integration/testLargeResultSet.js @@ -4,7 +4,6 @@ const assert = require('assert'); const async = require('async'); const testUtil = require('./testUtil'); -const { configureLogger } = require('../configureLogger'); const { randomizeName } = require('./testUtil'); describe('Large result Set Tests', function () { @@ -27,8 +26,8 @@ describe('Large result Set Tests', function () { sqlText: selectAllFromOrders, complete: function (err, stmt) { testUtil.checkError(err); - var stream = stmt.streamRows(); - var rowCount = 0; + const stream = stmt.streamRows(); + let rowCount = 0; stream.on('data', function () { rowCount++; }); @@ -49,8 +48,8 @@ describe('Large result Set Tests', function () { sqlText: selectAllFromOrders, complete: function (err, stmt) { testUtil.checkError(err); - var rowCount = 0; - var stream = stmt.streamRows({ + let rowCount = 0; + const stream = stmt.streamRows({ start: offset }); stream.on('data', function () { @@ -73,8 +72,8 @@ describe('Large result Set Tests', function () { sqlText: selectAllFromOrders, complete: function (err, stmt) { testUtil.checkError(err); - var rowCount = 0; - var stream = stmt.streamRows({ + let rowCount = 0; + const stream = stmt.streamRows({ start: offset }); stream.on('data', function () { @@ -186,8 +185,8 @@ describe('Large result Set Tests', function () { if (err) { callback(err); } else { - var stream = stmt.streamRows(); - var rowCount = 0; + const stream = stmt.streamRows(); + let rowCount = 0; stream.on('data', function () { rowCount++; }); diff --git a/test/integration/testManualConnection.js b/test/integration/testManualConnection.js index 7562dd725..5d33e5543 100644 --- a/test/integration/testManualConnection.js +++ b/test/integration/testManualConnection.js @@ -16,7 +16,7 @@ if (process.env.RUN_MANUAL_TESTS_ONLY === 'true') { connection.connectAsync(function (err, connection) { try { assert.ok(connection.isUp(), 'not active'); - testUtil.destroyConnection(connection, function (err, r) { + testUtil.destroyConnection(connection, function () { try { assert.ok(!connection.isUp(), 'not active'); done(); @@ -282,7 +282,7 @@ if (process.env.RUN_MANUAL_TESTS_ONLY === 'true') { }); function executeSingleQuery() { - return new Promise((resolve, reject) => { + return new Promise((resolve) => { const start = Date.now(); connection.execute({ sqlText: `SELECT VALUE @@ -296,10 +296,10 @@ if (process.env.RUN_MANUAL_TESTS_ONLY === 'true') { .on('error', function (err) { throw err; }) - .on('data', function (row) { + .on('data', function () { return; }) - .on('end', function (row) { + .on('end', function () { const end = Date.now(); const time = end - start; resolve(time); diff --git a/test/integration/testMultiStatement.js b/test/integration/testMultiStatement.js index c7260da33..aa9316507 100644 --- a/test/integration/testMultiStatement.js +++ b/test/integration/testMultiStatement.js @@ -1,23 +1,23 @@ /* * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var async = require('async'); -var assert = require('assert'); -var testUtil = require('./testUtil'); -var Util = require('./../../lib/util'); +const async = require('async'); +const assert = require('assert'); +const testUtil = require('./testUtil'); +const Util = require('./../../lib/util'); const Logger = require('../../lib/logger'); describe('Test multi statement', function () { - var connection; - var alterSessionMultiStatement0 = 'alter session set MULTI_STATEMENT_COUNT=0'; - var selectTable = 'select ?; select ?,3; select ?,5,6'; + let connection; + const alterSessionMultiStatement0 = 'alter session set MULTI_STATEMENT_COUNT=0'; + const selectTable = 'select ?; select ?,3; select ?,5,6'; before(function (done) { connection = testUtil.createConnection(); testUtil.connect(connection, function () { connection.execute({ sqlText: alterSessionMultiStatement0, - complete: function (err, stmt) { + complete: function (err) { testUtil.checkError(err); done(); } @@ -43,15 +43,15 @@ describe('Test multi statement', function () { } }); }, - function (callback) { - var bindArr = [1, 2, 4]; - var count = 0; + function () { + const bindArr = [1, 2, 4]; + let count = 0; connection.execute({ sqlText: selectTable, binds: bindArr, complete: function (err, stmt) { testUtil.checkError(err); - var stream = stmt.streamRows(); + const stream = stmt.streamRows(); stream.on('error', function (err) { testUtil.checkError(err); }); diff --git a/test/integration/testOcsp.js b/test/integration/testOcsp.js index 221b20913..174d257b4 100644 --- a/test/integration/testOcsp.js +++ b/test/integration/testOcsp.js @@ -35,13 +35,13 @@ describe('OCSP validation', function () { }); }, function (callback) { - var numErrors = 0; - var numStmtsExecuted = 0; - var numStmtsTotal = 20; + let numErrors = 0; + let numStmtsExecuted = 0; + const numStmtsTotal = 20; // execute a simple statement several times // and make sure there are no errors - for (var index = 0; index < numStmtsTotal; index++) { + for (let index = 0; index < numStmtsTotal; index++) { connection.execute( { sqlText: 'select 1;', @@ -80,13 +80,13 @@ describe('OCSP validation', function () { }); }, function (callback) { - var numErrors = 0; - var numStmtsExecuted = 0; - var numStmtsTotal = 5; + let numErrors = 0; + let numStmtsExecuted = 0; + const numStmtsTotal = 5; // execute a simple statement several times // and make sure there are no errors - for (var index = 0; index < numStmtsTotal; index++) { + for (let index = 0; index < numStmtsTotal; index++) { setTimeout(function () { connection.execute( { @@ -299,12 +299,12 @@ describe('OCSP privatelink', function () { } }; - const host = Util.construct_hostname(connOption.privatelink.region, connOption.privatelink.account); + const host = Util.constructHostname(connOption.privatelink.region, connOption.privatelink.account); const ocspResponseCacheServerUrl = `http://ocsp.${host}/ocsp_response_cache.json`; const ocspResponderUrl = `http://ocsp.${host}/retry/${mockParsedUrl.hostname}/${mockDataBuf.toString('base64')}`; it('Account with privatelink', function (done) { - var connection = snowflake.createConnection(connOption.privatelink); + const connection = snowflake.createConnection(connOption.privatelink); connection.connect(function (err) { assert.ok(!err, JSON.stringify(err)); @@ -322,7 +322,7 @@ describe('OCSP privatelink', function () { }); it('Account without privatelink', function (done) { - var connection = snowflake.createConnection(connOption.valid); + const connection = snowflake.createConnection(connOption.valid); connection.connect(function (err) { assert.ok(!err, JSON.stringify(err)); @@ -339,7 +339,7 @@ describe('OCSP privatelink', function () { // Skipped - requires manual interaction to set the network interface in system command and enter sudo user password describe.skip('Test Ocsp with network delay', function () { this.timeout(500000); - var connection; + let connection; before(function (done) { exec('sudo tc qdisc add dev eth0 root netem delay 5000ms'); @@ -361,7 +361,7 @@ describe.skip('Test Ocsp with network delay', function () { async.series([ function (callback) { - connection.connect(function (err, conn) { + connection.connect(function (err) { assert.ok(!err, JSON.stringify(err)); callback(); }); diff --git a/test/integration/testPutGet.js b/test/integration/testPutGet.js index fabc88c92..88351c82f 100644 --- a/test/integration/testPutGet.js +++ b/test/integration/testPutGet.js @@ -1100,14 +1100,13 @@ describe('PUT GET test with error', function () { let connection; const TEMP_TABLE_NAME = randomizeName('TEMP_TABLE'); const stage = `@${DATABASE_NAME}.${SCHEMA_NAME}.%${TEMP_TABLE_NAME}`; - const stage_not_exist = `@${DATABASE_NAME}.${SCHEMA_NAME}.%NONEXISTTABLE`; + const stageNotExist = `@${DATABASE_NAME}.${SCHEMA_NAME}.%NONEXISTTABLE`; const createTable = `create or replace table ${TEMP_TABLE_NAME} (${COL1} STRING, ${COL2} STRING, ${COL3} STRING)`; const removeFile = `REMOVE ${stage}`; const dropTable = `DROP TABLE IF EXISTS ${TEMP_TABLE_NAME}`; let tmpFile = null; let tmpfilePath = null; - const testCases = null; before(async () => { // Create a temp file without specified file extension @@ -1153,7 +1152,7 @@ describe('PUT GET test with error', function () { async.series( [ function (callback) { - verifyCompilationError(`PUT ${tmpfilePath} ${stage_not_exist}`, callback); + verifyCompilationError(`PUT ${tmpfilePath} ${stageNotExist}`, callback); } ], done diff --git a/test/integration/testPutSmallFiles.js b/test/integration/testPutSmallFiles.js index 9196740d9..ea8d938f8 100644 --- a/test/integration/testPutSmallFiles.js +++ b/test/integration/testPutSmallFiles.js @@ -1,12 +1,12 @@ /* * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var async = require('async'); -var assert = require('assert'); -var fs = require('fs'); -var os = require('os'); -var path = require('path'); -var testUtil = require('./testUtil'); +const async = require('async'); +const assert = require('assert'); +const fs = require('fs'); +const os = require('os'); +const path = require('path'); +const testUtil = require('./testUtil'); const connOption = require('./connectionOptions'); const { randomizeName } = require('./testUtil'); @@ -15,15 +15,15 @@ const SCHEMA_NAME = connOption.valid.schema; const WAREHOUSE_NAME = connOption.valid.warehouse; const TABLE = randomizeName('TESTTBL'); -var connection; -var files = new Array(); +let connection; +const files = new Array(); function uploadFiles(callback, index = 0) { if (index < files.length) { - var putQuery = `PUT file://${files[index]} @${DATABASE_NAME}.${SCHEMA_NAME}.%${TABLE}`; - var insertStmt = connection.execute({ + const putQuery = `PUT file://${files[index]} @${DATABASE_NAME}.${SCHEMA_NAME}.%${TABLE}`; + connection.execute({ sqlText: putQuery, - complete: function (err, stmt) { + complete: function (err) { testUtil.checkError(err); if (!err) { index++; @@ -40,12 +40,12 @@ function uploadFiles(callback, index = 0) { describe('Test Put Small Files', function () { this.timeout(100000); - var useWH = `use warehouse ${WAREHOUSE_NAME}`; - var createTable = `create or replace table ${DATABASE_NAME}.${SCHEMA_NAME}.${TABLE}(colA string, colB number, colC date, colD time, colE TIMESTAMP_NTZ, colF TIMESTAMP_TZ)`; - var copytInto = `copy into ${DATABASE_NAME}.${SCHEMA_NAME}.${TABLE}`; - var select1row = `select * from ${DATABASE_NAME}.${SCHEMA_NAME}.${TABLE} where colB = 3`; - var selectAll = `select count(*) AS NUM from ${DATABASE_NAME}.${SCHEMA_NAME}.${TABLE}`; - var count = 5000; + const useWH = `use warehouse ${WAREHOUSE_NAME}`; + const createTable = `create or replace table ${DATABASE_NAME}.${SCHEMA_NAME}.${TABLE}(colA string, colB number, colC date, colD time, colE TIMESTAMP_NTZ, colF TIMESTAMP_TZ)`; + const copytInto = `copy into ${DATABASE_NAME}.${SCHEMA_NAME}.${TABLE}`; + const select1row = `select * from ${DATABASE_NAME}.${SCHEMA_NAME}.${TABLE} where colB = 3`; + const selectAll = `select count(*) AS NUM from ${DATABASE_NAME}.${SCHEMA_NAME}.${TABLE}`; + const count = 5000; before(function (done) { connection = testUtil.createConnection(); @@ -69,32 +69,32 @@ describe('Test Put Small Files', function () { async.series( [ function (callback) { - var createTableStmt = connection.execute({ + connection.execute({ sqlText: createTable, - complete: function (err, stmt) { + complete: function (err) { testUtil.checkError(err); callback(); } }); }, function (callback) { - var arrBind = []; - var filesize = 1024 * 100; + const arrBind = []; + const filesize = 1024 * 100; - for (var i = 0; i < count; i++) { + for (let i = 0; i < count; i++) { arrBind.push(['string' + i, i, '2020-05-11', '12:35:41.3333333', '2022-04-01 23:59:59', '2022-07-08 12:05:30.9999999']); } - var fileCount = 0; - var strbuffer = ''; + let fileCount = 0; + let strbuffer = ''; - var tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tmp')); + let tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tmp')); if (tmpDir.indexOf('~') !== -1 && process.platform === 'win32') { - var tmpFolderName = tmpDir.substring(tmpDir.lastIndexOf('\\')); + const tmpFolderName = tmpDir.substring(tmpDir.lastIndexOf('\\')); tmpDir = process.env.USERPROFILE + '\\AppData\\Local\\Temp\\' + tmpFolderName; } - for (var i = 0; i < arrBind.length; i++) { - for (var j = 0; j < arrBind[i].length; j++) { + for (let i = 0; i < arrBind.length; i++) { + for (let j = 0; j < arrBind[i].length; j++) { if (j > 0) { strbuffer += ','; } @@ -103,14 +103,14 @@ describe('Test Put Small Files', function () { strbuffer += '\n'; if ((strbuffer.length >= filesize) || (i === arrBind.length - 1)) { - var fileName = path.join(tmpDir, (++fileCount).toString()); + const fileName = path.join(tmpDir, (++fileCount).toString()); fs.writeFileSync(fileName, strbuffer); files.push(fileName); strbuffer = ''; } } - var callbackfunc = function () { - for (var fileName in files) { + const callbackfunc = function () { + for (const fileName in files) { if (fs.existsSync(fileName)) { fs.unlinkSync(fileName); } @@ -120,22 +120,22 @@ describe('Test Put Small Files', function () { uploadFiles(callbackfunc, 0); }, function copy(callback) { - var copyintostmt = connection.execute({ + connection.execute({ sqlText: copytInto, - complete: function (err, stmt) { + complete: function (err) { testUtil.checkError(err); callback(); } }); }, function select(callback) { - var selectstmt = connection.execute({ + connection.execute({ sqlText: select1row, complete: function (err, stmt, rows) { testUtil.checkError(err); assert.strictEqual(rows[0]['COLA'], 'string3'); - var dateValue = new Date(rows[0]['COLC']).getTime(); - var timeValue = new Date(rows[0]['COLE']).getTime(); + const dateValue = new Date(rows[0]['COLC']).getTime(); + const timeValue = new Date(rows[0]['COLE']).getTime(); assert.strictEqual(dateValue.toString(), '1589155200000'); assert.strictEqual(timeValue.toString(), '1648857599000'); callback(); @@ -143,7 +143,7 @@ describe('Test Put Small Files', function () { }); }, function selectall(callback) { - var selectstmt = connection.execute({ + connection.execute({ sqlText: selectAll, complete: function (err, stmt, rows) { testUtil.checkError(err); diff --git a/test/integration/testStatement.js b/test/integration/testStatement.js index e7ce0dd1a..1096a70dc 100644 --- a/test/integration/testStatement.js +++ b/test/integration/testStatement.js @@ -1,15 +1,15 @@ /* * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var snowflake = require('./../../lib/snowflake'); -var Core = require('./../../lib/core'); -var assert = require('assert'); -var async = require('async'); -var connectionOptions = require('./connectionOptions'); +const snowflake = require('./../../lib/snowflake'); +const Core = require('./../../lib/core'); +const assert = require('assert'); +const async = require('async'); +const connectionOptions = require('./connectionOptions'); const Errors = require('./../../lib/errors'); const ErrorCodes = Errors.codes; -var Util = require('./../../lib/util'); -var testUtil = require('./testUtil'); +const Util = require('./../../lib/util'); +const testUtil = require('./testUtil'); describe('Statement Tests', function () { let connection; @@ -34,10 +34,11 @@ describe('Statement Tests', function () { const tokenConn = coreInst.createConnection(connectionOptions.valid); let goodConnection; + let statement; async.series( [ function (callback) { - tokenConn.connect(function (err, conn) { + tokenConn.connect(function (err) { assert.ok(!err, 'there should be no error'); const sessionToken = tokenConn.getTokens().sessionToken; assert.ok(sessionToken); @@ -47,8 +48,7 @@ describe('Statement Tests', function () { callback(); }); - } - , + }, function (callback) { statement = goodConnection.execute( { @@ -59,7 +59,7 @@ describe('Statement Tests', function () { 'the execute() callback should be invoked with the statement'); // we should only have one column c1 - var columns = statement.getColumns(); + const columns = statement.getColumns(); assert.ok(columns); assert.strictEqual(columns.length, 1); assert.ok(columns[0]); @@ -91,7 +91,7 @@ describe('Statement Tests', function () { callback(); }, function (callback) { - var rows = []; + const rows = []; statement.fetchRows( { each: function (row) { @@ -121,13 +121,14 @@ describe('Statement Tests', function () { const badConnection = snowflake.createConnection(Object.assign({}, connectionOptions.valid, { username: undefined, password: undefined, sessionToken: 'invalid token' })); + let statement; async.series( [ function (callback) { statement = badConnection.execute( { sqlText: sqlText, - complete: function (err, stmt) { + complete: function (err) { assert.ok(err !== undefined, 'expect an error'); assert.ok(err.code === ErrorCodes.ERR_SF_RESPONSE_INVALID_TOKEN, 'Should throw invalid token error'); callback(); @@ -156,7 +157,7 @@ describe('Statement Tests', function () { }); it('statement api', function (done) { - var statement; + let statement; async.series( [ @@ -179,7 +180,7 @@ describe('Statement Tests', function () { 'the execute() callback should be invoked with the statement'); // we should only have one column c1 - var columns = statement.getColumns(); + const columns = statement.getColumns(); assert.ok(columns); assert.strictEqual(columns.length, 1); assert.ok(columns[0]); @@ -210,7 +211,7 @@ describe('Statement Tests', function () { callback(); }, function (callback) { - var rows = []; + const rows = []; statement.fetchRows( { each: function (row) { @@ -248,10 +249,10 @@ describe('Call Statement', function () { async.series( [ function (callback) { - var statement = connection.execute({ + const statement = connection.execute({ sqlText: 'ALTER SESSION SET USE_STATEMENT_TYPE_CALL_FOR_STORED_PROC_CALLS=true;', - complete: function (err, stmt, rows) { - var stream = statement.streamRows(); + complete: function () { + const stream = statement.streamRows(); stream.on('error', function (err) { // Expected error - SqlState: 22023, VendorCode: 1006 assert.strictEqual('22023', err.sqlState); @@ -265,7 +266,7 @@ describe('Call Statement', function () { }); }, function (callback) { - var statement = connection.execute({ + const statement = connection.execute({ sqlText: 'create or replace procedure\n' + 'TEST_SP_CALL_STMT_ENABLED(in1 float, in2 variant)\n' + 'returns string language javascript as $$\n' @@ -273,51 +274,51 @@ describe('Call Statement', function () { + 'res.next();\n' + 'return res.getColumnValueAsString(1) + \' \' + res.getColumnValueAsString(2) + \' \' + IN2;\n' + '$$;', - complete: function (err, stmt, rows) { - var stream = statement.streamRows(); + complete: function () { + const stream = statement.streamRows(); stream.on('error', function (err) { done(err); }); stream.on('data', function (row) { assert.strictEqual(true, row.status.includes('success')); }); - stream.on('end', function (row) { + stream.on('end', function () { callback(); }); } }); }, function (callback) { - var statement = connection.execute({ + const statement = connection.execute({ sqlText: 'call TEST_SP_CALL_STMT_ENABLED(?, to_variant(?))', binds: [1, '[2,3]'], - complete: function (err, stmt, rows) { - var stream = statement.streamRows(); + complete: function () { + const stream = statement.streamRows(); stream.on('error', function (err) { done(err); }); stream.on('data', function (row) { - var result = '1 "[2,3]" [2,3]'; + const result = '1 "[2,3]" [2,3]'; assert.strictEqual(result, row.TEST_SP_CALL_STMT_ENABLED); }); - stream.on('end', function (row) { + stream.on('end', function () { callback(); }); } }); }, function (callback) { - var statement = connection.execute({ + const statement = connection.execute({ sqlText: 'drop procedure if exists TEST_SP_CALL_STMT_ENABLED(float, variant)', - complete: function (err, stmt, rows) { - var stream = statement.streamRows(); + complete: function () { + const stream = statement.streamRows(); stream.on('error', function (err) { done(err); }); stream.on('data', function (row) { assert.strictEqual(true, row.status.includes('success')); }); - stream.on('end', function (row) { + stream.on('end', function () { callback(); }); } diff --git a/test/integration/testStreamRows.js b/test/integration/testStreamRows.js index 465c92011..ac8531446 100644 --- a/test/integration/testStreamRows.js +++ b/test/integration/testStreamRows.js @@ -1,13 +1,13 @@ /* * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var assert = require('assert'); -var async = require('async'); -var testUtil = require('./testUtil'); +const assert = require('assert'); +const async = require('async'); +const testUtil = require('./testUtil'); require('events').EventEmitter.prototype._maxListeners = 100; describe('Test Stream Rows API', function () { - var connection; + let connection; this.timeout(300000); @@ -24,7 +24,7 @@ describe('Test Stream Rows API', function () { connection.execute({ sqlText: 'select aaa from b aaa', complete: function (err, stmt) { - var stream = stmt.streamRows(); + const stream = stmt.streamRows(); stream.on('data', function () { assert.ok(false); }); @@ -41,8 +41,8 @@ describe('Test Stream Rows API', function () { sqlText: 'select randstr(10, random()) from table(generator(rowcount=>30000))', complete: function (err, stmt) { testUtil.checkError(err); - var rowCount = 0; - var flowingStream = stmt.streamRows({ + let rowCount = 0; + const flowingStream = stmt.streamRows({ start: 200, end: 300 }); @@ -63,8 +63,8 @@ describe('Test Stream Rows API', function () { sqlText: 'select randstr(10, random()) from table(generator(rowcount=>30000))', complete: function (err, stmt) { testUtil.checkError(err); - var rowCount = 0; - var nonFlowingStream = stmt.streamRows({ + let rowCount = 0; + const nonFlowingStream = stmt.streamRows({ start: 200, end: 300 }); @@ -87,8 +87,8 @@ describe('Test Stream Rows API', function () { sqlText: 'select randstr(10, random()) c1 from table(generator(rowcount=>10)) where c1=\'abc\'', complete: function (err, stmt) { testUtil.checkError(err); - var completedStream = 0; - var flowingStream = stmt.streamRows(); + let completedStream = 0; + const flowingStream = stmt.streamRows(); flowingStream.on('data', function () { assert.ok(false); }).on('error', function (err) { @@ -99,7 +99,7 @@ describe('Test Stream Rows API', function () { } }); - var nonFlowingStream = stmt.streamRows(); + const nonFlowingStream = stmt.streamRows(); nonFlowingStream.on('readable', function () { assert.strictEqual(nonFlowingStream.read(), null); }).on('end', function () { @@ -115,7 +115,7 @@ describe('Test Stream Rows API', function () { }); it('testSmallResultSet', function (done) { - var expected = + const expected = [ { COLUMN1: '1', @@ -152,23 +152,23 @@ describe('Test Stream Rows API', function () { } ]; - var values = []; + const values = []; expected.forEach(function (entry) { - var value = []; - for (var e in entry) { - var v = entry[e]; + const value = []; + for (const e in entry) { + const v = entry[e]; value.push('\'' + v + '\''); } values.push('(' + value.join(',') + ')'); }); - var sql = 'select * from values' + values.join(','); + const sql = 'select * from values' + values.join(','); connection.execute({ sqlText: sql, complete: function (err, stmt) { testUtil.checkError(err); - var completedStream = 0; - var flowingStream = stmt.streamRows(); - var flowingModeResult = []; + let completedStream = 0; + const flowingStream = stmt.streamRows(); + const flowingModeResult = []; flowingStream.on('data', function (row) { flowingModeResult.push(row); }).on('error', function (err) { @@ -180,10 +180,10 @@ describe('Test Stream Rows API', function () { } }); - var nonFlowingModeResult = []; - var nonFlowingStream = stmt.streamRows(); + const nonFlowingModeResult = []; + const nonFlowingStream = stmt.streamRows(); nonFlowingStream.on('readable', function () { - var row; + let row; while ((row = nonFlowingStream.read()) !== null) { nonFlowingModeResult.push(row); } @@ -205,14 +205,14 @@ describe('Test Stream Rows API', function () { sqlText: 'select true from table(generator(rowcount=>' + sourceRowCount + '))', complete: function (err, stmt) { testUtil.checkError(err); - var streamQueue = []; - var completedStream = 0; - for (var i = 0; i < 20; i++) { + const streamQueue = []; + let completedStream = 0; + for (let i = 0; i < 20; i++) { streamQueue.push(stmt.streamRows()); } - var flowingStreamRegister = function (stream) { - var rowCount = 0; + const flowingStreamRegister = function (stream) { + let rowCount = 0; stream.on('data', function () { rowCount++; }).on('error', function (err) { @@ -225,7 +225,7 @@ describe('Test Stream Rows API', function () { }); }; - for (i = 0; i < 20; i++) { + for (let i = 0; i < 20; i++) { flowingStreamRegister(streamQueue[i]); } } @@ -238,8 +238,8 @@ describe('Test Stream Rows API', function () { sqlText: 'select true from table(generator(rowcount=>' + sourceRowCount + '))', complete: function (err, stmt) { testUtil.checkError(err); - var rowCount = 0; - var stream = stmt.streamRows(); + let rowCount = 0; + const stream = stmt.streamRows(); stream.on('data', function () { rowCount++; }).on('error', function (err) { @@ -263,14 +263,14 @@ describe('Test Stream Rows API', function () { it('testLargeResultSet', function (done) { // The test should finish in around 3 min this.timeout(180000); - var expectedRowCount = 5000000; + const expectedRowCount = 5000000; connection.execute({ sqlText: 'select randstr(10, random()) from table(generator(rowcount=>' + expectedRowCount + '))', streamResult: true, complete: function (err, stmt) { testUtil.checkError(err); - var rowCount = 0; - var stream = stmt.streamRows(); + let rowCount = 0; + const stream = stmt.streamRows(); stream.on('data', function () { rowCount++; }).on('end', function () { @@ -322,6 +322,7 @@ describe('Test Stream Rows API', function () { describe('Test Stream Rows HighWaterMark', function () { this.timeout(300000); + let connection; before(function (done) { connection = testUtil.createConnection(); @@ -332,19 +333,19 @@ describe('Test Stream Rows HighWaterMark', function () { testUtil.destroyConnection(connection, done); }); - var testingFunc = function (highWaterMark, expectedRowCount, callback) { + const testingFunc = function (highWaterMark, expectedRowCount, callback) { async.series( [ function (callback) { // select table with row count equal to expectedRowCount - var statement = connection.execute({ + const statement = connection.execute({ sqlText: `SELECT seq8() FROM table(generator(rowCount => ${expectedRowCount}));`, streamResult: true, complete: function () { - var actualRowCount = 0; - var rowIndex; + let actualRowCount = 0; + let rowIndex; - var stream = statement.streamRows(); + const stream = statement.streamRows(); stream.on('error', function (err) { callback(err); }); diff --git a/test/integration/testUpdatedRows.js b/test/integration/testUpdatedRows.js index 70a46777f..d676d9f84 100644 --- a/test/integration/testUpdatedRows.js +++ b/test/integration/testUpdatedRows.js @@ -2,12 +2,12 @@ * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var assert = require('assert'); -var async = require('async'); -var testUtil = require('./testUtil'); +const assert = require('assert'); +const async = require('async'); +const testUtil = require('./testUtil'); describe('Test updated rows', function () { - var connection; + let connection; const dropTableFoo = 'drop table if exists foo'; const dropTableBar = 'drop table if exists bar'; const dropTableFoo2 = 'drop table if exists foo2'; @@ -54,11 +54,11 @@ describe('Test updated rows', function () { it('insert, update, delete', function (done) { async.series([ function (callback) { - var sqlText = 'create or replace table foo (c1 number, c2 number);'; + const sqlText = 'create or replace table foo (c1 number, c2 number);'; connection.execute( { sqlText: sqlText, - complete: function (err, statement, rows) { + complete: function (err, statement) { assert.ok(!err); assert.strictEqual(statement.getNumUpdatedRows(), -1); callback(); @@ -66,7 +66,7 @@ describe('Test updated rows', function () { }); }, function (callback) { - var sqlText = 'insert into foo values' + + const sqlText = 'insert into foo values' + ' (1,10)' + ',(2,10)' + ',(3,10)' + @@ -78,7 +78,7 @@ describe('Test updated rows', function () { connection.execute( { sqlText: sqlText, - complete: function (err, statement, rows) { + complete: function (err, statement) { // 'number of rows inserted': 8 assert.ok(!err); assert.strictEqual(statement.getNumUpdatedRows(), 8); @@ -87,11 +87,11 @@ describe('Test updated rows', function () { }); }, function (callback) { - var sqlText = 'create or replace table foo2 (c3 number, c4 number);'; + const sqlText = 'create or replace table foo2 (c3 number, c4 number);'; connection.execute( { sqlText: sqlText, - complete: function (err, statement, rows) { + complete: function (err, statement) { assert.ok(!err); assert.strictEqual(statement.getNumUpdatedRows(), -1); callback(); @@ -99,7 +99,7 @@ describe('Test updated rows', function () { }); }, function (callback) { - var sqlText = 'insert into foo2 values' + + const sqlText = 'insert into foo2 values' + ' (1,5)' + ',(2,5)' + ',(3,5)' + @@ -107,7 +107,7 @@ describe('Test updated rows', function () { connection.execute( { sqlText: sqlText, - complete: function (err, statement, rows) { + complete: function (err, statement) { // 'number of rows inserted': 4 assert.ok(!err); assert.strictEqual(statement.getNumUpdatedRows(), 4); @@ -116,12 +116,12 @@ describe('Test updated rows', function () { }); }, function (callback) { - var sqlText = + const sqlText = 'update foo set c2 = c4 from foo2 where foo.c1 = foo2.c3;'; connection.execute( { sqlText: sqlText, - complete: function (err, statement, rows) { + complete: function (err, statement) { // 'number of rows updated': 3 // 'number of multi-joined rows updated': 1 assert.ok(!err); @@ -131,11 +131,11 @@ describe('Test updated rows', function () { }); }, function (callback) { - var sqlText = 'delete from foo;'; + const sqlText = 'delete from foo;'; connection.execute( { sqlText: sqlText, - complete: function (err, statement, rows) { + complete: function (err, statement) { // 'number of rows deleted': 8 assert.ok(!err); assert.strictEqual(statement.getNumUpdatedRows(), 8); @@ -149,11 +149,11 @@ describe('Test updated rows', function () { it('merge', function (done) { async.series([ function (callback) { - var sqlText = 'create or replace table fooMaster (k number, v number);'; + const sqlText = 'create or replace table fooMaster (k number, v number);'; connection.execute( { sqlText: sqlText, - complete: function (err, statement, rows) { + complete: function (err, statement) { assert.ok(!err); assert.strictEqual(statement.getNumUpdatedRows(), -1); callback(); @@ -161,11 +161,11 @@ describe('Test updated rows', function () { }); }, function (callback) { - var sqlText = 'insert into fooMaster values (0, 10), (1, 11), (5, 15), (6, 16);'; + const sqlText = 'insert into fooMaster values (0, 10), (1, 11), (5, 15), (6, 16);'; connection.execute( { sqlText: sqlText, - complete: function (err, statement, rows) { + complete: function (err, statement) { // 'number of rows inserted': 4 assert.ok(!err); assert.strictEqual(statement.getNumUpdatedRows(), 4); @@ -174,11 +174,11 @@ describe('Test updated rows', function () { }); }, function (callback) { - var sqlText = 'create or replace table foo clone fooMaster;'; + const sqlText = 'create or replace table foo clone fooMaster;'; connection.execute( { sqlText: sqlText, - complete: function (err, statement, rows) { + complete: function (err, statement) { assert.ok(!err); assert.strictEqual(statement.getNumUpdatedRows(), -1); callback(); @@ -186,11 +186,11 @@ describe('Test updated rows', function () { }); }, function (callback) { - var sqlText = 'create or replace table bar (k number, v number);'; + const sqlText = 'create or replace table bar (k number, v number);'; connection.execute( { sqlText: sqlText, - complete: function (err, statement, rows) { + complete: function (err, statement) { assert.ok(!err); assert.strictEqual(statement.getNumUpdatedRows(), -1); callback(); @@ -198,11 +198,11 @@ describe('Test updated rows', function () { }); }, function (callback) { - var sqlText = 'insert into bar values (0, 20), (1, 21), (2, 22), (3, 23), (4, 24);'; + const sqlText = 'insert into bar values (0, 20), (1, 21), (2, 22), (3, 23), (4, 24);'; connection.execute( { sqlText: sqlText, - complete: function (err, statement, rows) { + complete: function (err, statement) { // 'number of rows inserted': 5 assert.ok(!err); assert.strictEqual(statement.getNumUpdatedRows(), 5); @@ -211,14 +211,14 @@ describe('Test updated rows', function () { }); }, function (callback) { - var sqlText = 'merge into foo using bar on foo.k = bar.k ' + + const sqlText = 'merge into foo using bar on foo.k = bar.k ' + 'when matched and foo.k = 0 then update set v = bar.v*100 ' + 'when matched and foo.k = 1 then delete ' + 'when not matched then insert values (k,v);'; connection.execute( { sqlText: sqlText, - complete: function (err, statement, rows) { + complete: function (err, statement) { // 'number of rows inserted': 3 // 'number of rows updated': 1 // 'number of rows deleted': 1 @@ -234,11 +234,11 @@ describe('Test updated rows', function () { it('multi-table insert', function (done) { async.series([ function (callback) { - var sqlText = 'create or replace table source(k number, v number);'; + const sqlText = 'create or replace table source(k number, v number);'; connection.execute( { sqlText: sqlText, - complete: function (err, statement, rows) { + complete: function (err, statement) { assert.ok(!err); assert.strictEqual(statement.getNumUpdatedRows(), -1); callback(); @@ -246,11 +246,11 @@ describe('Test updated rows', function () { }); }, function (callback) { - var sqlText = 'insert into source values (0, 100), (1, 101), (2, 102);'; + const sqlText = 'insert into source values (0, 100), (1, 101), (2, 102);'; connection.execute( { sqlText: sqlText, - complete: function (err, statement, rows) { + complete: function (err, statement) { // 'number of rows inserted': 3 assert.ok(!err); assert.strictEqual(statement.getNumUpdatedRows(), 3); @@ -259,11 +259,11 @@ describe('Test updated rows', function () { }); }, function (callback) { - var sqlText = 'create or replace table foo (k number, v number);'; + const sqlText = 'create or replace table foo (k number, v number);'; connection.execute( { sqlText: sqlText, - complete: function (err, statement, rows) { + complete: function (err, statement) { assert.ok(!err); assert.strictEqual(statement.getNumUpdatedRows(), -1); callback(); @@ -271,11 +271,11 @@ describe('Test updated rows', function () { }); }, function (callback) { - var sqlText = 'create or replace table bar (k number, v number);'; + const sqlText = 'create or replace table bar (k number, v number);'; connection.execute( { sqlText: sqlText, - complete: function (err, statement, rows) { + complete: function (err, statement) { assert.ok(!err); assert.strictEqual(statement.getNumUpdatedRows(), -1); callback(); @@ -283,11 +283,11 @@ describe('Test updated rows', function () { }); }, function (callback) { - var sqlText = 'insert ALL into foo into bar select * from source;'; + const sqlText = 'insert ALL into foo into bar select * from source;'; connection.execute( { sqlText: sqlText, - complete: function (err, statement, rows) { + complete: function (err, statement) { // 'number of rows inserted into FOO': 3 // 'number of rows inserted into BAR': 3 assert.ok(!err); diff --git a/test/unit/authentication/authentication_test.js b/test/unit/authentication/authentication_test.js index 96cb941e0..a9dcbe743 100644 --- a/test/unit/authentication/authentication_test.js +++ b/test/unit/authentication/authentication_test.js @@ -2,34 +2,34 @@ * Copyright (c) 2021 Snowflake Computing Inc. All rights reserved. */ -var assert = require('assert'); -var mock = require('mock-require'); -var net = require('net'); +const assert = require('assert'); +const mock = require('mock-require'); +const net = require('net'); -var authenticator = require('./../../../lib/authentication/authentication'); -var auth_default = require('./../../../lib/authentication/auth_default'); -var auth_web = require('./../../../lib/authentication/auth_web'); -var auth_keypair = require('./../../../lib/authentication/auth_keypair'); -var auth_oauth = require('./../../../lib/authentication/auth_oauth'); -var auth_okta = require('./../../../lib/authentication/auth_okta'); -var authenticationTypes = require('./../../../lib/authentication/authentication').authenticationTypes; +const authenticator = require('./../../../lib/authentication/authentication'); +const AuthDefault = require('./../../../lib/authentication/auth_default'); +const AuthWeb = require('./../../../lib/authentication/auth_web'); +const AuthKeypair = require('./../../../lib/authentication/auth_keypair'); +const AuthOauth = require('./../../../lib/authentication/auth_oauth'); +const AuthOkta = require('./../../../lib/authentication/auth_okta'); +const authenticationTypes = require('./../../../lib/authentication/authentication').authenticationTypes; -var MockTestUtil = require('./../mock/mock_test_util'); +const MockTestUtil = require('./../mock/mock_test_util'); // get connection options to connect to this mock snowflake instance -var mockConnectionOptions = MockTestUtil.connectionOptions; -var connectionOptions = mockConnectionOptions.default; -var connectionOptionsDefault = mockConnectionOptions.authDefault; -var connectionOptionsExternalBrowser = mockConnectionOptions.authExternalBrowser; -var connectionOptionsKeyPair = mockConnectionOptions.authKeyPair; -var connectionOptionsKeyPairPath = mockConnectionOptions.authKeyPairPath; -var connectionOptionsOauth = mockConnectionOptions.authOauth; -var connectionOptionsOkta = mockConnectionOptions.authOkta; +const mockConnectionOptions = MockTestUtil.connectionOptions; +const connectionOptions = mockConnectionOptions.default; +const connectionOptionsDefault = mockConnectionOptions.authDefault; +const connectionOptionsExternalBrowser = mockConnectionOptions.authExternalBrowser; +const connectionOptionsKeyPair = mockConnectionOptions.authKeyPair; +const connectionOptionsKeyPairPath = mockConnectionOptions.authKeyPairPath; +const connectionOptionsOauth = mockConnectionOptions.authOauth; +const connectionOptionsOkta = mockConnectionOptions.authOkta; describe('default authentication', function () { it('default - authenticate method is thenable', done => { - const auth = new auth_default(connectionOptions.password); + const auth = new AuthDefault(connectionOptions.password); auth.authenticate() .then(done) @@ -37,9 +37,9 @@ describe('default authentication', function () { }); it('default - check password', function () { - var auth = new auth_default(connectionOptions.password); + const auth = new AuthDefault(connectionOptions.password); - var body = { data: {} }; + const body = { data: {} }; auth.updateBody(body); assert.strictEqual( @@ -47,7 +47,7 @@ describe('default authentication', function () { }); it('default - check authenticator', function () { - var body = authenticator.formAuthJSON(connectionOptionsDefault.authenticator, + const body = authenticator.formAuthJSON(connectionOptionsDefault.authenticator, connectionOptionsDefault.account, connectionOptionsDefault.username, {}, {}, {}); @@ -58,8 +58,9 @@ describe('default authentication', function () { }); describe('external browser authentication', function () { - var webbrowser; - var browserRedirectPort; + let webbrowser; + let browserRedirectPort; + let httpclient; const mockProofKey = 'mockProofKey'; const mockToken = 'mockToken'; @@ -75,8 +76,8 @@ describe('external browser authentication', function () { before(function () { mock('webbrowser', { - open: function (url) { - var client = net.createConnection({ port: browserRedirectPort }, () => { + open: function () { + const client = net.createConnection({ port: browserRedirectPort }, () => { client.write(`GET /?token=${mockToken} HTTP/1.1\r\n`); }); return; @@ -104,7 +105,7 @@ describe('external browser authentication', function () { }); it('external browser - authenticate method is thenable', done => { - const auth = new auth_web(connectionConfig, httpclient, webbrowser.open); + const auth = new AuthWeb(connectionConfig, httpclient, webbrowser.open); auth.authenticate(credentials.authenticator, '', credentials.account, credentials.username, credentials.host) .then(done) @@ -112,10 +113,10 @@ describe('external browser authentication', function () { }); it('external browser - get success', async function () { - const auth = new auth_web(connectionConfig, httpclient, webbrowser.open); + const auth = new AuthWeb(connectionConfig, httpclient, webbrowser.open); await auth.authenticate(credentials.authenticator, '', credentials.account, credentials.username, credentials.host); - var body = { data: {} }; + const body = { data: {} }; auth.updateBody(body); assert.strictEqual(body['data']['TOKEN'], mockToken); @@ -124,8 +125,8 @@ describe('external browser authentication', function () { it('external browser - get fail', async function () { mock('webbrowser', { - open: function (url) { - var client = net.createConnection({ port: browserRedirectPort }, () => { + open: function () { + const client = net.createConnection({ port: browserRedirectPort }, () => { client.write('\r\n'); }); return; @@ -151,10 +152,10 @@ describe('external browser authentication', function () { webbrowser = require('webbrowser'); httpclient = require('httpclient'); - const auth = new auth_web(connectionConfig, httpclient, webbrowser.open); + const auth = new AuthWeb(connectionConfig, httpclient, webbrowser.open); await auth.authenticate(credentials.authenticator, '', credentials.account, credentials.username, credentials.host); - var body = { data: {} }; + const body = { data: {} }; auth.updateBody(body); assert.strictEqual(typeof body['data']['TOKEN'], 'undefined'); @@ -162,7 +163,7 @@ describe('external browser authentication', function () { }); it('external browser - check authenticator', function () { - var body = authenticator.formAuthJSON(connectionOptionsExternalBrowser.authenticator, + const body = authenticator.formAuthJSON(connectionOptionsExternalBrowser.authenticator, connectionOptionsExternalBrowser.account, connectionOptionsExternalBrowser.username, {}, {}, {}); @@ -173,13 +174,13 @@ describe('external browser authentication', function () { }); describe('key-pair authentication', function () { - var cryptomod; - var jwtmod; - var filesystem; + let cryptomod; + let jwtmod; + let filesystem; - var mockToken = 'mockToken'; - var mockPrivateKeyFile = 'mockPrivateKeyFile'; - var mockPublicKeyObj = 'mockPublicKeyObj'; + const mockToken = 'mockToken'; + const mockPrivateKeyFile = 'mockPrivateKeyFile'; + const mockPublicKeyObj = 'mockPublicKeyObj'; before(function () { mock('cryptomod', { @@ -223,12 +224,12 @@ describe('key-pair authentication', function () { } }); mock('jwtmod', { - sign: function (payload, privateKey, algorithm) { + sign: function () { return mockToken; } }); mock('filesystem', { - readFileSync: function (path) { + readFileSync: function () { return mockPrivateKeyFile; } }); @@ -239,7 +240,7 @@ describe('key-pair authentication', function () { }); it('key-pair - authenticate method is thenable', done => { - const auth = new auth_keypair(connectionOptionsKeyPair.privateKey, + const auth = new AuthKeypair(connectionOptionsKeyPair.privateKey, connectionOptionsKeyPair.privateKeyPath, connectionOptionsKeyPair.privateKeyPass, cryptomod, jwtmod, filesystem); @@ -250,14 +251,14 @@ describe('key-pair authentication', function () { }); it('key-pair - get token with private key', function () { - var auth = new auth_keypair(connectionOptionsKeyPair.privateKey, + const auth = new AuthKeypair(connectionOptionsKeyPair.privateKey, connectionOptionsKeyPair.privateKeyPath, connectionOptionsKeyPair.privateKeyPass, cryptomod, jwtmod, filesystem); auth.authenticate(connectionOptionsKeyPair.authenticator, '', connectionOptionsKeyPair.account, connectionOptionsKeyPair.username); - var body = { data: {} }; + const body = { data: {} }; auth.updateBody(body); assert.strictEqual( @@ -265,7 +266,7 @@ describe('key-pair authentication', function () { }); it('key-pair - get token with private key path with passphrase', function () { - var auth = new auth_keypair(connectionOptionsKeyPairPath.privateKey, + const auth = new AuthKeypair(connectionOptionsKeyPairPath.privateKey, connectionOptionsKeyPairPath.privateKeyPath, connectionOptionsKeyPairPath.privateKeyPass, cryptomod, jwtmod, filesystem); @@ -274,7 +275,7 @@ describe('key-pair authentication', function () { connectionOptionsKeyPairPath.account, connectionOptionsKeyPairPath.username); - var body = { data: {} }; + const body = { data: {} }; auth.updateBody(body); assert.strictEqual( @@ -282,7 +283,7 @@ describe('key-pair authentication', function () { }); it('key-pair - get token with private key path without passphrase', function () { - var auth = new auth_keypair(connectionOptionsKeyPairPath.privateKey, + const auth = new AuthKeypair(connectionOptionsKeyPairPath.privateKey, connectionOptionsKeyPairPath.privateKeyPath, '', cryptomod, jwtmod, filesystem); @@ -291,7 +292,7 @@ describe('key-pair authentication', function () { connectionOptionsKeyPairPath.account, connectionOptionsKeyPairPath.username); - var body = { data: {} }; + const body = { data: {} }; auth.updateBody(body); assert.strictEqual( @@ -299,7 +300,7 @@ describe('key-pair authentication', function () { }); it('key-pair - check authenticator', function () { - var body = authenticator.formAuthJSON(connectionOptionsKeyPair.authenticator, + const body = authenticator.formAuthJSON(connectionOptionsKeyPair.authenticator, connectionOptionsKeyPair.account, connectionOptionsKeyPair.username, {}, {}, {}); @@ -311,7 +312,7 @@ describe('key-pair authentication', function () { describe('oauth authentication', function () { it('oauth - authenticate method is thenable', done => { - const auth = new auth_oauth(connectionOptionsOauth.token); + const auth = new AuthOauth(connectionOptionsOauth.token); auth.authenticate(connectionOptionsKeyPair.authenticator, '', connectionOptionsKeyPair.account, connectionOptionsKeyPair.username) .then(done) @@ -319,9 +320,9 @@ describe('oauth authentication', function () { }); it('oauth - check token', function () { - var auth = new auth_oauth(connectionOptionsOauth.token); + const auth = new AuthOauth(connectionOptionsOauth.token); - var body = { data: {} }; + const body = { data: {} }; auth.updateBody(body); assert.strictEqual( @@ -329,7 +330,7 @@ describe('oauth authentication', function () { }); it('oauth - check authenticator', function () { - var body = authenticator.formAuthJSON(connectionOptionsOauth.authenticator, + const body = authenticator.formAuthJSON(connectionOptionsOauth.authenticator, connectionOptionsOauth.account, connectionOptionsOauth.username, {}, {}, {}); @@ -340,19 +341,19 @@ describe('oauth authentication', function () { }); describe('okta authentication', function () { - var httpclient; + let httpclient; - var mockssoUrl = connectionOptionsOkta.authenticator; - var mockTokenUrl = connectionOptionsOkta.authenticator; - var mockCookieToken = 'mockCookieToken'; - var mockUrl = 'mockUrl'; + const mockssoUrl = connectionOptionsOkta.authenticator; + const mockTokenUrl = connectionOptionsOkta.authenticator; + const mockCookieToken = 'mockCookieToken'; + const mockUrl = 'mockUrl'; - var mockSamlResponse = '
'; + const mockSamlResponse = ''; before(function () { mock('httpclient', { - post: async function (url, body, header) { - var json; + post: async function (url) { + let json; if (url.startsWith('https://' + connectionOptionsOkta.account)) { json = { @@ -373,8 +374,8 @@ describe('okta authentication', function () { } return json; }, - get: async function (url, body, header) { - var json = + get: async function () { + const json = { data: mockSamlResponse }; @@ -386,7 +387,7 @@ describe('okta authentication', function () { }); it('okta - authenticate method is thenable', done => { - const auth = new auth_okta(connectionOptionsOkta.password, + const auth = new AuthOkta(connectionOptionsOkta.password, connectionOptionsOkta.region, connectionOptionsOkta.account, connectionOptionsOkta.clientAppid, @@ -399,7 +400,7 @@ describe('okta authentication', function () { }); it('okta - SAML response success', async function () { - var auth = new auth_okta(connectionOptionsOkta.password, + const auth = new AuthOkta(connectionOptionsOkta.password, connectionOptionsOkta.region, connectionOptionsOkta.account, connectionOptionsOkta.clientAppid, @@ -408,7 +409,7 @@ describe('okta authentication', function () { await auth.authenticate(connectionOptionsOkta.authenticator, '', connectionOptionsOkta.account, connectionOptionsOkta.username); - var body = { data: {} }; + const body = { data: {} }; auth.updateBody(body); assert.strictEqual( @@ -417,8 +418,8 @@ describe('okta authentication', function () { it('okta - SAML response fail prefix', async function () { mock('httpclient', { - post: async function (url, body, header) { - var json; + post: async function (url) { + let json; if (url.startsWith('https://' + connectionOptionsOkta.account)) { json = { @@ -438,7 +439,7 @@ describe('okta authentication', function () { httpclient = require('httpclient'); - var auth = new auth_okta(connectionOptionsOkta.password, + const auth = new AuthOkta(connectionOptionsOkta.password, connectionOptionsOkta.region, connectionOptionsOkta.account, connectionOptionsOkta.clientAppid, @@ -454,8 +455,8 @@ describe('okta authentication', function () { it('okta - SAML response fail postback', async function () { mock('httpclient', { - post: async function (url, body, header) { - var json; + post: async function (url) { + let json; if (url.startsWith('https://' + connectionOptionsOkta.account)) { json = { @@ -477,8 +478,8 @@ describe('okta authentication', function () { } return json; }, - get: async function (url, body, header) { - var json = + get: async function () { + const json = { data: mockUrl }; @@ -488,7 +489,7 @@ describe('okta authentication', function () { httpclient = require('httpclient'); - var auth = new auth_okta(connectionOptionsOkta.password, + const auth = new AuthOkta(connectionOptionsOkta.password, connectionOptionsOkta.region, connectionOptionsOkta.account, connectionOptionsOkta.clientAppid, @@ -505,7 +506,7 @@ describe('okta authentication', function () { }); it('okta - no authenticator should be added to the request body', function () { - var body = authenticator.formAuthJSON(connectionOptionsOkta.authenticator, + const body = authenticator.formAuthJSON(connectionOptionsOkta.authenticator, connectionOptionsOkta.account, connectionOptionsOkta.username, {}, {}, {}); diff --git a/test/unit/configuration/configuration_finding_test.js b/test/unit/configuration/configuration_finding_test.js index 211d06437..ae3488f47 100644 --- a/test/unit/configuration/configuration_finding_test.js +++ b/test/unit/configuration/configuration_finding_test.js @@ -5,7 +5,7 @@ const os = require('os'); const path = require('path'); const assert = require('assert'); const mock = require('mock-require'); -const { Levels, ClientConfig, ClientLoggingConfig, ConfigurationUtil } = require('./../../../lib/configuration/client_configuration'); +const { Levels, ConfigurationUtil } = require('./../../../lib/configuration/client_configuration'); const defaultConfigName = 'sf_client_config.json'; const configInDriverDirectory = path.join('.', defaultConfigName); const configInHomeDirectory = path.join(os.homedir(), defaultConfigName); diff --git a/test/unit/connection/connection_config_test.js b/test/unit/connection/connection_config_test.js index b30575251..2d5abe35d 100644 --- a/test/unit/connection/connection_config_test.js +++ b/test/unit/connection/connection_config_test.js @@ -2,16 +2,16 @@ * Copyright (c) 2015 Snowflake Computing Inc. All rights reserved. */ -var ConnectionConfig = require('./../../../lib/connection/connection_config'); -var ErrorCodes = require('./../../../lib/errors').codes; -var assert = require('assert'); +const ConnectionConfig = require('./../../../lib/connection/connection_config'); +const ErrorCodes = require('./../../../lib/errors').codes; +const assert = require('assert'); describe('ConnectionConfig: basic', function () { /////////////////////////////////////////////////////////////////////////// //// Test synchronous errors //// /////////////////////////////////////////////////////////////////////////// - var negativeTestCases = + const negativeTestCases = [ { name: 'missing options', @@ -692,9 +692,9 @@ describe('ConnectionConfig: basic', function () { }, ]; - var createNegativeITCallback = function (testCase) { + const createNegativeITCallback = function (testCase) { return function () { - var error; + let error; try { new ConnectionConfig(testCase.options); @@ -707,7 +707,7 @@ describe('ConnectionConfig: basic', function () { }; }; - var index, length, testCase; + let index, length, testCase; for (index = 0, length = negativeTestCases.length; index < length; index++) { testCase = negativeTestCases[index]; it(testCase.name, createNegativeITCallback(testCase)); @@ -717,7 +717,7 @@ describe('ConnectionConfig: basic', function () { //// Test valid arguments //// /////////////////////////////////////////////////////////////////////////// - var testCases = + const testCases = [ { name: 'basic', @@ -1198,12 +1198,12 @@ describe('ConnectionConfig: basic', function () { }, ]; - var createItCallback = function (testCase) { + const createItCallback = function (testCase) { return function () { - var result_options = new ConnectionConfig(testCase.input); + const resultOptions = new ConnectionConfig(testCase.input); Object.keys(testCase.options).forEach(function (key) { - var ref = testCase.options[key]; - var val = result_options[key]; + const ref = testCase.options[key]; + const val = resultOptions[key]; assert.strictEqual(val, ref); }); }; @@ -1215,11 +1215,11 @@ describe('ConnectionConfig: basic', function () { } it('custom prefetch', function () { - var username = 'username'; - var password = 'password'; - var account = 'account'; + const username = 'username'; + const password = 'password'; + const account = 'account'; - var connectionConfig = new ConnectionConfig( + let connectionConfig = new ConnectionConfig( { username: username, password: password, @@ -1227,10 +1227,10 @@ describe('ConnectionConfig: basic', function () { }); // get the default value of the resultPrefetch parameter - var resultPrefetchDefault = connectionConfig.getResultPrefetch(); + const resultPrefetchDefault = connectionConfig.getResultPrefetch(); // create a ConnectionConfig object with a custom value for resultPrefetch - var resultPrefetchCustom = resultPrefetchDefault + 1; + const resultPrefetchCustom = resultPrefetchDefault + 1; connectionConfig = new ConnectionConfig( { username: username, diff --git a/test/unit/connection/result/result_stream_test.js b/test/unit/connection/result/result_stream_test.js index e84aa6e70..434c356de 100644 --- a/test/unit/connection/result/result_stream_test.js +++ b/test/unit/connection/result/result_stream_test.js @@ -2,16 +2,16 @@ * Copyright (c) 2015 Snowflake Computing Inc. All rights reserved. */ -var ResultStream = require('./../../../../lib/connection/result/result_stream'); -var ErrorCodes = require('./../../../../lib/errors').codes; -var assert = require('assert'); +const ResultStream = require('./../../../../lib/connection/result/result_stream'); +const ErrorCodes = require('./../../../../lib/errors').codes; +const assert = require('assert'); describe('ResultStream: basic', function () { /////////////////////////////////////////////////////////////////////////// //// Test synchronous errors //// /////////////////////////////////////////////////////////////////////////// - var testCases = + const testCases = [ { name: 'missing options', @@ -107,9 +107,9 @@ describe('ResultStream: basic', function () { } ]; - var createItCallback = function (testCase) { + const createItCallback = function (testCase) { return function () { - var error; + let error; try { new ResultStream(testCase.options); @@ -122,7 +122,7 @@ describe('ResultStream: basic', function () { }; }; - var index, length, testCase; + let index, length, testCase; for (index = 0, length = testCases.length; index < length; index++) { testCase = testCases[index]; it(testCase.name, createItCallback(testCase)); @@ -133,7 +133,7 @@ describe('ResultStream: basic', function () { /////////////////////////////////////////////////////////////////////////// it('valid result stream', function () { - var resultStream = new ResultStream( + const resultStream = new ResultStream( { chunks: [], prefetchSize: 1 diff --git a/test/unit/connection/result/result_test.js b/test/unit/connection/result/result_test.js index 5ace6e0be..f966e4854 100644 --- a/test/unit/connection/result/result_test.js +++ b/test/unit/connection/result/result_test.js @@ -2,16 +2,13 @@ * Copyright (c) 2015 Snowflake Computing Inc. All rights reserved. */ -var Result = require('./../../../../lib/connection/result/result'); -var ConnectionConfig = require('./../../../../lib/connection/connection_config'); -var Util = require('./../../../../lib/util'); -var ErrorCodes = require('./../../../../lib/errors').codes; -var assert = require('assert'); +const Util = require('./../../../../lib/util'); +const assert = require('assert'); -var ResultTestCommon = require('./result_test_common'); +const ResultTestCommon = require('./result_test_common'); describe('Result', function () { - var response = + const response = { 'data': { 'parameters': [{ 'name': 'DATE_OUTPUT_FORMAT', 'value': 'YYYY-MM-DD' }, { @@ -58,7 +55,7 @@ describe('Result', function () { }; it('small result', function (done) { - var rows = []; + const rows = []; ResultTestCommon.testResult( ResultTestCommon.createResultOptions(response), @@ -66,7 +63,7 @@ describe('Result', function () { rows.push(row); }, function (result) { - var responseData = response.data; + const responseData = response.data; assert.strictEqual(result.getTotalRows(), responseData.total); assert.strictEqual(result.getReturnedRows(), responseData.returned); @@ -74,7 +71,7 @@ describe('Result', function () { assert.strictEqual(result.getQueryId(), responseData.queryId); assert.strictEqual(result.getVersion(), String(responseData.version)); - var sessionState = result.getSessionState(); + const sessionState = result.getSessionState(); assert.strictEqual( sessionState.getCurrentDatabaseProvider(), @@ -96,14 +93,14 @@ describe('Result', function () { sessionState.getCurrentRole(), responseData.finalRoleName); - var rowtype = responseData.rowtype; - var rowset = responseData.rowset; + const rowtype = responseData.rowtype; + const rowset = responseData.rowset; - var columns = result.getColumns(); + const columns = result.getColumns(); assert.strictEqual(columns.length, rowtype.length); - var rowIndex, rowsLength, row; + let rowIndex, rowsLength, row; for (rowIndex = 0, rowsLength = rows.length; rowIndex < rowsLength; rowIndex++) { row = rows[rowIndex]; @@ -111,7 +108,7 @@ describe('Result', function () { assert.ok(Util.isObject(row)); assert.strictEqual(row.rowIndex, rowIndex); - var columnIndex, columnsLength, column; + let columnIndex, columnsLength, column; for (columnIndex = 0, columnsLength = columns.length; columnIndex < columnsLength; columnIndex++) { column = columns[columnIndex]; diff --git a/test/unit/connection/result/result_test_binary.js b/test/unit/connection/result/result_test_binary.js index 2e68b5569..32ae4a873 100644 --- a/test/unit/connection/result/result_test_binary.js +++ b/test/unit/connection/result/result_test_binary.js @@ -2,14 +2,13 @@ * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var Util = require('./../../../../lib/util'); -var assert = require('assert'); -var ResultTestCommon = require('./result_test_common'); +const assert = require('assert'); +const ResultTestCommon = require('./result_test_common'); describe('Result: test binary', function () { it('select X\'0123456789ABCDEF\' as C1;', function (done) { - var response = + const response = { 'data': { 'parameters': [{ 'name': 'TIMEZONE', 'value': 'America/Los_Angeles' }, { @@ -60,11 +59,11 @@ describe('Result: test binary', function () { ResultTestCommon.testResult( ResultTestCommon.createResultOptions(response), function (row) { - var buffer = Buffer.from('0123456789ABCDEF', 'hex'); + const buffer = Buffer.from('0123456789ABCDEF', 'hex'); assert.ok(row.getColumnValue('C1').equals(buffer)); assert.strictEqual(row.getColumnValueAsString('C1'), '0123456789ABCDEF'); }, - function (result) { + function () { done(); } ); @@ -73,7 +72,7 @@ describe('Result: test binary', function () { it('alter session set BINARY_OUTPUT_FORMAT=\'BASE64\';' + 'select X\'0123456789ABCDEF\' as C1;', function (done) { - var response = + const response = { 'data': { 'parameters': [{ 'name': 'TIMEZONE', 'value': 'America/Los_Angeles' }, { @@ -124,11 +123,11 @@ describe('Result: test binary', function () { ResultTestCommon.testResult( ResultTestCommon.createResultOptions(response), function (row) { - var buffer = Buffer.from('0123456789ABCDEF', 'hex'); + const buffer = Buffer.from('0123456789ABCDEF', 'hex'); assert.ok(row.getColumnValue('C1').equals(buffer)); assert.strictEqual(row.getColumnValueAsString('C1'), 'ASNFZ4mrze8='); }, - function (result) { + function () { done(); } ); diff --git a/test/unit/connection/result/result_test_boolean.js b/test/unit/connection/result/result_test_boolean.js index de4b55b07..2746817d6 100644 --- a/test/unit/connection/result/result_test_boolean.js +++ b/test/unit/connection/result/result_test_boolean.js @@ -2,12 +2,12 @@ * Copyright (c) 2015 Snowflake Computing Inc. All rights reserved. */ -var assert = require('assert'); -var ResultTestCommon = require('./result_test_common'); +const assert = require('assert'); +const ResultTestCommon = require('./result_test_common'); describe('Result: test boolean', function () { it('select true as C1, false as C2, to_boolean(null) as C3;', function (done) { - var response = + const response = { 'data': { 'parameters': [{ 'name': 'TIMEZONE', 'value': 'America/Los_Angeles' }, { @@ -64,7 +64,7 @@ describe('Result: test boolean', function () { assert.strictEqual(row.getColumnValue('C3'), null); assert.strictEqual(row.getColumnValueAsString('C3'), 'NULL'); }, - function (result) { + function () { done(); } ); @@ -72,7 +72,7 @@ describe('Result: test boolean', function () { it('select to_boolean(\'1\') as C1, to_boolean(\'0\') as C2, \' + ' + '\'to_boolean(null) as C3;', function (done) { - var response = + const response = { 'data': { 'parameters': [{ 'name': 'TIMEZONE', 'value': 'America/Los_Angeles' }, { @@ -129,7 +129,7 @@ describe('Result: test boolean', function () { assert.strictEqual(row.getColumnValue('C3'), null); assert.strictEqual(row.getColumnValueAsString('C3'), 'NULL'); }, - function (result) { + function () { done(); } ); diff --git a/test/unit/connection/result/result_test_common.js b/test/unit/connection/result/result_test_common.js index f11dd946b..f49465837 100644 --- a/test/unit/connection/result/result_test_common.js +++ b/test/unit/connection/result/result_test_common.js @@ -2,18 +2,18 @@ * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var Result = require('./../../../../lib/connection/result/result'); -var ConnectionConfig = require('./../../../../lib/connection/connection_config'); -var Util = require('./../../../../lib/util'); -var ErrorCodes = require('./../../../../lib/errors').codes; -var assert = require('assert'); +const Result = require('./../../../../lib/connection/result/result'); +const ConnectionConfig = require('./../../../../lib/connection/connection_config'); +const Util = require('./../../../../lib/util'); +const ErrorCodes = require('./../../../../lib/errors').codes; +const assert = require('assert'); describe('Result: synchronous errors', function () { /////////////////////////////////////////////////////////////////////////// //// Test synchronous errors //// /////////////////////////////////////////////////////////////////////////// - var testCases = + const testCases = [ { name: 'missing options', @@ -166,9 +166,9 @@ describe('Result: synchronous errors', function () { } ]; - var createItCallback = function (testCase) { + const createItCallback = function (testCase) { return function () { - var error; + let error; try { new Result(testCase.options); @@ -181,7 +181,7 @@ describe('Result: synchronous errors', function () { }; }; - var index, length, testCase; + let index, length, testCase; for (index = 0, length = testCases.length; index < length; index++) { testCase = testCases[index]; it(testCase.name, createItCallback(testCase)); @@ -205,12 +205,12 @@ exports.createResultOptions = function (response) { exports.testResult = function (resultOptions, each, end, startIndex, endIndex) { // create a new result - var result = new Result(resultOptions); + const result = new Result(resultOptions); - var numIterationsActual = 0; + let numIterationsActual = 0; // initiate a fetch-rows operation - var operation = result.fetchRows( + const operation = result.fetchRows( { startIndex: startIndex, endIndex: endIndex, @@ -230,7 +230,7 @@ exports.testResult = function (resultOptions, each, end, startIndex, endIndex) { // the continue callback should be undefined (because there's no error) assert.ok(!Util.exists(continueCallback)); - var numIterationsExpected; + let numIterationsExpected; if (Util.isNumber(startIndex) && Util.isNumber(endIndex)) { numIterationsExpected = endIndex - startIndex + 1; diff --git a/test/unit/connection/result/result_test_date.js b/test/unit/connection/result/result_test_date.js index 11931ad0a..6f4f858f8 100644 --- a/test/unit/connection/result/result_test_date.js +++ b/test/unit/connection/result/result_test_date.js @@ -2,14 +2,14 @@ * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var Util = require('./../../../../lib/util'); -var assert = require('assert'); -var ResultTestCommon = require('./result_test_common'); +const Util = require('./../../../../lib/util'); +const assert = require('assert'); +const ResultTestCommon = require('./result_test_common'); describe('Result: test date', function () { it('select to_date(\'2016-01-21\') as C1;', function (done) { - var response = + const response = { 'data': { 'parameters': [{ 'name': 'TIMEZONE', 'value': 'America/Los_Angeles' }, { @@ -51,7 +51,7 @@ describe('Result: test date', function () { assert.ok(Util.isDate(row.getColumnValue('C1'))); assert.strictEqual(row.getColumnValueAsString('C1'), '2016-01-21'); }, - function (result) { + function () { done(); } ); diff --git a/test/unit/connection/result/result_test_duplicated_columns.js b/test/unit/connection/result/result_test_duplicated_columns.js index 6ea9f2d78..6b75ee4d4 100644 --- a/test/unit/connection/result/result_test_duplicated_columns.js +++ b/test/unit/connection/result/result_test_duplicated_columns.js @@ -6,7 +6,6 @@ const assert = require('assert'); const ResultTestCommon = require('./result_test_common'); const RowMode = require('./../../../../lib/constants/row_mode'); const ColumnNamesCreator = require('../../../../lib/connection/result/unique_column_name_creator'); -const { addoverriddenNamesForDuplicatedColumns } = require('../../../../lib/connection/result/unique_column_name_creator'); describe('Unique column names', function () { describe('result contains renamed columns depend on row mode', function () { diff --git a/test/unit/connection/result/result_test_number.js b/test/unit/connection/result/result_test_number.js index 52b3869bd..51e8ed227 100644 --- a/test/unit/connection/result/result_test_number.js +++ b/test/unit/connection/result/result_test_number.js @@ -2,8 +2,8 @@ * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var assert = require('assert'); -var ResultTestCommon = require('./result_test_common'); +const assert = require('assert'); +const ResultTestCommon = require('./result_test_common'); describe('Result: test number', function () { it('select to_number(\'123.456\') as C1, ' + @@ -11,7 +11,7 @@ describe('Result: test number', function () { 'to_number(\'12345678901234567890123456789012345678\') as C3, ' + // pragma: allowlist secret 'to_double(\'12345678901234567890123456789012345678\') as C4;', // pragma: allowlist secret function (done) { - var response = + const response = { 'data': { 'parameters': [{ 'name': 'TIMEZONE', 'value': 'America/Los_Angeles' }, { @@ -78,7 +78,7 @@ describe('Result: test number', function () { assert.strictEqual( row.getColumnValueAsString('C4'), '1.23456789012346e+37'); }, - function (result) { + function () { done(); } ); diff --git a/test/unit/connection/result/result_test_time.js b/test/unit/connection/result/result_test_time.js index 5e1bfef71..aa69253c5 100644 --- a/test/unit/connection/result/result_test_time.js +++ b/test/unit/connection/result/result_test_time.js @@ -2,14 +2,13 @@ * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var Util = require('./../../../../lib/util'); -var assert = require('assert'); -var ResultTestCommon = require('./result_test_common'); +const assert = require('assert'); +const ResultTestCommon = require('./result_test_common'); describe('Result: test time', function () { it('select to_time(\'12:34:56.789789789\') as C1;', function (done) { - var response = + const response = { 'data': { 'parameters': [{ 'name': 'TIMEZONE', 'value': 'America/Los_Angeles' }, { @@ -54,7 +53,7 @@ describe('Result: test time', function () { function (row) { assert.strictEqual(row.getColumnValue('C1'), '12:34:56'); }, - function (result) { + function () { done(); } ); @@ -63,7 +62,7 @@ describe('Result: test time', function () { it('alter session set TIME_OUTPUT_FORMAT=\'HH24:MI:SS.FF\';' + ' select to_time(\'12:34:56.789789789\') as C1;', function (done) { - var response = + const response = { 'data': { 'parameters': [{ 'name': 'TIMEZONE', 'value': 'America/Los_Angeles' }, { @@ -108,7 +107,7 @@ describe('Result: test time', function () { function (row) { assert.strictEqual(row.getColumnValue('C1'), '12:34:56.789789789'); }, - function (result) { + function () { done(); } ); diff --git a/test/unit/connection/result/result_test_timestamp.js b/test/unit/connection/result/result_test_timestamp.js index 112b2aaa3..691f84255 100644 --- a/test/unit/connection/result/result_test_timestamp.js +++ b/test/unit/connection/result/result_test_timestamp.js @@ -2,16 +2,16 @@ * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var Util = require('./../../../../lib/util'); -var assert = require('assert'); -var ResultTestCommon = require('./result_test_common'); +const Util = require('./../../../../lib/util'); +const assert = require('assert'); +const ResultTestCommon = require('./result_test_common'); describe('Result: test timestamp', function () { it('select to_timestamp_ltz(\'Thu, 21 Jan 2016 06:32:44 -0800\') as C1, ' + 'to_timestamp_tz(\'Thu, 21 Jan 2016 06:32:44 -0800\') as C2, ' + 'to_timestamp_ntz(\'Thu, 21 Jan 2016 06:32:44 -0800\') as C3;', function (done) { - var response = + const response = { 'data': { 'parameters': [{ 'name': 'TIMEZONE', 'value': 'America/Los_Angeles' }, { @@ -76,7 +76,7 @@ describe('Result: test timestamp', function () { row.getColumnValueAsString('C3'), 'Thu, 21 Jan 2016 06:32:44 +0000'); }, - function (result) { + function () { done(); } ); diff --git a/test/unit/connection/result/result_test_variant.js b/test/unit/connection/result/result_test_variant.js index fc4b97399..e31c2dac6 100644 --- a/test/unit/connection/result/result_test_variant.js +++ b/test/unit/connection/result/result_test_variant.js @@ -2,15 +2,15 @@ * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var assert = require('assert'); -var ResultTestCommon = require('./result_test_common'); +const assert = require('assert'); +const ResultTestCommon = require('./result_test_common'); describe('Result: test variant', function () { it('select to_variant((parse_json(\'{ a : 1 }\'))) as C1, ' + 'to_object(parse_json(\'{ a : 1 }\')) as C2, ' + 'to_array(parse_json(\'[1, 2]\')) as C3;', function (done) { - var response = + const response = { 'data': { 'parameters': [{ 'name': 'TIMEZONE', 'value': 'America/Los_Angeles' }, { @@ -70,7 +70,7 @@ describe('Result: test variant', function () { assert.equal( row.getColumnValueAsString('C3'), JSON.stringify([1, 2])); }, - function (result) { + function () { done(); } ); diff --git a/test/unit/connection/result/sf_timestamp_test.js b/test/unit/connection/result/sf_timestamp_test.js index f4d8ec740..5c1775888 100644 --- a/test/unit/connection/result/sf_timestamp_test.js +++ b/test/unit/connection/result/sf_timestamp_test.js @@ -2,11 +2,11 @@ * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var SfTimestamp = require('./../../../../lib/connection/result/sf_timestamp'); -var assert = require('assert'); +const SfTimestamp = require('./../../../../lib/connection/result/sf_timestamp'); +const assert = require('assert'); describe('Date: basic', function () { - var testCases = + const testCases = [ { name: 'date: YYYY-MM-DD', @@ -60,7 +60,7 @@ describe('Date: basic', function () { testCases.forEach(function (testCase) { it(testCase.name, function () { - var options = testCase.options; + const options = testCase.options; assert.strictEqual( new SfTimestamp( options.epochSeconds, @@ -74,7 +74,7 @@ describe('Date: basic', function () { }); describe('Timestamp: basic', function () { - var testCases = + const testCases = [ { name: 'timestamp: DY, DD MON YYYY HH24:MI:SS TZHTZM', @@ -128,7 +128,7 @@ describe('Timestamp: basic', function () { testCases.forEach(function (testCase) { it(testCase.name, function () { - var options = testCase.options; + const options = testCase.options; assert.strictEqual( new SfTimestamp( options.epochSeconds, @@ -142,7 +142,7 @@ describe('Timestamp: basic', function () { }); describe('Time: basic', function () { - var testCases = + const testCases = [ { name: 'time: HH24:MI:SS', @@ -196,7 +196,7 @@ describe('Time: basic', function () { testCases.forEach(function (testCase) { it(testCase.name, function () { - var options = testCase.options; + const options = testCase.options; assert.strictEqual( new SfTimestamp( options.epochSeconds, diff --git a/test/unit/connection/statement_test.js b/test/unit/connection/statement_test.js index 4fc545abf..0ab2f81be 100644 --- a/test/unit/connection/statement_test.js +++ b/test/unit/connection/statement_test.js @@ -2,16 +2,16 @@ * Copyright (c) 2015 Snowflake Computing Inc. All rights reserved. */ -var Statement = require('./../../../lib/connection/statement'); -var ErrorCodes = require('./../../../lib/errors').codes; -var assert = require('assert'); +const Statement = require('./../../../lib/connection/statement'); +const ErrorCodes = require('./../../../lib/errors').codes; +const assert = require('assert'); describe('Statement.execute()', function () { /////////////////////////////////////////////////////////////////////////// //// Test synchronous errors //// ////////////////////////////////////////////////////////////////////////// - var testCases = + const testCases = [ { name: 'execute() missing options', @@ -208,10 +208,10 @@ describe('Statement.execute()', function () { } ]; - var createItCallback = function (testCase) { + const createItCallback = function (testCase) { return function () { - var options; - var error; + let options; + let error; try { options = testCase.options; @@ -229,7 +229,7 @@ describe('Statement.execute()', function () { }; }; - var index, length, testCase; + let index, length, testCase; for (index = 0, length = testCases.length; index < length; index++) { testCase = testCases[index]; it(testCase.name, createItCallback(testCase)); @@ -241,7 +241,7 @@ describe('Statement.fetchResult()', function () { //// Test synchronous errors //// ////////////////////////////////////////////////////////////////////////// - var testCases = + const testCases = [ { name: 'fetchResult() undefined options', @@ -379,10 +379,10 @@ describe('Statement.fetchResult()', function () { } ]; - var createItCallback = function (testCase) { + const createItCallback = function (testCase) { return function () { - var options; - var error; + let options; + let error; try { options = testCase.options; @@ -399,7 +399,7 @@ describe('Statement.fetchResult()', function () { }; }; - var index, length, testCase; + let index, length, testCase; for (index = 0, length = testCases.length; index < length; index++) { testCase = testCases[index]; it(testCase.name, createItCallback(testCase)); diff --git a/test/unit/errors_test.js b/test/unit/errors_test.js index 8e4531070..c7b3849c2 100644 --- a/test/unit/errors_test.js +++ b/test/unit/errors_test.js @@ -2,15 +2,15 @@ * Copyright (c) 2015 Snowflake Computing Inc. All rights reserved. */ -var mapErrNameToErrCode = require('./../../lib/errors').codes; -var mapErrCodeToErrMessage = require('./../../lib/constants/error_messages'); -var mapErrCodeToSqlState = require('./../../lib/errors').mapErrorCodeToSqlState; -var assert = require('assert'); +const mapErrNameToErrCode = require('./../../lib/errors').codes; +const mapErrCodeToErrMessage = require('./../../lib/constants/error_messages'); +const mapErrCodeToSqlState = require('./../../lib/errors').mapErrorCodeToSqlState; +const assert = require('assert'); describe('Errors', function () { it('every error name should have an error code and error message', function () { - var errName; - var errCode; + let errName; + let errCode; for (errName in mapErrNameToErrCode) { if (Object.prototype.hasOwnProperty.call(mapErrNameToErrCode, errName)) { @@ -28,8 +28,8 @@ describe('Errors', function () { it('no two error names should have the same error code', function () { // make sure the mapping from error-name to error-code is one-to-one - var mapErrCodeToErrName = {}; - var errName, errCode; + const mapErrCodeToErrName = {}; + let errName, errCode; for (errName in mapErrNameToErrCode) { if (Object.prototype.hasOwnProperty.call(mapErrNameToErrCode, errName)) { errCode = mapErrNameToErrCode[errName]; @@ -43,14 +43,14 @@ describe('Errors', function () { }); it('validate error code to sql state mapping', function () { - var mapErrCodeToErrName = {}; - for (var errName in mapErrNameToErrCode) { + const mapErrCodeToErrName = {}; + for (const errName in mapErrNameToErrCode) { if (Object.prototype.hasOwnProperty.call(mapErrNameToErrCode, errName)) { mapErrCodeToErrName[mapErrNameToErrCode[errName]] = errName; } } - for (var errCode in mapErrCodeToSqlState) { + for (const errCode in mapErrCodeToSqlState) { if (Object.prototype.hasOwnProperty.call(mapErrCodeToSqlState, errCode)) { assert.ok(mapErrCodeToErrName[errCode], 'invalid mapping: ' + errCode + ':' + diff --git a/test/unit/file_transfer_agent/encrypt_util_test.js b/test/unit/file_transfer_agent/encrypt_util_test.js index e8b82fa25..a57500980 100644 --- a/test/unit/file_transfer_agent/encrypt_util_test.js +++ b/test/unit/file_transfer_agent/encrypt_util_test.js @@ -2,22 +2,22 @@ * Copyright (c) 2021 Snowflake Computing Inc. All rights reserved. */ -var assert = require('assert'); -var mock = require('mock-require'); -var SnowflakeEncryptionUtil = require('./../../../lib/file_transfer_agent/encrypt_util').encrypt_util; +const assert = require('assert'); +const mock = require('mock-require'); +const SnowflakeEncryptionUtil = require('./../../../lib/file_transfer_agent/encrypt_util').EncryptUtil; describe('Encryption util', function () { - var encryptionMaterial; - var mockData = 'mockData'; - var mockFileName = 'mockFileName'; - var mockRandomBytes = 'mockRandomBytes'; - var mockTmpDir = 'mockTmpDir'; - var mockTmpName = 'mockTmpName'; + let encryptionMaterial; + const mockData = 'mockData'; + const mockFileName = 'mockFileName'; + const mockRandomBytes = 'mockRandomBytes'; + const mockTmpDir = 'mockTmpDir'; + const mockTmpName = 'mockTmpName'; - var EncryptionUtil; - var encrypt; - var filestream; - var temp; + let EncryptionUtil; + let encrypt; + let filestream; + let temp; this.beforeEach(function () { encryptionMaterial = { @@ -27,13 +27,13 @@ describe('Encryption util', function () { }; mock('encrypt', { - randomBytes: function (options) { + randomBytes: function () { return Buffer.from(mockRandomBytes); }, - createCipheriv: function (AES_CBC, fileKey, ivData) { + createCipheriv: function () { function createCipheriv() { this.update = function (data) { - function update(data) { + function update() { return Buffer.from(mockData.substring(0, 4)); } return new update(data); @@ -49,7 +49,7 @@ describe('Encryption util', function () { } }); mock('filestream', { - createReadStream: function (inFileName, options) { + createReadStream: function () { function createReadStream() { this.on = function (event, callback) { callback(); @@ -58,9 +58,9 @@ describe('Encryption util', function () { } return new createReadStream; }, - createWriteStream: function (options) { + createWriteStream: function () { function createWriteStream() { - this.write = function (data) { + this.write = function () { return; }; this.close = function (resolve) { @@ -70,18 +70,18 @@ describe('Encryption util', function () { } return new createWriteStream; }, - closeSync: function (fd) { + closeSync: function () { return; } }); mock('temp', { - fileSync: function (options) { + fileSync: function () { return { name: mockTmpName, fd: 0 }; }, - openSync: function (options) { + openSync: function () { return; } }); @@ -94,12 +94,12 @@ describe('Encryption util', function () { }); it('encrypt file', async function () { - var result = await EncryptionUtil.encryptFile(encryptionMaterial, mockFileName, mockTmpDir); + const result = await EncryptionUtil.encryptFile(encryptionMaterial, mockFileName, mockTmpDir); - var decodedKey = Buffer.from(encryptionMaterial['queryStageMasterKey'], 'base64'); - var keySize = decodedKey.length; + const decodedKey = Buffer.from(encryptionMaterial['queryStageMasterKey'], 'base64'); + const keySize = decodedKey.length; - var matDesc = { + let matDesc = { 'smkId': encryptionMaterial.smkId, 'queryId': encryptionMaterial.queryId, 'keySize': keySize * 8 diff --git a/test/unit/file_transfer_agent/gcs_test.js b/test/unit/file_transfer_agent/gcs_test.js index 9baf89681..bd7e1073d 100644 --- a/test/unit/file_transfer_agent/gcs_test.js +++ b/test/unit/file_transfer_agent/gcs_test.js @@ -2,29 +2,29 @@ * Copyright (c) 2021 Snowflake Computing Inc. All rights reserved. */ -var assert = require('assert'); -var mock = require('mock-require'); -var SnowflakeGCSUtil = require('./../../../lib/file_transfer_agent/gcs_util'); -var resultStatus = require('./../../../lib/file_transfer_agent/file_util').resultStatus; +const assert = require('assert'); +const mock = require('mock-require'); +const SnowflakeGCSUtil = require('./../../../lib/file_transfer_agent/gcs_util'); +const resultStatus = require('./../../../lib/file_transfer_agent/file_util').resultStatus; describe('GCS client', function () { - var mockDataFile = 'mockDataFile'; - var mockLocation = 'mockLocation'; - var mockTable = 'mockTable'; - var mockPath = 'mockPath'; - var mockAccessToken = 'mockAccessToken'; - var mockClient = 'mockClient'; - var mockKey = 'mockKey'; - var mockIv = 'mockIv'; - var mockMatDesc = 'mockMatDesc'; - var mockPresignedUrl = 'mockPresignedUrl'; - - var GCS; - var httpclient; - var filestream; - var dataFile = mockDataFile; - var meta; - var encryptionMetadata = { + const mockDataFile = 'mockDataFile'; + const mockLocation = 'mockLocation'; + const mockTable = 'mockTable'; + const mockPath = 'mockPath'; + const mockAccessToken = 'mockAccessToken'; + const mockClient = 'mockClient'; + const mockKey = 'mockKey'; + const mockIv = 'mockIv'; + const mockMatDesc = 'mockMatDesc'; + const mockPresignedUrl = 'mockPresignedUrl'; + + let GCS; + let httpclient; + let filestream; + const dataFile = mockDataFile; + let meta; + const encryptionMetadata = { key: mockKey, iv: mockIv, matDesc: mockMatDesc @@ -42,13 +42,13 @@ describe('GCS client', function () { }; mock('httpclient', { - put: async function (url, body, header) { + put: async function () { return; }, - get: async function (url) { + get: async function () { return; }, - head: async function (url, header) { + head: async function () { return { headers: '' }; @@ -65,25 +65,25 @@ describe('GCS client', function () { }); it('extract bucket name and path', async function () { - var GCS = new SnowflakeGCSUtil(); + const GCS = new SnowflakeGCSUtil(); - var result = GCS.extractBucketNameAndPath('sfc-eng-regression/test_sub_dir/'); + let result = GCS.extractBucketNameAndPath('sfc-eng-regression/test_sub_dir/'); assert.strictEqual(result.bucketName, 'sfc-eng-regression'); assert.strictEqual(result.path, 'test_sub_dir/'); - var result = GCS.extractBucketNameAndPath('sfc-eng-regression/stakeda/test_stg/test_sub_dir/'); + result = GCS.extractBucketNameAndPath('sfc-eng-regression/stakeda/test_stg/test_sub_dir/'); assert.strictEqual(result.bucketName, 'sfc-eng-regression'); assert.strictEqual(result.path, 'stakeda/test_stg/test_sub_dir/'); - var result = GCS.extractBucketNameAndPath('sfc-eng-regression/'); + result = GCS.extractBucketNameAndPath('sfc-eng-regression/'); assert.strictEqual(result.bucketName, 'sfc-eng-regression'); assert.strictEqual(result.path, ''); - var result = GCS.extractBucketNameAndPath('sfc-eng-regression//'); + result = GCS.extractBucketNameAndPath('sfc-eng-regression//'); assert.strictEqual(result.bucketName, 'sfc-eng-regression'); assert.strictEqual(result.path, '/'); - var result = GCS.extractBucketNameAndPath('sfc-eng-regression///'); + result = GCS.extractBucketNameAndPath('sfc-eng-regression///'); assert.strictEqual(result.bucketName, 'sfc-eng-regression'); assert.strictEqual(result.path, '//'); }); @@ -97,17 +97,17 @@ describe('GCS client', function () { it('get file header - fail not found file with presigned url', async function () { mock('httpclient', { - put: async function (url, body, header) { + put: async function () { return; }, - get: async function (url) { + get: async function () { const err = new Error(); err.response = { status: 401 }; throw err; } }); - var httpclient = require('httpclient'); - var GCS = new SnowflakeGCSUtil(httpclient); + const httpclient = require('httpclient'); + const GCS = new SnowflakeGCSUtil(httpclient); await GCS.getFileHeader(meta, dataFile); assert.strictEqual(meta['resultStatus'], resultStatus.NOT_FOUND_FILE); @@ -115,14 +115,14 @@ describe('GCS client', function () { it('get file header - fail need retry', async function () { mock('httpclient', { - head: async function (url) { + head: async function () { const err = new Error(); err.response = { status: 403 }; throw err; } }); - var httpclient = require('httpclient'); - var GCS = new SnowflakeGCSUtil(httpclient); + const httpclient = require('httpclient'); + const GCS = new SnowflakeGCSUtil(httpclient); meta.presignedUrl = ''; @@ -132,14 +132,14 @@ describe('GCS client', function () { it('get file header - fail not found file without presigned url', async function () { mock('httpclient', { - head: async function (url) { + head: async function () { const err = new Error(); err.response = { status: 404 }; throw err; } }); - var httpclient = require('httpclient'); - var GCS = new SnowflakeGCSUtil(httpclient); + const httpclient = require('httpclient'); + const GCS = new SnowflakeGCSUtil(httpclient); meta.presignedUrl = ''; @@ -149,14 +149,14 @@ describe('GCS client', function () { it('get file header - fail expired token', async function () { mock('httpclient', { - head: async function (url, header) { + head: async function () { const err = new Error(); err.response = { status: 401 }; throw err; } }); - var httpclient = require('httpclient'); - var GCS = new SnowflakeGCSUtil(httpclient); + const httpclient = require('httpclient'); + const GCS = new SnowflakeGCSUtil(httpclient); meta.presignedUrl = ''; @@ -165,16 +165,16 @@ describe('GCS client', function () { }); it('get file header - fail unknown status', async function () { - var err; + let err; mock('httpclient', { - head: async function (url, header) { + head: async function () { err = new Error(); err.response = { status: 0 }; throw err; } }); - var httpclient = require('httpclient'); - var GCS = new SnowflakeGCSUtil(httpclient); + const httpclient = require('httpclient'); + const GCS = new SnowflakeGCSUtil(httpclient); meta.presignedUrl = ''; @@ -192,7 +192,7 @@ describe('GCS client', function () { it('upload - fail need retry', async function () { mock('httpclient', { - put: async function (url, body, header) { + put: async function () { const err = new Error(); err.code = 403; throw err; @@ -205,7 +205,7 @@ describe('GCS client', function () { }); httpclient = require('httpclient'); filestream = require('filestream'); - var GCS = new SnowflakeGCSUtil(httpclient, filestream); + const GCS = new SnowflakeGCSUtil(httpclient, filestream); await GCS.uploadFile(dataFile, meta, encryptionMetadata); assert.strictEqual(meta['resultStatus'], resultStatus.NEED_RETRY); @@ -213,7 +213,7 @@ describe('GCS client', function () { it('upload - fail renew presigned url', async function () { mock('httpclient', { - put: async function (url, body, header) { + put: async function () { const err = new Error(); err.code = 400; throw err; @@ -226,7 +226,7 @@ describe('GCS client', function () { }); httpclient = require('httpclient'); filestream = require('filestream'); - var GCS = new SnowflakeGCSUtil(httpclient, filestream); + const GCS = new SnowflakeGCSUtil(httpclient, filestream); meta.client = ''; meta.lastError = { code: 0 }; @@ -237,7 +237,7 @@ describe('GCS client', function () { it('upload - fail expired token', async function () { mock('httpclient', { - put: async function (url, body, header) { + put: async function () { const err = new Error(); err.code = 401; throw err; @@ -249,11 +249,11 @@ describe('GCS client', function () { } }); mock('gcsClient', { - bucket: function (bucketName) { + bucket: function () { function bucket() { - this.file = function (bucketPath) { + this.file = function () { function file() { - this.save = function (fileStream, options) { + this.save = function () { const err = new Error(); err.code = 401; throw err; @@ -267,8 +267,8 @@ describe('GCS client', function () { }); httpclient = require('httpclient'); filestream = require('filestream'); - gcsClient = require('gcsClient'); - var GCS = new SnowflakeGCSUtil(httpclient, filestream); + const gcsClient = require('gcsClient'); + const GCS = new SnowflakeGCSUtil(httpclient, filestream); meta.presignedUrl = ''; meta.client = { gcsToken: mockAccessToken, gcsClient: gcsClient }; diff --git a/test/unit/file_transfer_agent/s3_test.js b/test/unit/file_transfer_agent/s3_test.js index 0bf8e23b1..7e7de1fd9 100644 --- a/test/unit/file_transfer_agent/s3_test.js +++ b/test/unit/file_transfer_agent/s3_test.js @@ -30,9 +30,9 @@ describe('S3 client', function () { before(function () { mock('s3', { - S3: function (params) { + S3: function () { function S3() { - this.getObject = function (params) { + this.getObject = function () { function getObject() { this.then = function (callback) { callback({ @@ -43,7 +43,7 @@ describe('S3 client', function () { return new getObject; }; - this.putObject = function (params) { + this.putObject = function () { function putObject() { this.then = function (callback) { callback(); @@ -85,23 +85,23 @@ describe('S3 client', function () { }); it('extract bucket name and path', async function () { - var result = AWS.extractBucketNameAndPath('sfc-eng-regression/test_sub_dir/'); + let result = AWS.extractBucketNameAndPath('sfc-eng-regression/test_sub_dir/'); assert.strictEqual(result.bucketName, 'sfc-eng-regression'); assert.strictEqual(result.s3path, 'test_sub_dir/'); - var result = AWS.extractBucketNameAndPath('sfc-eng-regression/stakeda/test_stg/test_sub_dir/'); + result = AWS.extractBucketNameAndPath('sfc-eng-regression/stakeda/test_stg/test_sub_dir/'); assert.strictEqual(result.bucketName, 'sfc-eng-regression'); assert.strictEqual(result.s3path, 'stakeda/test_stg/test_sub_dir/'); - var result = AWS.extractBucketNameAndPath('sfc-eng-regression/'); + result = AWS.extractBucketNameAndPath('sfc-eng-regression/'); assert.strictEqual(result.bucketName, 'sfc-eng-regression'); assert.strictEqual(result.s3path, ''); - var result = AWS.extractBucketNameAndPath('sfc-eng-regression//'); + result = AWS.extractBucketNameAndPath('sfc-eng-regression//'); assert.strictEqual(result.bucketName, 'sfc-eng-regression'); assert.strictEqual(result.s3path, '/'); - var result = AWS.extractBucketNameAndPath('sfc-eng-regression///'); + result = AWS.extractBucketNameAndPath('sfc-eng-regression///'); assert.strictEqual(result.bucketName, 'sfc-eng-regression'); assert.strictEqual(result.s3path, '//'); }); @@ -113,11 +113,11 @@ describe('S3 client', function () { it('get file header - fail expired token', async function () { mock('s3', { - S3: function (params) { + S3: function () { function S3() { - this.getObject = function (params) { + this.getObject = function () { function getObject() { - this.then = function (callback) { + this.then = function () { const err = new Error(); err.Code = 'ExpiredToken'; throw err; @@ -142,11 +142,11 @@ describe('S3 client', function () { it('get file header - fail no such key', async function () { mock('s3', { - S3: function (params) { + S3: function () { function S3() { - this.getObject = function (params) { + this.getObject = function () { function getObject() { - this.then = function (callback) { + this.then = function () { const err = new Error(); err.Code = 'NoSuchKey'; throw err; @@ -171,11 +171,11 @@ describe('S3 client', function () { it('get file header - fail HTTP 400', async function () { mock('s3', { - S3: function (params) { + S3: function () { function S3() { - this.getObject = function (params) { + this.getObject = function () { function getObject() { - this.then = function (callback) { + this.then = function () { const err = new Error(); err.Code = '400'; throw err; @@ -200,11 +200,11 @@ describe('S3 client', function () { it('get file header - fail unknown', async function () { mock('s3', { - S3: function (params) { + S3: function () { function S3() { - this.getObject = function (params) { + this.getObject = function () { function getObject() { - this.then = function (callback) { + this.then = function () { const err = new Error(); err.Code = 'unknown'; throw err; @@ -234,9 +234,9 @@ describe('S3 client', function () { it('upload - fail expired token', async function () { mock('s3', { - S3: function (params) { + S3: function () { function S3() { - this.putObject = function (params) { + this.putObject = function () { function putObject() { this.then = function () { const err = new Error(); @@ -269,9 +269,9 @@ describe('S3 client', function () { it('upload - fail wsaeconnaborted', async function () { mock('s3', { - S3: function (params) { + S3: function () { function S3() { - this.putObject = function (params) { + this.putObject = function () { function putObject() { this.then = function () { const err = new Error(); @@ -304,9 +304,9 @@ describe('S3 client', function () { it('upload - fail HTTP 400', async function () { mock('s3', { - S3: function (params) { + S3: function () { function S3() { - this.putObject = function (params) { + this.putObject = function () { function putObject() { this.then = () => { const err = new Error(); diff --git a/test/unit/large_result_set/testLargeResultSet.js b/test/unit/large_result_set/testLargeResultSet.js index 7031d1b79..605051326 100644 --- a/test/unit/large_result_set/testLargeResultSet.js +++ b/test/unit/large_result_set/testLargeResultSet.js @@ -40,7 +40,7 @@ describe('LargeResultSetService', () => { it(testName, done => { largeResultSetService.getObject({ url: baseUrl + url, - callback: (err, body) => { + callback: (err) => { if (err) { if (err && err.name === expectedErrorName) { done(); diff --git a/test/unit/logger/node_test.js b/test/unit/logger/node_test.js index b44206f34..818e0c69b 100644 --- a/test/unit/logger/node_test.js +++ b/test/unit/logger/node_test.js @@ -226,7 +226,7 @@ describe('Logger node tests', function () { async function closeTransportsWithTimeout(logger) { logger.closeTransports(); - return new Promise((resolve, reject) => { + return new Promise((resolve) => { setTimeout(() => { resolve(); }, millisTimeoutToFlushLogFile); diff --git a/test/unit/mock/mock_http_client.js b/test/unit/mock/mock_http_client.js index 3cb2e067c..67cf2bcc5 100644 --- a/test/unit/mock/mock_http_client.js +++ b/test/unit/mock/mock_http_client.js @@ -2,8 +2,8 @@ * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var Util = require('../../../lib/util'); -var Errors = require('../../../lib/errors'); +const Util = require('../../../lib/util'); +const Errors = require('../../../lib/errors'); const Logger = require('../../../lib/logger'); /** @@ -45,12 +45,12 @@ MockHttpClient.prototype.request = function (request) { } // get the output of the specified request from the map - var requestOutput = this._mapRequestToOutput[serializeRequest(request)]; + const requestOutput = this._mapRequestToOutput[serializeRequest(request)]; Errors.assertInternal(Util.isObject(requestOutput), 'no response available for: ' + serializeRequest(request)); - var delay = Util.isNumber(requestOutput.delay) ? requestOutput.delay : 0; + const delay = Util.isNumber(requestOutput.delay) ? requestOutput.delay : 0; // invoke the callback with the right arguments in a future tick of the event // loop; note that we don't use process.nextTick() here because if we did, the @@ -60,8 +60,8 @@ MockHttpClient.prototype.request = function (request) { setTimeout(function () { // get the response from the output and clone it; this is to prevent tests // from interfering with each other if they mutate the response - var response = JSON.parse(JSON.stringify(requestOutput.response)); - var body = requestOutput.body; + const response = JSON.parse(JSON.stringify(requestOutput.response)); + let body = requestOutput.body; if (!body && response) { body = response.body; @@ -111,10 +111,10 @@ MockHttpClient.prototype.requestAsync = function (request) { * @returns {Object} */ function buildRequestToOutputMap(mappings) { - var mapRequestToOutput = {}; + const mapRequestToOutput = {}; - var mapping; - for (var index = 0, length = mappings.length; index < length; index++) { + let mapping; + for (let index = 0, length = mappings.length; index < length; index++) { mapping = mappings[index]; const k = serializeRequest(mapping.request); if (mapRequestToOutput[k]) { @@ -153,9 +153,9 @@ function serializeRequest(request) { * @returns {*} */ function createSortedClone(target) { - var keysSorted; - var sortedClone; - var index, length, key; + let keysSorted; + let sortedClone; + let index, length, key; if (Util.isObject(target)) { keysSorted = Object.keys(target).sort(); diff --git a/test/unit/mock/mock_test_util.js b/test/unit/mock/mock_test_util.js index 2c98c0708..69f6a3d18 100644 --- a/test/unit/mock/mock_test_util.js +++ b/test/unit/mock/mock_test_util.js @@ -2,10 +2,10 @@ * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var Core = require('./../../../lib/core'); -var MockHttpClient = require('./mock_http_client'); +const Core = require('./../../../lib/core'); +const MockHttpClient = require('./mock_http_client'); -var clientInfo = +const clientInfo = { version: require('./../../../package.json').version, environment: process.versions @@ -13,7 +13,7 @@ var clientInfo = // create a snowflake instance that operates in qa mode and is configured to // use a mock http client -var snowflake = Core( +const snowflake = Core( { qaMode: true, httpClient: new MockHttpClient(clientInfo), @@ -23,7 +23,7 @@ var snowflake = Core( exports.snowflake = snowflake; -var connectionOptions = +const connectionOptions = { accessUrl: 'http://fakeaccount.snowflakecomputing.com', username: 'fakeusername', @@ -31,12 +31,12 @@ var connectionOptions = account: 'fakeaccount' }; -var connectionOptionsDeserialize = +const connectionOptionsDeserialize = { accessUrl: 'http://fakeaccount.snowflakecomputing.com' }; -var connectionOptionsWithServiceName = +const connectionOptionsWithServiceName = { accessUrl: 'http://fakeaccount.snowflakecomputing.com', username: 'fakeuserservicename', @@ -44,7 +44,7 @@ var connectionOptionsWithServiceName = account: 'fakeaccount' }; -var connectionOptionsWithClientSessionKeepAlive = +const connectionOptionsWithClientSessionKeepAlive = { accessUrl: 'http://fakeaccount.snowflakecomputing.com', username: 'fakeusername', @@ -54,7 +54,7 @@ var connectionOptionsWithClientSessionKeepAlive = clientSessionKeepAliveHeartbeatFrequency: 1800 }; -var connectionOptionsForSessionGone = +const connectionOptionsForSessionGone = { accessUrl: 'http://fakeaccount.snowflakecomputing.com', username: 'fakesessiongone', @@ -62,7 +62,7 @@ var connectionOptionsForSessionGone = account: 'fakeaccount' }; -var connectionOptionsForSessionExpired = +const connectionOptionsForSessionExpired = { accessUrl: 'http://fakeaccount.snowflakecomputing.com', username: 'fakesessionexpired', @@ -70,7 +70,7 @@ var connectionOptionsForSessionExpired = account: 'fakeaccount' }; -var connectionOptions504 = +const connectionOptions504 = { accessUrl: 'http://fake504.snowflakecomputing.com', username: 'fake504user', @@ -78,7 +78,7 @@ var connectionOptions504 = account: 'fake504' }; -var connectionOptionsWithTreatIntAsBigInt = +const connectionOptionsWithTreatIntAsBigInt = { accessUrl: 'http://fakeaccount.snowflakecomputing.com', username: 'fakeusername', @@ -87,7 +87,7 @@ var connectionOptionsWithTreatIntAsBigInt = jsTreatIntegerAsBigInt: true }; -var connectionOptionsDefault = +const connectionOptionsDefault = { accessUrl: 'http://fakeaccount.snowflakecomputing.com', username: 'fakeusername', @@ -96,7 +96,7 @@ var connectionOptionsDefault = authenticator: 'SNOWFLAKE' }; -var connectionOptionsExternalBrowser = +const connectionOptionsExternalBrowser = { accessUrl: 'http://fakeaccount.snowflakecomputing.com', username: 'fakeusername', @@ -104,7 +104,7 @@ var connectionOptionsExternalBrowser = authenticator: 'EXTERNALBROWSER' }; -var connectionOptionsKeyPair = +const connectionOptionsKeyPair = { accessUrl: 'http://fakeaccount.snowflakecomputing.com', username: 'fakeusername', @@ -113,7 +113,7 @@ var connectionOptionsKeyPair = authenticator: 'SNOWFLAKE_JWT' }; -var connectionOptionsKeyPairPath = +const connectionOptionsKeyPairPath = { accessUrl: 'http://fakeaccount.snowflakecomputing.com', username: 'fakeusername', @@ -123,7 +123,7 @@ var connectionOptionsKeyPairPath = authenticator: 'SNOWFLAKE_JWT' }; -var connectionOptionsOauth = +const connectionOptionsOauth = { accessUrl: 'http://fakeaccount.snowflakecomputing.com', username: 'fakeusername', @@ -132,7 +132,7 @@ var connectionOptionsOauth = authenticator: 'OAUTH' }; -var connectionOptionsOkta = +const connectionOptionsOkta = { accessUrl: 'http://fakeaccount.snowflakecomputing.com', username: 'fakeusername', diff --git a/test/unit/mock/statement_fetch_as_string.js b/test/unit/mock/statement_fetch_as_string.js index 3fc3e59ec..2e293a7f8 100644 --- a/test/unit/mock/statement_fetch_as_string.js +++ b/test/unit/mock/statement_fetch_as_string.js @@ -2,46 +2,46 @@ * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var Util = require('../../../lib/util'); -var MockTestUtil = require('./mock_test_util'); -var assert = require('assert'); -var async = require('async'); +const Util = require('../../../lib/util'); +const MockTestUtil = require('./mock_test_util'); +const assert = require('assert'); +const async = require('async'); // get a mock snowflake instance and an options object to connect to it -var snowflake = MockTestUtil.snowflake; -var connOpts = MockTestUtil.connectionOptions.default; +const snowflake = MockTestUtil.snowflake; +const connOpts = MockTestUtil.connectionOptions.default; -var stmtOpts = +const stmtOpts = { sqlText: 'select to_boolean(:1) as "boolean", to_date(:2) as "date", 1.123456789123456789 as "number"', binds: ['false', '1967-06-23'], requestId: 'foobar' }; -var numberAsString = '1.123456789123456789'; -var booleanAsString = 'FALSE'; -var dateAsString = '1967-06-23'; +const numberAsString = '1.123456789123456789'; +const booleanAsString = 'FALSE'; +const dateAsString = '1967-06-23'; -var typesBoolean = [snowflake.BOOLEAN]; -var typesNumber = [snowflake.NUMBER]; -var typesDate = [snowflake.DATE]; +const typesBoolean = [snowflake.BOOLEAN]; +const typesNumber = [snowflake.NUMBER]; +const typesDate = [snowflake.DATE]; -var connOptsNone = Util.apply({}, connOpts); -var connOptsBoolean = Util.apply({ fetchAsString: typesBoolean }, connOpts); -var connOptsNumber = Util.apply({ fetchAsString: typesNumber }, connOpts); -var connOptsDate = Util.apply({ fetchAsString: typesDate }, connOpts); +const connOptsNone = Util.apply({}, connOpts); +const connOptsBoolean = Util.apply({ fetchAsString: typesBoolean }, connOpts); +const connOptsNumber = Util.apply({ fetchAsString: typesNumber }, connOpts); +const connOptsDate = Util.apply({ fetchAsString: typesDate }, connOpts); -var stmtOptsNone = Util.apply({}, stmtOpts); -var stmtOptsBoolean = Util.apply({ fetchAsString: typesBoolean }, stmtOpts); -var stmtOptsNumber = Util.apply({ fetchAsString: typesNumber }, stmtOpts); +const stmtOptsNone = Util.apply({}, stmtOpts); +const stmtOptsBoolean = Util.apply({ fetchAsString: typesBoolean }, stmtOpts); +const stmtOptsNumber = Util.apply({ fetchAsString: typesNumber }, stmtOpts); -var strmOptsNone = {}; -var strmOptsNumber = { fetchAsString: typesNumber }; -var strmOptsBoolean = { fetchAsString: typesBoolean }; -var strmOptsDate = { fetchAsString: typesDate }; +const strmOptsNone = {}; +const strmOptsNumber = { fetchAsString: typesNumber }; +const strmOptsBoolean = { fetchAsString: typesBoolean }; +const strmOptsDate = { fetchAsString: typesDate }; describe('Statement - fetch as string', function () { - var testCases = + const testCases = [ { name: 'connection = none, statement = none, stream = number', @@ -101,8 +101,8 @@ describe('Statement - fetch as string', function () { } ]; - for (var index = 0, length = testCases.length; index < length; index++) { - var testCase = testCases[index]; + for (let index = 0, length = testCases.length; index < length; index++) { + const testCase = testCases[index]; it(testCase.name, createItCallback( testCase.connOpts, @@ -118,7 +118,7 @@ function createItCallback( streamOptions, verifyFn) { return function (done) { - var connection; + let connection; async.series( [ function (callback) { @@ -129,7 +129,7 @@ function createItCallback( }); }, function (callback) { - var rows = []; + const rows = []; connection.execute(statementOptions).streamRows(streamOptions) .on('data', function (row) { rows.push(row); @@ -152,7 +152,7 @@ function createItCallback( function verifyOnlyNumberConverted(rows) { verifyRows(rows); - var row = rows[0]; + const row = rows[0]; verifyNumberConverted(row); verifyBooleanNotConverted(row); @@ -162,7 +162,7 @@ function verifyOnlyNumberConverted(rows) { function verifyOnlyBooleanConverted(rows) { verifyRows(rows); - var row = rows[0]; + const row = rows[0]; verifyNumberNotConverted(row); verifyBooleanConverted(row); @@ -172,7 +172,7 @@ function verifyOnlyBooleanConverted(rows) { function verifyOnlyDateConverted(rows) { verifyRows(rows); - var row = rows[0]; + const row = rows[0]; verifyNumberNotConverted(row); verifyBooleanNotConverted(row); diff --git a/test/unit/mock/statement_stream_result.js b/test/unit/mock/statement_stream_result.js index 44e5ce6e8..ce9997261 100644 --- a/test/unit/mock/statement_stream_result.js +++ b/test/unit/mock/statement_stream_result.js @@ -2,21 +2,21 @@ * Copyright (c) 2015-2019 Snowflake Computing Inc. All rights reserved. */ -var Util = require('../../../lib/util'); -var MockTestUtil = require('./mock_test_util'); -var assert = require('assert'); -var async = require('async'); +const Util = require('../../../lib/util'); +const MockTestUtil = require('./mock_test_util'); +const assert = require('assert'); +const async = require('async'); // get a mock snowflake instance and an options object to connect to it -var snowflake = MockTestUtil.snowflake; -var connOpts = MockTestUtil.connectionOptions.default; +const snowflake = MockTestUtil.snowflake; +const connOpts = MockTestUtil.connectionOptions.default; -var connOptsStreamResultNone = Util.apply({}, connOpts); -var connOptsStreamResultFalse = Util.apply({ streamResult: false }, connOpts); -var connOptsStreamResultTrue = Util.apply({ streamResult: true }, connOpts); +const connOptsStreamResultNone = Util.apply({}, connOpts); +const connOptsStreamResultFalse = Util.apply({ streamResult: false }, connOpts); +const connOptsStreamResultTrue = Util.apply({ streamResult: true }, connOpts); describe('Statement - stream result', function () { - var testCases = + const testCases = [ { name: 'connection = none, statement = none', @@ -74,8 +74,8 @@ describe('Statement - stream result', function () { } ]; - for (var index = 0, length = testCases.length; index < length; index++) { - var testCase = testCases[index]; + for (let index = 0, length = testCases.length; index < length; index++) { + const testCase = testCases[index]; it(testCase.name, createItCallback( testCase.connOpts, testCase.streamResult, testCase.verifyFn)); } @@ -83,12 +83,12 @@ describe('Statement - stream result', function () { function createItCallback(connectionOptions, streamResult, verifyFn) { return function (done) { - var connection; + let connection; async.series( [ function (callback) { connection = snowflake.createConnection(connectionOptions); - connection.connect(function (err) { + connection.connect(function () { callback(); }); }, diff --git a/test/unit/secret_detector_test.js b/test/unit/secret_detector_test.js index 24c9bddcf..beaade1af 100644 --- a/test/unit/secret_detector_test.js +++ b/test/unit/secret_detector_test.js @@ -2,15 +2,15 @@ * Copyright (c) 2021 Snowflake Computing Inc. All rights reserved. */ -var assert = require('assert'); -var SnowflakeSecretDetector = require('./../../lib/secret_detector'); +const assert = require('assert'); +const SnowflakeSecretDetector = require('./../../lib/secret_detector'); describe('Secret Detector', function () { - var SecretDetector; + let SecretDetector; const errstr = new Error('Test exception'); - var mock = + const mock = { execute: function () { throw errstr; @@ -22,24 +22,24 @@ describe('Secret Detector', function () { }); it('basic masking - null', async function () { - var txt = null; - var result = SecretDetector.maskSecrets(txt); + const txt = null; + const result = SecretDetector.maskSecrets(txt); assert.strictEqual(result.masked, false); assert.strictEqual(result.maskedtxt, null); assert.strictEqual(result.errstr, null); }); it('basic masking - empty', async function () { - var txt = ''; - var result = SecretDetector.maskSecrets(txt); + const txt = ''; + const result = SecretDetector.maskSecrets(txt); assert.strictEqual(result.masked, false); assert.strictEqual(result.maskedtxt, txt); assert.strictEqual(result.errstr, null); }); it('basic masking - no masking', async function () { - var txt = 'This string is innocuous'; - var result = SecretDetector.maskSecrets(txt); + const txt = 'This string is innocuous'; + const result = SecretDetector.maskSecrets(txt); assert.strictEqual(result.masked, false); assert.strictEqual(result.maskedtxt, txt); assert.strictEqual(result.errstr, null); @@ -47,14 +47,14 @@ describe('Secret Detector', function () { it('exception - masking', async function () { SecretDetector = new SnowflakeSecretDetector(null, mock); - var result = SecretDetector.maskSecrets('test'); + const result = SecretDetector.maskSecrets('test'); assert.strictEqual(result.masked, true); assert.strictEqual(result.maskedtxt, errstr.toString()); assert.strictEqual(result.errstr, errstr.toString()); }); it('test - mask token', async function () { - var longToken = '_Y1ZNETTn5/qfUWj3Jedby7gipDzQs=U' + + const longToken = '_Y1ZNETTn5/qfUWj3Jedby7gipDzQs=U' + 'KyJH9DS=nFzzWnfZKGV+C7GopWCGD4Lj' + 'OLLFZKOE26LXHDt3pTi4iI1qwKuSpf/F' + 'mClCMBSissVsU3Ei590FP0lPQQhcSGcD' + @@ -64,31 +64,31 @@ describe('Secret Detector', function () { 'FoloNIkBPXCwFTv+1RVUHgVA2g8A9Lw5' + 'XdJYuI8vhg=f0bKSq7AhQ2Bh'; - var tokenWithPrefix = 'Token =' + longToken; - var result = SecretDetector.maskSecrets(tokenWithPrefix); + const tokenWithPrefix = 'Token =' + longToken; + let result = SecretDetector.maskSecrets(tokenWithPrefix); assert.strictEqual(result.masked, true); assert.strictEqual(result.maskedtxt, 'Token =****'); assert.strictEqual(result.errstr, null); - var idTokenWithPrefix = 'idToken : ' + longToken; + const idTokenWithPrefix = 'idToken : ' + longToken; result = SecretDetector.maskSecrets(idTokenWithPrefix); assert.strictEqual(result.masked, true); assert.strictEqual(result.maskedtxt, 'idToken : ****'); assert.strictEqual(result.errstr, null); - var sessionTokenWithPrefix = 'sessionToken : ' + longToken; + const sessionTokenWithPrefix = 'sessionToken : ' + longToken; result = SecretDetector.maskSecrets(sessionTokenWithPrefix); assert.strictEqual(result.masked, true); assert.strictEqual(result.maskedtxt, 'sessionToken : ****'); assert.strictEqual(result.errstr, null); - var masterTokenWithPrefix = 'masterToken : ' + longToken; + const masterTokenWithPrefix = 'masterToken : ' + longToken; result = SecretDetector.maskSecrets(masterTokenWithPrefix); assert.strictEqual(result.masked, true); assert.strictEqual(result.maskedtxt, 'masterToken : ****'); assert.strictEqual(result.errstr, null); - var assertionWithPrefix = 'assertion content : ' + longToken; + const assertionWithPrefix = 'assertion content : ' + longToken; result = SecretDetector.maskSecrets(assertionWithPrefix); assert.strictEqual(result.masked, true); assert.strictEqual(result.maskedtxt, 'assertion content : ****'); @@ -97,38 +97,38 @@ describe('Secret Detector', function () { it('test - false positive', async function () { - var falsePositiveToken = '2020-04-30 23:06:04,069 - MainThread auth.py:397' + + const falsePositiveToken = '2020-04-30 23:06:04,069 - MainThread auth.py:397' + ' - write_temporary_credential() - DEBUG - no ID ' + 'token is given when try to store temporary credential'; - var result = SecretDetector.maskSecrets(falsePositiveToken); + const result = SecretDetector.maskSecrets(falsePositiveToken); assert.strictEqual(result.masked, false); assert.strictEqual(result.maskedtxt, falsePositiveToken); assert.strictEqual(result.errstr, null); }); it('test - password', async function () { - var randomPassword = 'Fh[+2J~AcqeqW%?'; + const randomPassword = 'Fh[+2J~AcqeqW%?'; - var randomPasswordWithPrefix = 'password:' + randomPassword; - var result = SecretDetector.maskSecrets(randomPasswordWithPrefix); + let randomPasswordWithPrefix = 'password:' + randomPassword; + let result = SecretDetector.maskSecrets(randomPasswordWithPrefix); assert.strictEqual(result.masked, true); assert.strictEqual(result.maskedtxt, 'password:****'); assert.strictEqual(result.errstr, null); - var randomPasswordCaps = 'PASSWORD:' + randomPassword; + const randomPasswordCaps = 'PASSWORD:' + randomPassword; result = SecretDetector.maskSecrets(randomPasswordCaps); assert.strictEqual(result.masked, true); assert.strictEqual(result.maskedtxt, 'PASSWORD:****'); assert.strictEqual(result.errstr, null); - var randomPasswordMixedCase = 'PassWorD:' + randomPassword; + const randomPasswordMixedCase = 'PassWorD:' + randomPassword; result = SecretDetector.maskSecrets(randomPasswordMixedCase); assert.strictEqual(result.masked, true); assert.strictEqual(result.maskedtxt, 'PassWorD:****'); assert.strictEqual(result.errstr, null); - var randomPasswordEqualSign = 'password =' + randomPassword; + const randomPasswordEqualSign = 'password =' + randomPassword; result = SecretDetector.maskSecrets(randomPasswordEqualSign); assert.strictEqual(result.masked, true); assert.strictEqual(result.maskedtxt, 'password =****'); @@ -143,7 +143,7 @@ describe('Secret Detector', function () { it('test - token password', async function () { - var longToken = '_Y1ZNETTn5/qfUWj3Jedby7gipDzQs=U' + + const longToken = '_Y1ZNETTn5/qfUWj3Jedby7gipDzQs=U' + 'KyJH9DS=nFzzWnfZKGV+C7GopWCGD4Lj' + 'OLLFZKOE26LXHDt3pTi4iI1qwKuSpf/F' + 'mClCMBSissVsU3Ei590FP0lPQQhcSGcD' + @@ -153,19 +153,19 @@ describe('Secret Detector', function () { 'FoloNIkBPXCwFTv+1RVUHgVA2g8A9Lw5' + 'XdJYuI8vhg=f0bKSq7AhQ2Bh'; - var longToken2 = 'ktL57KJemuq4-M+Q0pdRjCIMcf1mzcr' + + const longToken2 = 'ktL57KJemuq4-M+Q0pdRjCIMcf1mzcr' + 'MwKteDS5DRE/Pb+5MzvWjDH7LFPV5b_' + '/tX/yoLG3b4TuC6Q5qNzsARPPn_zs/j' + 'BbDOEg1-IfPpdsbwX6ETeEnhxkHIL4H' + 'sP-V'; - var randomPwd = 'Fh[+2J~AcqeqW%?'; - var randomPwd2 = randomPwd + 'vdkav13'; + const randomPwd = 'Fh[+2J~AcqeqW%?'; + const randomPwd2 = randomPwd + 'vdkav13'; - var testStringWithPrefix = 'token=' + longToken + + const testStringWithPrefix = 'token=' + longToken + ' random giberish ' + 'password:' + randomPwd; - var result = SecretDetector.maskSecrets(testStringWithPrefix); + let result = SecretDetector.maskSecrets(testStringWithPrefix); assert.strictEqual(result.masked, true); assert.strictEqual(result.maskedtxt, 'token=****' + @@ -174,7 +174,7 @@ describe('Secret Detector', function () { ); assert.strictEqual(result.errstr, null); - var testStringWithPrefixReversed = 'password:' + randomPwd + + const testStringWithPrefixReversed = 'password:' + randomPwd + ' random giberish ' + 'token=' + longToken; result = SecretDetector.maskSecrets(testStringWithPrefixReversed); @@ -186,7 +186,7 @@ describe('Secret Detector', function () { ); assert.strictEqual(result.errstr, null); - var testStringWithPrefixMultiToken = 'token=' + longToken + + const testStringWithPrefixMultiToken = 'token=' + longToken + ' random giberish ' + 'password:' + randomPwd + ' random giberish ' + @@ -202,7 +202,7 @@ describe('Secret Detector', function () { ); assert.strictEqual(result.errstr, null); - var testStringWithPrefixMultiPass = 'password=' + randomPwd + + const testStringWithPrefixMultiPass = 'password=' + randomPwd + ' random giberish ' + 'password=' + randomPwd2 + ' random giberish ' + @@ -220,7 +220,7 @@ describe('Secret Detector', function () { }); it('custom pattern - success', async function () { - var customPatterns = { + const customPatterns = { regex: [ String.raw`(testCustomPattern\s*:\s*"([a-z]{8,})")`, String.raw`(testCustomPattern\s*:\s*"([0-9]{8,})")` @@ -233,8 +233,8 @@ describe('Secret Detector', function () { SecretDetector = new SnowflakeSecretDetector(customPatterns); - var txt = 'testCustomPattern: "abcdefghijklmnop"'; - var result = SecretDetector.maskSecrets(txt); + let txt = 'testCustomPattern: "abcdefghijklmnop"'; + let result = SecretDetector.maskSecrets(txt); assert.strictEqual(result.masked, true); assert.strictEqual(result.maskedtxt, customPatterns.mask[0]); assert.strictEqual(result.errstr, null); @@ -247,7 +247,7 @@ describe('Secret Detector', function () { txt = 'password=asdfasdfasdfasdfasdf ' + 'testCustomPattern: "abcdefghijklmnop"'; - var result = SecretDetector.maskSecrets(txt); + result = SecretDetector.maskSecrets(txt); assert.strictEqual(result.masked, true); assert.strictEqual(result.maskedtxt, 'password=**** ' + @@ -265,7 +265,7 @@ describe('Secret Detector', function () { }); it('custom pattern - regex error', async function () { - var customPatterns = { + const customPatterns = { mask: ['maskCustomPattern1', 'maskCustomPattern2'] }; try { @@ -276,7 +276,7 @@ describe('Secret Detector', function () { }); it('custom pattern - mask error', async function () { - var customPatterns = { + const customPatterns = { regex: ['regexCustomPattern1', 'regexCustomPattern2'] }; try { @@ -287,7 +287,7 @@ describe('Secret Detector', function () { }); it('custom pattern - unequal length error', async function () { - var customPatterns = { + const customPatterns = { regex: ['regexCustomPattern1', 'regexCustomPattern2'], mask: ['maskCustomPattern1'] }; diff --git a/test/unit/snowflake_test.js b/test/unit/snowflake_test.js index d07aa02ec..204c842cb 100644 --- a/test/unit/snowflake_test.js +++ b/test/unit/snowflake_test.js @@ -2,32 +2,32 @@ * Copyright (c) 2015 Snowflake Computing Inc. All rights reserved. */ -var Util = require('./../../lib/util'); -var ErrorCodes = require('./../../lib/errors').codes; -var MockTestUtil = require('./mock/mock_test_util'); +const Util = require('./../../lib/util'); +const ErrorCodes = require('./../../lib/errors').codes; +const MockTestUtil = require('./mock/mock_test_util'); const QueryStatus = require('./../../lib/constants/query_status').code; -var assert = require('assert'); -var async = require('async'); +const assert = require('assert'); +const async = require('async'); const { connectAsync, destroyConnectionAsync } = require('../integration/testUtil'); // get a mock snowflake instance -var snowflake = MockTestUtil.snowflake; +const snowflake = MockTestUtil.snowflake; // get connection options to connect to this mock snowflake instance -var mockConnectionOptions = MockTestUtil.connectionOptions; -var connectionOptions = mockConnectionOptions.default; -var connectionOptionsDeserialize = mockConnectionOptions.deserialize; -var connectionOptionsServiceName = mockConnectionOptions.serviceName; -var connectionOptionsClientSessionKeepAlive = mockConnectionOptions.clientSessionKeepAlive; -var connectionOptionsForSessionGone = mockConnectionOptions.sessionGone; -var connectionOptionsForSessionExpired = mockConnectionOptions.sessionExpired; -var connectionOptionsExternalBrowser = mockConnectionOptions.authExternalBrowser; -var connectionOptionsOkta = mockConnectionOptions.authOkta; +const mockConnectionOptions = MockTestUtil.connectionOptions; +const connectionOptions = mockConnectionOptions.default; +const connectionOptionsDeserialize = mockConnectionOptions.deserialize; +const connectionOptionsServiceName = mockConnectionOptions.serviceName; +const connectionOptionsClientSessionKeepAlive = mockConnectionOptions.clientSessionKeepAlive; +const connectionOptionsForSessionGone = mockConnectionOptions.sessionGone; +const connectionOptionsForSessionExpired = mockConnectionOptions.sessionExpired; +const connectionOptionsExternalBrowser = mockConnectionOptions.authExternalBrowser; +const connectionOptionsOkta = mockConnectionOptions.authOkta; const connectionOptionsFor504 = mockConnectionOptions.http504; const connectionOptionsTreatIntegerAsBigInt = mockConnectionOptions.treatIntAsBigInt; describe('snowflake.createConnection() synchronous errors', function () { - var testCases = + const testCases = [ { name: 'missing options', @@ -279,9 +279,9 @@ describe('snowflake.createConnection() synchronous errors', function () { } ]; - var createItCallback = function (testCase) { + const createItCallback = function (testCase) { return function () { - var error = null; + let error = null; try { snowflake.createConnection(testCase.options); @@ -294,7 +294,7 @@ describe('snowflake.createConnection() synchronous errors', function () { }; }; - var index, length, testCase; + let index, length, testCase; for (index = 0, length = testCases.length; index < length; index++) { testCase = testCases[index]; it(testCase.name, createItCallback(testCase)); @@ -303,14 +303,14 @@ describe('snowflake.createConnection() synchronous errors', function () { describe('snowflake.createConnection() success', function () { it('createConnection() returns connection', function () { - var connection = snowflake.createConnection(connectionOptions); + const connection = snowflake.createConnection(connectionOptions); assert.ok(connection); }); }); describe('connection.connect() synchronous errors', function () { it('connect() with invalid callback', function () { - var error = null; + let error = null; try { snowflake.createConnection(connectionOptions).connect('invalid'); @@ -326,8 +326,8 @@ describe('connection.connect() synchronous errors', function () { describe('connection.connect() success', function () { it('connect() success', function (done) { - var connection = snowflake.createConnection(connectionOptions); - var ret = connection.connect(function (err, conn) { + const connection = snowflake.createConnection(connectionOptions); + const ret = connection.connect(function (err, conn) { assert.ok(!err, 'there should be no error'); assert.strictEqual(conn, connection, 'the connect() callback should be invoked with the connection'); @@ -346,7 +346,7 @@ describe('connection.connect() asynchronous errors', function () { // and we get an error on the second attempt "already connected" instead of "connection already in progress". xit('connect() while already connecting', function (done) { // create a connection and connect - var connection = snowflake.createConnection(connectionOptions).connect(); + const connection = snowflake.createConnection(connectionOptions).connect(); // try to connect again setTimeout(() => { @@ -362,7 +362,7 @@ describe('connection.connect() asynchronous errors', function () { }); it('connect() while already connected', function (done) { - var connection = snowflake.createConnection(connectionOptions); + const connection = snowflake.createConnection(connectionOptions); async.series( [ @@ -393,7 +393,7 @@ describe('connection.connect() asynchronous errors', function () { }); it('connect() while fatally disconnected', function (done) { - var connection = snowflake.createConnection(connectionOptions); + const connection = snowflake.createConnection(connectionOptions); async.series( [ @@ -433,7 +433,7 @@ describe('connection.connect() asynchronous errors', function () { it('connect() with external browser authenticator', function (done) { // create a connection and connect with external browser - var connection = snowflake.createConnection(connectionOptionsExternalBrowser); + const connection = snowflake.createConnection(connectionOptionsExternalBrowser); // try to connect try { @@ -448,7 +448,7 @@ describe('connection.connect() asynchronous errors', function () { it('connect() with okta authenticator', function (done) { // create a connection and connect with okta - var connection = snowflake.createConnection(connectionOptionsOkta); + const connection = snowflake.createConnection(connectionOptionsOkta); // try to connect try { @@ -463,9 +463,9 @@ describe('connection.connect() asynchronous errors', function () { }); describe('connection.execute() synchronous errors', function () { - var connection = snowflake.createConnection(connectionOptions); + const connection = snowflake.createConnection(connectionOptions); - var testCases = + const testCases = [ { name: 'missing options', @@ -581,9 +581,9 @@ describe('connection.execute() synchronous errors', function () { } ]; - var createItCallback = function (testCase) { + const createItCallback = function (testCase) { return function () { - var error = null; + let error = null; try { connection.execute(testCase.options); @@ -596,7 +596,7 @@ describe('connection.execute() synchronous errors', function () { }; }; - var index, length, testCase; + let index, length, testCase; for (index = 0, length = testCases.length; index < length; index++) { testCase = testCases[index]; it(testCase.name, createItCallback(testCase)); @@ -604,7 +604,7 @@ describe('connection.execute() synchronous errors', function () { }); function testStatementFetchRows(statement) { - var testCases = + const testCases = [ { name: 'fetchRows() missing options', @@ -675,7 +675,7 @@ function testStatementFetchRows(statement) { } ]; - var index, length, testCase, error; + let index, length, testCase, error; for (index = 0, length = testCases.length; index < length; index++) { testCase = testCases[index]; @@ -693,12 +693,12 @@ function testStatementFetchRows(statement) { } describe('connection.execute() statement successful', function () { - var connection = snowflake.createConnection(connectionOptions); - var sqlText = 'select 1 as "c1";'; - var requestId = 'foobar'; + const connection = snowflake.createConnection(connectionOptions); + const sqlText = 'select 1 as "c1";'; + const requestId = 'foobar'; it('statement api', function (done) { - var statement; + let statement; async.series( [ @@ -722,7 +722,7 @@ describe('connection.execute() statement successful', function () { 'the execute() callback should be invoked with the statement'); // we should only have one column c1 - var columns = statement.getColumns(); + const columns = statement.getColumns(); assert.ok(Util.isArray(columns)); assert.strictEqual(columns.length, 1); assert.ok(Util.isObject(columns[0])); @@ -756,7 +756,7 @@ describe('connection.execute() statement successful', function () { assert.strictEqual(statement.getQueryId(), undefined); }, function (callback) { - var rows = []; + const rows = []; statement.fetchRows( { each: function (row) { @@ -783,12 +783,12 @@ describe('connection.execute() statement successful', function () { }); describe('connection.execute() statement failure', function () { - var connection = snowflake.createConnection(connectionOptions); - var sqlText = 'select;'; - var requestId = 'foobar'; + const connection = snowflake.createConnection(connectionOptions); + const sqlText = 'select;'; + const requestId = 'foobar'; it('statement api', function (done) { - var statement; + let statement; async.series( [ @@ -929,9 +929,9 @@ describe('connection.execute() with requestId', function () { describe('too many concurrent requests', function () { it('too many concurrent requests per user', function (done) { - var connection = snowflake.createConnection(connectionOptions); - var sqlText = 'select \'too many concurrent queries\';'; - var requestId = 'foobar'; + const connection = snowflake.createConnection(connectionOptions); + const sqlText = 'select \'too many concurrent queries\';'; + const requestId = 'foobar'; async.series( [ @@ -949,7 +949,7 @@ describe('too many concurrent requests', function () { { sqlText: sqlText, requestId: requestId, - complete: function (err, statement) { + complete: function (err) { assert.ok(err, 'there should be an error'); assert.strictEqual(err.code, '000610'); @@ -965,9 +965,9 @@ describe('too many concurrent requests', function () { }); describe('connection.fetchResult() synchronous errors', function () { - var connection = snowflake.createConnection(connectionOptions); + const connection = snowflake.createConnection(connectionOptions); - var testCases = + const testCases = [ { name: 'missing options', @@ -1055,9 +1055,9 @@ describe('connection.fetchResult() synchronous errors', function () { } ]; - var createItCallback = function (testCase) { + const createItCallback = function (testCase) { return function () { - var error = null; + let error = null; try { connection.fetchResult(testCase.options); @@ -1070,7 +1070,7 @@ describe('connection.fetchResult() synchronous errors', function () { }; }; - var index, length, testCase; + let index, length, testCase; for (index = 0, length = testCases.length; index < length; index++) { testCase = testCases[index]; it(testCase.name, createItCallback(testCase)); @@ -1078,11 +1078,11 @@ describe('connection.fetchResult() synchronous errors', function () { }); describe('connection.fetchResult() statement successful', function () { - var connection = snowflake.createConnection(connectionOptions); - var queryId = 'df2852ef-e082-4bb3-94a4-e540bf0e70c6'; + const connection = snowflake.createConnection(connectionOptions); + const queryId = 'df2852ef-e082-4bb3-94a4-e540bf0e70c6'; it('statement api', function (done) { - var statement; + let statement; async.series( [ @@ -1105,7 +1105,7 @@ describe('connection.fetchResult() statement successful', function () { 'the fetchRow() callback should be invoked with the statement'); // we should only have one column c1 - var columns = statement.getColumns(); + const columns = statement.getColumns(); assert.ok(Util.isArray(columns)); assert.strictEqual(columns.length, 1); assert.ok(Util.isObject(columns[0])); @@ -1138,7 +1138,7 @@ describe('connection.fetchResult() statement successful', function () { assert.strictEqual(statement.getSessionState(), undefined); }, function (callback) { - var rows = []; + const rows = []; statement.fetchRows( { each: function (row) { @@ -1165,11 +1165,11 @@ describe('connection.fetchResult() statement successful', function () { }); describe('connection.fetchResult() statement failure', function () { - var connection = snowflake.createConnection(connectionOptions); - var queryId = '13f12818-de4c-41d2-bf19-f115ee8a5cc1'; + const connection = snowflake.createConnection(connectionOptions); + const queryId = '13f12818-de4c-41d2-bf19-f115ee8a5cc1'; it('statement api', function (done) { - var statement; + let statement; async.series( [ @@ -1244,8 +1244,8 @@ describe('connection.fetchResult() statement failure', function () { describe('statement.cancel()', function () { it('cancel a statement before it has been executed', function (done) { - var connection = snowflake.createConnection(connectionOptions); - var statement = connection.execute( + const connection = snowflake.createConnection(connectionOptions); + const statement = connection.execute( { sqlText: 'select 1 as "c1";', requestId: 'foobar' @@ -1260,15 +1260,15 @@ describe('statement.cancel()', function () { }); it('cancel a running statement', function (done) { - var connection = snowflake.createConnection(connectionOptions); - connection.connect(function (err, conn) { + const connection = snowflake.createConnection(connectionOptions); + connection.connect(function (err) { assert.ok(!err, 'should not get an error'); - var statement = connection.execute( + const statement = connection.execute( { sqlText: 'select count(*) from table(generator(timelimit=>10));', requestId: 'b97fee20-a805-11e5-a0ab-ddd3321ed586', - complete: function (err, stmt) { + complete: function (err) { assert.ok(err, 'there should be an error'); assert.strictEqual(err.sqlState, '57014', 'the error should have the right sql state'); @@ -1280,7 +1280,7 @@ describe('statement.cancel()', function () { } }); - var context = + const context = { completed: false, canceled: false @@ -1302,8 +1302,8 @@ describe('statement.cancel()', function () { }); it('cancel a statement that doesn\'t exist', function (done) { - var connection = snowflake.createConnection(connectionOptions); - var statement = connection.fetchResult( + const connection = snowflake.createConnection(connectionOptions); + const statement = connection.fetchResult( { queryId: 'foobar' }); @@ -1317,8 +1317,8 @@ describe('statement.cancel()', function () { }); it('cancel a successful statement', function (done) { - var connection = snowflake.createConnection(connectionOptions); - var statement = connection.fetchResult( + const connection = snowflake.createConnection(connectionOptions); + const statement = connection.fetchResult( { queryId: 'df2852ef-e082-4bb3-94a4-e540bf0e70c6' }); @@ -1332,8 +1332,8 @@ describe('statement.cancel()', function () { }); it('cancel a failed statement', function (done) { - var connection = snowflake.createConnection(connectionOptions); - var statement = connection.fetchResult( + const connection = snowflake.createConnection(connectionOptions); + const statement = connection.fetchResult( { queryId: '13f12818-de4c-41d2-bf19-f115ee8a5cc1' }); @@ -1659,7 +1659,7 @@ describe('snowflake.isAnError()', function () { describe('connection.destroy()', function () { it('destroy without connecting', function (done) { - var connection = snowflake.createConnection(connectionOptions); + const connection = snowflake.createConnection(connectionOptions); connection.destroy(function (err, conn) { assert.ok(err); assert.strictEqual( @@ -1671,9 +1671,9 @@ describe('connection.destroy()', function () { }); it('destroy while connecting', function (done) { - var connection = snowflake.createConnection(connectionOptions); + const connection = snowflake.createConnection(connectionOptions); - var context = + const context = { connectcomplete: false, destroycomplete: false @@ -1704,7 +1704,7 @@ describe('connection.destroy()', function () { }); it('destroy after connected', function (done) { - var connection = snowflake.createConnection(connectionOptions); + const connection = snowflake.createConnection(connectionOptions); async.series( [ @@ -1731,7 +1731,7 @@ describe('connection.destroy()', function () { }); it('destroy while disconnected', function (done) { - var connection = snowflake.createConnection(connectionOptions); + const connection = snowflake.createConnection(connectionOptions); async.series( [ @@ -1772,25 +1772,25 @@ describe('connection.destroy()', function () { describe('serialize connection', function () { it('serialize before connecting', function (done) { - var connection = snowflake.createConnection(connectionOptions); - var serializedConnection = connection.serialize(); + const connection = snowflake.createConnection(connectionOptions); + const serializedConnection = connection.serialize(); assert.ok(serializedConnection); done(); }); it('serialize when connected', function (done) { - var connection = snowflake.createConnection(connectionOptions); - var statementFirst; - var statementSecond; + const connection = snowflake.createConnection(connectionOptions); + let statementFirst; + let statementSecond; async.series( [ function (callback) { - var sqlText = 'select 1 as "c1";'; - var requestId = 'foobar'; + const sqlText = 'select 1 as "c1";'; + const requestId = 'foobar'; - connection.connect(function (err, conn) { + connection.connect(function (err) { assert.ok(!err); statementFirst = connection.execute( @@ -1811,7 +1811,7 @@ describe('serialize connection', function () { function (callback) { // serialize the connection and then deserialize it to get a copy of // the original connection - var connectionCopy = snowflake.deserializeConnection( + const connectionCopy = snowflake.deserializeConnection( connectionOptionsDeserialize, snowflake.serializeConnection(connection)); @@ -1837,7 +1837,7 @@ describe('serialize connection', function () { }); describe('deserialize connection synchronous errors', function () { - var testCases = + const testCases = [ { name: 'missing serializedConnection', @@ -1870,9 +1870,9 @@ describe('deserialize connection synchronous errors', function () { } ]; - var createItCallback = function (testCase) { + const createItCallback = function (testCase) { return function () { - var error = null; + let error = null; try { snowflake.deserializeConnection( @@ -1886,7 +1886,7 @@ describe('deserialize connection synchronous errors', function () { }; }; - var index, length, testCase; + let index, length, testCase; for (index = 0, length = testCases.length; index < length; index++) { testCase = testCases[index]; it(testCase.name, createItCallback(testCase)); @@ -1895,7 +1895,7 @@ describe('deserialize connection synchronous errors', function () { describe('snowflake.createConnection() SERVICE_NAME', function () { it('createConnection() returns connection including SERVICE_NAME', function (done) { - var connection = snowflake.createConnection(connectionOptionsServiceName); + const connection = snowflake.createConnection(connectionOptionsServiceName); async.series([ function (callback) { connection.connect(function (err) { @@ -1914,7 +1914,7 @@ describe('snowflake.createConnection() SERVICE_NAME', function () { { sqlText: 'select * from faketable', requestId: 'foobar', - complete: function (err, stmt) { + complete: function (err) { assert.ok(!err, JSON.stringify(err)); callback(); } @@ -1933,7 +1933,7 @@ describe('snowflake.createConnection() SERVICE_NAME', function () { describe('snowflake.createConnection() CLIENT_SESSION_KEEP_ALIVE', function () { it('createConnection() returns connection including CLIENT_SESSION_KEEP_ALIVE', function (done) { - var connection = snowflake.createConnection(connectionOptionsClientSessionKeepAlive); + const connection = snowflake.createConnection(connectionOptionsClientSessionKeepAlive); async.series([ function (callback) { connection.connect(function (err) { @@ -1960,7 +1960,7 @@ describe('snowflake.createConnection() CLIENT_SESSION_KEEP_ALIVE', function () { describe('snowflake.createConnection() JS_TREAT_INTEGER_AS_BIGINT', function () { it('createConnection() returns connection including JS_TREAT_INTEGER_AS_BIGINT', function (done) { - var connection = snowflake.createConnection(connectionOptionsTreatIntegerAsBigInt); + const connection = snowflake.createConnection(connectionOptionsTreatIntegerAsBigInt); async.series([ function (callback) { connection.connect(function (err) { @@ -2009,7 +2009,7 @@ describe('snowflake.connect() with 504', function () { * The connection is retired three times and get success. */ it('retry 504', function (done) { - var connection = snowflake.createConnection(connectionOptionsFor504); + const connection = snowflake.createConnection(connectionOptionsFor504); async.series([ function (callback) { connection.connect(function (err) { @@ -2018,7 +2018,7 @@ describe('snowflake.connect() with 504', function () { }); }, function (callback) { - connection.destroy(function (err, con) { + connection.destroy(function (err) { assert.ok(!err, JSON.stringify(err)); callback(); }); diff --git a/test/unit/url_util_test.js b/test/unit/url_util_test.js index 960983b71..816a34149 100644 --- a/test/unit/url_util_test.js +++ b/test/unit/url_util_test.js @@ -1,8 +1,8 @@ /* * Copyright (c) 2015-2023 Snowflake Computing Inc. All rights reserved. */ -var URLUtil = require('./../../lib/url_util'); -var assert = require('assert'); +const URLUtil = require('./../../lib/url_util'); +const assert = require('assert'); describe('URLUtil', function () { it('Valid URL', function () { diff --git a/test/unit/util_test.js b/test/unit/util_test.js index ead86fc24..81d87000e 100644 --- a/test/unit/util_test.js +++ b/test/unit/util_test.js @@ -2,8 +2,8 @@ * Copyright (c) 2015 Snowflake Computing Inc. All rights reserved. */ -var Util = require('./../../lib/util'); -var assert = require('assert'); +const Util = require('./../../lib/util'); +const assert = require('assert'); describe('Util', function () { it('Util.isFunction()', function () { @@ -177,7 +177,7 @@ describe('Util', function () { }); it('Util.string.compareVersions()', function () { - var testCases = []; + const testCases = []; // '' and '0' are the same testCases.push( @@ -277,8 +277,8 @@ describe('Util', function () { result: NaN }); - var testCase, actual, expected; - for (var index = 0, length = testCases.length; index < length; index++) { + let testCase, actual, expected; + for (let index = 0, length = testCases.length; index < length; index++) { testCase = testCases[index]; actual = Util.string.compareVersions(testCase.version1, testCase.version2); @@ -386,7 +386,7 @@ describe('Util', function () { //// Positive Test Cases //// ///////////////////////////////////////////////////////////////////////// - var testCasesPos = + const testCasesPos = [ { url: 'a', @@ -408,8 +408,8 @@ describe('Util', function () { } ]; - var testCase; - for (var index = 0, length = testCasesPos.length; index < length; index++) { + let testCase; + for (let index = 0, length = testCasesPos.length; index < length; index++) { testCase = testCasesPos[index]; assert.strictEqual( Util.url.appendParam( @@ -421,7 +421,7 @@ describe('Util', function () { //// Negative Test Cases //// ///////////////////////////////////////////////////////////////////////// - var testCasesNeg = + const testCasesNeg = [ { paramName: 'foo', @@ -439,8 +439,8 @@ describe('Util', function () { } ]; - var error; - for (index = 0, length = testCasesPos.length; index < length; index++) { + let error; + for (let index = 0, length = testCasesPos.length; index < length; index++) { error = null; testCase = testCasesNeg[index]; @@ -633,7 +633,7 @@ describe('Util', function () { assert.strictEqual(Util.apply(undefined, undefined), undefined); assert.strictEqual(Util.apply(undefined, {}), undefined); - var dst, src; + let dst, src; dst = {}; src = null; @@ -654,7 +654,7 @@ describe('Util', function () { }); it('Util.isRetryableHttpError()', function () { - var testCasesPos = + const testCasesPos = [ { name: '200 - OK', @@ -712,9 +712,9 @@ describe('Util', function () { }, ]; - var testCase; - var err; - for (var index = 0, length = testCasesPos.length; index < length; index++) { + let testCase; + let err; + for (let index = 0, length = testCasesPos.length; index < length; index++) { testCase = testCasesPos[index]; err = { response: { statusCode: testCase.statusCode } @@ -807,10 +807,6 @@ describe('Util', function () { }); describe('Util Test - detecting PROXY envvars and compare with the agent proxy settings', () => { - // if for some reason there's already a PROXY envvar, try to preserve it - const httpProxyBeforeTest = process.env.HTTP_PROXY ? process.env.HTTP_PROXY : null; - const httpsProxyBeforeTest = process.env.HTTPS_PROXY ? process.env.HTTPS_PROXY : null; - [ { name: 'detect http_proxy envvar, no agent proxy', @@ -879,7 +875,7 @@ describe('Util', function () { } ].forEach(({ name, isWarn, httpproxy, HTTPSPROXY, agentOptions, shouldLog }) => { it(`${name}`, () => { - process.env.http_proxy = httpproxy; + process.env.HTTP_PROXY = httpproxy; process.env.HTTPS_PROXY = HTTPSPROXY; const compareAndLogEnvAndAgentProxies = Util.getCompareAndLogEnvAndAgentProxies(agentOptions);