diff --git a/lib/file_transfer_agent/azure_util.js b/lib/file_transfer_agent/azure_util.js index 41ab158a8..6a54f2381 100644 --- a/lib/file_transfer_agent/azure_util.js +++ b/lib/file_transfer_agent/azure_util.js @@ -89,7 +89,7 @@ function AzureUtil(azure, filestream) { */ this.getFileHeader = async function (meta, filename) { const stageInfo = meta['stageInfo']; - const client = meta['client']; + const client = this.createClient(stageInfo); const azureLocation = this.extractContainerNameAndPath(stageInfo['location']); const containerClient = client.getContainerClient(azureLocation.containerName); @@ -188,7 +188,7 @@ function AzureUtil(azure, filestream) { } const stageInfo = meta['stageInfo']; - const client = meta['client']; + const client = this.createClient(stageInfo); const azureLocation = this.extractContainerNameAndPath(stageInfo['location']); const blobName = azureLocation.path + meta['dstFileName']; @@ -230,7 +230,7 @@ function AzureUtil(azure, filestream) { */ this.nativeDownloadFile = async function (meta, fullDstPath) { const stageInfo = meta['stageInfo']; - const client = meta['client']; + const client = this.createClient(stageInfo); const azureLocation = this.extractContainerNameAndPath(stageInfo['location']); const blobName = azureLocation.path + meta['srcFileName']; diff --git a/lib/file_transfer_agent/file_transfer_agent.js b/lib/file_transfer_agent/file_transfer_agent.js index 811d0833a..9c4fc1b36 100644 --- a/lib/file_transfer_agent/file_transfer_agent.js +++ b/lib/file_transfer_agent/file_transfer_agent.js @@ -131,7 +131,7 @@ function FileTransferAgent(context) { } //upload - let storageClient = getStorageClient(stageLocationType); + const storageClient = getStorageClient(stageLocationType); const client = storageClient.createClient(stageInfo, false); const meta = fileMetadata[0]; meta['parallel'] = parallel; @@ -148,12 +148,7 @@ function FileTransferAgent(context) { meta['requireCompress'] = false; meta['dstFileName'] = meta['srcFileName']; - storageClient = getStorageClient(meta['stageLocationType']); - try { - await storageClient.uploadOneFileStream(meta); - } finally { - storageClient.destroyClient(stageInfo, client); - } + await storageClient.uploadOneFileStream(meta); } else { parseCommand(); initFileMetadata(); @@ -308,16 +303,12 @@ function FileTransferAgent(context) { meta['client'] = client; } - try { - if (smallFileMetas.length > 0) { - //await uploadFilesinParallel(smallFileMetas); - await uploadFilesinSequential(smallFileMetas); - } - if (largeFileMetas.length > 0) { - await uploadFilesinSequential(largeFileMetas); - } - } finally { - storageClient.destroyClient(stageInfo, client); + if (smallFileMetas.length > 0) { + //await uploadFilesinParallel(smallFileMetas); + await uploadFilesinSequential(smallFileMetas); + } + if (largeFileMetas.length > 0) { + await uploadFilesinSequential(largeFileMetas); } } @@ -426,16 +417,12 @@ function FileTransferAgent(context) { meta['client'] = client; } - try { - if (smallFileMetas.length > 0) { - //await downloadFilesinParallel(smallFileMetas); - await downloadFilesinSequential(smallFileMetas); - } - if (largeFileMetas.length > 0) { - await downloadFilesinSequential(largeFileMetas); - } - } finally { - storageClient.destroyClient(stageInfo, client); + if (smallFileMetas.length > 0) { + //await downloadFilesinParallel(smallFileMetas); + await downloadFilesinSequential(smallFileMetas); + } + if (largeFileMetas.length > 0) { + await downloadFilesinSequential(largeFileMetas); } } @@ -514,18 +501,14 @@ function FileTransferAgent(context) { const client = SnowflakeRemoteStorageUtil.createClient(stageInfo, false); const s3location = SnowflakeS3Util.extractBucketNameAndPath(stageInfo['location']); - try { - await client.getBucketAccelerateConfiguration({ Bucket: s3location.bucketName }) - .then(function (data) { - useAccelerateEndpoint = data['Status'] === 'Enabled'; - }).catch(function (err) { - if (err['code'] === 'AccessDenied') { - return; - } - }); - } finally { - SnowflakeRemoteStorageUtil.destroyClient(stageInfo, client); - } + await client.getBucketAccelerateConfiguration({ Bucket: s3location.bucketName }) + .then(function (data) { + useAccelerateEndpoint = data['Status'] === 'Enabled'; + }).catch(function (err) { + if (err['code'] === 'AccessDenied') { + return; + } + }); } } diff --git a/lib/file_transfer_agent/local_util.js b/lib/file_transfer_agent/local_util.js index c003fa642..a06a9de7c 100644 --- a/lib/file_transfer_agent/local_util.js +++ b/lib/file_transfer_agent/local_util.js @@ -18,9 +18,6 @@ function LocalUtil() { return null; }; - this.destroyClient = function () { - }; - /** * Write file to upload. * diff --git a/lib/file_transfer_agent/remote_storage_util.js b/lib/file_transfer_agent/remote_storage_util.js index 8137371d3..97b911c8b 100644 --- a/lib/file_transfer_agent/remote_storage_util.js +++ b/lib/file_transfer_agent/remote_storage_util.js @@ -65,19 +65,6 @@ function RemoteStorageUtil() { return utilClass.createClient(stageInfo, useAccelerateEndpoint); }; - /** - * Destroys a client based on the location type. - * - * @param {Object} stageInfo - * @param {Object} client - */ - this.destroyClient = function (stageInfo, client) { - const utilClass = this.getForStorageType(stageInfo['locationType']); - if (utilClass.destroyClient) { - utilClass.destroyClient(client); - } - }; - /** * Encrypt then upload one file stream. * diff --git a/lib/file_transfer_agent/s3_util.js b/lib/file_transfer_agent/s3_util.js index 522821125..2b134bf8c 100644 --- a/lib/file_transfer_agent/s3_util.js +++ b/lib/file_transfer_agent/s3_util.js @@ -76,15 +76,6 @@ function S3Util(s3, filestream) { return new AWS.S3(config); }; - /** - * Destroys an AWS S3 client. - * - * @param {AWS.S3} client - */ - this.destroyClient = function (client) { - client.destroy(); - }; - /** * Extract the bucket name and path from the metadata's stage location. * @@ -123,7 +114,7 @@ function S3Util(s3, filestream) { */ this.getFileHeader = async function (meta, filename) { const stageInfo = meta['stageInfo']; - const client = meta['client']; + const client = this.createClient(stageInfo); const s3location = this.extractBucketNameAndPath(stageInfo['location']); const params = { @@ -203,7 +194,8 @@ function S3Util(s3, filestream) { s3Metadata[AMZ_MATDESC] = encryptionMetadata.matDesc; } - const client = meta['client']; + const stageInfo = meta['stageInfo']; + const client = this.createClient(stageInfo); const s3location = this.extractBucketNameAndPath(meta['stageInfo']['location']); @@ -243,7 +235,8 @@ function S3Util(s3, filestream) { * @param {Object} encryptionMetadata */ this.nativeDownloadFile = async function (meta, fullDstPath) { - const client = meta['client']; + const stageInfo = meta['stageInfo']; + const client = this.createClient(stageInfo); const s3location = this.extractBucketNameAndPath(meta['stageInfo']['location']); diff --git a/test/unit/file_transfer_agent/azure_test.js b/test/unit/file_transfer_agent/azure_test.js index 3f9c409b9..0a621de0e 100644 --- a/test/unit/file_transfer_agent/azure_test.js +++ b/test/unit/file_transfer_agent/azure_test.js @@ -20,8 +20,15 @@ describe('Azure client', function () { let Azure = null; let client = null; let filestream = null; - let meta = null; const dataFile = mockDataFile; + const meta = { + stageInfo: { + location: mockLocation, + path: mockTable + '/' + mockPath + '/', + creds: {} + }, + SHA256_DIGEST: mockDigest, + }; const encryptionMetadata = { key: mockKey, iv: mockIv, @@ -101,18 +108,6 @@ describe('Azure client', function () { filestream = require('filestream'); Azure = new SnowflakeAzureUtil(client, filestream); }); - beforeEach(function () { - const stageInfo = { - location: mockLocation, - path: mockTable + '/' + mockPath + '/', - creds: {} - }; - meta = { - stageInfo, - SHA256_DIGEST: mockDigest, - client: Azure.createClient(stageInfo), - }; - }); it('extract bucket name and path', async function () { verifyNameAndPath('sfc-eng-regression/test_sub_dir/', 'sfc-eng-regression', 'test_sub_dir/'); @@ -137,7 +132,6 @@ describe('Azure client', function () { client = require('client'); Azure = new SnowflakeAzureUtil(client); - meta['client'] = Azure.createClient(meta['stageInfo']); await Azure.getFileHeader(meta, dataFile); assert.strictEqual(meta['resultStatus'], resultStatus.RENEW_TOKEN); @@ -153,7 +147,6 @@ describe('Azure client', function () { client = require('client'); const Azure = new SnowflakeAzureUtil(client); - meta['client'] = Azure.createClient(meta['stageInfo']); await Azure.getFileHeader(meta, dataFile); assert.strictEqual(meta['resultStatus'], resultStatus.NOT_FOUND_FILE); @@ -169,7 +162,6 @@ describe('Azure client', function () { client = require('client'); Azure = new SnowflakeAzureUtil(client); - meta['client'] = Azure.createClient(meta['stageInfo']); await Azure.getFileHeader(meta, dataFile); assert.strictEqual(meta['resultStatus'], resultStatus.RENEW_TOKEN); @@ -185,7 +177,6 @@ describe('Azure client', function () { client = require('client'); Azure = new SnowflakeAzureUtil(client); - meta['client'] = Azure.createClient(meta['stageInfo']); await Azure.getFileHeader(meta, dataFile); assert.strictEqual(meta['resultStatus'], resultStatus.ERROR); @@ -202,7 +193,6 @@ describe('Azure client', function () { client = require('client'); filestream = require('filestream'); Azure = new SnowflakeAzureUtil(client, filestream); - meta['client'] = Azure.createClient(meta['stageInfo']); await Azure.uploadFile(dataFile, meta, encryptionMetadata); assert.strictEqual(meta['resultStatus'], resultStatus.UPLOADED); @@ -223,7 +213,6 @@ describe('Azure client', function () { client = require('client'); filestream = require('filestream'); Azure = new SnowflakeAzureUtil(client, filestream); - meta['client'] = Azure.createClient(meta['stageInfo']); await Azure.uploadFile(dataFile, meta, encryptionMetadata); assert.strictEqual(meta['resultStatus'], resultStatus.RENEW_TOKEN); @@ -244,7 +233,6 @@ describe('Azure client', function () { client = require('client'); filestream = require('filestream'); Azure = new SnowflakeAzureUtil(client, filestream); - meta['client'] = Azure.createClient(meta['stageInfo']); await Azure.uploadFile(dataFile, meta, encryptionMetadata); assert.strictEqual(meta['resultStatus'], resultStatus.NEED_RETRY); diff --git a/test/unit/file_transfer_agent/s3_test.js b/test/unit/file_transfer_agent/s3_test.js index 7e7de1fd9..2750bfb70 100644 --- a/test/unit/file_transfer_agent/s3_test.js +++ b/test/unit/file_transfer_agent/s3_test.js @@ -20,8 +20,15 @@ describe('S3 client', function () { let AWS; let s3; let filesystem; - let meta; const dataFile = mockDataFile; + const meta = { + stageInfo: { + location: mockLocation, + path: mockTable + '/' + mockPath + '/', + creds: {} + }, + SHA256_DIGEST: mockDigest, + }; const encryptionMetadata = { key: mockKey, iv: mockIv, @@ -52,7 +59,6 @@ describe('S3 client', function () { return new putObject; }; - this.destroy = function () {}; } return new S3; @@ -68,21 +74,6 @@ describe('S3 client', function () { AWS = new SnowflakeS3Util(s3, filesystem); }); - beforeEach(function () { - const stageInfo = { - location: mockLocation, - path: mockTable + '/' + mockPath + '/', - creds: {} - }; - meta = { - stageInfo, - SHA256_DIGEST: mockDigest, - client: AWS.createClient(stageInfo), - }; - }); - this.afterEach(function () { - AWS.destroyClient(meta['client']); - }); it('extract bucket name and path', async function () { let result = AWS.extractBucketNameAndPath('sfc-eng-regression/test_sub_dir/'); @@ -126,7 +117,6 @@ describe('S3 client', function () { return new getObject; }; - this.destroy = function () {}; } return new S3; @@ -134,7 +124,6 @@ describe('S3 client', function () { }); s3 = require('s3'); const AWS = new SnowflakeS3Util(s3); - meta['client'] = AWS.createClient(meta['stageInfo']); await AWS.getFileHeader(meta, dataFile); assert.strictEqual(meta['resultStatus'], resultStatus.RENEW_TOKEN); @@ -155,7 +144,6 @@ describe('S3 client', function () { return new getObject; }; - this.destroy = function () {}; } return new S3; @@ -163,7 +151,6 @@ describe('S3 client', function () { }); s3 = require('s3'); const AWS = new SnowflakeS3Util(s3); - meta['client'] = AWS.createClient(meta['stageInfo']); await AWS.getFileHeader(meta, dataFile); assert.strictEqual(meta['resultStatus'], resultStatus.NOT_FOUND_FILE); @@ -184,7 +171,6 @@ describe('S3 client', function () { return new getObject; }; - this.destroy = function () {}; } return new S3; @@ -192,7 +178,6 @@ describe('S3 client', function () { }); s3 = require('s3'); const AWS = new SnowflakeS3Util(s3); - meta['client'] = AWS.createClient(meta['stageInfo']); await AWS.getFileHeader(meta, dataFile); assert.strictEqual(meta['resultStatus'], resultStatus.RENEW_TOKEN); @@ -213,7 +198,6 @@ describe('S3 client', function () { return new getObject; }; - this.destroy = function () {}; } return new S3; @@ -221,7 +205,6 @@ describe('S3 client', function () { }); s3 = require('s3'); const AWS = new SnowflakeS3Util(s3); - meta['client'] = AWS.createClient(meta['stageInfo']); await AWS.getFileHeader(meta, dataFile); assert.strictEqual(meta['resultStatus'], resultStatus.ERROR); @@ -247,7 +230,6 @@ describe('S3 client', function () { return new putObject; }; - this.destroy = function () {}; } return new S3; @@ -261,7 +243,6 @@ describe('S3 client', function () { s3 = require('s3'); filesystem = require('filesystem'); const AWS = new SnowflakeS3Util(s3, filesystem); - meta['client'] = AWS.createClient(meta['stageInfo']); await AWS.uploadFile(dataFile, meta, encryptionMetadata); assert.strictEqual(meta['resultStatus'], resultStatus.RENEW_TOKEN); @@ -282,7 +263,6 @@ describe('S3 client', function () { return new putObject; }; - this.destroy = function () {}; } return new S3; @@ -296,7 +276,6 @@ describe('S3 client', function () { s3 = require('s3'); filesystem = require('filesystem'); const AWS = new SnowflakeS3Util(s3, filesystem); - meta['client'] = AWS.createClient(meta['stageInfo']); await AWS.uploadFile(dataFile, meta, encryptionMetadata); assert.strictEqual(meta['resultStatus'], resultStatus.NEED_RETRY_WITH_LOWER_CONCURRENCY); @@ -317,7 +296,6 @@ describe('S3 client', function () { return new putObject; }; - this.destroy = function () {}; } return new S3; @@ -331,7 +309,6 @@ describe('S3 client', function () { s3 = require('s3'); filesystem = require('filesystem'); const AWS = new SnowflakeS3Util(s3, filesystem); - meta['client'] = AWS.createClient(meta['stageInfo']); await AWS.uploadFile(dataFile, meta, encryptionMetadata); assert.strictEqual(meta['resultStatus'], resultStatus.NEED_RETRY);