From 7fb3f5cda9a3ff99c62086114a9326ab9b1baef9 Mon Sep 17 00:00:00 2001 From: Vitalii Yarmus <71256742+Vitalii4as@users.noreply.github.com> Date: Wed, 18 Sep 2024 11:56:47 +0300 Subject: [PATCH 01/26] HCK-7953: fix creation of indexes on added view in alter script (#107) * HCK-7953: fix creation of indexes on added view in alter script * fix typos --- .../alterScriptHelpers/alterViewHelper.js | 37 ++++++++++++++++++- 1 file changed, 35 insertions(+), 2 deletions(-) diff --git a/forward_engineering/helpers/alterScriptHelpers/alterViewHelper.js b/forward_engineering/helpers/alterScriptHelpers/alterViewHelper.js index c2d4e9a..8ab8fd4 100644 --- a/forward_engineering/helpers/alterScriptHelpers/alterViewHelper.js +++ b/forward_engineering/helpers/alterScriptHelpers/alterViewHelper.js @@ -8,16 +8,29 @@ module.exports = (app, options) => { const { AlterScriptDto } = require('./types/AlterScriptDto'); const getAddViewScriptDto = view => { + const viewName = getTableName(view.code || view.name, view?.role?.compMod?.keyspaceName); const viewSchema = { ...view, ...(view.role ?? {}) }; + const idToNameHashTable = generateRefToNameHashTable(viewSchema); + const idToActivatedHashTable = generateRefToActivatedHashTable(viewSchema); + const schemaData = { schemaName: viewSchema.compMod.keyspaceName }; const viewData = { name: viewSchema.code || viewSchema.name, keys: getKeys(viewSchema, viewSchema.compMod?.collectionData?.collectionRefsDefinitionsMap ?? {}), - schemaData: { schemaName: viewSchema.compMod.keyspaceName }, + schemaData, }; const hydratedView = ddlProvider.hydrateView({ viewData, entityData: [view] }); - return AlterScriptDto.getInstance([ddlProvider.createView(hydratedView, {}, view.isActivated)], true, false); + const viewScript = AlterScriptDto.getInstance( + [ddlProvider.createView(hydratedView, {}, view.isActivated)], + true, + false, + ); + const indexesSCripts = (viewSchema.Indxs || []) + .map(hydrateIndex({ idToNameHashTable, idToActivatedHashTable, schemaData })) + .map(index => AlterScriptDto.getInstance([ddlProvider.createViewIndex(viewName, index)], true, false)); + + return [viewScript, ...indexesSCripts].filter(Boolean); }; const getDeleteViewScriptDto = view => { @@ -178,6 +191,26 @@ module.exports = (app, options) => { .filter(Boolean); }; + const generateRefToNameHashTable = view => { + const refToNameHashTable = {}; + + mapProperties(view, (propertyName, schema) => { + refToNameHashTable[schema.ref] = propertyName; + }); + + return refToNameHashTable; + }; + + const generateRefToActivatedHashTable = view => { + const refToActivatedHashTable = {}; + + mapProperties(view, (propertyName, schema) => { + refToActivatedHashTable[schema.ref] = schema.isActivated; + }); + + return refToActivatedHashTable; + }; + return { getAddViewScriptDto, getDeleteViewScriptDto, From dd6ea8fa7fdbf2422408d2e84f612d764d738557 Mon Sep 17 00:00:00 2001 From: Serhii Filonenko <91055067+serhii-filonenko@users.noreply.github.com> Date: Thu, 19 Sep 2024 12:25:46 +0300 Subject: [PATCH 02/26] HCK-8003: add comma commenting for last activated column statement (#108) --- forward_engineering/ddlProvider.js | 8 ++- .../joinActivatedAndDeactivatedStatements.js | 53 +++++++++++++++++++ 2 files changed, 59 insertions(+), 2 deletions(-) create mode 100644 forward_engineering/utils/joinActivatedAndDeactivatedStatements.js diff --git a/forward_engineering/ddlProvider.js b/forward_engineering/ddlProvider.js index d4d9d0a..cc1adf4 100644 --- a/forward_engineering/ddlProvider.js +++ b/forward_engineering/ddlProvider.js @@ -2,6 +2,7 @@ const defaultTypes = require('./configs/defaultTypes'); const types = require('./configs/types'); const templates = require('./configs/templates'); const { commentIfDeactivated } = require('./helpers/commentIfDeactivated'); +const { joinActivatedAndDeactivatedStatements } = require('./utils/joinActivatedAndDeactivatedStatements'); module.exports = (baseProvider, options, app) => { const _ = app.require('lodash'); @@ -137,9 +138,10 @@ module.exports = (baseProvider, options, app) => { const dividedForeignKeys = divideIntoActivatedAndDeactivated(foreignKeyConstraints, key => key.statement); const foreignKeyConstraintsString = generateConstraintsString(dividedForeignKeys, isActivated); const tableAndColumnCommentsSeparator = tableComment ? '\n\n' : ''; + const columnStatements = joinActivatedAndDeactivatedStatements({ statements: columns, indent: '\n\t' }); const tableStatement = assignTemplates(templates.createTable, { name: tableName, - column_definitions: columns.join(',\n\t'), + column_definitions: columnStatements, temporalTableTime: temporalTableTimeStatement, checkConstraints: checkConstraints.length ? ',\n\t' + checkConstraints.join(',\n\t') : '', foreignKeyConstraints: foreignKeyConstraintsString, @@ -202,7 +204,7 @@ module.exports = (baseProvider, options, app) => { columnDefinition.isHidden, ); - return assignTemplates(templates.columnDefinition, { + const statement = assignTemplates(templates.columnDefinition, { name: columnDefinition.name, type: decorateType(type, columnDefinition), primary_key: primaryKey + unique, @@ -215,6 +217,8 @@ module.exports = (baseProvider, options, app) => { temporalTableTime, ...identityContainer, }); + + return commentIfDeactivated(statement, { isActivated: columnDefinition.isActivated }); }, createIndex(tableName, index, dbData, isParentActivated = true) { diff --git a/forward_engineering/utils/joinActivatedAndDeactivatedStatements.js b/forward_engineering/utils/joinActivatedAndDeactivatedStatements.js new file mode 100644 index 0000000..440e120 --- /dev/null +++ b/forward_engineering/utils/joinActivatedAndDeactivatedStatements.js @@ -0,0 +1,53 @@ +/** + * @param {{ + * index: number; + * numberOfStatements: number; + * lastIndexOfActivatedStatement: number; + * delimiter: string; + * }} + * @return {string} + * */ +const getDelimiter = ({ index, numberOfStatements, lastIndexOfActivatedStatement, delimiter }) => { + const isLastStatement = index === numberOfStatements - 1; + const isLastActivatedStatement = index === lastIndexOfActivatedStatement; + + if (isLastStatement) { + return ''; + } + + if (isLastActivatedStatement) { + return ' --' + delimiter; + } + + return delimiter; +}; + +/** + * @param {{ + * statements?: string[]; + * delimiter?: string; + * indent?: string; + * }} + * @return {string} + * */ +const joinActivatedAndDeactivatedStatements = ({ statements = [], delimiter = ',', indent = '\n' }) => { + const lastIndexOfActivatedStatement = statements.findLastIndex(statement => !statement.startsWith('--')); + const numberOfStatements = statements.length; + + return statements + .map((statement, index) => { + const currentDelimiter = getDelimiter({ + index, + numberOfStatements, + lastIndexOfActivatedStatement, + delimiter, + }); + + return statement + currentDelimiter; + }) + .join(indent); +}; + +module.exports = { + joinActivatedAndDeactivatedStatements, +}; From 8403a067a334b1e684b3f3b96cc71f1c4413b1cd Mon Sep 17 00:00:00 2001 From: Teamcity Date: Fri, 20 Sep 2024 17:39:54 +0000 Subject: [PATCH 03/26] Bump plugin version to 0.2.7 for next development track --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 6988bf2..b315afe 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "SQLServer", - "version": "0.2.6", + "version": "0.2.7", "author": "hackolade", "engines": { "hackolade": "6.7.1", From 26f035f3b3f6681ba8a5823641877982de39188b Mon Sep 17 00:00:00 2001 From: Taras Dubyk Date: Thu, 26 Sep 2024 16:16:03 +0300 Subject: [PATCH 04/26] increase minimum app version to fix Polyglot derive, XSD RE, DDL RE composite relationships issue (#109) --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index b315afe..064e1a7 100644 --- a/package.json +++ b/package.json @@ -3,7 +3,7 @@ "version": "0.2.7", "author": "hackolade", "engines": { - "hackolade": "6.7.1", + "hackolade": "7.7.10", "hackoladePlugin": "1.2.0" }, "contributes": { From cc06d5e457b77fbe969ae5ac7a5bb4f98563178f Mon Sep 17 00:00:00 2001 From: Teamcity Date: Fri, 27 Sep 2024 17:57:40 +0000 Subject: [PATCH 05/26] Bump plugin version to 0.2.8 for next development track --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 064e1a7..b11494a 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "SQLServer", - "version": "0.2.7", + "version": "0.2.8", "author": "hackolade", "engines": { "hackolade": "7.7.10", From a0f8d827706f00ca54ce8e1fc6adc60d6fad051e Mon Sep 17 00:00:00 2001 From: Taras Dubyk Date: Fri, 4 Oct 2024 14:21:32 +0300 Subject: [PATCH 06/26] HCK-8237: field single PK and UK should be mutually exclusive (#110) * enable showing all field abbreviations * field single PK and UK should be mutually exclusive --- adapter/0.2.8.json | 58 +++++++ package.json | 3 +- .../field_level/fieldLevelConfig.json | 154 +++++++++++++++--- 3 files changed, 193 insertions(+), 22 deletions(-) create mode 100644 adapter/0.2.8.json diff --git a/adapter/0.2.8.json b/adapter/0.2.8.json new file mode 100644 index 0000000..000a685 --- /dev/null +++ b/adapter/0.2.8.json @@ -0,0 +1,58 @@ +/** + * Copyright © 2016-2018 by IntegrIT S.A. dba Hackolade. All rights reserved. + * + * The copyright to the computer software herein is the property of IntegrIT S.A. + * The software may be used and/or copied only with the written permission of + * IntegrIT S.A. or in accordance with the terms and conditions stipulated in + * the agreement/contract under which the software has been supplied. + * + * { + * "add": { + * "entity": [], + * "container": [], + * "model": [], + * "view": [], + * "field": { + * "": [] + * } + * }, + * "delete": { + * "entity": [], + * "container": [], + * "model": [], + * "view": [], + * "field": { + * "": [] + * } + * }, + * "modify": { + * "entity": [ + * { + * "from": { }, + * "to": { } + * } + * ], + * "container": [], + * "model": [], + * "view": [], + * "field": [] + * }, + * } + */ +{ + "add": {}, + "modify": { + "field": [ + { + "from": { + "primaryKey": true, + "unique": true + }, + "to": { + "unique": false + } + } + ] + }, + "delete": {} +} diff --git a/package.json b/package.json index b11494a..d57dee6 100644 --- a/package.json +++ b/package.json @@ -49,7 +49,8 @@ }, "FEScriptCommentsSupported": true, "enableFetchSystemEntitiesCheckbox": true, - "discoverRelationships": true + "discoverRelationships": true, + "enableKeysMultipleAbrr": true } }, "description": "Hackolade plugin for Microsoft SQL Server and Azure SQL Database", diff --git a/properties_pane/field_level/fieldLevelConfig.json b/properties_pane/field_level/fieldLevelConfig.json index 4fa5c5d..e01081f 100644 --- a/properties_pane/field_level/fieldLevelConfig.json +++ b/properties_pane/field_level/fieldLevelConfig.json @@ -553,6 +553,13 @@ making sure that you maintain a proper JSON format. ] } ] + }, + { + "type": "not", + "values": { + "key": "unique", + "value": true + } } ] } @@ -696,12 +703,21 @@ making sure that you maintain a proper JSON format. "type": "or", "values": [ { - "key": "compositePrimaryKey", - "value": false + "type": "or", + "values": [ + { + "key": "primaryKey", + "value": false + }, + { + "key": "primaryKey", + "exist": false + } + ] }, { "key": "compositePrimaryKey", - "exist": false + "value": true } ] }, @@ -1202,6 +1218,13 @@ making sure that you maintain a proper JSON format. ] } ] + }, + { + "type": "not", + "values": { + "key": "unique", + "value": true + } } ] } @@ -1345,12 +1368,21 @@ making sure that you maintain a proper JSON format. "type": "or", "values": [ { - "key": "compositePrimaryKey", - "value": false + "type": "or", + "values": [ + { + "key": "primaryKey", + "value": false + }, + { + "key": "primaryKey", + "exist": false + } + ] }, { "key": "compositePrimaryKey", - "exist": false + "value": true } ] }, @@ -1815,6 +1847,13 @@ making sure that you maintain a proper JSON format. ] } ] + }, + { + "type": "not", + "values": { + "key": "unique", + "value": true + } } ] } @@ -1958,12 +1997,21 @@ making sure that you maintain a proper JSON format. "type": "or", "values": [ { - "key": "compositePrimaryKey", - "value": false + "type": "or", + "values": [ + { + "key": "primaryKey", + "value": false + }, + { + "key": "primaryKey", + "exist": false + } + ] }, { "key": "compositePrimaryKey", - "exist": false + "value": true } ] }, @@ -2418,6 +2466,13 @@ making sure that you maintain a proper JSON format. ] } ] + }, + { + "type": "not", + "values": { + "key": "unique", + "value": true + } } ] } @@ -2561,12 +2616,21 @@ making sure that you maintain a proper JSON format. "type": "or", "values": [ { - "key": "compositePrimaryKey", - "value": false + "type": "or", + "values": [ + { + "key": "primaryKey", + "value": false + }, + { + "key": "primaryKey", + "exist": false + } + ] }, { "key": "compositePrimaryKey", - "exist": false + "value": true } ] }, @@ -3010,6 +3074,13 @@ making sure that you maintain a proper JSON format. ] } ] + }, + { + "type": "not", + "values": { + "key": "unique", + "value": true + } } ] } @@ -3153,12 +3224,21 @@ making sure that you maintain a proper JSON format. "type": "or", "values": [ { - "key": "compositePrimaryKey", - "value": false + "type": "or", + "values": [ + { + "key": "primaryKey", + "value": false + }, + { + "key": "primaryKey", + "exist": false + } + ] }, { "key": "compositePrimaryKey", - "exist": false + "value": true } ] }, @@ -3585,6 +3665,13 @@ making sure that you maintain a proper JSON format. ] } ] + }, + { + "type": "not", + "values": { + "key": "unique", + "value": true + } } ] } @@ -3728,12 +3815,21 @@ making sure that you maintain a proper JSON format. "type": "or", "values": [ { - "key": "compositePrimaryKey", - "value": false + "type": "or", + "values": [ + { + "key": "primaryKey", + "value": false + }, + { + "key": "primaryKey", + "exist": false + } + ] }, { "key": "compositePrimaryKey", - "exist": false + "value": true } ] }, @@ -4145,6 +4241,13 @@ making sure that you maintain a proper JSON format. ] } ] + }, + { + "type": "not", + "values": { + "key": "unique", + "value": true + } } ] } @@ -4288,12 +4391,21 @@ making sure that you maintain a proper JSON format. "type": "or", "values": [ { - "key": "compositePrimaryKey", - "value": false + "type": "or", + "values": [ + { + "key": "primaryKey", + "value": false + }, + { + "key": "primaryKey", + "exist": false + } + ] }, { "key": "compositePrimaryKey", - "exist": false + "value": true } ] }, From 179af1ee38975b42a7dabf976b669995bea9b038 Mon Sep 17 00:00:00 2001 From: Teamcity Date: Fri, 4 Oct 2024 14:57:22 +0000 Subject: [PATCH 07/26] Bump plugin version to 0.2.9 for next development track --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index d57dee6..9143d0f 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "SQLServer", - "version": "0.2.8", + "version": "0.2.9", "author": "hackolade", "engines": { "hackolade": "7.7.10", From 935ea53a06d76ba3467607bf151f1e38a0480152 Mon Sep 17 00:00:00 2001 From: Nazar Kovtun <88377450+WilhelmWesser@users.noreply.github.com> Date: Thu, 24 Oct 2024 19:27:25 +0300 Subject: [PATCH 08/26] Hck 8467 add client id to azure active directory username password (#111) * HCK-8467: added input for Client Id in Azure Active Directory (Username / Password) connection method * HCK-8467: added client id passing to connection * HCK-8467: reused HCK app clientId for password auth flow --- reverse_engineering/databaseService/databaseService.js | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/reverse_engineering/databaseService/databaseService.js b/reverse_engineering/databaseService/databaseService.js index 2907bc0..9dc7c2d 100644 --- a/reverse_engineering/databaseService/databaseService.js +++ b/reverse_engineering/databaseService/databaseService.js @@ -51,6 +51,8 @@ const getConnectionClient = async (connectionInfo, logger) => { : connectionInfo.userName; const tenantId = connectionInfo.connectionTenantId || connectionInfo.tenantId || 'common'; const sslOptions = getSslConfig(connectionInfo); + const clientId = '0dc36597-bc44-49f8-a4a7-ae5401959b85'; + const redirectUri = 'http://localhost:8080'; if (connectionInfo.authMethod === 'Username / Password') { return await sql.connect({ @@ -88,8 +90,6 @@ const getConnectionClient = async (connectionInfo, logger) => { requestTimeout: Number(connectionInfo.queryRequestTimeout) || 60000, }); } else if (connectionInfo.authMethod === 'Azure Active Directory (MFA)') { - const clientId = '0dc36597-bc44-49f8-a4a7-ae5401959b85'; - const redirectUri = 'http://localhost:8080'; const token = await getToken({ connectionInfo, tenantId, clientId, redirectUri, logger }); return await sql.connect({ @@ -127,6 +127,8 @@ const getConnectionClient = async (connectionInfo, logger) => { options: { userName: connectionInfo.userName, password: connectionInfo.userPassword, + tenantId, + clientId, }, }, connectTimeout: Number(connectionInfo.queryRequestTimeout) || 60000, From 9d52bc57447fc86d7ea9eafc2a109a50f1095747 Mon Sep 17 00:00:00 2001 From: Teamcity Date: Fri, 25 Oct 2024 17:41:26 +0000 Subject: [PATCH 09/26] Bump plugin version to 0.2.10 for next development track --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 9143d0f..9789cfb 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "SQLServer", - "version": "0.2.9", + "version": "0.2.10", "author": "hackolade", "engines": { "hackolade": "7.7.10", From 010437b8acd14ebf4ae3deae7fe242b0f206fe5c Mon Sep 17 00:00:00 2001 From: Yevhenii Moroziuk Date: Mon, 28 Oct 2024 12:16:16 +0200 Subject: [PATCH 10/26] HCK-8490: Escape quotes in comment in DDL: SQLServer (#112) * HCK-8498: Fix DDL when single key in composite key (primary and unique) * HCK-8490: Escape quotes in comment in DDL: SQLServer * HCK-8490: Escape quotes in comment in DDL: SQLServer --------- Co-authored-by: Vitalii Yarmus <71256742+Vitalii4as@users.noreply.github.com> --- forward_engineering/configs/templates.js | 16 +++++------ forward_engineering/ddlProvider.js | 27 ++++++++++++------- .../helpers/columnDefinitionHelper.js | 25 ++++++++--------- .../helpers/constraintsHelper.js | 2 +- forward_engineering/utils/general.js | 8 ++++++ 5 files changed, 48 insertions(+), 30 deletions(-) diff --git a/forward_engineering/configs/templates.js b/forward_engineering/configs/templates.js index 267e2ce..82a11e7 100644 --- a/forward_engineering/configs/templates.js +++ b/forward_engineering/configs/templates.js @@ -91,16 +91,16 @@ module.exports = { dropType: 'DROP TYPE IF EXISTS ${name}${terminator}', createSchemaComment: - "EXEC sp_addextendedproperty 'MS_Description', '${value}', 'schema', ${schemaName}${terminator}", + "EXEC sp_addextendedproperty 'MS_Description', N'${value}', 'schema', ${schemaName}${terminator}", createTableComment: - "EXEC sp_addextendedproperty 'MS_Description', '${value}', 'schema', ${schemaName}, 'table', ${tableName}${terminator}", + "EXEC sp_addextendedproperty 'MS_Description', N'${value}', 'schema', ${schemaName}, 'table', ${tableName}${terminator}", createColumnComment: - "EXEC sp_addextendedproperty 'MS_Description', '${value}', 'schema', ${schemaName}, 'table', ${tableName}, 'column', ${columnName}${terminator}", + "EXEC sp_addextendedproperty 'MS_Description', N'${value}', 'schema', ${schemaName}, 'table', ${tableName}, 'column', ${columnName}${terminator}", createViewComment: - "EXEC sp_addextendedproperty 'MS_Description', '${value}', 'schema', ${schemaName}, 'view', ${viewName}${terminator}", + "EXEC sp_addextendedproperty 'MS_Description', N'${value}', 'schema', ${schemaName}, 'view', ${viewName}${terminator}", dropSchemaComment: "EXEC sp_dropextendedproperty 'MS_Description', 'schema', ${schemaName}${terminator}", @@ -114,14 +114,14 @@ module.exports = { "EXEC sp_dropextendedproperty 'MS_Description', 'schema', ${schemaName}, 'view', ${viewName}${terminator}", updateSchemaComment: - "EXEC sp_updateextendedproperty 'MS_Description', '${value}', 'schema', ${schemaName}${terminator}", + "EXEC sp_updateextendedproperty 'MS_Description', N'${value}', 'schema', ${schemaName}${terminator}", updateTableComment: - "EXEC sp_updateextendedproperty 'MS_Description', '${value}', 'schema', ${schemaName}, 'table', ${tableName}${terminator}", + "EXEC sp_updateextendedproperty 'MS_Description', N'${value}', 'schema', ${schemaName}, 'table', ${tableName}${terminator}", updateColumnComment: - "EXEC sp_updateextendedproperty 'MS_Description', '${value}', 'schema', ${schemaName}, 'table', ${tableName}, 'column', ${columnName}${terminator}", + "EXEC sp_updateextendedproperty 'MS_Description', N'${value}', 'schema', ${schemaName}, 'table', ${tableName}, 'column', ${columnName}${terminator}", updateViewComment: - "EXEC sp_updateextendedproperty 'MS_Description', '${value}', 'schema', ${schemaName}, 'view', ${viewName}${terminator}", + "EXEC sp_updateextendedproperty 'MS_Description', N'${value}', 'schema', ${schemaName}, 'view', ${viewName}${terminator}", }; diff --git a/forward_engineering/ddlProvider.js b/forward_engineering/ddlProvider.js index cc1adf4..7ab9d6d 100644 --- a/forward_engineering/ddlProvider.js +++ b/forward_engineering/ddlProvider.js @@ -40,7 +40,7 @@ module.exports = (baseProvider, options, app) => { const { wrapIfNotExistSchema, wrapIfNotExistDatabase, wrapIfNotExistTable, wrapIfNotExistView } = require('./helpers/ifNotExistStatementHelper')(app); const { getPartitionedTables, getCreateViewData } = require('./helpers/viewHelper')(app); - const { getFullTableName } = require('./utils/general')(_); + const { getFullTableName, escapeSpecialCharacters } = require('./utils/general')(_); const terminator = getTerminator(options); @@ -773,7 +773,7 @@ module.exports = (baseProvider, options, app) => { createSchemaComment({ schemaName, comment, customTerminator }) { return assignTemplates(templates.createSchemaComment, { - value: comment, + value: escapeSpecialCharacters(comment), schemaName: wrapInBrackets(schemaName), terminator: customTerminator ?? terminator, }); @@ -783,8 +783,9 @@ module.exports = (baseProvider, options, app) => { if (!schemaName) { return ''; } + return assignTemplates(templates.createTableComment, { - value: comment, + value: escapeSpecialCharacters(comment), schemaName: wrapInBrackets(schemaName), tableName: wrapInBrackets(tableName), terminator: customTerminator ?? terminator, @@ -795,8 +796,9 @@ module.exports = (baseProvider, options, app) => { if (!tableName || !columnName) { return ''; } + return assignTemplates(templates.createColumnComment, { - value: comment, + value: escapeSpecialCharacters(comment), schemaName: wrapInBrackets(schemaName), tableName: wrapInBrackets(tableName), columnName: wrapInBrackets(columnName), @@ -808,8 +810,9 @@ module.exports = (baseProvider, options, app) => { if (!schemaName) { return ''; } + return assignTemplates(templates.createViewComment, { - value: comment, + value: escapeSpecialCharacters(comment), schemaName: wrapInBrackets(schemaName), viewName: wrapInBrackets(viewName), terminator: customTerminator ?? terminator, @@ -827,6 +830,7 @@ module.exports = (baseProvider, options, app) => { if (!schemaName) { return ''; } + return assignTemplates(templates.dropTableComment, { schemaName: wrapInBrackets(schemaName), tableName: wrapInBrackets(tableName), @@ -838,6 +842,7 @@ module.exports = (baseProvider, options, app) => { if (!schemaName || !tableName) { return ''; } + return assignTemplates(templates.dropColumnComment, { schemaName: wrapInBrackets(schemaName), tableName: wrapInBrackets(tableName), @@ -850,6 +855,7 @@ module.exports = (baseProvider, options, app) => { if (!schemaName) { return ''; } + return assignTemplates(templates.dropViewComment, { schemaName: wrapInBrackets(schemaName), viewName: wrapInBrackets(viewName), @@ -859,7 +865,7 @@ module.exports = (baseProvider, options, app) => { updateSchemaComment({ schemaName, comment, customTerminator }) { return assignTemplates(templates.updateSchemaComment, { - value: comment, + value: escapeSpecialCharacters(comment), schemaName: wrapInBrackets(schemaName), terminator: customTerminator ?? terminator, }); @@ -869,8 +875,9 @@ module.exports = (baseProvider, options, app) => { if (!schemaName) { return ''; } + return assignTemplates(templates.updateTableComment, { - value: comment, + value: escapeSpecialCharacters(comment), schemaName: wrapInBrackets(schemaName), tableName: wrapInBrackets(tableName), terminator: customTerminator ?? terminator, @@ -881,8 +888,9 @@ module.exports = (baseProvider, options, app) => { if (!schemaName || !tableName) { return ''; } + return assignTemplates(templates.updateColumnComment, { - value: comment, + value: escapeSpecialCharacters(comment), schemaName: wrapInBrackets(schemaName), tableName: wrapInBrackets(tableName), columnName: wrapInBrackets(columnName), @@ -894,8 +902,9 @@ module.exports = (baseProvider, options, app) => { if (!schemaName) { return ''; } + return assignTemplates(templates.updateViewComment, { - value: comment, + value: escapeSpecialCharacters(comment), schemaName: wrapInBrackets(schemaName), viewName: wrapInBrackets(viewName), terminator: customTerminator ?? terminator, diff --git a/forward_engineering/helpers/columnDefinitionHelper.js b/forward_engineering/helpers/columnDefinitionHelper.js index 47d6c63..0eefc7d 100644 --- a/forward_engineering/helpers/columnDefinitionHelper.js +++ b/forward_engineering/helpers/columnDefinitionHelper.js @@ -3,6 +3,7 @@ const templates = require('../configs/templates'); module.exports = app => { const { assignTemplates } = app.require('@hackolade/ddl-fe-utils'); const _ = app.require('lodash'); + const { wrapInBrackets, escapeSpecialCharacters } = require('../utils/general')(_); const addLength = (type, length) => { return `${type}(${length})`; @@ -15,9 +16,9 @@ module.exports = app => { const addScalePrecision = (type, precision, scale) => { if (_.isNumber(scale)) { return `${type}(${precision},${scale})`; - } else { - return `${type}(${precision})`; } + + return `${type}(${precision})`; }; const addPrecision = (type, precision) => { @@ -69,17 +70,17 @@ module.exports = app => { return `'${escapeQuotes(defaultValue)}'`; } else if (type === 'XML') { return `CAST(N'${defaultValue}' AS xml)`; - } else { - return defaultValue; } + + return defaultValue; }; const getIdentity = identity => { if (!identity.seed || !identity.increment) { return ''; - } else { - return ` IDENTITY(${identity.seed}, ${identity.increment})`; } + + return ` IDENTITY(${identity.seed}, ${identity.increment})`; }; const addClustered = (statement, columnDefinition) => { @@ -89,9 +90,9 @@ module.exports = app => { if (!columnDefinition.clustered) { return statement + ' NONCLUSTERED'; - } else { - return statement + ' CLUSTERED'; } + + return statement + ' CLUSTERED'; }; const getEncryptedWith = encryption => { @@ -118,10 +119,10 @@ module.exports = app => { return ''; } const commentStatement = assignTemplates(templates.createColumnComment, { - value: comment, - schemaName: `[${schemaName}]`, - tableName: `[${tableName}]`, - columnName: `[${name}]`, + value: escapeSpecialCharacters(comment), + schemaName: wrapInBrackets(schemaName), + tableName: wrapInBrackets(tableName), + columnName: wrapInBrackets(name), terminator, }); diff --git a/forward_engineering/helpers/constraintsHelper.js b/forward_engineering/helpers/constraintsHelper.js index 302a90a..a021c23 100644 --- a/forward_engineering/helpers/constraintsHelper.js +++ b/forward_engineering/helpers/constraintsHelper.js @@ -66,7 +66,7 @@ module.exports = app => { const cleaned = clean(_.omit(keyData, 'keyType', 'indexOption', 'columns')); - return !_.isEmpty(cleaned) || keyData.columns?.length > 1; + return !_.isEmpty(cleaned) || keyData.columns?.length; }; const adaptIndexOptions = indexOption => { diff --git a/forward_engineering/utils/general.js b/forward_engineering/utils/general.js index 392d61e..267a47a 100644 --- a/forward_engineering/utils/general.js +++ b/forward_engineering/utils/general.js @@ -62,6 +62,7 @@ module.exports = _ => { } else if (!isNaN(name)) { return `\`${name}\``; } + return name; }; const replaceSpaceWithUnderscore = (name = '') => { @@ -79,6 +80,7 @@ module.exports = _ => { const getFullCollectionName = collectionSchema => { const collectionName = getEntityName(collectionSchema); const bucketName = collectionSchema.compMod?.keyspaceName; + return getNamePrefixedWithSchemaName(collectionName, bucketName); }; @@ -156,6 +158,10 @@ module.exports = _ => { return `[${name}]`; }; + const escapeSpecialCharacters = (name = '') => { + return name.replace(/'/g, "''"); + }; + const buildScript = statements => { const formattedScripts = statements .filter(Boolean) @@ -183,6 +189,7 @@ module.exports = _ => { if (!newProperty && !oldProperty) { return; } + return !_.isEqual(newProperty, oldProperty); }; @@ -209,6 +216,7 @@ module.exports = _ => { commentDeactivatedInlineKeys, buildScript, wrapInBrackets, + escapeSpecialCharacters, getFullEntityName, getFullTableName, getFullCollectionName, From ef09b9da0cdd04c0ccecc3e263b77c8c99ae503a Mon Sep 17 00:00:00 2001 From: Teamcity Date: Tue, 29 Oct 2024 15:18:56 +0000 Subject: [PATCH 11/26] Bump plugin version to 0.2.11 for next development track --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 9789cfb..ece483b 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "SQLServer", - "version": "0.2.10", + "version": "0.2.11", "author": "hackolade", "engines": { "hackolade": "7.7.10", From 1c7e2f4a1ae0e4992c050ad84f03677d9ff6d70d Mon Sep 17 00:00:00 2001 From: Nazar Kovtun <88377450+WilhelmWesser@users.noreply.github.com> Date: Wed, 30 Oct 2024 15:37:27 +0200 Subject: [PATCH 12/26] HCK-8554: extended errors processing to handle Entra grant consent issues (#113) * HCK-8554: extended errors processing to handle Entra grant c onsent issues * HCk-8554: removed redundant variables and escape characters --------- Co-authored-by: Vitalii Yarmus <71256742+Vitalii4as@users.noreply.github.com> Co-authored-by: Thomas Jakemeyn --- reverse_engineering/api.js | 23 ++++- .../databaseService/helpers/errorService.js | 99 +++++++++++++++++++ 2 files changed, 118 insertions(+), 4 deletions(-) create mode 100644 reverse_engineering/databaseService/helpers/errorService.js diff --git a/reverse_engineering/api.js b/reverse_engineering/api.js index b43e0a5..1f452b9 100644 --- a/reverse_engineering/api.js +++ b/reverse_engineering/api.js @@ -17,6 +17,7 @@ const { adaptJsonSchema } = require('./helpers/adaptJsonSchema'); const crypto = require('crypto'); const randomstring = require('randomstring'); const base64url = require('base64url'); +const { prepareError } = require('./databaseService/helpers/errorService'); module.exports = { async connect(connectionInfo, logger, callback, app) { @@ -47,8 +48,17 @@ module.exports = { } callback(null); } catch (error) { - logger.log('error', { message: error.message, stack: error.stack, error }, 'Test connection'); - callback({ message: error.message, stack: error.stack }); + const errorWithUpdatedInfo = prepareError({ error }); + logger.log( + 'error', + { + message: errorWithUpdatedInfo.message, + stack: errorWithUpdatedInfo.stack, + error: errorWithUpdatedInfo, + }, + 'Test connection', + ); + callback({ message: errorWithUpdatedInfo.message, stack: errorWithUpdatedInfo.stack }); } }, @@ -91,12 +101,17 @@ module.exports = { logger.log('info', { collation: collationData[0] }, 'Database collation'); callback(null, objects); } catch (error) { + const errorWithUpdatedInfo = prepareError({ error }); logger.log( 'error', - { message: error.message, stack: error.stack, error }, + { + message: errorWithUpdatedInfo.message, + stack: errorWithUpdatedInfo.stack, + error: errorWithUpdatedInfo, + }, 'Retrieving databases and tables information', ); - callback({ message: error.message, stack: error.stack }); + callback({ message: errorWithUpdatedInfo.message, stack: errorWithUpdatedInfo.stack }); } }, diff --git a/reverse_engineering/databaseService/helpers/errorService.js b/reverse_engineering/databaseService/helpers/errorService.js new file mode 100644 index 0000000..cb18d5e --- /dev/null +++ b/reverse_engineering/databaseService/helpers/errorService.js @@ -0,0 +1,99 @@ +/** + * + * @param {{message: string}} param + * @returns {boolean} + */ +const isDisabledPublicClientFlowsError = ({ message }) => { + const DISABLED_PUBLIC_CLIENT_FLOWS_ERROR_ID = 'AADSTS7000218'; + + return message.includes(DISABLED_PUBLIC_CLIENT_FLOWS_ERROR_ID); +}; + +/** + * + * @param {{message: string}} param + * @returns {boolean} + */ +const isConsentRequiredError = ({ message }) => { + const CONSENT_REQUIRED_ERROR_ID = 'AADSTS65001'; + + return message.includes(CONSENT_REQUIRED_ERROR_ID); +}; + +/** + * + * @param {{error: object, newMessage: string, newStackTrace: string}} param + * @returns {object} + */ +const updateErrorMessageAndStack = ({ error, newMessage, newStackTrace }) => ({ + code: error.code, + name: error.name, + message: newMessage, + stack: newStackTrace, +}); + +/** + * + * @param {{clientId: string}} param + * @returns {string} + */ +const getConsentRequiredErrorMessage = ({ clientId }) => { + const consentLink = `https://login.microsoftonline.com/organizations/adminconsent?client_id=${clientId}`; + + return `Your Azure administrator needs to grant tenant-wide consent to the Hackolade application using the link below: ${consentLink}`; +}; + +/** + * + * @param {{match: string}} param + * @returns {string} + */ +const getClientIdFromErrorMessage = ({ message }) => { + const clientIdRegularExpression = new RegExp(/'[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}'/gim); + const clientIdMatches = message.match(clientIdRegularExpression); + + if (clientIdMatches.length === 0) { + return 'Unknown'; + } + + const [clientId] = clientIdMatches; + const clientIdWithoutQuotes = clientId.slice(1, clientId.length - 1); + + return clientIdWithoutQuotes; +}; + +/** + * + * @param {{error: object}} param + * @returns {object} + */ +const prepareError = ({ error }) => { + const originalErrors = error?.originalError?.errors; + if (!originalErrors || originalErrors?.length === 0) { + return error; + } + + const initialErrorDataIndex = originalErrors.length - 1; + const initialError = originalErrors[initialErrorDataIndex]; + + const isInitialErrorConsentRequiredError = isConsentRequiredError(initialError); + if (isInitialErrorConsentRequiredError) { + const clientId = getClientIdFromErrorMessage({ message: initialError.message }); + const newErrorMessage = getConsentRequiredErrorMessage({ clientId }); + + return updateErrorMessageAndStack({ error, newMessage: newErrorMessage, newStackTrace: initialError.stack }); + } + + const isInitialErrorDisabledPublicClientFlowsError = isDisabledPublicClientFlowsError(initialError); + if (isInitialErrorDisabledPublicClientFlowsError) { + const newErrorMessage = 'You need to allow Public client flows for the Entra ID application'; + + return updateErrorMessageAndStack({ error, newMessage: newErrorMessage, newStackTrace: initialError.stack }); + } + + return error; +}; + +module.exports = { + prepareError, +}; From a37258af9dde2bd4ea94e788a706d6754b26aa20 Mon Sep 17 00:00:00 2001 From: Nazar Kovtun <88377450+WilhelmWesser@users.noreply.github.com> Date: Thu, 31 Oct 2024 14:11:10 +0200 Subject: [PATCH 13/26] HCK: improved logs to get the full stack trace from original (initial) RE error (#115) * HCK: improved logs to get the full stack trace from original (initial) RE error * HCK: removed comparison to 0 --- .../databaseService/helpers/errorService.js | 31 ++++++++++++++++--- 1 file changed, 26 insertions(+), 5 deletions(-) diff --git a/reverse_engineering/databaseService/helpers/errorService.js b/reverse_engineering/databaseService/helpers/errorService.js index cb18d5e..d49d316 100644 --- a/reverse_engineering/databaseService/helpers/errorService.js +++ b/reverse_engineering/databaseService/helpers/errorService.js @@ -62,36 +62,57 @@ const getClientIdFromErrorMessage = ({ message }) => { return clientIdWithoutQuotes; }; +/** + * + * @param {{error}} param + * @returns {object[]} + */ +const getOriginalErrors = ({ error }) => { + const originalErrors = error?.originalError?.errors; + if (originalErrors) { + return originalErrors; + } + + const originalError = error?.originalError; + if (originalError) { + return [originalError]; + } + + return []; +}; + /** * * @param {{error: object}} param * @returns {object} */ const prepareError = ({ error }) => { - const originalErrors = error?.originalError?.errors; - if (!originalErrors || originalErrors?.length === 0) { + const originalErrors = getOriginalErrors({ error }); + if (!originalErrors.length) { return error; } const initialErrorDataIndex = originalErrors.length - 1; const initialError = originalErrors[initialErrorDataIndex]; + const fullStackTrace = originalErrors.map(({ stack }) => stack).join('\n\n'); + const isInitialErrorConsentRequiredError = isConsentRequiredError(initialError); if (isInitialErrorConsentRequiredError) { const clientId = getClientIdFromErrorMessage({ message: initialError.message }); const newErrorMessage = getConsentRequiredErrorMessage({ clientId }); - return updateErrorMessageAndStack({ error, newMessage: newErrorMessage, newStackTrace: initialError.stack }); + return updateErrorMessageAndStack({ error, newMessage: newErrorMessage, newStackTrace: fullStackTrace }); } const isInitialErrorDisabledPublicClientFlowsError = isDisabledPublicClientFlowsError(initialError); if (isInitialErrorDisabledPublicClientFlowsError) { const newErrorMessage = 'You need to allow Public client flows for the Entra ID application'; - return updateErrorMessageAndStack({ error, newMessage: newErrorMessage, newStackTrace: initialError.stack }); + return updateErrorMessageAndStack({ error, newMessage: newErrorMessage, newStackTrace: fullStackTrace }); } - return error; + return updateErrorMessageAndStack({ error, newMessage: initialError.message, newStackTrace: fullStackTrace }); }; module.exports = { From 991c86de2423a26946ac8a157ed192255528b224 Mon Sep 17 00:00:00 2001 From: chulanovskyi-bs <56116665+chulanovskyi-bs@users.noreply.github.com> Date: Thu, 31 Oct 2024 18:06:03 +0200 Subject: [PATCH 14/26] HCK-8611: connection string parser (#114) * fix: connection string parser * feat: improved connection string detection of mssql format * fix: connection string parser * fix: aligned improvements between plugins --- reverse_engineering/api.js | 27 ++++---- .../helpers/parseConnectionString.js | 65 +++++++++++++++++++ 2 files changed, 78 insertions(+), 14 deletions(-) create mode 100644 reverse_engineering/helpers/parseConnectionString.js diff --git a/reverse_engineering/api.js b/reverse_engineering/api.js index 1f452b9..3959ddd 100644 --- a/reverse_engineering/api.js +++ b/reverse_engineering/api.js @@ -1,6 +1,8 @@ 'use strict'; -const { BasePool } = require('mssql'); +const crypto = require('crypto'); +const randomstring = require('randomstring'); +const base64url = require('base64url'); const { getClient, setClient, clearClient } = require('./connectionState'); const { getObjectsFromDatabase, getDatabaseCollationOption } = require('./databaseService/databaseService'); const { @@ -14,9 +16,7 @@ const { getJsonSchemasWithInjectedDescriptionComments } = require('./helpers/com const filterRelationships = require('./helpers/filterRelationships'); const getOptionsFromConnectionInfo = require('./helpers/getOptionsFromConnectionInfo'); const { adaptJsonSchema } = require('./helpers/adaptJsonSchema'); -const crypto = require('crypto'); -const randomstring = require('randomstring'); -const base64url = require('base64url'); +const { parseConnectionString } = require('./helpers/parseConnectionString'); const { prepareError } = require('./databaseService/helpers/errorService'); module.exports = { @@ -156,16 +156,15 @@ module.exports = { parseConnectionString({ connectionString = '' }, logger, callback) { try { - const parsedConnectionStringData = BasePool.parseConnectionString(connectionString); - const parsedData = { - databaseName: parsedConnectionStringData.database, - host: parsedConnectionStringData.server, - port: parsedConnectionStringData.port, - authMethod: 'Username / Password', - userName: parsedConnectionStringData.user, - userPassword: parsedConnectionStringData.password, - }; - callback(null, { parsedData }); + const authMethod = 'Username / Password'; + const parsedData = parseConnectionString({ string: connectionString }); + + callback(null, { + parsedData: { + authMethod, + ...parsedData, + }, + }); } catch (err) { logger.log('error', { message: err.message, stack: err.stack, err }, 'Parsing connection string failed'); callback({ message: err.message, stack: err.stack }); diff --git a/reverse_engineering/helpers/parseConnectionString.js b/reverse_engineering/helpers/parseConnectionString.js new file mode 100644 index 0000000..f00faef --- /dev/null +++ b/reverse_engineering/helpers/parseConnectionString.js @@ -0,0 +1,65 @@ +const { URL } = require('url'); +const { ConnectionPool } = require('mssql'); + +const mssqlPrefix = 'mssql://'; +const sqlserverPrefix = 'jdbc:sqlserver://'; + +// example: mssql://username:password@host:1433/DatabaseName +const parseMssqlUrl = ({ url = '' }) => { + const parsed = new URL(url); + return { + database: parsed.pathname.slice(1), + host: parsed.hostname, + port: parsed.port ? Number(parsed.port) : null, + userName: parsed.username, + userPassword: parsed.password, + }; +}; + +// example: jdbc:sqlserver://synapseworkspace.sql.azuresynapse.net:1433;databaseName=SampleDB;user=myusername@mytenant.onmicrosoft.com;password=myStrongPassword123;encrypt=true;trustServerCertificate=false;authentication=ActiveDirectoryPassword;loginTimeout=30; +const parseSqlServerUrl = ({ url = '' }) => { + const [_protocol, params] = url.split(sqlserverPrefix); + const [server, ...paramParts] = params.split(';'); + const [host, port] = server.split(':'); + + const parsedParams = paramParts.reduce((acc, part) => { + const [key, value] = part.split('='); + if (key && value) { + acc[key] = value; + } + return acc; + }, {}); + + return { + server: host, + port: port ? Number(port) : null, + database: parsedParams.databaseName, + user: parsedParams.user, + password: parsedParams.password, + }; +}; + +// Default connection string example: +// Server=host,1433;Database=DatabaseName;User Id=username;Password=password; +const parseConnectionString = ({ string = '' }) => { + let params; + if (string.startsWith(sqlserverPrefix)) { + params = parseSqlServerUrl({ url: string }); + } else if (string.startsWith(mssqlPrefix)) { + params = parseMssqlUrl({ url: string }); + } else { + params = ConnectionPool.parseConnectionString(string); + } + + return { + databaseName: params.database, + host: params.server, + port: params.port, + userName: params.user, + userPassword: params.password, + }; +}; + +module.exports = { + parseConnectionString, +}; From 374e600710eb977184c380b9ee3993dc646f203c Mon Sep 17 00:00:00 2001 From: chulanovskyi-bs <56116665+chulanovskyi-bs@users.noreply.github.com> Date: Thu, 31 Oct 2024 19:28:58 +0200 Subject: [PATCH 15/26] HCK-8611: connection string parser (#116) * fix: connection string parser * feat: improved connection string detection of mssql format * fix: connection string parser * fix: aligned improvements between plugins * fix: unified helpers --- .../helpers/parseConnectionString.js | 47 ++++++++++--------- 1 file changed, 24 insertions(+), 23 deletions(-) diff --git a/reverse_engineering/helpers/parseConnectionString.js b/reverse_engineering/helpers/parseConnectionString.js index f00faef..96781a9 100644 --- a/reverse_engineering/helpers/parseConnectionString.js +++ b/reverse_engineering/helpers/parseConnectionString.js @@ -1,5 +1,5 @@ -const { URL } = require('url'); const { ConnectionPool } = require('mssql'); +const { URL } = require('url'); const mssqlPrefix = 'mssql://'; const sqlserverPrefix = 'jdbc:sqlserver://'; @@ -8,9 +8,9 @@ const sqlserverPrefix = 'jdbc:sqlserver://'; const parseMssqlUrl = ({ url = '' }) => { const parsed = new URL(url); return { - database: parsed.pathname.slice(1), host: parsed.hostname, port: parsed.port ? Number(parsed.port) : null, + databaseName: parsed.pathname.slice(1), userName: parsed.username, userPassword: parsed.password, }; @@ -18,11 +18,11 @@ const parseMssqlUrl = ({ url = '' }) => { // example: jdbc:sqlserver://synapseworkspace.sql.azuresynapse.net:1433;databaseName=SampleDB;user=myusername@mytenant.onmicrosoft.com;password=myStrongPassword123;encrypt=true;trustServerCertificate=false;authentication=ActiveDirectoryPassword;loginTimeout=30; const parseSqlServerUrl = ({ url = '' }) => { - const [_protocol, params] = url.split(sqlserverPrefix); - const [server, ...paramParts] = params.split(';'); + const [_protocol, urlParams] = url.split(sqlserverPrefix); + const [server, ...paramParts] = urlParams.split(';'); const [host, port] = server.split(':'); - const parsedParams = paramParts.reduce((acc, part) => { + const params = paramParts.reduce((acc, part) => { const [key, value] = part.split('='); if (key && value) { acc[key] = value; @@ -31,33 +31,34 @@ const parseSqlServerUrl = ({ url = '' }) => { }, {}); return { - server: host, + host, port: port ? Number(port) : null, - database: parsedParams.databaseName, - user: parsedParams.user, - password: parsedParams.password, + databaseName: params.databaseName, + userName: params.user, + userPassword: params.password, + }; +}; + +// example: Server=tcp:synapseworkspace.sql.azuresynapse.net,1433;Database=SampleDB;Authentication=Active Directory Password;User ID=myusername@mytenant.onmicrosoft.com;Password=password;Encrypt=true;TrustServerCertificate=false;Connection Timeout=30; +const parseBasicString = ({ string = '' }) => { + const parsed = ConnectionPool.parseConnectionString(string); + return { + databaseName: parsed.database, + host: parsed.server, + port: parsed.port, + userName: parsed.user, + userPassword: parsed.password, }; }; -// Default connection string example: -// Server=host,1433;Database=DatabaseName;User Id=username;Password=password; const parseConnectionString = ({ string = '' }) => { - let params; if (string.startsWith(sqlserverPrefix)) { - params = parseSqlServerUrl({ url: string }); + return parseSqlServerUrl({ url: string }); } else if (string.startsWith(mssqlPrefix)) { - params = parseMssqlUrl({ url: string }); + return parseMssqlUrl({ url: string }); } else { - params = ConnectionPool.parseConnectionString(string); + return parseBasicString({ string }); } - - return { - databaseName: params.database, - host: params.server, - port: params.port, - userName: params.user, - userPassword: params.password, - }; }; module.exports = { From bc309e515bf3da1d64303102a013cfa5f9c1947b Mon Sep 17 00:00:00 2001 From: chulanovskyi-bs <56116665+chulanovskyi-bs@users.noreply.github.com> Date: Thu, 31 Oct 2024 20:17:16 +0200 Subject: [PATCH 16/26] HCK-8611: server Instance (#117) * fix: handle the case when the Instance is in server param * chore: sonar --- reverse_engineering/helpers/parseConnectionString.js | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/reverse_engineering/helpers/parseConnectionString.js b/reverse_engineering/helpers/parseConnectionString.js index 96781a9..c5d0c6d 100644 --- a/reverse_engineering/helpers/parseConnectionString.js +++ b/reverse_engineering/helpers/parseConnectionString.js @@ -42,10 +42,15 @@ const parseSqlServerUrl = ({ url = '' }) => { // example: Server=tcp:synapseworkspace.sql.azuresynapse.net,1433;Database=SampleDB;Authentication=Active Directory Password;User ID=myusername@mytenant.onmicrosoft.com;Password=password;Encrypt=true;TrustServerCertificate=false;Connection Timeout=30; const parseBasicString = ({ string = '' }) => { const parsed = ConnectionPool.parseConnectionString(string); + + const serverRegex = /Server=(?:[a-z]+:)?([^,;]+)(?:,\d+)?/i; + const match = serverRegex.exec(string); + const host = match ? match[1] : parsed.server; + return { - databaseName: parsed.database, - host: parsed.server, + host: host, port: parsed.port, + databaseName: parsed.database, userName: parsed.user, userPassword: parsed.password, }; From 874ff0fcd0bdb52562d4e24c790756e1e19471dd Mon Sep 17 00:00:00 2001 From: Teamcity Date: Fri, 1 Nov 2024 11:05:11 +0000 Subject: [PATCH 17/26] Bump plugin version to 0.2.12 for next development track --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index ece483b..0ffa9b8 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "SQLServer", - "version": "0.2.11", + "version": "0.2.12", "author": "hackolade", "engines": { "hackolade": "7.7.10", From c2901fdf12e65c0369979b610fcfd09d15c1ef97 Mon Sep 17 00:00:00 2001 From: Alik Date: Fri, 15 Nov 2024 10:10:52 +0100 Subject: [PATCH 18/26] HCK-8640: Incorrect parsing of NamedInstance from connection string (#118) * HCK-8640: Incorrect parsing of NamedInstance from connection string * fix * fix --- reverse_engineering/helpers/parseConnectionString.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/reverse_engineering/helpers/parseConnectionString.js b/reverse_engineering/helpers/parseConnectionString.js index c5d0c6d..9cbea92 100644 --- a/reverse_engineering/helpers/parseConnectionString.js +++ b/reverse_engineering/helpers/parseConnectionString.js @@ -49,7 +49,7 @@ const parseBasicString = ({ string = '' }) => { return { host: host, - port: parsed.port, + port: host.includes('\\') ? null : parsed.port, databaseName: parsed.database, userName: parsed.user, userPassword: parsed.password, From 52600537ed8a03c3b11ac5e118ba6feedfcdc851 Mon Sep 17 00:00:00 2001 From: Teamcity Date: Sat, 16 Nov 2024 11:37:18 +0000 Subject: [PATCH 19/26] Bump plugin version to 0.2.13 for next development track --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 0ffa9b8..3cb36bc 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "SQLServer", - "version": "0.2.12", + "version": "0.2.13", "author": "hackolade", "engines": { "hackolade": "7.7.10", From 708bbc5e30792a0e30278ed1b133662e775c4bc6 Mon Sep 17 00:00:00 2001 From: Serhii Filonenko <91055067+serhii-filonenko@users.noreply.github.com> Date: Fri, 22 Nov 2024 15:27:14 +0200 Subject: [PATCH 20/26] HCK-8843: Fix RE for cross schema views (#119) * HCK-8843: remove relative table binding for view * HCK-8843: add an adapter to clear viewOn property --- adapter/0.2.13.json | 53 +++++++++++++++++++ reverse_engineering/api.js | 2 +- .../reverseEngineeringService.js | 36 ++++++------- 3 files changed, 69 insertions(+), 22 deletions(-) create mode 100644 adapter/0.2.13.json diff --git a/adapter/0.2.13.json b/adapter/0.2.13.json new file mode 100644 index 0000000..b2f5f96 --- /dev/null +++ b/adapter/0.2.13.json @@ -0,0 +1,53 @@ +/** + * Copyright © 2016-2024 by IntegrIT S.A. dba Hackolade. All rights reserved. + * + * The copyright to the computer software herein is the property of IntegrIT S.A. + * The software may be used and/or copied only with the written permission of + * IntegrIT S.A. or in accordance with the terms and conditions stipulated in + * the agreement/contract under which the software has been supplied. + * + * { + * "add": { + * "entity": [], + * "container": [], + * "model": [], + * "view": [], + * "field": { + * "": [] + * } + * }, + * "delete": { + * "entity": [], + * "container": [], + * "model": [], + * "view": [], + * "field": { + * "": [] + * } + * }, + * "modify": { + * "entity": [ + * { + * "from": { }, + * "to": { } + * } + * ], + * "container": [], + * "model": [], + * "view": [], + * "field": [] + * }, + * } + */ +{ + "modify": { + "view": [ + { + "from": {}, + "to": { + "viewOn": "" + } + } + ] + } +} diff --git a/reverse_engineering/api.js b/reverse_engineering/api.js index 3959ddd..e786c80 100644 --- a/reverse_engineering/api.js +++ b/reverse_engineering/api.js @@ -140,7 +140,7 @@ module.exports = { }); callback( null, - mergeCollectionsWithViews(jsonSchemasWithDescriptionComments), + mergeCollectionsWithViews({ jsonSchemas: jsonSchemasWithDescriptionComments }), null, filterRelationships(relationships, jsonSchemasWithDescriptionComments), ); diff --git a/reverse_engineering/reverseEngineeringService/reverseEngineeringService.js b/reverse_engineering/reverseEngineeringService/reverseEngineeringService.js index 36c4b37..e025d58 100644 --- a/reverse_engineering/reverseEngineeringService/reverseEngineeringService.js +++ b/reverse_engineering/reverseEngineeringService/reverseEngineeringService.js @@ -1,3 +1,4 @@ +const { groupBy, omit, partition } = require('lodash'); const { getTableInfo, getTableRow, @@ -42,28 +43,21 @@ const { } = require('./helpers'); const pipe = require('../helpers/pipe'); -const mergeCollectionsWithViews = jsonSchemas => - jsonSchemas.reduce((structuredJSONSchemas, jsonSchema) => { - if (jsonSchema.relatedTables) { - const currentIndex = structuredJSONSchemas.findIndex( - structuredSchema => jsonSchema.collectionName === structuredSchema.collectionName && jsonSchema.dbName, - ); - const relatedTableSchemaIndex = structuredJSONSchemas.findIndex(({ collectionName, dbName }) => - jsonSchema.relatedTables.find( - ({ tableName, schemaName }) => tableName === collectionName && schemaName === dbName, - ), - ); - - if (relatedTableSchemaIndex !== -1 && doesViewHaveRelatedTables(jsonSchema, structuredJSONSchemas)) { - structuredJSONSchemas[relatedTableSchemaIndex].views.push(jsonSchema); - } - - delete jsonSchema.relatedTables; - return structuredJSONSchemas.filter((schema, i) => i !== currentIndex); - } +const mergeCollectionsWithViews = ({ jsonSchemas }) => { + const [viewSchemas, collectionSchemas] = partition(jsonSchemas, jsonSchema => jsonSchema.relatedTables); + const groupedViewSchemas = groupBy(viewSchemas, 'dbName'); + const combinedViewSchemas = Object.entries(groupedViewSchemas).map(([dbName, views]) => { + return { + dbName, + entityLevel: {}, + emptyBucket: false, + bucketInfo: views[0].bucketInfo, + views: views.map(view => omit(view, ['relatedTables'])), + }; + }); - return structuredJSONSchemas; - }, jsonSchemas); + return [...collectionSchemas, ...combinedViewSchemas]; +}; const getCollectionsRelationships = logger => async dbConnectionClient => { const dbName = dbConnectionClient.config.database; From c05efdf1b3f4ad4147938bc81fbc12c47eda9551 Mon Sep 17 00:00:00 2001 From: Teamcity Date: Fri, 22 Nov 2024 17:37:47 +0000 Subject: [PATCH 21/26] Bump plugin version to 0.2.14 for next development track --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 3cb36bc..171737c 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "SQLServer", - "version": "0.2.13", + "version": "0.2.14", "author": "hackolade", "engines": { "hackolade": "7.7.10", From 225bfdb458efc75a5b8df342dba65fa1f26ce4ee Mon Sep 17 00:00:00 2001 From: Nazar Kovtun <88377450+WilhelmWesser@users.noreply.github.com> Date: Mon, 25 Nov 2024 16:25:05 +0200 Subject: [PATCH 22/26] HCK-8696: add filtering of duplicated indexes columns for case with indexes on partitioned tables (#121) * HCK-8696: add filtering of duplicated indexes columns for case with indexes on partitioned tables * HCK-8696: removed redundant Boolean call --- .../helpers/getUniqueIndexesColumns.js | 23 +++++++++++++++++++ .../reverseEngineeringService.js | 4 +++- 2 files changed, 26 insertions(+), 1 deletion(-) create mode 100644 reverse_engineering/reverseEngineeringService/helpers/getUniqueIndexesColumns.js diff --git a/reverse_engineering/reverseEngineeringService/helpers/getUniqueIndexesColumns.js b/reverse_engineering/reverseEngineeringService/helpers/getUniqueIndexesColumns.js new file mode 100644 index 0000000..7350a24 --- /dev/null +++ b/reverse_engineering/reverseEngineeringService/helpers/getUniqueIndexesColumns.js @@ -0,0 +1,23 @@ +const getColumnUniqueKey = ({ IndexName, TableName, schemaName, columnName }) => + `${schemaName}${IndexName}${TableName}${columnName}`; + +const getUniqueIndexesColumns = ({ indexesColumns }) => { + const uniqueKeysToColumns = {}; + + for (const indexesColumn of indexesColumns) { + const columnKey = getColumnUniqueKey(indexesColumn); + const isColumnUnique = !uniqueKeysToColumns[columnKey]; + + if (!isColumnUnique) { + continue; + } + + uniqueKeysToColumns[columnKey] = indexesColumn; + } + + return Object.values(uniqueKeysToColumns); +}; + +module.exports = { + getUniqueIndexesColumns, +}; diff --git a/reverse_engineering/reverseEngineeringService/reverseEngineeringService.js b/reverse_engineering/reverseEngineeringService/reverseEngineeringService.js index e025d58..494880b 100644 --- a/reverse_engineering/reverseEngineeringService/reverseEngineeringService.js +++ b/reverse_engineering/reverseEngineeringService/reverseEngineeringService.js @@ -42,6 +42,7 @@ const { getPeriodForSystemTime, } = require('./helpers'); const pipe = require('../helpers/pipe'); +const { getUniqueIndexesColumns } = require('./helpers/getUniqueIndexesColumns'); const mergeCollectionsWithViews = ({ jsonSchemas }) => { const [viewSchemas, collectionSchemas] = partition(jsonSchemas, jsonSchema => jsonSchema.relatedTables); @@ -309,7 +310,8 @@ const reverseCollectionsToJSON = logger => async (dbConnectionClient, tablesInfo rawDatabaseIndexes.map(i => i.index_id), logger, ); - const databaseIndexes = addTotalBucketCountToDatabaseIndexes(rawDatabaseIndexes, indexesBucketCount); + const uniqueDatabaseIndexesColumns = getUniqueIndexesColumns({ indexesColumns: rawDatabaseIndexes }); + const databaseIndexes = addTotalBucketCountToDatabaseIndexes(uniqueDatabaseIndexesColumns, indexesBucketCount); return await Object.entries(tablesInfo).reduce(async (jsonSchemas, [schemaName, tableNames]) => { logger.log('info', { message: `Fetching '${dbName}' database information` }, 'Reverse Engineering'); From e518474b71b50f174a50827bdbd1dee45ff6707a Mon Sep 17 00:00:00 2001 From: Yevhenii Moroziuk Date: Wed, 4 Dec 2024 17:06:08 +0200 Subject: [PATCH 23/26] HCK-8971: include schema name in index name of DDL (#122) * HCK-8971: include schema name in index name of DDL * HCK-8971: include schema name in index name of DDL * HCK-8971: fix comments --- forward_engineering/helpers/general.js | 18 ++++++++++++------ forward_engineering/helpers/indexHelper.js | 8 ++++---- 2 files changed, 16 insertions(+), 10 deletions(-) diff --git a/forward_engineering/helpers/general.js b/forward_engineering/helpers/general.js index c56913d..4abf5be 100644 --- a/forward_engineering/helpers/general.js +++ b/forward_engineering/helpers/general.js @@ -7,13 +7,18 @@ module.exports = app => { const { decorateDefault } = require('./columnDefinitionHelper')(app); const { checkAllKeysDeactivated } = app.require('@hackolade/ddl-fe-utils').general; + const withBrackets = (name, brackets) => (brackets ? `[${name}]` : name); + const getTableName = (tableName, schemaName, brackets = true) => { - const withBrackets = name => (brackets ? `[${name}]` : name); - if (schemaName) { - return `${withBrackets(schemaName)}.${withBrackets(tableName)}`; - } else { - return withBrackets(tableName); - } + const name = withBrackets(tableName, brackets); + + return schemaName ? `${withBrackets(schemaName, brackets)}.${name}` : name; + }; + + const getIndexName = ({ name, schemaName, brackets = true }) => { + const indexName = withBrackets(name, brackets); + + return schemaName ? `${withBrackets(schemaName, brackets)}.${indexName}` : indexName; }; const getDefaultValue = (defaultValue, defaultConstraintName, type) => { @@ -262,6 +267,7 @@ module.exports = app => { filterColumnStoreProperties, getKeyWithAlias, getTableName, + getIndexName, getTableOptions, hasType, getViewData, diff --git a/forward_engineering/helpers/indexHelper.js b/forward_engineering/helpers/indexHelper.js index 382ad1e..8913414 100644 --- a/forward_engineering/helpers/indexHelper.js +++ b/forward_engineering/helpers/indexHelper.js @@ -4,7 +4,7 @@ const BOUNDING_BOX_LABEL = ['XMIN', 'YMIN', 'XMAX', 'YMAX']; module.exports = app => { const _ = app.require('lodash'); - const { filterColumnStoreProperties, getTableName } = require('./general')(app); + const { filterColumnStoreProperties, getTableName, getIndexName } = require('./general')(app); const { assignTemplates } = app.require('@hackolade/ddl-fe-utils'); const { divideIntoActivatedAndDeactivated, checkAllKeysDeactivated } = app.require('@hackolade/ddl-fe-utils').general; @@ -82,7 +82,7 @@ module.exports = app => { : ''; return assignTemplates(templates.index, { - name: index.name, + name: getIndexName({ name: index.name, schemaName: index.schemaName }), unique: index.unique ? ' UNIQUE' : '', clustered: index.clustered ? ' CLUSTERED' : '', table: getTableName(tableName, index.schemaName), @@ -159,7 +159,7 @@ module.exports = app => { return isParentActivated ? commentIfDeactivated(column, key) : column; }) .join(',\n\t'), - indexName: index.keyIndex, + indexName: getIndexName({ name: index.keyIndex, schemaName: index.schemaName }), catalog: catalog ? `ON ${catalog}\n` : '', options: options ? `WITH (\n\t${options}\n)` : '', terminator, @@ -212,7 +212,7 @@ module.exports = app => { const options = getSpatialOptions(index); return assignTemplates(templates.spatialIndex, { - name: index.name, + name: getIndexName({ name: index.name, schemaName: index.schemaName }), table: getTableName(tableName, index.schemaName), column: `[${index.column.name}]`, using: index.using ? `\nUSING ${index.using}` : '', From 1468c9154cd3824eaeacaa0b343a095c3f54081b Mon Sep 17 00:00:00 2001 From: Yevhenii Moroziuk Date: Thu, 5 Dec 2024 13:54:49 +0200 Subject: [PATCH 24/26] Revert "HCK-8971: include schema name in index name of DDL (#122)" (#123) This reverts commit e518474b71b50f174a50827bdbd1dee45ff6707a. --- forward_engineering/helpers/general.js | 18 ++++++------------ forward_engineering/helpers/indexHelper.js | 8 ++++---- 2 files changed, 10 insertions(+), 16 deletions(-) diff --git a/forward_engineering/helpers/general.js b/forward_engineering/helpers/general.js index 4abf5be..c56913d 100644 --- a/forward_engineering/helpers/general.js +++ b/forward_engineering/helpers/general.js @@ -7,18 +7,13 @@ module.exports = app => { const { decorateDefault } = require('./columnDefinitionHelper')(app); const { checkAllKeysDeactivated } = app.require('@hackolade/ddl-fe-utils').general; - const withBrackets = (name, brackets) => (brackets ? `[${name}]` : name); - const getTableName = (tableName, schemaName, brackets = true) => { - const name = withBrackets(tableName, brackets); - - return schemaName ? `${withBrackets(schemaName, brackets)}.${name}` : name; - }; - - const getIndexName = ({ name, schemaName, brackets = true }) => { - const indexName = withBrackets(name, brackets); - - return schemaName ? `${withBrackets(schemaName, brackets)}.${indexName}` : indexName; + const withBrackets = name => (brackets ? `[${name}]` : name); + if (schemaName) { + return `${withBrackets(schemaName)}.${withBrackets(tableName)}`; + } else { + return withBrackets(tableName); + } }; const getDefaultValue = (defaultValue, defaultConstraintName, type) => { @@ -267,7 +262,6 @@ module.exports = app => { filterColumnStoreProperties, getKeyWithAlias, getTableName, - getIndexName, getTableOptions, hasType, getViewData, diff --git a/forward_engineering/helpers/indexHelper.js b/forward_engineering/helpers/indexHelper.js index 8913414..382ad1e 100644 --- a/forward_engineering/helpers/indexHelper.js +++ b/forward_engineering/helpers/indexHelper.js @@ -4,7 +4,7 @@ const BOUNDING_BOX_LABEL = ['XMIN', 'YMIN', 'XMAX', 'YMAX']; module.exports = app => { const _ = app.require('lodash'); - const { filterColumnStoreProperties, getTableName, getIndexName } = require('./general')(app); + const { filterColumnStoreProperties, getTableName } = require('./general')(app); const { assignTemplates } = app.require('@hackolade/ddl-fe-utils'); const { divideIntoActivatedAndDeactivated, checkAllKeysDeactivated } = app.require('@hackolade/ddl-fe-utils').general; @@ -82,7 +82,7 @@ module.exports = app => { : ''; return assignTemplates(templates.index, { - name: getIndexName({ name: index.name, schemaName: index.schemaName }), + name: index.name, unique: index.unique ? ' UNIQUE' : '', clustered: index.clustered ? ' CLUSTERED' : '', table: getTableName(tableName, index.schemaName), @@ -159,7 +159,7 @@ module.exports = app => { return isParentActivated ? commentIfDeactivated(column, key) : column; }) .join(',\n\t'), - indexName: getIndexName({ name: index.keyIndex, schemaName: index.schemaName }), + indexName: index.keyIndex, catalog: catalog ? `ON ${catalog}\n` : '', options: options ? `WITH (\n\t${options}\n)` : '', terminator, @@ -212,7 +212,7 @@ module.exports = app => { const options = getSpatialOptions(index); return assignTemplates(templates.spatialIndex, { - name: getIndexName({ name: index.name, schemaName: index.schemaName }), + name: index.name, table: getTableName(tableName, index.schemaName), column: `[${index.column.name}]`, using: index.using ? `\nUSING ${index.using}` : '', From b5df71e81232c3a4e13d687d31e394efd63b7a98 Mon Sep 17 00:00:00 2001 From: chulanovskyi-bs <56116665+chulanovskyi-bs@users.noreply.github.com> Date: Thu, 5 Dec 2024 18:43:28 +0200 Subject: [PATCH 25/26] HCK-8816: queries optimization (#120) * chore: refactored `connectionState` into a `ClientManager` class; resolved some sonarlint remarks * chore: refactored the main `reverseCollectionsToJSON` function in RE, reducing the in-place complexity, splitting it to smaller pieces * feat: restricted some queries to a predefined set of schemas and tables * fix: concatenated the query properly * fix: proper formatting, returned to tagged template literals for simple queries * feat: optimized queries for `getSpatialIndexes`, `getFullTextIndexes`, `getViewsIndexes` * fix: removed inapplicable WHERE clause for `getViewsIndexes` * feat: optimized queries for `getDatabaseMemoryOptimizedTables`, `getDatabaseCheckConstraints`, `getTableKeyConstraints`, `getDatabaseXmlSchemaCollection` * chore: improved ClientManager API, eliminated redundant `sshService` manipulations * fix: key constraints * fix: moved WHERE clause parts to LEFT JOIN * chore: reduced implicit complexity --------- Co-authored-by: Thomas Jakemeyn --- forward_engineering/api.js | 2 +- reverse_engineering/api.js | 37 +- .../{connectionState.js => clientManager.js} | 66 +- .../databaseService/databaseService.js | 872 ++++++++++-------- reverse_engineering/helpers/commentsHelper.js | 4 +- .../changeViewPropertiesToReferences.js | 2 +- .../helpers/containsJson.js | 11 +- .../defineFieldsCompositeKeyConstraints.js | 8 +- .../helpers/defineFieldsKeyConstraints.js | 2 +- .../getKeyConstraintsCompositionStatuses.js | 4 +- .../helpers/getPeriodForSystemTime.js | 6 +- .../helpers/reorderTableRows.js | 2 +- .../helpers/reverseTableIndexes.js | 2 +- .../reverseEngineeringService.js | 489 ++++++---- 14 files changed, 866 insertions(+), 641 deletions(-) rename reverse_engineering/{connectionState.js => clientManager.js} (50%) diff --git a/forward_engineering/api.js b/forward_engineering/api.js index 54e7b6e..0cbdc0b 100644 --- a/forward_engineering/api.js +++ b/forward_engineering/api.js @@ -107,7 +107,7 @@ module.exports = { await getExternalBrowserUrl(connectionInfo, logger, callback, app); } else { const client = await connect(connectionInfo, logger, () => {}, app); - await logDatabaseVersion(client, logger); + await logDatabaseVersion({ client, logger }); } callback(null); } catch (error) { diff --git a/reverse_engineering/api.js b/reverse_engineering/api.js index e786c80..cadcd55 100644 --- a/reverse_engineering/api.js +++ b/reverse_engineering/api.js @@ -3,7 +3,7 @@ const crypto = require('crypto'); const randomstring = require('randomstring'); const base64url = require('base64url'); -const { getClient, setClient, clearClient } = require('./connectionState'); +const { clientManager } = require('./clientManager'); const { getObjectsFromDatabase, getDatabaseCollationOption } = require('./databaseService/databaseService'); const { reverseCollectionsToJSON, @@ -21,19 +21,21 @@ const { prepareError } = require('./databaseService/helpers/errorService'); module.exports = { async connect(connectionInfo, logger, callback, app) { - const client = getClient(); - const sshService = app.require('@hackolade/ssh-service'); + const client = clientManager.getClient(); + if (!client) { - await setClient(connectionInfo, sshService, 0, logger); - return getClient(); + return await clientManager.initClient({ + connectionInfo, + logger, + sshService: app.require('@hackolade/ssh-service'), + }); } return client; }, - disconnect(connectionInfo, logger, callback, app) { - const sshService = app.require('@hackolade/ssh-service'); - clearClient(sshService); + disconnect(connectionInfo, logger, callback) { + clientManager.clearClient(); callback(); }, @@ -44,9 +46,9 @@ module.exports = { await this.getExternalBrowserUrl(connectionInfo, logger, callback, app); } else { const client = await this.connect(connectionInfo, logger, () => {}, app); - await logDatabaseVersion(client, logger); + await logDatabaseVersion({ client, logger }); } - callback(null); + callback(); } catch (error) { const errorWithUpdatedInfo = prepareError({ error }); logger.log( @@ -88,16 +90,17 @@ module.exports = { async getDbCollectionsNames(connectionInfo, logger, callback, app) { try { logInfo('Retrieving databases and tables information', connectionInfo, logger); + const client = await this.connect(connectionInfo, logger, () => {}, app); if (!client.config.database) { throw new Error('No database specified'); } - await logDatabaseVersion(client, logger); + await logDatabaseVersion({ client, logger }); const objects = await getObjectsFromDatabase(client); const dbName = client.config.database; - const collationData = (await getDatabaseCollationOption(client, dbName, logger)) || []; + const collationData = (await getDatabaseCollationOption({ client, dbName, logger })) || []; logger.log('info', { collation: collationData[0] }, 'Database collation'); callback(null, objects); } catch (error) { @@ -120,16 +123,16 @@ module.exports = { logger.log('info', collectionsInfo, 'Retrieving schema', collectionsInfo.hiddenKeys); logger.progress({ message: 'Start reverse-engineering process', containerName: '', entityName: '' }); const { collections } = collectionsInfo.collectionData; - const client = getClient(); - const dbName = client.config.database; - if (!dbName) { + const client = clientManager.getClient(); + const dbName = client?.config.database; + if (!client || !dbName) { throw new Error('No database specified'); } const reverseEngineeringOptions = getOptionsFromConnectionInfo(collectionsInfo); const [jsonSchemas, relationships] = await Promise.all([ - await reverseCollectionsToJSON(logger)(client, collections, reverseEngineeringOptions), - await getCollectionsRelationships(logger)(client, collections), + await reverseCollectionsToJSON({ client, tablesInfo: collections, reverseEngineeringOptions, logger }), + await getCollectionsRelationships({ client, tablesInfo: collections, logger }), ]); const jsonSchemasWithDescriptionComments = await getJsonSchemasWithInjectedDescriptionComments({ diff --git a/reverse_engineering/connectionState.js b/reverse_engineering/clientManager.js similarity index 50% rename from reverse_engineering/connectionState.js rename to reverse_engineering/clientManager.js index 9ebbda2..02d4ce8 100644 --- a/reverse_engineering/connectionState.js +++ b/reverse_engineering/clientManager.js @@ -1,11 +1,22 @@ const { getConnectionClient } = require('./databaseService/databaseService'); -const stateInstance = { - _client: null, - _isSshTunnel: false, - getClient: () => this._client, - setClient: async (connectionInfo, sshService, attempts = 0, logger) => { - if (connectionInfo.ssh && !this._isSshTunnel) { +class ClientManager { + #client = null; + #sshService = null; + #isSshTunnel = false; + + getClient() { + return this.#client; + } + + async initClient({ connectionInfo, sshService, attempts = 0, logger }) { + if (!this.#sshService) { + this.#sshService = sshService; + } + + let connectionParams = { ...connectionInfo }; + + if (connectionInfo.ssh && !this.#isSshTunnel) { const { options } = await sshService.openTunnel({ sshAuthMethod: connectionInfo.ssh_method === 'privateKey' ? 'IDENTITY_FILE' : 'USER_PASSWORD', sshTunnelHostname: connectionInfo.ssh_host, @@ -18,44 +29,51 @@ const stateInstance = { port: connectionInfo.port, }); - this._isSshTunnel = true; - connectionInfo = { + this.#isSshTunnel = true; + + connectionParams = { ...connectionInfo, ...options, }; } try { - this._client = await getConnectionClient(connectionInfo, logger); + this.#client = await getConnectionClient({ connectionInfo: connectionParams, logger }); + + return this.#client; } catch (error) { const encryptConnection = - connectionInfo.encryptConnection === undefined || Boolean(connectionInfo.encryptConnection); + connectionParams.encryptConnection === undefined || Boolean(connectionParams.encryptConnection); + const isEncryptedConnectionToLocalInstance = error.message.includes('self signed certificate') && encryptConnection; if (isEncryptedConnectionToLocalInstance && attempts <= 0) { - return stateInstance.setClient( - { - ...connectionInfo, + return this.initClient({ + connectionInfo: { + ...connectionParams, encryptConnection: false, }, sshService, - attempts + 1, + attempts: attempts + 1, logger, - ); + }); } throw error; } - }, - clearClient: async sshService => { - this._client = null; + } + + clearClient() { + this.#client = null; - if (this._isSshTunnel) { - await sshService.closeConsumer(); - this._isSshTunnel = false; + if (this.#isSshTunnel && this.#sshService) { + this.#sshService.closeConsumer(); + this.#isSshTunnel = false; } - }, -}; + } +} -module.exports = stateInstance; +module.exports = { + clientManager: new ClientManager(), +}; diff --git a/reverse_engineering/databaseService/databaseService.js b/reverse_engineering/databaseService/databaseService.js index 9dc7c2d..117ffb7 100644 --- a/reverse_engineering/databaseService/databaseService.js +++ b/reverse_engineering/databaseService/databaseService.js @@ -1,16 +1,15 @@ +const { flatMap } = require('lodash'); const axios = require('axios'); -const sql = require('mssql'); +const fs = require('fs'); const https = require('https'); -const { getObjectsFromDatabase, getNewConnectionClientByDb } = require('./helpers'); const msal = require('@azure/msal-node'); -const fs = require('fs'); +const sql = require('mssql'); +const { getObjectsFromDatabase, getNewConnectionClientByDb } = require('./helpers'); const getSampleDocSize = require('../helpers/getSampleDocSize'); const QUERY_REQUEST_TIMEOUT = 60000; -const getSslConfig = connectionInfo => { - const encrypt = connectionInfo.encryptConnection === undefined ? true : Boolean(connectionInfo.encryptConnection); - +const getSslConfig = ({ connectionInfo }) => { if (connectionInfo.sslType === 'SYSTEMCA') { return {}; } @@ -43,16 +42,17 @@ const getSslConfig = connectionInfo => { return {}; }; -const getConnectionClient = async (connectionInfo, logger) => { +const getConnectionClient = async ({ connectionInfo, logger }) => { const hostName = getHostName(connectionInfo.host); const userName = isEmail(connectionInfo.userName) && hostName ? `${connectionInfo.userName}@${hostName}` : connectionInfo.userName; const tenantId = connectionInfo.connectionTenantId || connectionInfo.tenantId || 'common'; - const sslOptions = getSslConfig(connectionInfo); + const sslOptions = getSslConfig({ connectionInfo }); const clientId = '0dc36597-bc44-49f8-a4a7-ae5401959b85'; const redirectUri = 'http://localhost:8080'; + const timeout = Number(connectionInfo.queryRequestTimeout) || 60000; if (connectionInfo.authMethod === 'Username / Password') { return await sql.connect({ @@ -67,8 +67,8 @@ const getConnectionClient = async (connectionInfo, logger) => { connectionInfo.encryptConnection === undefined ? true : Boolean(connectionInfo.encryptConnection), ...sslOptions, }, - connectTimeout: Number(connectionInfo.queryRequestTimeout) || 60000, - requestTimeout: Number(connectionInfo.queryRequestTimeout) || 60000, + connectTimeout: timeout, + requestTimeout: timeout, }); } else if (connectionInfo.authMethod === 'Username / Password (Windows)') { return await sql.connect({ @@ -86,8 +86,8 @@ const getConnectionClient = async (connectionInfo, logger) => { : Boolean(connectionInfo.encryptWindowsConnection), enableArithAbort: true, }, - connectTimeout: Number(connectionInfo.queryRequestTimeout) || 60000, - requestTimeout: Number(connectionInfo.queryRequestTimeout) || 60000, + connectTimeout: timeout, + requestTimeout: timeout, }); } else if (connectionInfo.authMethod === 'Azure Active Directory (MFA)') { const token = await getToken({ connectionInfo, tenantId, clientId, redirectUri, logger }); @@ -131,8 +131,8 @@ const getConnectionClient = async (connectionInfo, logger) => { clientId, }, }, - connectTimeout: Number(connectionInfo.queryRequestTimeout) || 60000, - requestTimeout: Number(connectionInfo.queryRequestTimeout) || 60000, + connectTimeout: timeout, + requestTimeout: timeout, }); } @@ -145,7 +145,7 @@ const getHostName = url => (url || '').split('.')[0]; const PERMISSION_DENIED_CODE = 297; -const addPermissionDeniedMetaData = (error, meta) => { +const addPermissionDeniedMetaData = ({ error, meta }) => { error.message = 'The user does not have permission to perform ' + meta.action + @@ -155,8 +155,8 @@ const addPermissionDeniedMetaData = (error, meta) => { return error; }; -const getClient = async (connectionClient, dbName, meta, logger) => { - let currentDbConnectionClient = await getNewConnectionClientByDb(connectionClient, dbName); +const getClient = async ({ client, dbName, meta, logger }) => { + let currentDbConnectionClient = await getNewConnectionClientByDb(client, dbName); const _inst = { request(...args) { @@ -173,7 +173,7 @@ const getClient = async (connectionClient, dbName, meta, logger) => { } catch (error) { if (meta) { if (error.number === PERMISSION_DENIED_CODE) { - error = addPermissionDeniedMetaData(error, meta); + error = addPermissionDeniedMetaData({ error, meta }); } if (meta.skip) { @@ -196,16 +196,16 @@ const getClient = async (connectionClient, dbName, meta, logger) => { return _inst; }; -const getVersionInfo = async (connectionClient, dbName, logger) => { - const currentDbConnectionClient = await getClient( - connectionClient, +const getVersionInfo = async ({ client, dbName, logger }) => { + const currentDbConnectionClient = await getClient({ + client, dbName, - { + meta: { action: 'getting version info', objects: ['VersionInfo'], }, logger, - ); + }); try { return mapResponse(await currentDbConnectionClient.query`SELECT @@VERSION VersionInfo;`); @@ -222,70 +222,83 @@ const getVersionInfo = async (connectionClient, dbName, logger) => { } }; -const getTableInfo = async (connectionClient, dbName, tableName, tableSchema, logger) => { - const currentDbConnectionClient = await getClient( - connectionClient, +const getTableInfo = async ({ client, dbName, tableName, tableSchema, logger }) => { + const currentDbConnectionClient = await getClient({ + client, dbName, - { + meta: { action: 'table information query', objects: ['INFORMATION_SCHEMA.COLUMNS', 'sys.identity_columns', 'sys.objects'], }, logger, - ); + }); + const objectId = `${tableSchema}.${tableName}`; + return mapResponse( await currentDbConnectionClient.query` - SELECT c.*, + SELECT c.*, ic.SEED_VALUE, ic.INCREMENT_VALUE, COLUMNPROPERTY(OBJECT_ID(${objectId}), c.column_name, 'IsSparse') AS IS_SPARSE, COLUMNPROPERTY(OBJECT_ID(${objectId}), c.column_name, 'IsIdentity') AS IS_IDENTITY, o.type AS TABLE_TYPE - FROM INFORMATION_SCHEMA.COLUMNS AS c - LEFT JOIN sys.identity_columns ic ON ic.object_id=OBJECT_ID(${objectId}) - LEFT JOIN sys.objects o ON o.object_id=OBJECT_ID(${objectId}) - WHERE c.table_name = ${tableName} - AND c.table_schema = ${tableSchema} - ;`, + FROM INFORMATION_SCHEMA.COLUMNS AS c + LEFT JOIN sys.identity_columns ic + ON ic.object_id=OBJECT_ID(${objectId}) + LEFT JOIN sys.objects o + ON o.object_id=OBJECT_ID(${objectId}) + WHERE c.table_name = ${tableName} + AND c.table_schema = ${tableSchema}; + `, ); }; -const getTableSystemTime = async (connectionClient, dbName, tableName, tableSchema, logger) => { - const currentDbConnectionClient = await getClient( - connectionClient, +const getTableSystemTime = async ({ client, dbName, tableName, tableSchema, logger }) => { + const currentDbConnectionClient = await getClient({ + client, dbName, - { + meta: { action: 'table information query', objects: ['sys.periods'], skip: true, }, logger, - ); + }); + const objectId = `${tableSchema}.${tableName}`; + return mapResponse( await currentDbConnectionClient.query` - SELECT col_name(p.object_id, p.start_column_id) as startColumn, - COLUMNPROPERTY(p.object_id, col_name(p.object_id, p.start_column_id), 'IsHidden') as startColumnIsHidden, - col_name(p.object_id, p.end_column_id) as endColumn, - COLUMNPROPERTY(p.object_id, col_name(p.object_id, p.start_column_id), 'IsHidden') as endColumnIsHidden - FROM sys.periods p - WHERE p.object_id = OBJECT_ID(${objectId}) - AND p.period_type = 1; - ;`, + SELECT + col_name(p.object_id, p.start_column_id) AS startColumn, + COLUMNPROPERTY(p.object_id, col_name(p.object_id, p.start_column_id), 'IsHidden') AS startColumnIsHidden, + col_name(p.object_id, p.end_column_id) AS endColumn, + COLUMNPROPERTY(p.object_id, col_name(p.object_id, p.start_column_id), 'IsHidden') AS endColumnIsHidden + FROM sys.periods p + WHERE p.object_id = OBJECT_ID(${objectId}) + AND p.period_type = 1; + `, ); }; -const getTableRow = async (connectionClient, dbName, tableName, tableSchema, recordSamplingSettings, logger) => { - const currentDbConnectionClient = await getClient( - connectionClient, +const getTableRowCount = async ({ tableSchema, tableName, client }) => { + const rowCountQuery = `SELECT COUNT(*) as rowsCount FROM [${tableSchema}].[${tableName}]`; + const rowCountResponse = await client.query(rowCountQuery); + return rowCountResponse?.recordset[0]?.rowsCount; +}; + +const getTableRow = async ({ client, dbName, tableName, tableSchema, recordSamplingSettings, logger }) => { + const currentDbConnectionClient = await getClient({ + client, dbName, - { + meta: { action: 'getting data query', objects: [`[${tableSchema}].[${tableName}]`], skip: true, }, logger, - ); + }); let amount; if (recordSamplingSettings.active === 'absolute') { @@ -296,7 +309,7 @@ const getTableRow = async (connectionClient, dbName, tableName, tableSchema, rec 'Reverse Engineering', ); } else { - const rowCount = await getTableRowCount(tableSchema, tableName, currentDbConnectionClient); + const rowCount = await getTableRowCount({ tableSchema, tableName, client: currentDbConnectionClient }); amount = getSampleDocSize(rowCount, recordSamplingSettings); logger.log( 'info', @@ -315,11 +328,11 @@ const getTableRow = async (connectionClient, dbName, tableName, tableSchema, rec ); }; -const getTableForeignKeys = async (connectionClient, dbName, logger) => { - const currentDbConnectionClient = await getClient( - connectionClient, +const getTableForeignKeys = async ({ client, tablesInfo, dbName, logger }) => { + const currentDbConnectionClient = await getClient({ + client, dbName, - { + meta: { action: 'getting foreign keys query', objects: [ 'sys.foreign_key_columns', @@ -332,105 +345,126 @@ const getTableForeignKeys = async (connectionClient, dbName, logger) => { skip: true, }, logger, + }); + + const schemaAlias = 'sch'; + const table1Alias = 'tab1'; + + const whereClauseParts = flatMap( + Object.entries(tablesInfo).map(([schemaName, tableNames]) => + tableNames.map( + tableName => `(${table1Alias}.name = '${tableName}' AND ${schemaAlias}.name = '${schemaName}')`, + ), + ), ); + return mapResponse( - await currentDbConnectionClient.query` - SELECT obj.name AS FK_NAME, - sch.name AS [schema_name], - tab1.name AS [table], + await currentDbConnectionClient.query(` + SELECT obj.name AS FK_NAME, + ${schemaAlias}.name AS [schema_name], + ${table1Alias}.name AS [table], col1.name AS [column], tab2.name AS [referenced_table], col2.name AS [referenced_column], fk.delete_referential_action_desc AS on_delete, fk.update_referential_action_desc AS on_update - FROM sys.foreign_key_columns fkc - INNER JOIN sys.objects obj - ON obj.object_id = fkc.constraint_object_id - INNER JOIN sys.tables tab1 - ON tab1.object_id = fkc.parent_object_id - INNER JOIN sys.schemas sch - ON tab1.schema_id = sch.schema_id - INNER JOIN sys.columns col1 - ON col1.column_id = parent_column_id AND col1.object_id = tab1.object_id - INNER JOIN sys.tables tab2 - ON tab2.object_id = fkc.referenced_object_id - INNER JOIN sys.columns col2 - ON col2.column_id = referenced_column_id AND col2.object_id = tab2.object_id - INNER JOIN sys.foreign_keys fk - ON fk.object_id = obj.object_id - `, + FROM sys.foreign_key_columns fkc + INNER JOIN sys.objects obj + ON obj.object_id = fkc.constraint_object_id + INNER JOIN sys.tables ${table1Alias} + ON ${table1Alias}.object_id = fkc.parent_object_id + INNER JOIN sys.schemas ${schemaAlias} + ON ${table1Alias}.schema_id = ${schemaAlias}.schema_id + INNER JOIN sys.columns col1 + ON col1.column_id = parent_column_id AND col1.object_id = ${table1Alias}.object_id + INNER JOIN sys.tables tab2 + ON tab2.object_id = fkc.referenced_object_id + INNER JOIN sys.columns col2 + ON col2.column_id = referenced_column_id AND col2.object_id = tab2.object_id + INNER JOIN sys.foreign_keys fk + ON fk.object_id = obj.object_id + WHERE ${whereClauseParts.join(' OR ')} + `), ); }; -const getDatabaseIndexes = async (connectionClient, dbName, logger) => { - const currentDbConnectionClient = await getClient( - connectionClient, +const getDatabaseIndexes = async ({ client, dbName, tablesInfo, logger }) => { + const currentDbConnectionClient = await getClient({ + client, dbName, - { + meta: { action: 'getting indexes query', objects: ['sys.indexes', 'sys.tables', 'sys.index_columns', 'sys.partitions'], skip: true, }, logger, - ); + }); logger.log('info', { message: `Get '${dbName}' database indexes.` }, 'Reverse Engineering'); + const tableAlias = 't'; + const leftJoinClauseParts = Object.entries(tablesInfo).map(([schemaName, tableNames]) => { + const preparedTableNames = tableNames.map(tableName => `'${tableName}'`).join(', '); + return `(OBJECT_SCHEMA_NAME(${tableAlias}.object_id) = '${schemaName}' AND ${tableAlias}.name IN (${preparedTableNames}))`; + }); + return mapResponse( - await currentDbConnectionClient.query` - SELECT - TableName = t.name, - IndexName = ind.name, - ic.is_descending_key, - ic.is_included_column, - COL_NAME(t.object_id, ic.column_id) AS columnName, - OBJECT_SCHEMA_NAME(t.object_id) AS schemaName, - p.data_compression_desc AS dataCompression, - ind.* - FROM sys.indexes ind - LEFT JOIN sys.tables t - ON ind.object_id = t.object_id - INNER JOIN sys.index_columns ic - ON ind.object_id = ic.object_id AND ind.index_id = ic.index_id - INNER JOIN sys.partitions p - ON p.object_id = t.object_id AND ind.index_id = p.index_id - WHERE - ind.is_primary_key = 0 - AND ind.is_unique_constraint = 0 - AND t.is_ms_shipped = 0 - `, + await currentDbConnectionClient.query(` + SELECT + TableName = t.name, + IndexName = ind.name, + ic.is_descending_key, + ic.is_included_column, + COL_NAME(t.object_id, ic.column_id) AS columnName, + OBJECT_SCHEMA_NAME(t.object_id) AS schemaName, + p.data_compression_desc AS dataCompression, + ind.* + FROM sys.indexes ind + LEFT JOIN sys.tables t + ON ind.object_id = t.object_id + AND (${leftJoinClauseParts.join(' OR ')}) + INNER JOIN sys.index_columns ic + ON ind.object_id = ic.object_id AND ind.index_id = ic.index_id + INNER JOIN sys.partitions p + ON p.object_id = t.object_id AND ind.index_id = p.index_id + WHERE + ind.is_primary_key = 0 + AND ind.is_unique_constraint = 0 + AND t.is_ms_shipped = 0 + `), ); }; -const getIndexesBucketCount = async (connectionClient, dbName, indexesId, logger) => { +const getIndexesBucketCount = async ({ client, dbName, indexesId, logger }) => { if (!indexesId.length) { return []; } - const currentDbConnectionClient = await getClient( - connectionClient, + const currentDbConnectionClient = await getClient({ + client, dbName, - { + meta: { action: 'getting total buckets of indexes', objects: ['sys.dm_db_xtp_hash_index_stats'], skip: true, }, logger, - ); + }); return mapResponse( await currentDbConnectionClient.query(` - SELECT hs.total_bucket_count, hs.index_id - FROM sys.dm_db_xtp_hash_index_stats hs - WHERE hs.index_id IN (${indexesId.join(', ')})`), + SELECT hs.total_bucket_count, hs.index_id + FROM sys.dm_db_xtp_hash_index_stats hs + WHERE hs.index_id IN (${indexesId.join(', ')}) + `), ); }; -const getSpatialIndexes = async (connectionClient, dbName, logger) => { - const currentDbConnectionClient = await getClient( - connectionClient, +const getSpatialIndexes = async ({ client, dbName, allUniqueSchemasAndTables, logger }) => { + const currentDbConnectionClient = await getClient({ + client, dbName, - { + meta: { action: 'getting spatial indexes', objects: [ 'sys.spatial_indexes', @@ -442,45 +476,50 @@ const getSpatialIndexes = async (connectionClient, dbName, logger) => { skip: true, }, logger, - ); + }); logger.log('info', { message: `Get '${dbName}' database spatial indexes.` }, 'Reverse Engineering'); + const tableAlias = 't'; + const whereClauseParts = getWhereClauseForUniqueSchemasAndTables({ tableAlias, allUniqueSchemasAndTables }); + return mapResponse( - await currentDbConnectionClient.query` - SELECT - TableName = t.name, - IndexName = ind.name, - COL_NAME(t.object_id, ic.column_id) AS columnName, - OBJECT_SCHEMA_NAME(t.object_id) AS schemaName, - sit.bounding_box_xmin AS XMIN, - sit.bounding_box_ymin AS YMIN, - sit.bounding_box_xmax AS XMAX, - sit.bounding_box_ymax AS YMAX, - sit.level_1_grid_desc AS LEVEL_1, - sit.level_2_grid_desc AS LEVEL_2, - sit.level_3_grid_desc AS LEVEL_3, - sit.level_4_grid_desc AS LEVEL_4, - sit.cells_per_object AS CELLS_PER_OBJECT, - p.data_compression_desc AS dataCompression, - ind.* - FROM sys.spatial_indexes ind - LEFT JOIN sys.tables t - ON ind.object_id = t.object_id - INNER JOIN sys.index_columns ic - ON ind.object_id = ic.object_id AND ind.index_id = ic.index_id - LEFT JOIN sys.spatial_index_tessellations sit - ON ind.object_id = sit.object_id AND ind.index_id = sit.index_id - LEFT JOIN sys.partitions p - ON p.object_id = t.object_id AND ind.index_id = p.index_id`, + await currentDbConnectionClient.query(` + SELECT + TableName = ${tableAlias}.name, + IndexName = ind.name, + COL_NAME(${tableAlias}.object_id, ic.column_id) AS columnName, + OBJECT_SCHEMA_NAME(${tableAlias}.object_id) AS schemaName, + sit.bounding_box_xmin AS XMIN, + sit.bounding_box_ymin AS YMIN, + sit.bounding_box_xmax AS XMAX, + sit.bounding_box_ymax AS YMAX, + sit.level_1_grid_desc AS LEVEL_1, + sit.level_2_grid_desc AS LEVEL_2, + sit.level_3_grid_desc AS LEVEL_3, + sit.level_4_grid_desc AS LEVEL_4, + sit.cells_per_object AS CELLS_PER_OBJECT, + p.data_compression_desc AS dataCompression, + ind.* + FROM sys.spatial_indexes ind + LEFT JOIN sys.tables ${tableAlias} + ON ind.object_id = ${tableAlias}.object_id + INNER JOIN sys.index_columns ic + ON ind.object_id = ic.object_id AND ind.index_id = ic.index_id + LEFT JOIN sys.spatial_index_tessellations sit + ON ind.object_id = sit.object_id AND ind.index_id = sit.index_id + LEFT JOIN sys.partitions p + ON p.object_id = ${tableAlias}.object_id AND ind.index_id = p.index_id + WHERE ${whereClauseParts} + `), ); }; -const getFullTextIndexes = async (connectionClient, dbName, logger) => { - const currentDbConnectionClient = await getClient( - connectionClient, +const getFullTextIndexes = async ({ client, dbName, allUniqueSchemasAndTables, logger }) => { + const currentDbConnectionClient = await getClient({ + client, dbName, - { + meta: { action: 'getting full text indexes', objects: [ 'sys.fulltext_indexes', @@ -494,232 +533,260 @@ const getFullTextIndexes = async (connectionClient, dbName, logger) => { skip: true, }, logger, - ); + }); logger.log('info', { message: `Get '${dbName}' database full text indexes.` }, 'Reverse Engineering'); - const result = await currentDbConnectionClient.query` - SELECT - OBJECT_SCHEMA_NAME(F.object_id) AS schemaName, - OBJECT_NAME(F.object_id) AS TableName, - COL_NAME(FC.object_id, FC.column_id) AS columnName, - COL_NAME(FC.object_id, FC.type_column_id) AS columnTypeName, - FC.statistical_semantics AS statistical_semantics, - FC.language_id AS language, - I.name AS indexKeyName, - F.change_tracking_state_desc AS changeTracking, - CASE WHEN F.stoplist_id IS NULL THEN 'OFF' WHEN F.stoplist_id = 0 THEN 'SYSTEM' ELSE SL.name END AS stopListName, - SPL.name AS searchPropertyList, - FG.name AS fileGroup, - FCAT.name AS catalogName, - type = 'FullText', - IndexName = 'full_text_idx' - FROM sys.fulltext_indexes F - INNER JOIN sys.fulltext_index_columns FC ON FC.object_id = F.object_id - LEFT JOIN sys.indexes I ON F.unique_index_id = I.index_id AND I.object_id = F.object_id - LEFT JOIN sys.fulltext_stoplists SL ON SL.stoplist_id = F.stoplist_id - LEFT JOIN sys.registered_search_property_lists SPL ON SPL.property_list_id = F.property_list_id - LEFT JOIN sys.filegroups FG ON FG.data_space_id = F.data_space_id - LEFT JOIN sys.fulltext_catalogs FCAT ON FCAT.fulltext_catalog_id = F.fulltext_catalog_id - WHERE F.is_enabled = 1`; - - return mapResponse(result); + const tableAlias = 'F'; + const whereClauseParts = getWhereClauseForUniqueSchemasAndTables({ tableAlias, allUniqueSchemasAndTables }); + + return mapResponse( + await currentDbConnectionClient.query(` + SELECT + OBJECT_SCHEMA_NAME(${tableAlias}.object_id) AS schemaName, + OBJECT_NAME(${tableAlias}.object_id) AS TableName, + COL_NAME(FC.object_id, FC.column_id) AS columnName, + COL_NAME(FC.object_id, FC.type_column_id) AS columnTypeName, + FC.statistical_semantics AS statistical_semantics, + FC.language_id AS language, + I.name AS indexKeyName, + ${tableAlias}.change_tracking_state_desc AS changeTracking, + CASE + WHEN ${tableAlias}.stoplist_id IS NULL THEN 'OFF' + WHEN ${tableAlias}.stoplist_id = 0 THEN 'SYSTEM' + ELSE SL.name + END AS stopListName, + SPL.name AS searchPropertyList, + FG.name AS fileGroup, + FCAT.name AS catalogName, + type = 'FullText', + IndexName = 'full_text_idx' + FROM sys.fulltext_indexes ${tableAlias} + INNER JOIN sys.fulltext_index_columns FC + ON FC.object_id = ${tableAlias}.object_id + LEFT JOIN sys.indexes I + ON ${tableAlias}.unique_index_id = I.index_id AND I.object_id = ${tableAlias}.object_id + LEFT JOIN sys.fulltext_stoplists SL + ON SL.stoplist_id = ${tableAlias}.stoplist_id + LEFT JOIN sys.registered_search_property_lists SPL + ON SPL.property_list_id = ${tableAlias}.property_list_id + LEFT JOIN sys.filegroups FG + ON FG.data_space_id = ${tableAlias}.data_space_id + LEFT JOIN sys.fulltext_catalogs FCAT + ON FCAT.fulltext_catalog_id = ${tableAlias}.fulltext_catalog_id + WHERE + ${tableAlias}.is_enabled = 1 + AND ${whereClauseParts} + `), + ); }; -const getViewsIndexes = async (connectionClient, dbName, logger) => { - const currentDbConnectionClient = await getClient( - connectionClient, +const getViewsIndexes = async ({ client, dbName, logger }) => { + const currentDbConnectionClient = await getClient({ + client, dbName, - { + meta: { action: 'getting view indexes query', objects: ['sys.indexes', 'sys.views', 'sys.index_columns', 'sys.partitions'], skip: true, }, logger, - ); + }); logger.log('info', { message: `Get '${dbName}' database views indexes.` }, 'Reverse Engineering'); + const tableAlias = 'ind'; + return mapResponse( - await currentDbConnectionClient.query` + await currentDbConnectionClient.query(` SELECT TableName = t.name, - IndexName = ind.name, + IndexName = ${tableAlias}.name, ic.is_descending_key, ic.is_included_column, COL_NAME(t.object_id, ic.column_id) AS columnName, OBJECT_SCHEMA_NAME(t.object_id) AS schemaName, p.data_compression_desc AS dataCompression, - ind.* - FROM sys.indexes ind + ${tableAlias}.* + FROM sys.indexes ${tableAlias} LEFT JOIN sys.views t - ON ind.object_id = t.object_id + ON ${tableAlias}.object_id = t.object_id INNER JOIN sys.index_columns ic - ON ind.object_id = ic.object_id AND ind.index_id = ic.index_id + ON ${tableAlias}.object_id = ic.object_id AND ${tableAlias}.index_id = ic.index_id INNER JOIN sys.partitions p - ON p.object_id = t.object_id AND ind.index_id = p.index_id + ON p.object_id = t.object_id AND ${tableAlias}.index_id = p.index_id WHERE - ind.is_primary_key = 0 - AND ind.is_unique_constraint = 0 + ${tableAlias}.is_primary_key = 0 + AND ${tableAlias}.is_unique_constraint = 0 AND t.is_ms_shipped = 0 - `, + `), ); }; -const getTableColumnsDescription = async (connectionClient, dbName, tableName, schemaName, logger) => { - const currentDbConnectionClient = await getClient( - connectionClient, +const getTableColumnsDescription = async ({ client, dbName, tableName, schemaName, logger }) => { + const currentDbConnectionClient = await getClient({ + client, dbName, - { + meta: { action: 'getting table columns description', objects: ['sys.tables', 'sys.columns', 'sys.extended_properties'], skip: true, }, logger, - ); + }); logger.log('info', { message: `Get '${tableName}' table columns description.` }, 'Reverse Engineering'); - return mapResponse(currentDbConnectionClient.query` - SELECT - st.name [Table], - sc.name [Column], - sep.value [Description] - FROM sys.tables st - INNER JOIN sys.columns sc ON st.object_id = sc.object_id - LEFT JOIN sys.extended_properties sep ON st.object_id = sep.major_id - AND sc.column_id = sep.minor_id - AND sep.name = 'MS_Description' - WHERE st.name = ${tableName} - AND st.schema_id=SCHEMA_ID(${schemaName}) - `); + return mapResponse( + currentDbConnectionClient.query` + SELECT + st.name [Table], + sc.name [Column], + sep.value [Description] + FROM sys.tables st + INNER JOIN sys.columns sc ON st.object_id = sc.object_id + LEFT JOIN sys.extended_properties sep ON st.object_id = sep.major_id + AND sc.column_id = sep.minor_id + AND sep.name = 'MS_Description' + WHERE st.name = ${tableName} + AND st.schema_id=SCHEMA_ID(${schemaName}) + `, + ); }; -const getDatabaseMemoryOptimizedTables = async (connectionClient, dbName, logger) => { - const currentDbConnectionClient = await getClient( - connectionClient, +const getDatabaseMemoryOptimizedTables = async ({ client, dbName, logger }) => { + const currentDbConnectionClient = await getClient({ + client, dbName, - { + meta: { action: 'getting memory optimized tables', objects: ['sys.tables', 'sys.objects'], skip: true, }, logger, - ); + }); logger.log('info', { message: `Get '${dbName}' database memory optimized indexes.` }, 'Reverse Engineering'); return mapResponse( await currentDbConnectionClient.query` - SELECT - T.name, - T.durability, - T.durability_desc, - OBJECT_NAME(T.history_table_id) AS history_table, - SCHEMA_NAME(O.schema_id) AS history_schema, - T.temporal_type_desc, - T.is_memory_optimized - FROM sys.tables T LEFT JOIN sys.objects O ON T.history_table_id = O.object_id - `, + SELECT + T.name, + T.durability, + T.durability_desc, + OBJECT_NAME(T.history_table_id) AS history_table, + SCHEMA_NAME(O.schema_id) AS history_schema, + T.temporal_type_desc, + T.is_memory_optimized + FROM sys.tables T + LEFT JOIN sys.objects O + ON T.history_table_id = O.object_id + WHERE T.is_memory_optimized = 1 + `, ); }; -const getDatabaseCheckConstraints = async (connectionClient, dbName, logger) => { - const currentDbConnectionClient = await getClient( - connectionClient, +const getDatabaseCheckConstraints = async ({ client, dbName, allUniqueSchemasAndTables, logger }) => { + const currentDbConnectionClient = await getClient({ + client, dbName, - { + meta: { action: 'getting check constraints', objects: ['sys.check_constraints', 'sys.objects', 'sys.all_columns'], skip: true, }, logger, - ); + }); logger.log('info', { message: `Get '${dbName}' database check constraints.` }, 'Reverse Engineering'); - return mapResponse(currentDbConnectionClient.query` - SELECT con.[name], - t.[name] AS [table], - col.[name] AS column_name, - con.[definition], - con.[is_not_trusted], - con.[is_disabled], - con.[is_not_for_replication] - FROM sys.check_constraints con - LEFT OUTER JOIN sys.objects t - ON con.parent_object_id = t.object_id - LEFT OUTER JOIN sys.all_columns col - ON con.parent_column_id = col.column_id - AND con.parent_object_id = col.object_id - `); + const tableAlias = 't'; + const whereClauseParts = getWhereClauseForUniqueSchemasAndTables({ tableAlias, allUniqueSchemasAndTables }); + + return mapResponse( + currentDbConnectionClient.query(` + SELECT con.[name], + ${tableAlias}.[name] AS [table], + col.[name] AS column_name, + con.[definition], + con.[is_not_trusted], + con.[is_disabled], + con.[is_not_for_replication] + FROM sys.check_constraints con + LEFT OUTER JOIN sys.objects ${tableAlias} + ON con.parent_object_id = ${tableAlias}.object_id + LEFT OUTER JOIN sys.all_columns col + ON con.parent_column_id = col.column_id + AND con.parent_object_id = col.object_id + WHERE ${whereClauseParts} + `), + ); }; -const getViewTableInfo = async (connectionClient, dbName, viewName, schemaName, logger) => { - const currentDbConnectionClient = await getClient( - connectionClient, +const getViewTableInfo = async ({ client, dbName, viewName, schemaName, logger }) => { + const currentDbConnectionClient = await getClient({ + client, dbName, - { + meta: { action: 'information view query', objects: ['sys.sql_dependencies', 'sys.objects', 'sys.columns', 'sys.types'], skip: true, }, logger, - ); + }); logger.log('info', { message: `Get '${viewName}' view table info.` }, 'Reverse Engineering'); const objectId = `${schemaName}.${viewName}`; - return mapResponse(currentDbConnectionClient.query` - SELECT - ViewName = O.name, - ColumnName = A.name, - ReferencedSchemaName = SCHEMA_NAME(X.schema_id), - ReferencedTableName = X.name, - ReferencedColumnName = C.name, - T.is_selected, - T.is_updated, - T.is_select_all, - ColumnType = M.name, - M.max_length, - M.precision, - M.scale - FROM - sys.sql_dependencies AS T - INNER JOIN sys.objects AS O ON T.object_id = O.object_id - INNER JOIN sys.objects AS X ON T.referenced_major_id = X.object_id - INNER JOIN sys.columns AS C ON - C.object_id = X.object_id AND - C.column_id = T.referenced_minor_id - INNER JOIN sys.types AS M ON - M.system_type_id = C.system_type_id AND - M.user_type_id = C.user_type_id - INNER JOIN sys.columns AS A ON - A.object_id = OBJECT_ID(${objectId}) AND - T.referenced_minor_id = A.column_id - WHERE - O.type = 'V' - AND - O.name = ${viewName} - AND O.schema_id=SCHEMA_ID(${schemaName}) - ORDER BY - O.name, - X.name, - C.name - `); + return mapResponse( + currentDbConnectionClient.query` + SELECT + ViewName = O.name, + ColumnName = A.name, + ReferencedSchemaName = SCHEMA_NAME(X.schema_id), + ReferencedTableName = X.name, + ReferencedColumnName = C.name, + T.is_selected, + T.is_updated, + T.is_select_all, + ColumnType = M.name, + M.max_length, + M.precision, + M.scale + FROM sys.sql_dependencies AS T + INNER JOIN sys.objects AS O + ON T.object_id = O.object_id + INNER JOIN sys.objects AS X + ON T.referenced_major_id = X.object_id + INNER JOIN sys.columns AS C + ON C.object_id = X.object_id AND C.column_id = T.referenced_minor_id + INNER JOIN sys.types AS M + ON M.system_type_id = C.system_type_id AND M.user_type_id = C.user_type_id + INNER JOIN sys.columns AS A + ON A.object_id = OBJECT_ID(${objectId}) AND T.referenced_minor_id = A.column_id + WHERE + O.type = 'V' + AND O.name = ${viewName} + AND O.schema_id=SCHEMA_ID(${schemaName}) + ORDER BY + O.name, + X.name, + C.name + `, + ); }; -const getViewColumnRelations = async (connectionClient, dbName, viewName, schemaName, logger) => { - const currentDbConnectionClient = await getClient( - connectionClient, +const getViewColumnRelations = async ({ client, dbName, viewName, schemaName, logger }) => { + const currentDbConnectionClient = await getClient({ + client, dbName, - { + meta: { action: 'getting view column relations', objects: ['sys.dm_exec_describe_first_result_set'], skip: true, }, logger, - ); + }); logger.log('info', { message: `Get '${viewName}' view column relations.` }, 'Reverse Engineering'); @@ -734,17 +801,17 @@ const getViewColumnRelations = async (connectionClient, dbName, viewName, schema `); }; -const getViewStatement = async (connectionClient, dbName, viewName, schemaName, logger) => { - const currentDbConnectionClient = await getClient( - connectionClient, +const getViewStatement = async ({ client, dbName, viewName, schemaName, logger }) => { + const currentDbConnectionClient = await getClient({ + client, dbName, - { + meta: { action: 'getting view statements', objects: ['sys.sql_modules', 'sys.views'], skip: true, }, logger, - ); + }); logger.log('info', { message: `Get '${viewName}' view statement.` }, 'Reverse Engineering'); @@ -755,11 +822,11 @@ const getViewStatement = async (connectionClient, dbName, viewName, schemaName, `); }; -const getTableKeyConstraints = async (connectionClient, dbName, tableName, schemaName, logger) => { - const currentDbConnectionClient = await getClient( - connectionClient, +const getTableKeyConstraints = async ({ client, dbName, tableName, schemaName, logger }) => { + const currentDbConnectionClient = await getClient({ + client, dbName, - { + meta: { action: 'getting constraints of keys', objects: [ 'INFORMATION_SCHEMA.TABLE_CONSTRAINTS', @@ -773,132 +840,154 @@ const getTableKeyConstraints = async (connectionClient, dbName, tableName, schem skip: true, }, logger, - ); + }); logger.log('info', { message: `Get '${tableName}' table key constraints.` }, 'Reverse Engineering'); const objectId = `${schemaName}.${tableName}`; return mapResponse( await currentDbConnectionClient.query` - SELECT TC.TABLE_NAME AS tableName, TC.Constraint_Name AS constraintName, - CC.Column_Name AS columnName, TC.constraint_type AS constraintType, ind.type_desc AS typeDesc, - p.data_compression_desc AS dataCompression, - ds.name AS dataSpaceName, - st.no_recompute AS statisticNoRecompute, st.is_incremental AS statisticsIncremental, - ic.is_descending_key AS isDescending, - ind.* - FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS TC - INNER JOIN INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE CC ON TC.Constraint_Name = CC.Constraint_Name - AND TC.TABLE_NAME=${tableName} AND TC.TABLE_SCHEMA=${schemaName} - INNER JOIN sys.indexes ind ON ind.name = TC.CONSTRAINT_NAME - INNER JOIN sys.stats st ON st.name = TC.CONSTRAINT_NAME - LEFT JOIN sys.data_spaces ds ON ds.data_space_id = ind.data_space_id - INNER JOIN sys.index_columns ic ON ic.object_id = OBJECT_ID(${objectId}) - AND ind.index_id=ic.index_id - AND ic.column_id=COLUMNPROPERTY(OBJECT_ID(${objectId}), CC.column_name, 'ColumnId') - INNER JOIN sys.partitions p ON p.object_id = OBJECT_ID(${objectId}) AND p.index_id = ind.index_id - ORDER BY TC.Constraint_Name - `, + SELECT + TC.TABLE_NAME AS tableName, + TC.Constraint_Name AS constraintName, + CC.Column_Name AS columnName, + TC.constraint_type AS constraintType, + ind.type_desc AS typeDesc, + p.data_compression_desc AS dataCompression, + ds.name AS dataSpaceName, + st.no_recompute AS statisticNoRecompute, + st.is_incremental AS statisticsIncremental, + ic.is_descending_key AS isDescending, + ind.* + FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS TC + INNER JOIN INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE CC + ON TC.Constraint_Name = CC.Constraint_Name + AND TC.TABLE_NAME=${tableName} + AND TC.TABLE_SCHEMA=${schemaName} + INNER JOIN sys.indexes ind + ON ind.name = TC.CONSTRAINT_NAME + INNER JOIN sys.stats st + ON st.name = TC.CONSTRAINT_NAME + LEFT JOIN sys.data_spaces ds + ON ds.data_space_id = ind.data_space_id + INNER JOIN sys.index_columns ic + ON ic.object_id = OBJECT_ID(${objectId}) + AND ind.index_id=ic.index_id + AND ic.column_id=COLUMNPROPERTY(OBJECT_ID(${objectId}), CC.column_name, 'ColumnId') + INNER JOIN sys.partitions p + ON p.object_id = OBJECT_ID(${objectId}) + AND p.index_id = ind.index_id + WHERE TC.TABLE_SCHEMA=${schemaName} + AND TC.TABLE_NAME=${tableName} + ORDER BY TC.Constraint_Name + `, ); }; -const getTableMaskedColumns = async (connectionClient, dbName, tableName, schemaName, logger) => { - const currentDbConnectionClient = await getClient( - connectionClient, +const getTableMaskedColumns = async ({ client, dbName, tableName, schemaName, logger }) => { + const currentDbConnectionClient = await getClient({ + client, dbName, - { + meta: { action: 'getting masked columns', objects: ['sys.masked_columns'], skip: true, }, logger, - ); + }); logger.log('info', { message: `Get '${tableName}' table masked columns.` }, 'Reverse Engineering'); const objectId = `${schemaName}.${tableName}`; return mapResponse( await currentDbConnectionClient.query` - SELECT name, masking_function FROM sys.masked_columns - WHERE object_id=OBJECT_ID(${objectId}) - `, + SELECT name, masking_function FROM sys.masked_columns + WHERE object_id=OBJECT_ID(${objectId}) + `, ); }; -const getDatabaseXmlSchemaCollection = async (connectionClient, dbName, logger) => { - const currentDbConnectionClient = await getClient( - connectionClient, +const getDatabaseXmlSchemaCollection = async ({ client, dbName, allUniqueSchemasAndTables, logger }) => { + const currentDbConnectionClient = await getClient({ + client, dbName, - { + meta: { action: 'getting xml schema collections', objects: ['sys.column_xml_schema_collection_usages', 'sys.xml_schema_collections'], skip: true, }, logger, - ); + }); logger.log('info', { message: `Get '${dbName}' database xml schema collection.` }, 'Reverse Engineering'); + const schemaAlias = 'xsc'; + const tableAlias = 'xcu'; + const whereClauseParts = getWhereClauseForUniqueSchemasAndTables({ + schemaAlias, + tableAlias, + allUniqueSchemasAndTables, + }); + return mapResponse( - await currentDbConnectionClient.query` - SELECT xsc.name AS collectionName, + await currentDbConnectionClient.query(` + SELECT + xsc.name AS collectionName, SCHEMA_NAME(xsc.schema_id) AS schemaName, OBJECT_NAME(xcu.object_id) AS tableName, COL_NAME(xcu.object_id, xcu.column_id) AS columnName - FROM sys.column_xml_schema_collection_usages xcu - LEFT JOIN sys.xml_schema_collections xsc ON xsc.xml_collection_id=xcu.xml_collection_id - `, + FROM sys.column_xml_schema_collection_usages xcu + LEFT JOIN sys.xml_schema_collections xsc + ON xsc.xml_collection_id=xcu.xml_collection_id + WHERE ${whereClauseParts} + `), ); }; -const getTableDefaultConstraintNames = async (connectionClient, dbName, tableName, schemaName, logger) => { - const currentDbConnectionClient = await getClient( - connectionClient, +const getTableDefaultConstraintNames = async ({ client, dbName, tableName, schemaName, logger }) => { + const currentDbConnectionClient = await getClient({ + client, dbName, - { + meta: { action: 'getting default cosntraint names', objects: ['sys.all_columns', 'sys.tables', 'sys.schemas', 'sys.default_constraints'], skip: true, }, logger, - ); + }); logger.log('info', { message: `Get '${tableName}' table default constraint names.` }, 'Reverse Engineering'); return mapResponse( await currentDbConnectionClient.query` - SELECT - ac.name AS columnName, - dc.name - FROM - sys.all_columns AS ac - INNER JOIN - sys.tables - ON ac.object_id = tables.object_id - INNER JOIN - sys.schemas - ON tables.schema_id = schemas.schema_id - INNER JOIN - sys.default_constraints AS dc - ON ac.default_object_id = dc.object_id - WHERE - schemas.name = ${schemaName} - AND tables.name = ${tableName} - `, + SELECT + ac.name AS columnName, + dc.name + FROM sys.all_columns AS ac + INNER JOIN sys.tables + ON ac.object_id = tables.object_id + INNER JOIN sys.schemas + ON tables.schema_id = schemas.schema_id + INNER JOIN sys.default_constraints AS dc + ON ac.default_object_id = dc.object_id + WHERE + schemas.name = ${schemaName} + AND tables.name = ${tableName} + `, ); }; -const getDatabaseUserDefinedTypes = async (connectionClient, dbName, logger) => { - const currentDbConnectionClient = await getClient( - connectionClient, +const getDatabaseUserDefinedTypes = async ({ client, dbName, logger }) => { + const currentDbConnectionClient = await getClient({ + client, dbName, - { + meta: { action: 'getting user defined types', objects: ['sys.types'], skip: true, }, logger, - ); + }); logger.log('info', { message: `Get '${dbName}' database UDTs.` }, 'Reverse Engineering'); @@ -908,32 +997,31 @@ const getDatabaseUserDefinedTypes = async (connectionClient, dbName, logger) => `); }; -const getDatabaseCollationOption = async (connectionClient, dbName, logger) => { - const currentDbConnectionClient = await getClient( - connectionClient, +const getDatabaseCollationOption = async ({ client, dbName, logger }) => { + const currentDbConnectionClient = await getClient({ + client, dbName, - { + meta: { action: 'getting database collation', objects: [], skip: true, }, logger, - ); + }); return mapResponse( currentDbConnectionClient.query(`SELECT CONVERT (varchar(256), DATABASEPROPERTYEX('${dbName}','collation'));`), ); }; -const mapResponse = async (response = {}) => { +const mapResponse = async (response = Promise.resolve({})) => { const resp = await response; - return resp.recordset ? resp.recordset : resp; }; const getTokenByMSAL = async ({ connectionInfo, redirectUri, clientId, tenantId, logger }) => { try { - const pca = new msal.PublicClientApplication(getAuthConfig(clientId, tenantId, logger.log)); + const pca = new msal.PublicClientApplication(getAuthConfig({ clientId, tenantId, logger })); const tokenRequest = { code: connectionInfo?.externalBrowserQuery?.code || '', scopes: ['https://database.windows.net//.default'], @@ -951,7 +1039,7 @@ const getTokenByMSAL = async ({ connectionInfo, redirectUri, clientId, tenantId, } }; -const getAgent = (reject, cert, key) => { +const getAgent = ({ reject, cert, key } = {}) => { return new https.Agent({ cert, key, rejectUnauthorized: !!reject }); }; @@ -979,9 +1067,7 @@ const getTokenByAxios = async ({ connectionInfo, tenantId, redirectUri, clientId } }; -const getTokenByAxiosExtended = params => { - return getTokenByAxios({ ...params, agent: getAgent() }); -}; +const getTokenByAxiosExtended = ({ ...params }) => getTokenByAxios({ ...params, agent: getAgent() }); const getToken = async ({ connectionInfo, tenantId, clientId, redirectUri, logger }) => { const axiosExtendedToken = await getTokenByAxiosExtended({ @@ -1004,15 +1090,13 @@ const getToken = async ({ connectionInfo, tenantId, clientId, redirectUri, logge if (axiosToken) { return axiosToken; } - - return; }; -const getAuthConfig = (clientId, tenantId, logger) => ({ +const getAuthConfig = ({ clientId, tenantId, logger }) => ({ system: { loggerOptions: { loggerCallback(loglevel, message) { - logger(message); + logger.log(message); }, piiLoggingEnabled: false, logLevel: msal.LogLevel.Verbose, @@ -1024,16 +1108,16 @@ const getAuthConfig = (clientId, tenantId, logger) => ({ }, }); -const getDescriptionComments = async (connectionClient, dbName, { schema, entity }, logger) => { - const currentDbConnectionClient = await getClient( - connectionClient, +const getDescriptionComments = async ({ client, dbName, schema, entity, logger }) => { + const currentDbConnectionClient = await getClient({ + client, dbName, - { + meta: { action: 'MS_Description query', objects: [], }, logger, - ); + }); logger.log('info', { message: `Get description comments for '${entity?.name}'.` }, 'Reverse Engineering'); @@ -1057,6 +1141,14 @@ const buildDescriptionCommentsRetrieveQuery = ({ schema, entity }) => { return `SELECT objtype, objname, value FROM fn_listextendedproperty ('MS_Description', ${schemaTemplate}, ${entityTemplate});`; }; +const getWhereClauseForUniqueSchemasAndTables = ({ + schemaAlias, + tableAlias, + allUniqueSchemasAndTables: { schemas, tables }, +}) => + `OBJECT_SCHEMA_NAME(${schemaAlias || tableAlias}.object_id) IN (${[...schemas].join(', ')}) + AND OBJECT_NAME(${tableAlias}.object_id) IN (${[...tables].join(', ')})`; + module.exports = { getConnectionClient, getObjectsFromDatabase, @@ -1084,11 +1176,3 @@ module.exports = { getVersionInfo, getDescriptionComments, }; - -async function getTableRowCount(tableSchema, tableName, currentDbConnectionClient) { - const rowCountQuery = `SELECT COUNT(*) as rowsCount FROM [${tableSchema}].[${tableName}]`; - const rowCountResponse = await currentDbConnectionClient.query(rowCountQuery); - const rowCount = rowCountResponse?.recordset[0]?.rowsCount; - - return rowCount; -} diff --git a/reverse_engineering/helpers/commentsHelper.js b/reverse_engineering/helpers/commentsHelper.js index 05f9299..1491011 100644 --- a/reverse_engineering/helpers/commentsHelper.js +++ b/reverse_engineering/helpers/commentsHelper.js @@ -67,8 +67,8 @@ const getJsonSchemasWithInjectedDescriptionComments = async ({ client, dbName, j const descriptionComments = ( await Promise.all( - [...schemas, ...schemasWithViews, ...schemasWithTables, ...entities].map(commentParams => - getDescriptionComments(client, dbName, commentParams, logger), + [...schemas, ...schemasWithViews, ...schemasWithTables, ...entities].map(({ schema, entity }) => + getDescriptionComments({ client, dbName, schema, entity, logger }), ), ) ).flat(); diff --git a/reverse_engineering/reverseEngineeringService/helpers/changeViewPropertiesToReferences.js b/reverse_engineering/reverseEngineeringService/helpers/changeViewPropertiesToReferences.js index d504e0a..58bfe0f 100644 --- a/reverse_engineering/reverseEngineeringService/helpers/changeViewPropertiesToReferences.js +++ b/reverse_engineering/reverseEngineeringService/helpers/changeViewPropertiesToReferences.js @@ -3,7 +3,7 @@ const getColumnInfoByName = (columnsInfo, columnName, propertyName) => { return relatedColumn[propertyName]; }; -const changeViewPropertiesToReferences = (jsonSchema, viewInfo, viewColumnRelations) => { +const changeViewPropertiesToReferences = ({ jsonSchema, viewInfo, viewColumnRelations }) => { return viewColumnRelations.reduce((jsonSchemaAcc, column) => { const columnName = column['name']; const referenceTable = diff --git a/reverse_engineering/reverseEngineeringService/helpers/containsJson.js b/reverse_engineering/reverseEngineeringService/helpers/containsJson.js index 70fecb8..90e7252 100644 --- a/reverse_engineering/reverseEngineeringService/helpers/containsJson.js +++ b/reverse_engineering/reverseEngineeringService/helpers/containsJson.js @@ -1,15 +1,10 @@ -const containsJson = tableInfo => { - return tableInfo.some(item => { +const containsJson = ({ tableInfo }) => + tableInfo.some(item => { if (item['DATA_TYPE'] !== 'nvarchar') { return false; } - if (item['CHARACTER_MAXIMUM_LENGTH'] >= 0 && item['CHARACTER_MAXIMUM_LENGTH'] < 4000) { - return false; - } - - return true; + return !(item['CHARACTER_MAXIMUM_LENGTH'] >= 0 && item['CHARACTER_MAXIMUM_LENGTH'] < 4000); }); -}; module.exports = containsJson; diff --git a/reverse_engineering/reverseEngineeringService/helpers/defineFieldsCompositeKeyConstraints.js b/reverse_engineering/reverseEngineeringService/helpers/defineFieldsCompositeKeyConstraints.js index b18af1a..4e04357 100644 --- a/reverse_engineering/reverseEngineeringService/helpers/defineFieldsCompositeKeyConstraints.js +++ b/reverse_engineering/reverseEngineeringService/helpers/defineFieldsCompositeKeyConstraints.js @@ -4,8 +4,8 @@ const getKeyConstraintsCompositionStatuses = require('./getKeyConstraintsComposi const UNIQUE = 'UNIQUE'; const PRIMARY_KEY = 'PRIMARY KEY'; -const reverseCompositeKeys = keyConstraintsInfo => { - const keyCompositionStatuses = getKeyConstraintsCompositionStatuses(keyConstraintsInfo); +const reverseCompositeKeys = ({ keyConstraintsInfo }) => { + const keyCompositionStatuses = getKeyConstraintsCompositionStatuses({ keyConstraintsInfo }); return keyConstraintsInfo.reduce((reversedKeys, keyConstraintInfo) => { const { columnName, constraintName, constraintType, isDescending } = keyConstraintInfo; const compositionStatus = keyCompositionStatuses[constraintName]; @@ -52,8 +52,8 @@ const reverseCompositeKeys = keyConstraintsInfo => { }, {}); }; -const defineFieldsCompositeKeyConstraints = keyConstraintsInfo => { - const reversedKeyConstraints = reverseCompositeKeys(keyConstraintsInfo); +const defineFieldsCompositeKeyConstraints = ({ keyConstraintsInfo }) => { + const reversedKeyConstraints = reverseCompositeKeys({ keyConstraintsInfo }); return Object.values(reversedKeyConstraints).reduce( (keysAcc, keyConstraintInfo) => { const { _type, order, ...necessaryInfo } = keyConstraintInfo; diff --git a/reverse_engineering/reverseEngineeringService/helpers/defineFieldsKeyConstraints.js b/reverse_engineering/reverseEngineeringService/helpers/defineFieldsKeyConstraints.js index e615a9b..ac927c8 100644 --- a/reverse_engineering/reverseEngineeringService/helpers/defineFieldsKeyConstraints.js +++ b/reverse_engineering/reverseEngineeringService/helpers/defineFieldsKeyConstraints.js @@ -32,7 +32,7 @@ const handleKey = (field, keyConstraintInfo) => { }; const defineFieldsKeyConstraints = keyConstraintsInfo => jsonSchema => { - const keyCompositionStatuses = getKeyConstraintsCompositionStatuses(keyConstraintsInfo); + const keyCompositionStatuses = getKeyConstraintsCompositionStatuses({ keyConstraintsInfo }); return keyConstraintsInfo.reduce((jsonSchemaAcc, keyConstraintInfo) => { const { columnName, constraintName } = keyConstraintInfo; const currentField = jsonSchemaAcc.properties[columnName]; diff --git a/reverse_engineering/reverseEngineeringService/helpers/getKeyConstraintsCompositionStatuses.js b/reverse_engineering/reverseEngineeringService/helpers/getKeyConstraintsCompositionStatuses.js index d22413d..cdc76b1 100644 --- a/reverse_engineering/reverseEngineeringService/helpers/getKeyConstraintsCompositionStatuses.js +++ b/reverse_engineering/reverseEngineeringService/helpers/getKeyConstraintsCompositionStatuses.js @@ -1,4 +1,4 @@ -const getKeyConstraintsCompositionStatuses = keyConstraintsInfo => { +const getKeyConstraintsCompositionStatuses = ({ keyConstraintsInfo }) => { const keyConstraintsColumns = keyConstraintsInfo.reduce((constraintsColumns, keyConstraintInfo) => { const { constraintName, columnName } = keyConstraintInfo; const currentConstraintColumns = constraintsColumns[constraintName]; @@ -18,7 +18,7 @@ const getKeyConstraintsCompositionStatuses = keyConstraintsInfo => { return Object.entries(keyConstraintsColumns).reduce( (statuses, [name, columns]) => ({ ...statuses, - [name]: Array.from(new Set(columns)).length > 1 ? true : false, + [name]: Array.from(new Set(columns)).length > 1, }), {}, ); diff --git a/reverse_engineering/reverseEngineeringService/helpers/getPeriodForSystemTime.js b/reverse_engineering/reverseEngineeringService/helpers/getPeriodForSystemTime.js index e6650d0..b4a492a 100644 --- a/reverse_engineering/reverseEngineeringService/helpers/getPeriodForSystemTime.js +++ b/reverse_engineering/reverseEngineeringService/helpers/getPeriodForSystemTime.js @@ -1,10 +1,12 @@ const { getTableSystemTime } = require('../../databaseService/databaseService'); -const getPeriodForSystemTime = async (dbConnectionClient, dbName, tableName, schemaName, logger) => { - const tableSystemTime = await getTableSystemTime(dbConnectionClient, dbName, tableName, schemaName, logger); +const getPeriodForSystemTime = async ({ client, dbName, tableName, schemaName, logger }) => { + const tableSystemTime = await getTableSystemTime({ client, dbName, tableName, tableSchema: schemaName, logger }); + if (!tableSystemTime[0]) { return; } + const periodForSystemTime = tableSystemTime[0]; return [ { diff --git a/reverse_engineering/reverseEngineeringService/helpers/reorderTableRows.js b/reverse_engineering/reverseEngineeringService/helpers/reorderTableRows.js index 864a824..cedacef 100644 --- a/reverse_engineering/reverseEngineeringService/helpers/reorderTableRows.js +++ b/reverse_engineering/reverseEngineeringService/helpers/reorderTableRows.js @@ -1,4 +1,4 @@ -const reorderTableRows = (tableRows, isFieldOrderAlphabetic) => { +const reorderTableRows = ({ tableRows, isFieldOrderAlphabetic }) => { if (!isFieldOrderAlphabetic) { return tableRows; } diff --git a/reverse_engineering/reverseEngineeringService/helpers/reverseTableIndexes.js b/reverse_engineering/reverseEngineeringService/helpers/reverseTableIndexes.js index 43025d2..3d35995 100644 --- a/reverse_engineering/reverseEngineeringService/helpers/reverseTableIndexes.js +++ b/reverse_engineering/reverseEngineeringService/helpers/reverseTableIndexes.js @@ -153,7 +153,7 @@ const addKeys = (indexData, index) => { } }; -const reverseTableIndexes = tableIndexes => +const reverseTableIndexes = ({ tableIndexes }) => Object.values( tableIndexes.reduce((indexList, index) => { let existedIndex = indexList[index.IndexName]; diff --git a/reverse_engineering/reverseEngineeringService/reverseEngineeringService.js b/reverse_engineering/reverseEngineeringService/reverseEngineeringService.js index 494880b..f2e574a 100644 --- a/reverse_engineering/reverseEngineeringService/reverseEngineeringService.js +++ b/reverse_engineering/reverseEngineeringService/reverseEngineeringService.js @@ -60,16 +60,16 @@ const mergeCollectionsWithViews = ({ jsonSchemas }) => { return [...collectionSchemas, ...combinedViewSchemas]; }; -const getCollectionsRelationships = logger => async dbConnectionClient => { - const dbName = dbConnectionClient.config.database; +const getCollectionsRelationships = async ({ client, tablesInfo, logger }) => { + const dbName = client.config.database; logger.log('info', { message: `Fetching tables relationships.` }, 'Reverse Engineering'); logger.progress({ message: 'Fetching tables relationships', containerName: dbName, entityName: '' }); - const tableForeignKeys = await getTableForeignKeys(dbConnectionClient, dbName, logger); + const tableForeignKeys = await getTableForeignKeys({ client, dbName, tablesInfo, logger }); return reverseTableForeignKeys(tableForeignKeys, dbName); }; -const getStandardDocumentByJsonSchema = jsonSchema => { +const getStandardDocumentByJsonSchema = ({ jsonSchema }) => { return Object.keys(jsonSchema.properties).reduce((result, key) => { return { ...result, @@ -78,8 +78,8 @@ const getStandardDocumentByJsonSchema = jsonSchema => { }, {}); }; -const isViewPartitioned = viewStatement => { - viewStatement = cleanComments(String(viewStatement).trim()); +const isViewPartitioned = ({ viewStatement }) => { + viewStatement = cleanComments({ definition: String(viewStatement).trim() }); const viewContentRegexp = /CREATE[\s\S]+?VIEW[\s\S]+?AS\s+(?:WITH[\s\S]+AS\s+\([\s\S]+\))?([\s\S]+)/i; if (!viewContentRegexp.test(viewStatement)) { @@ -87,12 +87,10 @@ const isViewPartitioned = viewStatement => { } const content = viewStatement.match(viewContentRegexp)[1] || ''; - const hasUnionAll = content.toLowerCase().split(/union[\s\S]+?all/i).length > 1; - - return hasUnionAll; + return content.toLowerCase().split(/union[\s\S]+?all/i).length > 1; }; -const getPartitionedJsonSchema = (viewInfo, viewColumnRelations) => { +const getPartitionedJsonSchema = ({ viewInfo, viewColumnRelations }) => { const aliasToName = viewInfo.reduce( (aliasToName, item) => ({ ...aliasToName, @@ -118,7 +116,7 @@ const getPartitionedJsonSchema = (viewInfo, viewColumnRelations) => { }; }; -const getPartitionedTables = viewInfo => { +const getPartitionedTables = ({ viewInfo }) => { const hasTable = (tables, item) => tables.some( table => table.table[0] === item.ReferencedSchemaName && table.table[1] === item.ReferencedTableName, @@ -137,16 +135,15 @@ const getPartitionedTables = viewInfo => { }, []); }; -const cleanComments = definition => { - return definition +const cleanComments = ({ definition }) => + definition .split('\n') - .filter(line => !/^--/.test(line.trim())) + .filter(line => !line.trim().startsWith('--')) .join('\n'); -}; -const getSelectStatementFromDefinition = definition => { +const getSelectStatementFromDefinition = ({ definition }) => { const regExp = - /CREATE[\s]+VIEW[\s\S]+?(?:WITH[\s]+(?:ENCRYPTION,?|SCHEMABINDING,?|VIEW_METADATA,?)+[\s]+)?AS\s+((?:WITH|SELECT)[\s\S]+?)(WITH\s+CHECK\s+OPTION|$)/i; + /CREATE\s+VIEW[\s\S]+?(?:WITH\s+[\w,\s]+?\s+)?AS\s+((?:WITH|SELECT)[\s\S]+?)(WITH\s+CHECK\s+OPTION|$)/i; if (!regExp.test(definition.trim())) { return ''; @@ -155,9 +152,9 @@ const getSelectStatementFromDefinition = definition => { return definition.trim().match(regExp)[1]; }; -const getPartitionedSelectStatement = (definition, table, dbName) => { +const getPartitionedSelectStatement = ({ definition, table, dbName }) => { const tableRef = new RegExp(`(\\[?${dbName}\\]?\\.)?(\\[?${table[0]}\\]?\\.)?\\[?${table[1]}\\]?`, 'i'); - const statement = getSelectStatementFromDefinition(definition) + const statement = getSelectStatementFromDefinition({ definition }) .split(/UNION\s+ALL/i) .find(item => tableRef.test(item)); @@ -168,7 +165,7 @@ const getPartitionedSelectStatement = (definition, table, dbName) => { return statement.replace(tableRef, '${tableName}').trim(); }; -const getViewProperties = viewData => { +const getViewProperties = ({ viewData }) => { if (!viewData) { return {}; } @@ -182,15 +179,16 @@ const getViewProperties = viewData => { }; }; -const prepareViewJSON = (dbConnectionClient, dbName, viewName, schemaName, logger) => async jsonSchema => { +const prepareViewJSON = async ({ client, dbName, viewName, schemaName, logger, jsonSchema }) => { const [viewInfo, viewColumnRelations, viewStatement] = await Promise.all([ - await getViewTableInfo(dbConnectionClient, dbName, viewName, schemaName, logger), - await getViewColumnRelations(dbConnectionClient, dbName, viewName, schemaName, logger), - await getViewStatement(dbConnectionClient, dbName, viewName, schemaName, logger), + await getViewTableInfo({ client, dbName, viewName, schemaName, logger }), + await getViewColumnRelations({ client, dbName, viewName, schemaName, logger }), + await getViewStatement({ client, dbName, viewName, schemaName, logger }), ]); - if (isViewPartitioned(viewStatement[0].definition)) { - const partitionedSchema = getPartitionedJsonSchema(viewInfo, viewColumnRelations); - const partitionedTables = getPartitionedTables(viewInfo); + + if (isViewPartitioned({ viewStatement: viewStatement[0].definition })) { + const partitionedSchema = getPartitionedJsonSchema({ viewInfo, viewColumnRelations }); + const partitionedTables = getPartitionedTables({ viewInfo }); return { jsonSchema: JSON.stringify({ @@ -201,12 +199,12 @@ const prepareViewJSON = (dbConnectionClient, dbName, viewName, schemaName, logge }, }), data: { - ...getViewProperties(viewStatement[0]), - selectStatement: getPartitionedSelectStatement( - cleanComments(String(viewStatement[0].definition)), - (partitionedTables[0] || {}).table, + ...getViewProperties({ viewData: viewStatement[0] }), + selectStatement: getPartitionedSelectStatement({ + definition: cleanComments({ definition: String(viewStatement[0].definition) }), + table: partitionedTables[0]?.table, dbName, - ), + }), partitioned: true, partitionedTables, }, @@ -220,11 +218,13 @@ const prepareViewJSON = (dbConnectionClient, dbName, viewName, schemaName, logge }; } else { return { - jsonSchema: JSON.stringify(changeViewPropertiesToReferences(jsonSchema, viewInfo, viewColumnRelations)), + jsonSchema: JSON.stringify(changeViewPropertiesToReferences({ jsonSchema, viewInfo, viewColumnRelations })), name: viewName, data: { - ...getViewProperties(viewStatement[0]), - selectStatement: getSelectStatementFromDefinition(cleanComments(String(viewStatement[0].definition))), + ...getViewProperties({ viewData: viewStatement[0] }), + selectStatement: getSelectStatementFromDefinition({ + definition: cleanComments({ definition: String(viewStatement[0].definition) }), + }), }, relatedTables: viewInfo.map(columnInfo => ({ tableName: columnInfo['ReferencedTableName'], @@ -270,21 +270,23 @@ const getMemoryOptimizedOptions = options => { }; }; -const addTotalBucketCountToDatabaseIndexes = (databaseIndexes, indexesBucketCount) => { +const addTotalBucketCountToDatabaseIndexes = ({ databaseIndexes, indexesBucketCount }) => { const hash = indexesBucketCount.reduce((hash, i) => { - return Object.assign({}, hash, { [i.index_id]: i.total_bucket_count }); + return { ...hash, [i.index_id]: i.total_bucket_count }; }, {}); + return databaseIndexes.map(i => { if (hash[i.index_id] === undefined) { return i; } else { - return Object.assign({}, i, { total_bucket_count: hash[i.index_id] }); + return { ...i, total_bucket_count: hash[i.index_id] }; } }); }; -const reverseCollectionsToJSON = logger => async (dbConnectionClient, tablesInfo, reverseEngineeringOptions) => { - const dbName = dbConnectionClient.config.database; +const fetchDatabaseMetadata = async ({ client, dbName, tablesInfo, logger }) => { + const allUniqueSchemasAndTables = getAllUniqueSchemasAndTables({ tablesInfo }); + const [ rawDatabaseIndexes, databaseMemoryOptimizedTables, @@ -295,158 +297,279 @@ const reverseCollectionsToJSON = logger => async (dbConnectionClient, tablesInfo fullTextIndexes, spatialIndexes, ] = await Promise.all([ - getDatabaseIndexes(dbConnectionClient, dbName, logger), - getDatabaseMemoryOptimizedTables(dbConnectionClient, dbName, logger), - getDatabaseCheckConstraints(dbConnectionClient, dbName, logger), - getDatabaseXmlSchemaCollection(dbConnectionClient, dbName, logger), - getDatabaseUserDefinedTypes(dbConnectionClient, dbName, logger), - getViewsIndexes(dbConnectionClient, dbName, logger), - getFullTextIndexes(dbConnectionClient, dbName, logger), - getSpatialIndexes(dbConnectionClient, dbName, logger), + getDatabaseIndexes({ client, dbName, tablesInfo, logger }), + getDatabaseMemoryOptimizedTables({ client, dbName, logger }), + getDatabaseCheckConstraints({ client, dbName, allUniqueSchemasAndTables, logger }), + getDatabaseXmlSchemaCollection({ client, dbName, allUniqueSchemasAndTables, logger }), + getDatabaseUserDefinedTypes({ client, dbName, logger }), + getViewsIndexes({ client, dbName, logger }), + getFullTextIndexes({ client, dbName, allUniqueSchemasAndTables, logger }), + getSpatialIndexes({ client, dbName, allUniqueSchemasAndTables, logger }), ]); - const indexesBucketCount = await getIndexesBucketCount( - dbConnectionClient, + + const indexesBucketCount = await getIndexesBucketCount({ + client, dbName, - rawDatabaseIndexes.map(i => i.index_id), + indexesId: rawDatabaseIndexes.map(i => i.index_id), logger, - ); + }); + const uniqueDatabaseIndexesColumns = getUniqueIndexesColumns({ indexesColumns: rawDatabaseIndexes }); - const databaseIndexes = addTotalBucketCountToDatabaseIndexes(uniqueDatabaseIndexesColumns, indexesBucketCount); - - return await Object.entries(tablesInfo).reduce(async (jsonSchemas, [schemaName, tableNames]) => { - logger.log('info', { message: `Fetching '${dbName}' database information` }, 'Reverse Engineering'); - logger.progress({ message: 'Fetching database information', containerName: dbName, entityName: '' }); - const tablesInfo = await Promise.all( - tableNames.map(async untrimmedTableName => { - const tableName = untrimmedTableName.replace(/ \(v\)$/, ''); - const tableIndexes = databaseIndexes - .concat(fullTextIndexes) - .concat(spatialIndexes) - .filter(index => index.TableName === tableName && index.schemaName === schemaName); - const tableXmlSchemas = xmlSchemaCollections.filter( - collection => collection.tableName === tableName && collection.schemaName === schemaName, - ); - const tableCheckConstraints = databaseCheckConstraints.filter(cc => cc.table === tableName); - logger.log( - 'info', - { message: `Fetching '${tableName}' table information from '${dbName}' database` }, - 'Reverse Engineering', - ); - logger.progress({ - message: 'Fetching table information', - containerName: dbName, - entityName: tableName, - }); - const tableInfo = await getTableInfo(dbConnectionClient, dbName, tableName, schemaName, logger); - - const [tableRows, fieldsKeyConstraints] = await Promise.all([ - containsJson(tableInfo) - ? await getTableRow( - dbConnectionClient, - dbName, - tableName, - schemaName, - reverseEngineeringOptions.recordSamplingSettings, - logger, - ) - : Promise.resolve([]), - await getTableKeyConstraints(dbConnectionClient, dbName, tableName, schemaName, logger), - ]); - const tableType = tableInfo[0]['TABLE_TYPE']; - const isView = tableType && tableType.trim() === 'V'; - const jsonSchema = pipe( - transformDatabaseTableInfoToJSON(tableInfo), - defineRequiredFields, - defineFieldsDescription( - await getTableColumnsDescription(dbConnectionClient, dbName, tableName, schemaName, logger), - ), - defineFieldsKeyConstraints(fieldsKeyConstraints), - defineMaskedColumns( - await getTableMaskedColumns(dbConnectionClient, dbName, tableName, schemaName, logger), - ), - defineJSONTypes(tableRows), - defineXmlFieldsCollections(tableXmlSchemas), - defineFieldsDefaultConstraintNames( - await getTableDefaultConstraintNames(dbConnectionClient, dbName, tableName, schemaName, logger), - ), - )({ required: [], properties: {} }); - - const reorderedTableRows = reorderTableRows( - tableRows, - reverseEngineeringOptions.isFieldOrderAlphabetic, - ); - const standardDoc = - Array.isArray(reorderedTableRows) && reorderedTableRows.length - ? reorderedTableRows - : reorderTableRows( - [getStandardDocumentByJsonSchema(jsonSchema)], - reverseEngineeringOptions.isFieldOrderAlphabetic, - ); - const periodForSystemTime = await getPeriodForSystemTime( - dbConnectionClient, + const databaseIndexes = addTotalBucketCountToDatabaseIndexes({ + databaseIndexes: uniqueDatabaseIndexesColumns, + indexesBucketCount, + }); + + return { + databaseIndexes, + databaseMemoryOptimizedTables, + databaseCheckConstraints, + xmlSchemaCollections, + databaseUDT, + viewsIndexes, + fullTextIndexes, + spatialIndexes, + }; +}; + +const processSchemas = async ({ tablesInfo, ...context }) => { + const { logger, dbName } = context; + + logger.log('info', { message: `Fetching '${dbName}' database information` }, 'Reverse Engineering'); + logger.progress({ message: 'Fetching database information', containerName: dbName, entityName: '' }); + + const schemaPromises = Object.entries(tablesInfo).map(([schemaName, tableNames]) => + processTables({ schemaName, tableNames, ...context }), + ); + + const allTables = await Promise.all(schemaPromises); + return allTables.flat(); +}; + +const processTables = async ({ schemaName, tableNames, ...context }) => { + const tablePromises = tableNames.map(tableName => + processTable({ schemaName, rawTableName: tableName, ...context }), + ); + const tables = await Promise.all(tablePromises); + + return tables.filter(Boolean); +}; + +const processTable = async ({ schemaName, rawTableName, ...context }) => { + const { dbName, logger, reverseEngineeringOptions, client } = context; + const { recordSamplingSettings, isFieldOrderAlphabetic } = reverseEngineeringOptions; + const tableName = rawTableName.replace(/ \(v\)$/, ''); + + logger.log( + 'info', + { message: `Fetching '${tableName}' table information from '${dbName}' database` }, + 'Reverse Engineering', + ); + logger.progress({ + message: 'Fetching table information', + containerName: dbName, + entityName: tableName, + }); + + const tableInfo = await getTableInfo({ client, dbName, tableName, tableSchema: schemaName, logger }); + const [tableRows, fieldsKeyConstraints] = await Promise.all([ + containsJson({ tableInfo }) + ? getTableRow({ + client, dbName, tableName, - schemaName, + tableSchema: schemaName, + recordSamplingSettings, logger, - ); - let result = { - collectionName: tableName, - dbName: schemaName, - entityLevel: { - Indxs: reverseTableIndexes(tableIndexes), - chkConstr: reverseTableCheckConstraints(tableCheckConstraints), - periodForSystemTime, - ...getMemoryOptimizedOptions( - databaseMemoryOptimizedTables.find(item => item.name === tableName), - ), - ...defineFieldsCompositeKeyConstraints(fieldsKeyConstraints), - }, - standardDoc: standardDoc, - documentTemplate: standardDoc, - collectionDocs: reorderedTableRows, - documents: cleanDocuments(reorderedTableRows), - bucketInfo: { - databaseName: dbName, - }, - modelDefinitions: { - definitions: getUserDefinedTypes(tableInfo, databaseUDT), - }, - emptyBucket: false, - validation: { jsonSchema }, - views: [], - }; - - if (isView) { - const viewData = await prepareViewJSON( - dbConnectionClient, - dbName, - tableName, - schemaName, - logger, - )(jsonSchema); - const indexes = viewsIndexes.filter( - index => index.TableName === tableName && index.schemaName === schemaName, - ); - - result = { - ...result, - ...viewData, - data: { - ...(viewData.data || {}), - Indxs: reverseTableIndexes(indexes), - }, - }; - } - - return result; - }), - ); - return [...(await jsonSchemas), ...tablesInfo.filter(Boolean)]; - }, Promise.resolve([])); + }) + : Promise.resolve([]), + getTableKeyConstraints({ client, dbName, tableName, schemaName, logger }), + ]); + + const isView = isViewTable({ tableInfo }); + const jsonSchema = await createJsonSchema({ + ...context, + tableInfo, + tableRows, + fieldsKeyConstraints, + schemaName, + tableName, + }); + + const reorderedTableRows = reorderTableRows({ tableRows, isFieldOrderAlphabetic }); + const standardDoc = getStandardDocument({ reorderedTableRows, jsonSchema, isFieldOrderAlphabetic }); + + const periodForSystemTime = await getPeriodForSystemTime({ + client, + dbName, + tableName, + schemaName, + logger, + }); + + let result = createTableResult({ + ...context, + tableName, + schemaName, + jsonSchema, + standardDoc, + reorderedTableRows, + periodForSystemTime, + tableInfo, + fieldsKeyConstraints, + }); + + if (isView) { + result = await processView({ processedTableResult: result, tableName, schemaName, jsonSchema, ...context }); + } + + return result; +}; + +function isViewTable({ tableInfo }) { + const tableType = tableInfo[0]?.['TABLE_TYPE']; + return tableType && tableType.trim() === 'V'; +} + +const createJsonSchema = async ({ + tableInfo, + tableRows, + fieldsKeyConstraints, + schemaName, + tableName, + xmlSchemaCollections, + client, + dbName, + logger, +}) => { + const commonContext = { client, dbName, tableName, schemaName, logger }; + return pipe( + transformDatabaseTableInfoToJSON(tableInfo), + defineRequiredFields, + defineFieldsDescription(await getTableColumnsDescription(commonContext)), + defineFieldsKeyConstraints(fieldsKeyConstraints), + defineMaskedColumns(await getTableMaskedColumns(commonContext)), + defineJSONTypes(tableRows), + defineXmlFieldsCollections( + xmlSchemaCollections.filter( + collection => collection.tableName === tableName && collection.schemaName === schemaName, + ), + ), + defineFieldsDefaultConstraintNames(await getTableDefaultConstraintNames(commonContext)), + )({ required: [], properties: {} }); +}; + +const getStandardDocument = ({ reorderedTableRows, jsonSchema, isFieldOrderAlphabetic }) => + Array.isArray(reorderedTableRows) && reorderedTableRows.length + ? reorderedTableRows + : reorderTableRows({ tableRows: [getStandardDocumentByJsonSchema({ jsonSchema })], isFieldOrderAlphabetic }); + +const getAllUniqueSchemasAndTables = ({ tablesInfo }) => + Object.keys(tablesInfo).reduce( + (acc, schemaName) => { + acc.schemas.add(`'${schemaName}'`); + tablesInfo[schemaName].forEach(tableName => acc.tables.add(`'${tableName}'`)); + return acc; + }, + { schemas: new Set(), tables: new Set() }, + ); + +const createTableResult = ({ + dbName, + databaseUDT, + tableName, + schemaName, + jsonSchema, + standardDoc, + reorderedTableRows, + periodForSystemTime, + tableInfo, + fieldsKeyConstraints, + databaseIndexes, + fullTextIndexes, + spatialIndexes, + databaseCheckConstraints, + databaseMemoryOptimizedTables, +}) => { + const tableIndexes = [...databaseIndexes, ...fullTextIndexes, ...spatialIndexes].filter( + index => index.TableName === tableName && index.schemaName === schemaName, + ); + + const tableCheckConstraints = databaseCheckConstraints.filter(cc => cc.table === tableName); + + return { + collectionName: tableName, + dbName: schemaName, + entityLevel: { + Indxs: reverseTableIndexes({ tableIndexes }), + chkConstr: reverseTableCheckConstraints(tableCheckConstraints), + periodForSystemTime, + ...getMemoryOptimizedOptions(databaseMemoryOptimizedTables.find(item => item.name === tableName)), + ...defineFieldsCompositeKeyConstraints({ keyConstraintsInfo: fieldsKeyConstraints }), + }, + standardDoc, + documentTemplate: standardDoc, + collectionDocs: reorderedTableRows, + documents: cleanDocuments(reorderedTableRows), + bucketInfo: { databaseName: dbName }, + modelDefinitions: { definitions: getUserDefinedTypes(tableInfo, databaseUDT) }, + emptyBucket: false, + validation: { jsonSchema }, + views: [], + }; +}; + +const processView = async ({ processedTableResult, tableName, schemaName, jsonSchema, ...context }) => { + const { client, dbName, logger } = context; + + const viewData = await prepareViewJSON({ client, dbName, viewName: tableName, schemaName, logger, jsonSchema }); + const indexes = context.viewsIndexes.filter( + index => index.TableName === tableName && index.schemaName === schemaName, + ); + + return { + ...processedTableResult, + ...viewData, + data: { + ...(viewData.data || {}), + Indxs: reverseTableIndexes({ tableIndexes: indexes }), + }, + }; +}; + +const reverseCollectionsToJSON = async ({ client, tablesInfo, reverseEngineeringOptions, logger }) => { + const dbName = client.config.database; + + const { + databaseIndexes, + databaseMemoryOptimizedTables, + databaseCheckConstraints, + xmlSchemaCollections, + databaseUDT, + viewsIndexes, + fullTextIndexes, + spatialIndexes, + } = await fetchDatabaseMetadata({ client, dbName, tablesInfo, logger }); + + return processSchemas({ + tablesInfo, + client, + dbName, + logger, + reverseEngineeringOptions, + databaseIndexes, + databaseMemoryOptimizedTables, + databaseCheckConstraints, + xmlSchemaCollections, + databaseUDT, + viewsIndexes, + fullTextIndexes, + spatialIndexes, + }); }; -const logDatabaseVersion = async (dbConnectionClient, logger) => { - const versionInfo = await getVersionInfo(dbConnectionClient, dbConnectionClient.config.database, logger); +const logDatabaseVersion = async ({ client, logger }) => { + const versionInfo = await getVersionInfo({ client, dbName: client.config.database, logger }); logger.log('info', { dbVersion: versionInfo }, 'Database version'); }; From d29f95e7459aa7db40dc295e2cc853326b193243 Mon Sep 17 00:00:00 2001 From: Teamcity Date: Fri, 6 Dec 2024 19:29:54 +0000 Subject: [PATCH 26/26] Bump plugin version to 0.2.15 for next development track --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 171737c..112ffc1 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "SQLServer", - "version": "0.2.14", + "version": "0.2.15", "author": "hackolade", "engines": { "hackolade": "7.7.10",